diff --git a/.gitignore b/.gitignore index 673ff8651b..1e9a94b200 100644 --- a/.gitignore +++ b/.gitignore @@ -9,6 +9,7 @@ *.fdb_latexmk *.log +docs/spec/out/ test-out/ flamegraphs/ # sbt specific diff --git a/.travis.yml b/.travis.yml index e2c841e681..2b50641477 100644 --- a/.travis.yml +++ b/.travis.yml @@ -14,7 +14,7 @@ cache: language: scala jdk: - - oraclejdk8 + - oraclejdk9 script: - sbt -jvm-opts .travis.jvmopts test diff --git a/build.sbt b/build.sbt index adfa32043a..7b8e91e235 100644 --- a/build.sbt +++ b/build.sbt @@ -73,11 +73,11 @@ git.gitUncommittedChanges in ThisBuild := true val bouncycastleBcprov = "org.bouncycastle" % "bcprov-jdk15on" % "1.60" val scrypto = "org.scorexfoundation" %% "scrypto" % "2.1.6" -val scorexUtil = "org.scorexfoundation" %% "scorex-util" % "0.1.3" +val scorexUtil = "org.scorexfoundation" %% "scorex-util" % "0.1.4" val macroCompat = "org.typelevel" %% "macro-compat" % "1.1.1" val paradise = "org.scalamacros" %% "paradise" % "2.1.0" cross CrossVersion.full -val specialVersion = "master-5ffd1bf8-SNAPSHOT" +val specialVersion = "master-534cb6f5-SNAPSHOT" val specialCommon = "io.github.scalan" %% "common" % specialVersion val specialCore = "io.github.scalan" %% "core" % specialVersion val specialLibrary = "io.github.scalan" %% "library" % specialVersion @@ -91,7 +91,7 @@ val libraryconf = "io.github.scalan" %% "library-conf" % specialVersion val testingDependencies = Seq( "org.scalatest" %% "scalatest" % "3.0.5" % "test", "org.scalactic" %% "scalactic" % "3.0.+" % "test", - "org.scalacheck" %% "scalacheck" % "1.13.+" % "test", + "org.scalacheck" %% "scalacheck" % "1.14.+" % "test", "junit" % "junit" % "4.12" % "test", "com.novocode" % "junit-interface" % "0.11" % "test", specialCommon, (specialCommon % Test).classifier("tests"), @@ -115,6 +115,7 @@ libraryDependencies ++= Seq( "com.typesafe.akka" %% "akka-actor" % "2.4.+", "org.bitbucket.inkytonik.kiama" %% "kiama" % "2.1.0", "com.lihaoyi" %% "fastparse" % "1.0.0", + "org.spire-math" %% "debox" % "0.8.0" ) ++ testingDependencies @@ -192,7 +193,7 @@ lazy val sigma = (project in file(".")) .settings(commonSettings: _*) def runErgoTask(task: String, sigmastateVersion: String, log: Logger): Unit = { - val ergoBranch = "v2.0" + val ergoBranch = "sigma-validation-settings" log.info(s"Testing current build in Ergo (branch $ergoBranch):") val cwd = new File("").absolutePath val ergoPath = new File(cwd + "/ergo-tests/") diff --git a/docs/LangSpec.md b/docs/LangSpec.md index 308d4724ac..61f478da34 100644 --- a/docs/LangSpec.md +++ b/docs/LangSpec.md @@ -521,16 +521,6 @@ class Coll[A] { */ def flatMap[B](f: A => Coll[B]): Coll[B] - /** Computes length of longest segment whose elements all satisfy some predicate. - * - * @param p the predicate used to test elements. - * @param from the index where the search starts. - * @return the length of the longest segment of this collection starting from index `from` - * such that every element of the segment satisfies the predicate `p`. - * @since 2.0 - */ - def segmentLength(p: A => Boolean, from: Int): Int - /** Finds the first element of the $coll satisfying a predicate, if any. * * @param p the predicate used to test elements. diff --git a/docs/PR-review-checklist.md b/docs/PR-review-checklist.md new file mode 100644 index 0000000000..3f1827a041 --- /dev/null +++ b/docs/PR-review-checklist.md @@ -0,0 +1,13 @@ +## What should be checked during PR review + +### For each $TypeName.$methodName there should be + +1. test case in SigmaDslTests (checks SigmaDsl <-> ErgoScript equality) +2. test case in CostingSpecification +3. costing rule method in ${TypeName}Coster +4. for each SMethod registration + - .withInfo($description, $argsInfo) + - .withIRInfo($irBuilder, $opDescriptor) + +### For each PredefinedFunc registration there should be + - PredefFuncInfo($irBuilder, $opDescriptor) diff --git a/docs/conversions.dot b/docs/conversions.dot index ad3645d052..7727a4e294 100644 --- a/docs/conversions.dot +++ b/docs/conversions.dot @@ -64,7 +64,7 @@ digraph conversions { GroupElement -> Boolean [label=".isIdentity"] GroupElement -> Bytes [label=".nonce"] //todo remove compressed flag, use GroupElementSerializer - GroupElement -> Bytes [label=".getEncoded(compressed)" color=red] + GroupElement -> Bytes [label=".getEncoded" color=red] String -> Bytes [label="fromBase58(...)"] String -> Bytes [label="fromBase64(...)"] diff --git a/docs/sigma-dsl.md b/docs/sigma-dsl.md index 5ddf304f7b..ee9691186b 100644 --- a/docs/sigma-dsl.md +++ b/docs/sigma-dsl.md @@ -1,13 +1,15 @@ -# Sigma: Scala DSL for smart contracts with zero knowledge proof of knowledge +# SigmaDsl: Scala DSL for smart contracts with zero knowledge proof of knowledge ## Intro SigmaDsl is a domain-specific language embedded into Scala and designed to be source code compatible with SigmaScript. This means you can write SigmaDsl code directly in Scala IDE (e.g. IntelliJ IDEA) and copy-paste code snippets between SigmaDsl and SigmaScript. + Special Scala macros can also be used to automatically translate SigmaDsl to + Sigma byte code. -SigmaDsl is implemented as a library in the framework of -[Special](https://github.com/scalan/special) +SigmaDsl is implemented as Scala library using [Special](https://github.com/scalan/special) +framework. ## See also [Special](https://github.com/scalan/special) diff --git a/docs/soft-fork-log.md b/docs/soft-fork-log.md new file mode 100644 index 0000000000..209ef7ec13 --- /dev/null +++ b/docs/soft-fork-log.md @@ -0,0 +1,12 @@ + +## A log of changes leading to soft-fork + +This list should be updated every time something soft-forkable is added. + +### Changes since 2.0 + + - new type (SGlobal.typeCode = 106) + - new method (SGlobal.groupGenerator.methodId = 1) + - new method (SAvlTree.updateDigest.methodId = 15) + - removed GroupElement.nonce (changed codes of getEncoded, exp, multiply, negate) + - change in Coll.filter serialization format (removed tagged variable id, changed condition type) \ No newline at end of file diff --git a/docs/spec/appendix_ergotree_serialization.tex b/docs/spec/appendix_ergotree_serialization.tex new file mode 100644 index 0000000000..b2cb8387d0 --- /dev/null +++ b/docs/spec/appendix_ergotree_serialization.tex @@ -0,0 +1,6 @@ +\section{Serialization format of ErgoTree nodes} +\label{sec:appendix:ergotree_serialization} + +\mnote{These subsections are autogenerated from instrumented ValueSerializers} + +\input{generated/ergotree_serialization1.tex} diff --git a/docs/spec/appendix_integer_encoding.tex b/docs/spec/appendix_integer_encoding.tex new file mode 100644 index 0000000000..45aae6c57c --- /dev/null +++ b/docs/spec/appendix_integer_encoding.tex @@ -0,0 +1,36 @@ +\section{Compressed encoding of integer values} + +\subsection{VLQ encoding} +\label{sec:vlq-encoding} + +\begin{verbatim} +public final void putULong(long value) { + while (true) { + if ((value & ~0x7FL) == 0) { + buffer[position++] = (byte) value; + return; + } else { + buffer[position++] = (byte) (((int) value & 0x7F) | 0x80); + value >>>= 7; + } + } +} +\end{verbatim} + +\subsection{ZigZag encoding} +\label{sec:zigzag-encoding} + +Encode a ZigZag-encoded 64-bit value. ZigZag encodes signed integers +into values that can be efficiently encoded with varint. (Otherwise, +negative values must be sign-extended to 64 bits to be varint encoded, +thus always taking 10 bytes in the buffer. + +Parameter \lst{n} is a signed 64-bit integer. +This Java method returns an unsigned 64-bit integer, stored in a signed int because Java has no explicit unsigned support. + +\begin{verbatim} + public static long encodeZigZag64(final long n) { + // Note: the right-shift must be arithmetic + return (n << 1) ^ (n >> 63); + } +\end{verbatim} \ No newline at end of file diff --git a/docs/spec/appendix_motivation.tex b/docs/spec/appendix_motivation.tex new file mode 100644 index 0000000000..aab78bcc34 --- /dev/null +++ b/docs/spec/appendix_motivation.tex @@ -0,0 +1,127 @@ +\section{Motivations} +\label{sec:appendix:motivation} + +\subsection{Type Serialization format rationale} +\label{sec:appendix:motivation:type} + +Some operations of \ASDag have type parameters, for which concrete types +should be specified (since \ASDag is monomorphic IR). When the operation +(such as \hyperref[sec:serialization:operation:ExtractRegisterAs]{\lst{ExtractRegisterAs}}) is serialized those types should also be +serialized as part of operation. The following encoding is designed to +minimize a number of bytes required to represent type in the serialization +format of \ASDag. + +In most cases type term serialises into a single byte. In the intermediate +representation of ErgoTree each type is represented by a tree of nodes where +leaves are primitive types and other nodes are type constructors. +Simple (but sub-optimal) way to serialize a type would be to give each +primitive type and each type constructor a unique type code. Then, to +serialize a node, we need to emit its code and then perform recursive descent +to serialize all children. +However, to save storage space, we use special encoding schema to save bytes +for the types that are used more often. + +We assume the most frequently used types are: +\begin{itemize} + \item primitive types (\lst{Int, Byte, Boolean, BigInt, GroupElement, + Box, AvlTree}) + \item Collections of primitive types (\lst{Coll[Byte]} etc) + \item Options of primitive types (\lst{Option[Int]} etc.) + \item Nested arrays of primitive types (\lst{Coll[Coll[Int]]} etc.) + \item Functions of primitive types (\lst{Box => Boolean} etc.) + \item First biased pair of types (\lst{(_, Int)} when we know the first + component is a primitive type). + \item Second biased pair of types (\lst{(Int, _)} when we know the second + component is a primitive type) + \item Symmetric pair of types (\lst{(Int, Int)} when we know both types are + the same) +\end{itemize} + +All the types above should be represented in an optimized way (preferable by a single byte). +For other types, we do recursive descent down the type tree as it is defined in section~\ref{sec:ser:type} + +\subsection{Constant Segregation rationale} + +\subsubsection{Massive script validation} + +Consider a transaction \lst{tx} which have \lst{INPUTS} collection of boxes to +spend. Every input box can have a script protecting it (\lst{propostionBytes} +property). This script should be executed in a context of the current +transaction. The simplest transaction have 1 input box. Thus if we want to +have a sustained block validation of 1000 transactions per second we need to +be able to validate 1000 scripts per second. + +For every script (of input \lst{box}) the following is done in order to +validate it: +\begin{enumerate} + \item Context is created with \lst{SELF} = box + \item The script is deserialized into ErgoTree + \item ErgoTree is traversed to build costGraph and calcGraph, two graphs for + cost estimation function and script calculation function. + \item Cost estimation is computed by evaluating costGraph with current context data + \item If cost and data size limits are not exceeded, calcGraph is + evaluated using context data to obtain sigma proposition (see + \hyperref[sec:type:SigmaProp]{\lst{SigmaProp}}) + \item Verification procedure is executed +\end{enumerate} + +\subsubsection{Potential for Script processing optimization} + +Before an \langname contract can be stored in a blockchain it should be first +compiled from its source text into ErgoTree and then serialized into byte +array. + +Because the language is purely functional and IR is graph-based, the +compilation process has an effect of normalization/unification. This means +that different original scripts may have identical ErgoTrees and as the +result identical serialized bytes. + +Because of normalization, and also because of script reusability, the number +of conceptually (or logically) different scripts is much less than the number +of individual scripts in a blockchain. For example we may have 1000s of +different scripts in a blockchain with millions of boxes. + +The average reusability ratio is 1000 in this case. And even those different +scripts may have different usage frequency. Having big reusability ratio we +can optimize script evaluation by performing steps 1 - 4 only once per unique +script. + +The compiled calcGraph can be cached in \lst{Map[Array[Byte], Context => +SigmaBoolean]}. Every script extracted from an input box can be used as a key +in this map to obtain ready to execute graph. + +However, we have a problem with constants embedded in contracts. There is one +obstacle to the optimization by caching. In many cases it is very natural to +embed constants in the script body, most notable scenario is when public keys +are embedded. As result two functionally identical scripts may serialize to +different byte arrays because they have different embedded constants. + +\subsubsection{Constant-less ErgoTree} + +The solution to the problem with embedded constants is simple, we don't need +to embed constants. Each constant in the body of \ASDag can be replaced +with indexed placeholder (see \hyperref[sec:appendix:primops:ConstantPlaceholder]{\lst{ConstantPlaceholder}}). +Each placeholder have an index field. The index of the placeholder is +assigned by breadth-first topological order of the graph traversal. + +The transformation is part of compilation and is performed ahead of time. +Each \ASDag have an array of all the constants extracted from its body. Each +placeholder refers to the constant by the constant's index in the array. + +Thus the format of serialized script is shown in Figure~\ref{fig:ser:ergotree} which contains: +\begin{enumerate} + \item number of constants + \item constants collection + \item script expression with placeholders +\end{enumerate} + +The constants collection contains serialized constant data (using +ConstantSerializer) one after another. +The script expression is a serialized ErgoTree with placeholders. + +Using this new script format we can use script expression part as a key in +the cache. An observation is that after the constants are extracted, what +remains is a template. Thus instead of applying steps 1-4 to +\emph{constant-full} scripts we can apply them to \emph{constant-less} +templates. Before applying steps 4 and 5 we need to bind placeholders with +actual values taken from the cconstants collection. diff --git a/docs/spec/appendix_predeftypes.tex b/docs/spec/appendix_predeftypes.tex new file mode 100644 index 0000000000..87dd9b9e52 --- /dev/null +++ b/docs/spec/appendix_predeftypes.tex @@ -0,0 +1,143 @@ +\section{Predefined types} +\label{sec:appendix:predeftypes} + +\begin{table}[h] + \small + \begin{tabu}{|l |l |l |l |l |l |l |l|} + \hline + \rowfont{\bfseries} + Name & Code & IsConstSize & + isPrim\footnote{isPrim - primitive type} & + isEmbed & isNum & Set of values \\ + \hline + +\input{generated/predeftypes.tex} + + \hline + \end{tabu} + \caption{Predefined types of \langname} + \label{table:predeftypes} +\end{table} + +The following subsections are autogenerated from type descriptors +of \langname reference implementation. + +\subsection{Boolean type} +\label{sec:type:Boolean} +\input{generated/Boolean_methods.tex} + +\subsection{Byte type} +\label{sec:type:Byte} +\input{generated/Byte_methods.tex} + +\subsection{Short type} +\label{sec:type:Short} +\input{generated/Short_methods.tex} + +\subsection{Int type} +\label{sec:type:Int} +\input{generated/Int_methods.tex} + +\subsection{Long type} +\label{sec:type:Long} +\input{generated/Long_methods.tex} + +\subsection{BigInt type} +\label{sec:type:BigInt} +\input{generated/BigInt_methods.tex} + +\subsection{GroupElement type} +\label{sec:type:GroupElement} +\input{generated/GroupElement_methods.tex} + + +\subsection{SigmaProp type} +\label{sec:type:SigmaProp} + +Values of \lst{SigmaProp} type hold sigma propositions, which can be proved +and verified using Sigma protocols. Each sigma proposition is represented as +an expression where sigma protocol primitives such as \lst{ProveDlog}, and +\lst{ProveDHTuple} are used as constants and special sigma protocol +connectives like \lst{&&},\lst{||} and \lst{THRESHOLD} are used as operations. + +The abstract syntax of sigma propositions is shown in +Figure~\ref{fig:sigmaprop:tree}. + +\begin{figure}[h] + \centering + \begin{tabular}{@{}l c l l l} + \hline + Set & & Syntax & Mnemonic & Description \\ + \hline + $Tree \ni t$ & := & \lst{Trivial(b)} & \lst{TrivialProp} & boolean value \lst{b} as sigma proposition \\ + & $\mid$ & \lst{Dlog(ge)} & \lst{ProveDLog} & knowledge of discrete logarithm of \lst{ge} \\ + & $\mid$ & \lst{DHTuple(g,h,u,v)} & \lst{ProveDHTuple} & knowledge of Diffie-Hellman tuple \\ + & $\mid$ & \lst{THRESHOLD}$(k,t_1,\dots,t_n)$ & \lst{THRESHOLD} & knowledge of $k$ out of $n$ secrets\\ + & $\mid$ & \lst{OR}$(t_1,\dots,t_n)$ & \lst{OR} & knowledge of any one of $n$ secrets\\ + & $\mid$ & \lst{AND}$(t_1,\dots,t_n)$ & \lst{AND} & knowledge of all $n$ secrets\\ + \end{tabular} + \caption{Abstract syntax of sigma propositions} + \label{fig:sigmaprop:tree} +\end{figure} + +Every well-formed tree of sigma proposition is a value of type +\lst{SigmaProp}, thus following the notation of Section~\ref{sec:evaluation} +we can define denotation of \lst{SigmaProp} +$$\Denot{\lst{SigmaProp}} = \Set{t \in Tree}$$ + + +The following methods can be called on all instances of \lst{SigmaProp} type. + +\input{generated/SigmaProp_methods.tex} + +For a list of primitive operations on \lst{SigmaProp} type see Appendix~\ref{sec:appendix:primops}. + +\subsection{Box type} +\label{sec:type:Box} +\input{generated/Box_methods.tex} + +\subsection{\lst{AvlTree} type} +\label{sec:type:AvlTree} + +% \subsubsection{\lst{AvlTree.digest} method (Code 100.1)} +% \noindent +% \begin{tabularx}{\textwidth}{| l | X |} +% \hline +% \bf{Description} & Returns digest of the state represent by this tree. +% Authenticated tree digest = root hash bytes ++ tree height. \\ +% \hline +% \bf{Parameters} & +% \(\begin{array}{l l l} +% \lst{key} & \lst{: Coll[Byte]} & \text{// key to lookup} \\ +% \lst{value} & \lst{: Coll[Byte]} & \text{// value to insert}\\ +% \end{array}\) \\ +% \hline +% \bf{Result} & \lst{Coll[Byte]} \\ +% \hline +% \end{tabularx} + +\input{generated/AvlTree_methods.tex} + +\subsection{Header type} +\label{sec:type:Header} +\input{generated/Header_methods.tex} + +\subsection{PreHeader type} +\label{sec:type:PreHeader} +\input{generated/PreHeader_methods.tex} + +\subsection{Context type} +\label{sec:type:Context} +\input{generated/Context_methods.tex} + +\subsection{Global type} +\label{sec:type:Global} +\input{generated/SigmaDslBuilder_methods.tex} + +\subsection{Coll type} +\label{sec:type:Coll} +\input{generated/SCollection_methods.tex} + +\subsection{Option type} +\label{sec:type:Option} +\input{generated/SOption_methods.tex} diff --git a/docs/spec/appendix_primops.tex b/docs/spec/appendix_primops.tex new file mode 100644 index 0000000000..326f3dc097 --- /dev/null +++ b/docs/spec/appendix_primops.tex @@ -0,0 +1,33 @@ +\section{Predefined global functions} +\label{sec:appendix:primops} + +% \begin{table}[h] + % \caption{Predefined primitive operations of \langname} + % \label{table:primops} + % \footnotesize + % \tiny + \tiny + \begin{longtable}[h]{|l |l | p{.25\linewidth} | p{.5\linewidth} |} + \hline + % \rowfont{\bfseries} +Code & Mnemonic & Signature & Description \\ + \hline + \input{generated/predeffunc_rows.tex} + +% SelectField & & $((\tau_1,\dots,\tau_n), i: Byte) \to \tau_i$ & $\SelectField{(e_1,\dots,e_n)}{i}$ & \\ +% \hline +% SomeValue & & $[T](x: T) \to Option[T]$ & $\Some{e}$ & injects value into non-empty optional value \\ +% \hline +% NoneValue & & $[T]()\to Option[T]$ & $\None{\tau}$ & constructs empty optional value of type $\tau$ \\ +% \hline +% Collection & & $[T](T, \dots, T)\to Coll[T]$ & $\Coll{e_1,\dots,e_n}$ & constructor of collection with $n$ items \\ +% \hline + \end{longtable} + \normalsize + +% \end{table} + +\mnote{This table is autogenerated from sigma operation descriptors. See +SigmaPredef.scala} + +\input{generated/predeffunc_sections.tex} diff --git a/docs/spec/cleanout.sh b/docs/spec/cleanout.sh new file mode 100755 index 0000000000..dd8f457670 --- /dev/null +++ b/docs/spec/cleanout.sh @@ -0,0 +1,13 @@ +#!/usr/bin/env sh + +rm appendix_integer_encoding.aux +rm costing.aux +rm evaluation.aux +rm graph.aux +rm language.aux +rm serialization.aux +rm types.aux +rm spec.aux +rm spec.out +rm spec.toc +rm spec.log \ No newline at end of file diff --git a/docs/spec/compile.sh b/docs/spec/compile.sh new file mode 100755 index 0000000000..53182d7dd9 --- /dev/null +++ b/docs/spec/compile.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env sh + +command -v pdflatex && command -v bibtex +if [[ "$?" != 0 ]]; then + echo "Command 'pdflatex' or 'bibtex' not exist, both must be installed. For Ubuntu, try:" + echo "sudo apt install texlive-latex-base texlive-binaries" + echo + echo "You may also need to install additional packages like fonts, etc. For Ubuntu, try:" + echo "sudo apt-get install texlive-fonts-recommended latex-xcolor texlive-latex-extra cm-super" + exit 1; +fi + +pdflatex -output-directory=out spec +bibtex spec +pdflatex -output-directory=out spec +pdflatex -output-directory=out spec + +./cleanout.sh diff --git a/docs/spec/costing.tex b/docs/spec/costing.tex new file mode 100644 index 0000000000..39c7b5ea10 --- /dev/null +++ b/docs/spec/costing.tex @@ -0,0 +1,37 @@ +\section{Costing} +\label{sec:costing} + +This is how the file name is specified +\begin{lstlisting} + + val env: ScriptEnv = Map( + ScriptNameProp -> s"filename_verify", + ... +\end{lstlisting} + +The file should be in \lst{test-out} directory. The graph should have +explicit nodes like \lst{CostOf(...)}, which represent access to CostTable +entries. The actual cost is counted in the nodes like this \lst{s1340: Int = +OpCost(2, List(s1361, s1360), s983)}. Each such node is handled like +\lst{costAccumulator.add(s1340, OpCost(2, List(s1361, s1360), s983), dataEnv)} +See \lst{CostAccumulator} + +How much cost is represented by OpCost node? +\begin{enumerate} + \item Symbols s1361, s1360 are dependencies. They represent cost that + should be accumulated before s983. + \item If upon handling of OpCost, the dependencies are not yet + accumulated, then they are accumulated first, and then s983 is + accumulated. + \item the values of s1340 is the value of s983. + \item Thus execution of OpCost, consists of 2 parts: a) data flow b) side + effect on CostAccumulator + \item OpCost is special node, interpreted in a special way. See method + evaluate in Evaluation. +\end{enumerate} + +% For why this is necessary, consider a script where two different context +% variables are used. Without this, the same OpCost node is emitted twice, but +% only single is added in the graph because of unification of nodes. Thus +% adding costedValue.id make those nodes different and they both added to the +% graph. \ No newline at end of file diff --git a/docs/spec/evaluation.tex b/docs/spec/evaluation.tex new file mode 100644 index 0000000000..6f00766ef4 --- /dev/null +++ b/docs/spec/evaluation.tex @@ -0,0 +1,107 @@ +\section{Evaluation Semantics} +\label{sec:evaluation} + + Evaluation of \langname is specified by its translation to \corelang, whose +terms form a subset of \langname terms. Thus, typing rules of \corelang form +a subset of typing rules of \langname. + +Here we specify evaluation semantics of \corelang, which is based on +call-by-value (CBV) lambda calculus. Evaluation of \corelang is specified +using denotational semantics. To do that, we first specify denotations of +types, then typed terms and then equations of denotational semantics. + +\begin{definition} + (values, producers) + \begin{itemize} + \item The following CBV terms are called values: + $$ V :== x \mid C(d, T) \mid \Lam{x}{M}$$ + \item All CBV terms are called producers. (This is because, when evaluated, they produce a value.) + \end{itemize} +\end{definition} + +We now describe and explain a denotational semantics for the \corelang +language. The key principle is that each type $A$ denotes a set $\Denot{A}$ +whose elements are the denotations of values of the type $A$. + +Thus the type \lst{Boolean} denotes the 2-element set +$\{\lst{true},\lst{false}\}$, because there are two values of type +\lst{Boolean}. Likewise the type $(T_1,\dots,T_n)$ denotes +$(\Denot{T_1},\dots,\Denot{T_n})$ because a value of type $(T_1,\dots,T_n)$ +must be of the form $(V_1,\dots,V_n)$, where each $V_i$ is value of type +$T_i$. + +Given a value $V$ of type $A$, we write $\Denot{V}$ for the element of $A$ +that it denotes. Given a close term $M$ of type $A$, we recall that it +produces a value $V$ of type $A$. So $M$ will denote an element $\Denot{M}$ +of $\Denot{A}$. + +A value of type $A \to B$ is of the form $\Lam{x}{M}$. This, when +applied to a value of type $A$ gives a value of type $B$. So $A \to B$ +denotes $\Denot{A} \to \Denot{B}$. It is true that the syntax appears to +allow us to apply $\Lam{x}{M}$ to any term $N$ of type $A$. But $N$ will be +evaluated before it interracts with $\Lam{x}{M}$, so $\Lam{x}{M}$ is really only applied to the value that $N$ produces. + +\begin{definition} + A \emph{context} $\Gamma$ is a finite sequence of identifiers with value + types $x_1:A_1, \dots ,x_n:A_n$. Sometimes we omit the identifiers and + write $\Gamma$ as a list of value types. +\end{definition} + +Given a context $\Gamma = x_1:A_1,\dots,x_n:A_n$, an environment (list of +bindings for identifiers) associates to each $x_i$ as value of type $A_i$. So +the environment denotes an element of $(\Denot{A_1},\dots,\Denot{A_n})$, and +we write $\Denot{\Gamma}$ for this set. + +Given a \corelang term $\DerEnv{M: B}$, we see that $M$, together with +environment, gives a closed term of type $B$. So $M$ denotes a function +$\Denot{M}$ from $\Denot{\Gamma}$ to $\Denot{B}$. + +In summary, the denotational semantics is organized as follows. + +\begin{itemize} + \item A type $A$ denotes a set $\Denot{A}$ + \item A context $x_1:A_1,\dots,x_n:A_n$ denotes the set $(\Denot{A_1},\dots,\Denot{A_n})$ + \item A term $\DerEnv{M: B}$ denotes a function $\Denot{M}: + \Denot{\Gamma} \to \Denot{B}$ +\end{itemize} + +The denotations of types and terms is given in Figure~\ref{fig:denotations}. + +\begin{figure}[h] + +The denotations of \corelang types + +\begin{center} + \(\begin{array}{ l c l } + \Denot{\lst{Boolean}} & = & \{ \lst{true}, \lst{false} \} \\ + \Denot{\lst{P}} & = & \text{see Appendix~\ref{sec:appendix:predeftypes}} \\ + \Denot{(T_1,\dots,T_n)} & = & (\Denot{T_1},\dots,\Denot{T_n}) \\ + \Denot{A \to B} & = & \Denot{A} \to \Denot{B} \\ + \end{array}\) +\end{center} + +The denotations of \corelang terms + +\begin{center} + \(\begin{array}{ l c l } + \Apply{ \Denot{\lst{x}} }{(\rho,\lst{x}\mapsto x, \rho')} & = & x \\ + \Apply{ \Denot{C(d, T)} }{\rho} & = & d \\ + \Apply{ \Denot{(\Ov{M_i})} }{\rho} & = & (\Ov{ \Apply{\Denot{M_i}}{\rho} }) \\ + + \Apply{ \Denot{\Apply{\delta}{N}} }{\rho} & = + & \Apply{ (\Apply{\Denot{\delta}}{\rho}) }{ v }~where~v = \Apply{\Denot{N}}{\rho} \\ + + \Apply{ \Denot{\Lam{\lst{x}}{M}} }{\rho} & = + & \Lam{x}{ \Apply{\Denot{M}}{(\rho, \lst{x}\mapsto x)} } \\ + + \Apply{ \Denot{\Apply{M_f}{N}} }{\rho} & = + & \Apply{ (\Apply{\Denot{M_f}}{\rho}) }{ v }~where~v = \Apply{\Denot{N}}{\rho} \\ + + \Apply{ \Denot{ \Apply{M_I.\lst{m}}{\Ov{N_i}} } }{\rho} & = + & \Apply{ (\Apply{\Denot{M_I}}{\rho}).m }{ \Ov{v_i} }~where~\Ov{v_i = \Apply{\Denot{N_i}}{\rho}} \\ + \end{array}\) +\end{center} + +\caption{Denotational semantics of \corelang} +\label{fig:denotations} +\end{figure} \ No newline at end of file diff --git a/docs/spec/figures/fig_language.tex b/docs/spec/figures/fig_language.tex new file mode 100644 index 0000000000..5ecd6b68e7 --- /dev/null +++ b/docs/spec/figures/fig_language.tex @@ -0,0 +1,29 @@ +\[\begin{tabular}{@{}l c l l l} +\hline +Set Name & & Syntax & Mnemonic & Description \\ +\hline +$\mathcal{T} \ni T$ & ::= & \lst{P} & \lst{SPredefType} & predefined types (see Appendix~\ref{sec:appendix:predeftypes}) \\ + + & $\mid$ & $\tau$ & \lst{STypeVar} & type variable \\ + & $\mid$ & $(T_1, \dots ,T_n) $ & \lst{STuple} & tuple of $n$ elements (see \lst{Tuple} type)\\ + + & $\mid$ & $(T_1,\dots,T_n) \to T $ & \lst{SFunc} & function of $n$ arguments (see \lst{Func} type) \\ + & $\mid$ & $\text{\lst{Coll}}[T]$ & \lst{SCollection} & collection of elements of type $T$ \\ + & $\mid$ & $\text{\lst{Option}}[T]$ & \lst{SOption} & optional value of type $T$ \\ + & & & & \\ + +$Term\ni e$ & ::= & $C(v, T)$ & \lst{Constant} & typed constants \\ + & $\mid$ & $x$ & \lst{ValUse} & variables \\ + & $\mid$ & $\TyLam{x_i}{T_i}{e}$ & \lst{FuncExpr} & lambda expression \\ + & $\mid$ & $\Apply{e_f}{\Ov{e_i}}$ & \lst{Apply} & application of functional expression \\ + & $\mid$ & $\Apply{e.m}{\Ov{e_i}}$ & \lst{MethodCall} & method invocation \\ + & $\mid$ & $\Tup{e_1, \dots ,e_n}$ & \lst{Tuple} & constructor of tuple with $n$ items \\ + & $\mid$ & $\Apply{\delta}{\Ov{e_i}}$ & & primitive application (see Appendix~\ref{sec:appendix:primops}) \\ + & $\mid$ & \lst{if} $(e_{cond})$ $e_1$ \lst{else} $e_2$ & \lst{If} & if-then-else expression \\ + & $\mid$ & $\{ \Ov{\text{\lst{val}}~x_i = e_i;}~e\}$ & \lst{BlockExpr} & block expression \\ + & & & & \\ +$cd$ & ::= & $\Trait{I}{\Ov{ms_i}}$ & \lst{STypeCompanion} & interface declaration \\ + & & & & \\ +$ms$ & ::= & $\MSig{m[\Ov{\tau_i}]}{\overline{x_i : T_i}}{T}$ & \lst{SMethod} & method signature declaration \\ +\end{tabular}\] + diff --git a/docs/spec/figures/fig_semantics.tex b/docs/spec/figures/fig_semantics.tex new file mode 100644 index 0000000000..2ea335b756 --- /dev/null +++ b/docs/spec/figures/fig_semantics.tex @@ -0,0 +1,30 @@ +\begin{center} +\(\begin{array}{c} +\multicolumn{1}{l}{\text{Reduction contexts of \langname}}\\ +\\ +\begin{tabular}{@{}l c l l} +$\Ctx$ & ::= & $\Hole$ & hole \\ + & $\mid$ & $\delta~\Ov{v}~\Ctx~\Ov{e}$ & \\ + & $\mid$ & $\Ctx~e$ & \\ + & $\mid$ & $(\Lam{x}{e})\Ctx$ & \\ + & & \\ +\end{tabular} \\ +\multicolumn{1}{l}{\text{Call-by-value evaluation relation}}\\ +\\ +\begin{tabular}{@{}l c l r} +$[(\Lam{x}{e})~v]\Ctx$ & $\mapsto$ & $[[v/x]e]\Ctx$ & (1) \\ +$[$ \lst{let} $x = v$ \lst{in} $e]\Ctx$ & $\mapsto$ & $[[v/x]e]\Ctx$ & (2) \\ +$[$\lst{case} $k~\Ov{v}$ \lst{of} \{ $k_i~\overline{x_i}$ $\rightarrow$ $e_i$ \}$]\Ctx$ & $\mapsto$ & $[[\overline{v}/\overline{x_j}]e_j]\Ctx$, if $k = k_j$ & (2) \\ +$\Apply{\Ctx}{\IfThenElse{v}{e_1}{e_2}}$ & $\mapsto$ & \begin{tabular}{ l l} + $\Apply{\Ctx}{e_1}$ & if $v = true$ \\ + $\Apply{\Ctx}{e_2}$ & otherwise + \end{tabular} & (3) \\ +$[\delta~\overline{v}]\Ctx$ & $\mapsto$ & $[l]\Ctx$, if $l = |[\Prim|]\overline{v}$ & (4) \\ + +\end{tabular} +\end{array}\) +\end{center} +% \begin{center} +% \small{Here $\mu\B{\alpha}.\B{T}$ stands for isorecursive type \cite{wiki_recursive_data_type}. +% Note that types are described using \emph{phantom types} notation~\cite{JamesCheney2003,wiki_phantom_types}} +% \end{center} diff --git a/docs/spec/figures/fig_typing.tex b/docs/spec/figures/fig_typing.tex new file mode 100644 index 0000000000..61ea751d0a --- /dev/null +++ b/docs/spec/figures/fig_typing.tex @@ -0,0 +1,70 @@ + +\begin{center} +% const, var, tuple +\(\begin{array}{c c c} + \frac{}{\Der{\Gamma}{C(\_, T)~:~T}}~\lst{(Const)} + & + \frac{}{\Der{\Gamma,x~:~T}{x~:~T}}~\lst{(Var)} + & + \frac{ + \Ov{\DerEnv{e_i:~T_i}}~~ + ptype(\delta, \Ov{T_i}) :~(T_1,\dots,T_n) \to T + }{ + \Apply{\delta}{\Ov{e_i}}:~T + }~\lst{(Prim)} \\ + & & \\ % blank line +\end{array}\) + + +% tuples +\(\begin{array}{c} +\frac{\DerEnv{e_1 :~T_1}~~\dots~~\DerEnv{e_n :~T_n}} + {\DerEnv{(e_1,\dots,e_n)~:~(T_1,\dots,T_n)}}~\lst{(Tuple)} \\ +\\ % blank line +\end{array}\) + +% MethodCall +\(\begin{array}{c} +\frac{ + \DerEnv{e~:~I,~e_i:~T_i}~~ + mtype(m, I, \Ov{T_i})~:~(I, T_1,\dots,T_n) \to T + } + { \Apply{e.m}{\Ov{e_i}}:~T }~\lst{(MethodCall)} \\ +\\ % blank line +\end{array}\) + +% functions +\(\begin{array}{c c} + \frac{\Der{\TEnv,\Ov{x_i:~T_i}}{e~:~T}} + {\Der{\Gamma}{\TyLam{x_i}{T_i}{e}~:~(T_0,\dots,T_n) \to T}}~\lst{(FuncExpr)} + & + \frac{ \Der{\TEnv}{e_f:~(T_1,\dots,T_n) \to T}~~~\Ov{\Der{\TEnv}{e_i:~T_i}} } + { \Der{\Gamma}{\Apply{e_f}{\Ov{e_i}}~:~T} }~\lst{(Apply)} \\ +& \\ % blank line +\end{array}\) + +% if +\(\begin{array}{c c} + \frac{ \DerEnv{e_{cond} :~\lst{Boolean}}~~\DerEnv{e_1 :~T}~~\DerEnv{e_2 :~T} } + { \DerEnv{\IfThenElse{e_{cond}}{e_1}{e_2}~:~T} }~\lst{(If)} + & + \\ + & \\ % blank line +\end{array}\) +\( + \frac{ + \DerEnv{e_1 :~T_1}~\wedge~ + \forall k\in\{2,\dots,n\}~\Der{\Gamma,x_1:~T_1,\dots,x_{k-1}:~T_{k-1}}{e_k:~T_k}~\wedge~ + \Der{\Gamma,x_1:~T_1,\dots,x_n:~T_n}{e:~T} + } + { \DerEnv{\{ \Ov{\text{\lst{val}}~x_i = e_i;}~e\}~:~T} }~\lst{(BlockExpr)} +\) +% let +% \( +% \frac{\Der{\TEnv,x : T_1}{e_2 : T_2}}{\Der{\Gamma}{\Let{x}{e_1}{e_2} : T_2}} +% \) + + +\end{center} + + diff --git a/docs/spec/generated/AvlTree_methods.tex b/docs/spec/generated/AvlTree_methods.tex new file mode 100644 index 0000000000..98fe28c7e2 --- /dev/null +++ b/docs/spec/generated/AvlTree_methods.tex @@ -0,0 +1,390 @@ + +\subsubsection{\lst{AvlTree.digest} method (Code 100.1)} +\label{sec:type:AvlTree:digest} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns digest of the state represented by this tree. + Authenticated tree \lst{digest} = \lst{root hash bytes} ++ \lst{tree height} + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{AvlTree.enabledOperations} method (Code 100.2)} +\label{sec:type:AvlTree:enabledOperations} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Flags of enabled operations packed in single byte. + \lst{isInsertAllowed == (enabledOperations & 0x01) != 0}\newline + \lst{isUpdateAllowed == (enabledOperations & 0x02) != 0}\newline + \lst{isRemoveAllowed == (enabledOperations & 0x04) != 0} + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Byte} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{AvlTree.keyLength} method (Code 100.3)} +\label{sec:type:AvlTree:keyLength} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & + + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Int} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{AvlTree.valueLengthOpt} method (Code 100.4)} +\label{sec:type:AvlTree:valueLengthOpt} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & + + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[Int]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{AvlTree.isInsertAllowed} method (Code 100.5)} +\label{sec:type:AvlTree:isInsertAllowed} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & + + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{AvlTree.isUpdateAllowed} method (Code 100.6)} +\label{sec:type:AvlTree:isUpdateAllowed} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & + + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{AvlTree.isRemoveAllowed} method (Code 100.7)} +\label{sec:type:AvlTree:isRemoveAllowed} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & + + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{AvlTree.updateOperations} method (Code 100.8)} +\label{sec:type:AvlTree:updateOperations} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & + + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{AvlTree} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{AvlTree.contains} method (Code 100.9)} +\label{sec:type:AvlTree:contains} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & + + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{AvlTree.get} method (Code 100.10)} +\label{sec:type:AvlTree:get} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & + + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[Coll[Byte]]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{AvlTree.getMany} method (Code 100.11)} +\label{sec:type:AvlTree:getMany} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & + + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Option[Coll[Byte]]]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{AvlTree.insert} method (Code 100.12)} +\label{sec:type:AvlTree:insert} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & + + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[AvlTree]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{AvlTree.update} method (Code 100.13)} +\label{sec:type:AvlTree:update} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & + + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[AvlTree]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{AvlTree.remove} method (Code 100.14)} +\label{sec:type:AvlTree:remove} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & + + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[AvlTree]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{AvlTree.updateDigest} method (Code 100.15)} +\label{sec:type:AvlTree:updateDigest} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & + + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{AvlTree} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/BigInt_methods.tex b/docs/spec/generated/BigInt_methods.tex new file mode 100644 index 0000000000..c0ac012526 --- /dev/null +++ b/docs/spec/generated/BigInt_methods.tex @@ -0,0 +1,267 @@ + +\subsubsection{\lst{BigInt.toByte} method (Code 106.1)} +\label{sec:type:BigInt:toByte} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Byte}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Byte} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{BigInt.modQ} method (Code 6.1)} +\label{sec:type:BigInt:modQ} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns this \lst{mod} Q, i.e. remainder of division by Q, where Q is an order of the cryprographic group. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{BigInt} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ModQ]{\lst{ModQ}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{BigInt.toShort} method (Code 106.2)} +\label{sec:type:BigInt:toShort} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Short}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Short} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{BigInt.plusModQ} method (Code 6.2)} +\label{sec:type:BigInt:plusModQ} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Adds this number with \lst{other} by module Q. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{other} & \lst{: BigInt} & \text{// Number to add to this.} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{BigInt} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PlusModQ]{\lst{PlusModQ}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{BigInt.toInt} method (Code 106.3)} +\label{sec:type:BigInt:toInt} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Int}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Int} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{BigInt.minusModQ} method (Code 6.3)} +\label{sec:type:BigInt:minusModQ} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Subtracts \lst{other} number from this by module Q. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{other} & \lst{: BigInt} & \text{// Number to subtract from this.} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{BigInt} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MinusModQ]{\lst{MinusModQ}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{BigInt.toLong} method (Code 106.4)} +\label{sec:type:BigInt:toLong} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Long}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Long} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{BigInt.multModQ} method (Code 6.4)} +\label{sec:type:BigInt:multModQ} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Multiply this number with \lst{other} by module Q. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{other} & \lst{: BigInt} & \text{// Number to multiply with this.} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{BigInt} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{BigInt.toBigInt} method (Code 106.5)} +\label{sec:type:BigInt:toBigInt} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{BigInt} \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{BigInt} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{BigInt.toBytes} method (Code 106.6)} +\label{sec:type:BigInt:toBytes} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns a big-endian representation of this numeric value in a collection of bytes. + For example, the \lst{Int} value \lst{0x12131415} would yield the + collection of bytes \lst{[0x12, 0x13, 0x14, 0x15]}. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{BigInt.toBits} method (Code 106.7)} +\label{sec:type:BigInt:toBits} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns a big-endian representation of this numeric in a collection of Booleans. + Each boolean corresponds to one bit. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Boolean]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/Boolean_methods.tex b/docs/spec/generated/Boolean_methods.tex new file mode 100644 index 0000000000..48ce891811 --- /dev/null +++ b/docs/spec/generated/Boolean_methods.tex @@ -0,0 +1,22 @@ + +\subsubsection{\lst{Boolean.toByte} method (Code 1.1)} +\label{sec:type:Boolean:toByte} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Convert true to 1 and false to 0 \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Byte} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/Box_methods.tex b/docs/spec/generated/Box_methods.tex new file mode 100644 index 0000000000..21dd67334c --- /dev/null +++ b/docs/spec/generated/Box_methods.tex @@ -0,0 +1,437 @@ + +\subsubsection{\lst{Box.value} method (Code 99.1)} +\label{sec:type:Box:value} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Mandatory: Monetary value, in Ergo tokens (NanoErg unit of measure) \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Long} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractAmount]{\lst{ExtractAmount}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.propositionBytes} method (Code 99.2)} +\label{sec:type:Box:propositionBytes} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Serialized bytes of guarding script, which should be evaluated to true in order to + open this box. (aka spend it in a transaction) \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractScriptBytes]{\lst{ExtractScriptBytes}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.bytes} method (Code 99.3)} +\label{sec:type:Box:bytes} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Serialized bytes of this box's content, including proposition bytes. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractBytes]{\lst{ExtractBytes}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.bytesWithoutRef} method (Code 99.4)} +\label{sec:type:Box:bytesWithoutRef} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Serialized bytes of this box's content, excluding transactionId and index of output. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractBytesWithNoRef]{\lst{ExtractBytesWithNoRef}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.id} method (Code 99.5)} +\label{sec:type:Box:id} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Blake2b256 hash of this box's content, basically equals to \lst{blake2b256(bytes)} \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractId]{\lst{ExtractId}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.creationInfo} method (Code 99.6)} +\label{sec:type:Box:creationInfo} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & If \lst{tx} is a transaction which generated this box, then \lst{creationInfo._1} + is a height of the tx's block. The \lst{creationInfo._2} is a serialized transaction + identifier followed by box index in the transaction outputs. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{(Int,Coll[Byte])} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractCreationInfo]{\lst{ExtractCreationInfo}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.getReg} method (Code 99.7)} +\label{sec:type:Box:getReg} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Extracts register by id and type. + Type param \lst{T} expected type of the register. + Returns \lst{Some(value)} if the register is defined and has given type and \lst{None} otherwise + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{regId} & \lst{: Int} & \text{// zero-based identifier of the register.} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[T]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractRegisterAs]{\lst{ExtractRegisterAs}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.tokens} method (Code 99.8)} +\label{sec:type:Box:tokens} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Secondary tokens \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[(Coll[Byte],Long)]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.R0} method (Code 99.9)} +\label{sec:type:Box:R0} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Monetary value, in Ergo tokens \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[T]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractRegisterAs]{\lst{ExtractRegisterAs}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.R1} method (Code 99.10)} +\label{sec:type:Box:R1} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Guarding script \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[T]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractRegisterAs]{\lst{ExtractRegisterAs}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.R2} method (Code 99.11)} +\label{sec:type:Box:R2} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Secondary tokens \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[T]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractRegisterAs]{\lst{ExtractRegisterAs}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.R3} method (Code 99.12)} +\label{sec:type:Box:R3} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Reference to transaction and output id where the box was created \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[T]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractRegisterAs]{\lst{ExtractRegisterAs}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.R4} method (Code 99.13)} +\label{sec:type:Box:R4} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Non-mandatory register \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[T]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractRegisterAs]{\lst{ExtractRegisterAs}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.R5} method (Code 99.14)} +\label{sec:type:Box:R5} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Non-mandatory register \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[T]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractRegisterAs]{\lst{ExtractRegisterAs}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.R6} method (Code 99.15)} +\label{sec:type:Box:R6} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Non-mandatory register \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[T]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractRegisterAs]{\lst{ExtractRegisterAs}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.R7} method (Code 99.16)} +\label{sec:type:Box:R7} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Non-mandatory register \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[T]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractRegisterAs]{\lst{ExtractRegisterAs}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.R8} method (Code 99.17)} +\label{sec:type:Box:R8} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Non-mandatory register \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[T]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractRegisterAs]{\lst{ExtractRegisterAs}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Box.R9} method (Code 99.18)} +\label{sec:type:Box:R9} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Non-mandatory register \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[T]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ExtractRegisterAs]{\lst{ExtractRegisterAs}} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/Byte_methods.tex b/docs/spec/generated/Byte_methods.tex new file mode 100644 index 0000000000..d7e0f2f889 --- /dev/null +++ b/docs/spec/generated/Byte_methods.tex @@ -0,0 +1,171 @@ + +\subsubsection{\lst{Byte.toByte} method (Code 106.1)} +\label{sec:type:Byte:toByte} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Byte}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Byte} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Byte.toShort} method (Code 106.2)} +\label{sec:type:Byte:toShort} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Short}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Short} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Byte.toInt} method (Code 106.3)} +\label{sec:type:Byte:toInt} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Int}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Int} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Byte.toLong} method (Code 106.4)} +\label{sec:type:Byte:toLong} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Long}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Long} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Byte.toBigInt} method (Code 106.5)} +\label{sec:type:Byte:toBigInt} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{BigInt} \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{BigInt} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Byte.toBytes} method (Code 106.6)} +\label{sec:type:Byte:toBytes} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns a big-endian representation of this numeric value in a collection of bytes. + For example, the \lst{Int} value \lst{0x12131415} would yield the + collection of bytes \lst{[0x12, 0x13, 0x14, 0x15]}. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Byte.toBits} method (Code 106.7)} +\label{sec:type:Byte:toBits} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns a big-endian representation of this numeric in a collection of Booleans. + Each boolean corresponds to one bit. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Boolean]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/Context_methods.tex b/docs/spec/generated/Context_methods.tex new file mode 100644 index 0000000000..351e8df433 --- /dev/null +++ b/docs/spec/generated/Context_methods.tex @@ -0,0 +1,230 @@ + +\subsubsection{\lst{Context.dataInputs} method (Code 101.1)} +\label{sec:type:Context:dataInputs} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Context} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Box]} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Context.headers} method (Code 101.2)} +\label{sec:type:Context:headers} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Context} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Header]} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Context.preHeader} method (Code 101.3)} +\label{sec:type:Context:preHeader} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Context} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{PreHeader} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Context.INPUTS} method (Code 101.4)} +\label{sec:type:Context:INPUTS} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Context} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Box]} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Context.OUTPUTS} method (Code 101.5)} +\label{sec:type:Context:OUTPUTS} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Context} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Box]} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Context.HEIGHT} method (Code 101.6)} +\label{sec:type:Context:HEIGHT} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Context} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Int} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Context.SELF} method (Code 101.7)} +\label{sec:type:Context:SELF} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Context} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Box} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Context.selfBoxIndex} method (Code 101.8)} +\label{sec:type:Context:selfBoxIndex} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Context} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Int} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Context.LastBlockUtxoRootHash} method (Code 101.9)} +\label{sec:type:Context:LastBlockUtxoRootHash} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Context} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{AvlTree} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Context.minerPubKey} method (Code 101.10)} +\label{sec:type:Context:minerPubKey} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Context} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Context.getVar} method (Code 101.11)} +\label{sec:type:Context:getVar} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Context} & \text{// } \\ +\lst{arg1} & \lst{: Byte} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[T]} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/GroupElement_methods.tex b/docs/spec/generated/GroupElement_methods.tex new file mode 100644 index 0000000000..b31e24f30d --- /dev/null +++ b/docs/spec/generated/GroupElement_methods.tex @@ -0,0 +1,118 @@ + +\subsubsection{\lst{GroupElement.isIdentity} method (Code 7.1)} +\label{sec:type:GroupElement:isIdentity} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Checks if this value is identity element of the eliptic curve group. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{GroupElement.getEncoded} method (Code 7.2)} +\label{sec:type:GroupElement:getEncoded} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Get an encoding of the point value. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{GroupElement.exp} method (Code 7.3)} +\label{sec:type:GroupElement:exp} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Exponentiate this \lst{GroupElement} to the given number. Returns this to the power of k \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{k} & \lst{: BigInt} & \text{// The power} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{GroupElement} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Exponentiate]{\lst{Exponentiate}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{GroupElement.multiply} method (Code 7.4)} +\label{sec:type:GroupElement:multiply} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Group operation. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{other} & \lst{: GroupElement} & \text{// other element of the group} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{GroupElement} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MultiplyGroup]{\lst{MultiplyGroup}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{GroupElement.negate} method (Code 7.5)} +\label{sec:type:GroupElement:negate} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Inverse element of the group. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{GroupElement} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/Header_methods.tex b/docs/spec/generated/Header_methods.tex new file mode 100644 index 0000000000..5779748d8a --- /dev/null +++ b/docs/spec/generated/Header_methods.tex @@ -0,0 +1,313 @@ + +\subsubsection{\lst{Header.id} method (Code 104.1)} +\label{sec:type:Header:id} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Header} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Header.version} method (Code 104.2)} +\label{sec:type:Header:version} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Header} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Byte} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Header.parentId} method (Code 104.3)} +\label{sec:type:Header:parentId} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Header} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Header.ADProofsRoot} method (Code 104.4)} +\label{sec:type:Header:ADProofsRoot} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Header} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Header.stateRoot} method (Code 104.5)} +\label{sec:type:Header:stateRoot} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Header} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{AvlTree} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Header.transactionsRoot} method (Code 104.6)} +\label{sec:type:Header:transactionsRoot} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Header} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Header.timestamp} method (Code 104.7)} +\label{sec:type:Header:timestamp} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Header} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Long} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Header.nBits} method (Code 104.8)} +\label{sec:type:Header:nBits} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Header} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Long} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Header.height} method (Code 104.9)} +\label{sec:type:Header:height} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Header} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Int} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Header.extensionRoot} method (Code 104.10)} +\label{sec:type:Header:extensionRoot} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Header} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Header.minerPk} method (Code 104.11)} +\label{sec:type:Header:minerPk} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Header} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{GroupElement} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Header.powOnetimePk} method (Code 104.12)} +\label{sec:type:Header:powOnetimePk} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Header} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{GroupElement} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Header.powNonce} method (Code 104.13)} +\label{sec:type:Header:powNonce} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Header} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Header.powDistance} method (Code 104.14)} +\label{sec:type:Header:powDistance} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Header} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{BigInt} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Header.votes} method (Code 104.15)} +\label{sec:type:Header:votes} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Header} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/Int_methods.tex b/docs/spec/generated/Int_methods.tex new file mode 100644 index 0000000000..6c75d1a13c --- /dev/null +++ b/docs/spec/generated/Int_methods.tex @@ -0,0 +1,171 @@ + +\subsubsection{\lst{Int.toByte} method (Code 106.1)} +\label{sec:type:Int:toByte} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Byte}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Byte} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Int.toShort} method (Code 106.2)} +\label{sec:type:Int:toShort} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Short}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Short} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Int.toInt} method (Code 106.3)} +\label{sec:type:Int:toInt} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Int}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Int} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Int.toLong} method (Code 106.4)} +\label{sec:type:Int:toLong} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Long}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Long} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Int.toBigInt} method (Code 106.5)} +\label{sec:type:Int:toBigInt} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{BigInt} \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{BigInt} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Int.toBytes} method (Code 106.6)} +\label{sec:type:Int:toBytes} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns a big-endian representation of this numeric value in a collection of bytes. + For example, the \lst{Int} value \lst{0x12131415} would yield the + collection of bytes \lst{[0x12, 0x13, 0x14, 0x15]}. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Int.toBits} method (Code 106.7)} +\label{sec:type:Int:toBits} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns a big-endian representation of this numeric in a collection of Booleans. + Each boolean corresponds to one bit. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Boolean]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/Long_methods.tex b/docs/spec/generated/Long_methods.tex new file mode 100644 index 0000000000..0429e22219 --- /dev/null +++ b/docs/spec/generated/Long_methods.tex @@ -0,0 +1,171 @@ + +\subsubsection{\lst{Long.toByte} method (Code 106.1)} +\label{sec:type:Long:toByte} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Byte}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Byte} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Long.toShort} method (Code 106.2)} +\label{sec:type:Long:toShort} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Short}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Short} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Long.toInt} method (Code 106.3)} +\label{sec:type:Long:toInt} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Int}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Int} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Long.toLong} method (Code 106.4)} +\label{sec:type:Long:toLong} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Long}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Long} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Long.toBigInt} method (Code 106.5)} +\label{sec:type:Long:toBigInt} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{BigInt} \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{BigInt} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Long.toBytes} method (Code 106.6)} +\label{sec:type:Long:toBytes} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns a big-endian representation of this numeric value in a collection of bytes. + For example, the \lst{Int} value \lst{0x12131415} would yield the + collection of bytes \lst{[0x12, 0x13, 0x14, 0x15]}. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Long.toBits} method (Code 106.7)} +\label{sec:type:Long:toBits} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns a big-endian representation of this numeric in a collection of Booleans. + Each boolean corresponds to one bit. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Boolean]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/PreHeader_methods.tex b/docs/spec/generated/PreHeader_methods.tex new file mode 100644 index 0000000000..8b8118840a --- /dev/null +++ b/docs/spec/generated/PreHeader_methods.tex @@ -0,0 +1,145 @@ + +\subsubsection{\lst{PreHeader.version} method (Code 105.1)} +\label{sec:type:PreHeader:version} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: PreHeader} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Byte} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{PreHeader.parentId} method (Code 105.2)} +\label{sec:type:PreHeader:parentId} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: PreHeader} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{PreHeader.timestamp} method (Code 105.3)} +\label{sec:type:PreHeader:timestamp} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: PreHeader} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Long} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{PreHeader.nBits} method (Code 105.4)} +\label{sec:type:PreHeader:nBits} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: PreHeader} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Long} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{PreHeader.height} method (Code 105.5)} +\label{sec:type:PreHeader:height} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: PreHeader} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Int} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{PreHeader.minerPk} method (Code 105.6)} +\label{sec:type:PreHeader:minerPk} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: PreHeader} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{GroupElement} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{PreHeader.votes} method (Code 105.7)} +\label{sec:type:PreHeader:votes} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: PreHeader} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/SCollection_methods.tex b/docs/spec/generated/SCollection_methods.tex new file mode 100644 index 0000000000..d75b349151 --- /dev/null +++ b/docs/spec/generated/SCollection_methods.tex @@ -0,0 +1,744 @@ + +\subsubsection{\lst{SCollection.size} method (Code 12.1)} +\label{sec:type:SCollection:size} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & The size of the collection in elements. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Int} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:SizeOf]{\lst{SizeOf}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.getOrElse} method (Code 12.2)} +\label{sec:type:SCollection:getOrElse} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Return the element of collection if \lst{index} is in range \lst{0 .. size-1} \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{index} & \lst{: Int} & \text{// index of the element of this collection} \\ +\lst{default} & \lst{: IV} & \text{// value to return when \lst{index} is out of range} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{IV} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ByIndex]{\lst{ByIndex}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.map} method (Code 12.3)} +\label{sec:type:SCollection:map} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Builds a new collection by applying a function to all elements of this collection. + Returns a new collection of type \lst{Coll[B]} resulting from applying the given function + \lst{f} to each element of this collection and collecting the results. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{f} & \lst{: (IV) => OV} & \text{// the function to apply to each element} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[OV]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MapCollection]{\lst{MapCollection}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.exists} method (Code 12.4)} +\label{sec:type:SCollection:exists} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Tests whether a predicate holds for at least one element of this collection. +Returns \lst{true} if the given predicate \lst{p} is satisfied by at least one element of this collection, otherwise \lst{false} + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{p} & \lst{: (IV) => Boolean} & \text{// the predicate used to test elements} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Exists]{\lst{Exists}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.fold} method (Code 12.5)} +\label{sec:type:SCollection:fold} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Applies a binary operator to a start value and all elements of this collection, going left to right. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{zero} & \lst{: OV} & \text{// a starting value} \\ +\lst{op} & \lst{: (OV,IV) => OV} & \text{// the binary operator} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{OV} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Fold]{\lst{Fold}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.forall} method (Code 12.6)} +\label{sec:type:SCollection:forall} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Tests whether a predicate holds for all elements of this collection. +Returns \lst{true} if this collection is empty or the given predicate \lst{p} +holds for all elements of this collection, otherwise \lst{false}. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{p} & \lst{: (IV) => Boolean} & \text{// the predicate used to test elements} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ForAll]{\lst{ForAll}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.slice} method (Code 12.7)} +\label{sec:type:SCollection:slice} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Selects an interval of elements. The returned collection is made up + of all elements \lst{x} which satisfy the invariant: + \lst{ + from <= indexOf(x) < until + } + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{from} & \lst{: Int} & \text{// the lowest index to include from this collection} \\ +\lst{until} & \lst{: Int} & \text{// the lowest index to EXCLUDE from this collection} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[IV]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Slice]{\lst{Slice}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.filter} method (Code 12.8)} +\label{sec:type:SCollection:filter} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Selects all elements of this collection which satisfy a predicate. + Returns a new collection consisting of all elements of this collection that satisfy the given + predicate \lst{p}. The order of the elements is preserved. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{p} & \lst{: (IV) => Boolean} & \text{// the predicate used to test elements.} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[IV]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Filter]{\lst{Filter}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.append} method (Code 12.9)} +\label{sec:type:SCollection:append} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Puts the elements of other collection after the elements of this collection (concatenation of 2 collections) \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{other} & \lst{: Coll[IV]} & \text{// the collection to append at the end of this} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[IV]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Append]{\lst{Append}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.apply} method (Code 12.10)} +\label{sec:type:SCollection:apply} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & The element at given index. + Indices start at \lst{0}; \lst{xs.apply(0)} is the first element of collection \lst{xs}. + Note the indexing syntax \lst{xs(i)} is a shorthand for \lst{xs.apply(i)}. + Returns the element at the given index. + Throws an exception if \lst{i < 0} or \lst{length <= i} + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{i} & \lst{: Int} & \text{// the index} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{IV} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ByIndex]{\lst{ByIndex}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.<<} method (Code 12.11)} +\label{sec:type:SCollection:<<} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Coll[IV]} & \text{// } \\ +\lst{arg1} & \lst{: Int} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[IV]} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.>>} method (Code 12.12)} +\label{sec:type:SCollection:>>} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Coll[IV]} & \text{// } \\ +\lst{arg1} & \lst{: Int} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[IV]} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.>>>} method (Code 12.13)} +\label{sec:type:SCollection:>>>} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Coll[Boolean]} & \text{// } \\ +\lst{arg1} & \lst{: Int} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Boolean]} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.indices} method (Code 12.14)} +\label{sec:type:SCollection:indices} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Produces the range of all indices of this collection as a new collection + containing [0 .. length-1] values. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Int]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.flatMap} method (Code 12.15)} +\label{sec:type:SCollection:flatMap} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Builds a new collection by applying a function to all elements of this collection + and using the elements of the resulting collections. + Function \lst{f} is constrained to be of the form \lst{x => x.someProperty}, otherwise + it is illegal. + Returns a new collection of type \lst{Coll[B]} resulting from applying the given collection-valued function + \lst{f} to each element of this collection and concatenating the results. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{f} & \lst{: (IV) => Coll[OV]} & \text{// the function to apply to each element.} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[OV]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.patch} method (Code 12.19)} +\label{sec:type:SCollection:patch} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[IV]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.updated} method (Code 12.20)} +\label{sec:type:SCollection:updated} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[IV]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.updateMany} method (Code 12.21)} +\label{sec:type:SCollection:updateMany} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[IV]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.unionSets} method (Code 12.22)} +\label{sec:type:SCollection:unionSets} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[IV]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.diff} method (Code 12.23)} +\label{sec:type:SCollection:diff} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[IV]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.intersect} method (Code 12.24)} +\label{sec:type:SCollection:intersect} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[IV]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.prefixLength} method (Code 12.25)} +\label{sec:type:SCollection:prefixLength} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Int} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.indexOf} method (Code 12.26)} +\label{sec:type:SCollection:indexOf} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Int} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.lastIndexOf} method (Code 12.27)} +\label{sec:type:SCollection:lastIndexOf} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Int} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.find} method (Code 12.28)} +\label{sec:type:SCollection:find} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[IV]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.zip} method (Code 12.29)} +\label{sec:type:SCollection:zip} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[(IV,OV)]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.distinct} method (Code 12.30)} +\label{sec:type:SCollection:distinct} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[IV]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.startsWith} method (Code 12.31)} +\label{sec:type:SCollection:startsWith} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.endsWith} method (Code 12.32)} +\label{sec:type:SCollection:endsWith} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SCollection.mapReduce} method (Code 12.34)} +\label{sec:type:SCollection:mapReduce} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[(K,V)]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/SOption_methods.tex b/docs/spec/generated/SOption_methods.tex new file mode 100644 index 0000000000..78419c9a7d --- /dev/null +++ b/docs/spec/generated/SOption_methods.tex @@ -0,0 +1,228 @@ + +\subsubsection{\lst{SOption.isEmpty} method (Code 36.1)} +\label{sec:type:SOption:isEmpty} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{arg0} & \lst{: Option[T]} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SOption.isDefined} method (Code 36.2)} +\label{sec:type:SOption:isDefined} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns \lst{true} if the option is an instance of \lst{Some}, \lst{false} otherwise. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:OptionIsDefined]{\lst{OptionIsDefined}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SOption.get} method (Code 36.3)} +\label{sec:type:SOption:get} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns the option's value. The option must be nonempty. Throws exception if the option is empty. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:OptionGet]{\lst{OptionGet}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SOption.getOrElse} method (Code 36.4)} +\label{sec:type:SOption:getOrElse} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns the option's value if the option is nonempty, otherwise +return the result of evaluating \lst{default}. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{default} & \lst{: T} & \text{// the default value} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:OptionGetOrElse]{\lst{OptionGetOrElse}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SOption.fold} method (Code 36.5)} +\label{sec:type:SOption:fold} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns the result of applying \lst{f} to this option's + value if the option is nonempty. Otherwise, evaluates + expression \lst{ifEmpty}. + This is equivalent to \lst{option map f getOrElse ifEmpty}. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{ifEmpty} & \lst{: R} & \text{// the expression to evaluate if empty} \\ +\lst{f} & \lst{: (T) => R} & \text{// the function to apply if nonempty} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{R} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SOption.toColl} method (Code 36.6)} +\label{sec:type:SOption:toColl} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Convert this Option to a collection with zero or one element. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[T]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SOption.map} method (Code 36.7)} +\label{sec:type:SOption:map} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns a \lst{Some} containing the result of applying \lst{f} to this option's + value if this option is nonempty. + Otherwise return \lst{None}. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{f} & \lst{: (T) => R} & \text{// the function to apply} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[R]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SOption.filter} method (Code 36.8)} +\label{sec:type:SOption:filter} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns this option if it is nonempty and applying the predicate \lst{p} to + this option's value returns true. Otherwise, return \lst{None}. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{p} & \lst{: (T) => Boolean} & \text{// the predicate used for testing} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[T]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SOption.flatMap} method (Code 36.9)} +\label{sec:type:SOption:flatMap} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns the result of applying \lst{f} to this option's value if + this option is nonempty. + Returns \lst{None} if this option is empty. + Slightly different from \lst{map} in that \lst{f} is expected to + return an option (which could be \lst{one}). + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{f} & \lst{: (T) => Option[R]} & \text{// the function to apply} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[R]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:MethodCall]{\lst{MethodCall}} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/Short_methods.tex b/docs/spec/generated/Short_methods.tex new file mode 100644 index 0000000000..35a7b9852f --- /dev/null +++ b/docs/spec/generated/Short_methods.tex @@ -0,0 +1,171 @@ + +\subsubsection{\lst{Short.toByte} method (Code 106.1)} +\label{sec:type:Short:toByte} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Byte}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Byte} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Short.toShort} method (Code 106.2)} +\label{sec:type:Short:toShort} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Short}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Short} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Short.toInt} method (Code 106.3)} +\label{sec:type:Short:toInt} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Int}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Int} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Short.toLong} method (Code 106.4)} +\label{sec:type:Short:toLong} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{Long}, throwing exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Long} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Short.toBigInt} method (Code 106.5)} +\label{sec:type:Short:toBigInt} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts this numeric value to \lst{BigInt} \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{BigInt} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Short.toBytes} method (Code 106.6)} +\label{sec:type:Short:toBytes} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns a big-endian representation of this numeric value in a collection of bytes. + For example, the \lst{Int} value \lst{0x12131415} would yield the + collection of bytes \lst{[0x12, 0x13, 0x14, 0x15]}. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{Short.toBits} method (Code 106.7)} +\label{sec:type:Short:toBits} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns a big-endian representation of this numeric in a collection of Booleans. + Each boolean corresponds to one bit. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Boolean]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:PropertyCall]{\lst{PropertyCall}} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/SigmaDslBuilder_methods.tex b/docs/spec/generated/SigmaDslBuilder_methods.tex new file mode 100644 index 0000000000..0232ed2e76 --- /dev/null +++ b/docs/spec/generated/SigmaDslBuilder_methods.tex @@ -0,0 +1,22 @@ + +\subsubsection{\lst{SigmaDslBuilder.groupGenerator} method (Code 106.1)} +\label{sec:type:SigmaDslBuilder:groupGenerator} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{GroupElement} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:GroupGenerator]{\lst{GroupGenerator}} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/SigmaProp_methods.tex b/docs/spec/generated/SigmaProp_methods.tex new file mode 100644 index 0000000000..725a6fc74d --- /dev/null +++ b/docs/spec/generated/SigmaProp_methods.tex @@ -0,0 +1,43 @@ + +\subsubsection{\lst{SigmaProp.propBytes} method (Code 8.1)} +\label{sec:type:SigmaProp:propBytes} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Serialized bytes of this sigma proposition taken as ErgoTree. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:SigmaPropBytes]{\lst{SigmaPropBytes}} \\ + \hline + +\end{tabularx} + + + +\subsubsection{\lst{SigmaProp.isProven} method (Code 8.2)} +\label{sec:type:SigmaProp:isProven} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Verify that sigma proposition is proven. (FRONTEND ONLY) \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/ergotree_serialization.tex b/docs/spec/generated/ergotree_serialization.tex new file mode 100644 index 0000000000..0d2844f2c6 --- /dev/null +++ b/docs/spec/generated/ergotree_serialization.tex @@ -0,0 +1,79 @@ +\subsubsection{\lst{ByIndex} operation (OpCode 178)} + +\noindent +\(\begin{array}{| l | l | l | l |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + \lst{input} & \lst{Value} & variable & \\ + \hline + \multicolumn{4}{l}{\lst{optional default}} \\ + \hline + ~~\lst{tag} & \lst{Byte} & 1 & \text{0 - no value; 1 - has value} \\ + \hline + \multicolumn{4}{l}{~~\lst{when tag == 1}} \\ + \hline + ~~~~\lst{value} & \lst{Value} & variable & \\ + \hline + \multicolumn{4}{l}{\lst{end optional}} \\ + \end{array}\) + + +\subsubsection{\lst{EQ} operation (OpCode 147)} + +\noindent +\(\begin{array}{| l | l | l | l |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + \multicolumn{4}{l}{\lst{match (left, right)}} \\ + \multicolumn{4}{l}{~~\lst{with (Constant(left, SBoolean), Constant(right, SBoolean))}} \\ + \hline + ~~~~\lst{opCode} & \lst{Byte} & 1 & \lst{ConcreteCollectionBooleanConstantCode} \\ + \hline + ~~~~\lst{(left,right)} & \lst{Bits} & 1 & \text{two bits in 1 byte} \\ + \hline + \multicolumn{4}{l}{~~\lst{otherwise}} \\ + \hline + ~~~~\lst{left} & \lst{Value} & variable & \\ + \hline + ~~~~\lst{right} & \lst{Value} & variable & \\ + \hline + \multicolumn{4}{l}{\lst{end match}} \\ + \end{array}\) + + \subsubsection{\lst{Tuple} operation (OpCode 134)} + + \noindent + \(\begin{array}{| l | l | l | l |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + \lst{numItems} & \lst{UByte} & 1 & \\ + \hline + \multicolumn{4}{l}{\lst{for i=1 to numItems}} \\ + \hline + \lst{item} & \lst{Value} & variable & \\ + \hline + \multicolumn{4}{l}{\lst{end for}} \\ + \end{array}\) + + \subsubsection{\lst{Fold} operation (OpCode 176)} + + \noindent + \(\begin{array}{| l | l | l | l |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + \lst{this} & \lst{Value} & variable & \\ + \hline + \lst{zero} & \lst{Value} & variable & \\ + \hline + \lst{op} & \lst{Value} & variable & \\ + \hline + \end{array}\) + +% SerScope(Fold$(176), ArrayBuffer((this,DataScope(DataInfo(ArgInfo(this,this +% instance),ValueFmt))), (zero,DataScope(DataInfo(ArgInfo(zero,a starting +% value),ValueFmt))), (op,DataScope(DataInfo(ArgInfo(op,the binary +% operator),ValueFmt))))) diff --git a/docs/spec/generated/ergotree_serialization1.tex b/docs/spec/generated/ergotree_serialization1.tex new file mode 100644 index 0000000000..468756773c --- /dev/null +++ b/docs/spec/generated/ergotree_serialization1.tex @@ -0,0 +1,1398 @@ + +\subsubsection{\lst{ConcreteCollection} operation (OpCode 131)} +\label{sec:serialization:operation:ConcreteCollection} + + + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ numItems $ & \lst{VLQ(UShort)} & [1, *] & number of item in a collection of expressions \\ + \hline + $ elementType $ & \lst{Type} & [1, *] & type of each expression in the collection \\ + \hline + \multicolumn{4}{l}{\lst{for}~$i=1$~\lst{to}~$numItems$} \\ + \hline + ~~ $ item_i $ & \lst{Expr} & [1, *] & expression in i-th position \\ + \hline + \multicolumn{4}{l}{\lst{end for}} \\ +\end{tabularx}\) + + +\subsubsection{\lst{ConcreteCollectionBooleanConstant} operation (OpCode 133)} +\label{sec:serialization:operation:ConcreteCollectionBooleanConstant} + + + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ numBits $ & \lst{VLQ(UShort)} & [1, *] & number of items in a collection of Boolean values \\ + \hline + $ bits $ & \lst{Bits} & [1, 1024] & Boolean values encoded as as bits (right most byte is zero-padded on the right) \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Tuple} operation (OpCode 134)} +\label{sec:serialization:operation:Tuple} + + + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ numItems $ & \lst{UByte} & 1 & number of items in the tuple \\ + \hline + \multicolumn{4}{l}{\lst{for}~$i=1$~\lst{to}~$numItems$} \\ + \hline + ~~ $ item_i $ & \lst{Expr} & [1, *] & tuple's item in i-th position \\ + \hline + \multicolumn{4}{l}{\lst{end for}} \\ +\end{tabularx}\) + + +\subsubsection{\lst{SelectField} operation (OpCode 140)} +\label{sec:serialization:operation:SelectField} + +Select tuple field by its 1-based index. E.g. \lst{input._1} is transformed to \lst{SelectField(input, 1)} See~\hyperref[sec:appendix:primops:SelectField]{\lst{selectField}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ input $ & \lst{Expr} & [1, *] & tuple of items \\ + \hline + $ fieldIndex $ & \lst{Byte} & 1 & index of an item to select \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{LT} operation (OpCode 143)} +\label{sec:serialization:operation:LT} + +Returns \lst{true} is the left operand is less then the right operand, \lst{false} otherwise. See~\hyperref[sec:appendix:primops:LT]{\lst{<}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + \multicolumn{4}{l}{\lst{match}~$ (left, right) $} \\ + + \multicolumn{4}{l}{~~\lst{otherwise} } \\ + \hline + ~~~~ $ left $ & \lst{Expr} & [1, *] & left operand \\ + \hline + ~~~~ $ right $ & \lst{Expr} & [1, *] & right operand \\ + \hline + \multicolumn{4}{l}{\lst{end match}} \\ +\end{tabularx}\) + + +\subsubsection{\lst{LE} operation (OpCode 144)} +\label{sec:serialization:operation:LE} + +Returns \lst{true} is the left operand is less then or equal to the right operand, \lst{false} otherwise. See~\hyperref[sec:appendix:primops:LE]{\lst{<=}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + \multicolumn{4}{l}{\lst{match}~$ (left, right) $} \\ + + \multicolumn{4}{l}{~~\lst{otherwise} } \\ + \hline + ~~~~ $ left $ & \lst{Expr} & [1, *] & left operand \\ + \hline + ~~~~ $ right $ & \lst{Expr} & [1, *] & right operand \\ + \hline + \multicolumn{4}{l}{\lst{end match}} \\ +\end{tabularx}\) + + +\subsubsection{\lst{GT} operation (OpCode 145)} +\label{sec:serialization:operation:GT} + +Returns \lst{true} is the left operand is greater then the right operand, \lst{false} otherwise. See~\hyperref[sec:appendix:primops:GT]{\lst{>}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + \multicolumn{4}{l}{\lst{match}~$ (left, right) $} \\ + + \multicolumn{4}{l}{~~\lst{otherwise} } \\ + \hline + ~~~~ $ left $ & \lst{Expr} & [1, *] & left operand \\ + \hline + ~~~~ $ right $ & \lst{Expr} & [1, *] & right operand \\ + \hline + \multicolumn{4}{l}{\lst{end match}} \\ +\end{tabularx}\) + + +\subsubsection{\lst{GE} operation (OpCode 146)} +\label{sec:serialization:operation:GE} + +Returns \lst{true} is the left operand is greater then or equal to the right operand, \lst{false} otherwise. See~\hyperref[sec:appendix:primops:GE]{\lst{>=}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + \multicolumn{4}{l}{\lst{match}~$ (left, right) $} \\ + + \multicolumn{4}{l}{~~\lst{otherwise} } \\ + \hline + ~~~~ $ left $ & \lst{Expr} & [1, *] & left operand \\ + \hline + ~~~~ $ right $ & \lst{Expr} & [1, *] & right operand \\ + \hline + \multicolumn{4}{l}{\lst{end match}} \\ +\end{tabularx}\) + + +\subsubsection{\lst{EQ} operation (OpCode 147)} +\label{sec:serialization:operation:EQ} + +Compare equality of \lst{left} and \lst{right} arguments See~\hyperref[sec:appendix:primops:EQ]{\lst{==}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + \multicolumn{4}{l}{\lst{match}~$ (left, right) $} \\ + + \multicolumn{4}{l}{~~\lst{otherwise} } \\ + \hline + ~~~~ $ left $ & \lst{Expr} & [1, *] & left operand \\ + \hline + ~~~~ $ right $ & \lst{Expr} & [1, *] & right operand \\ + \hline + \multicolumn{4}{l}{\lst{end match}} \\ +\end{tabularx}\) + + +\subsubsection{\lst{NEQ} operation (OpCode 148)} +\label{sec:serialization:operation:NEQ} + +Compare inequality of \lst{left} and \lst{right} arguments See~\hyperref[sec:appendix:primops:NEQ]{\lst{!=}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + \multicolumn{4}{l}{\lst{match}~$ (left, right) $} \\ + + \multicolumn{4}{l}{~~\lst{otherwise} } \\ + \hline + ~~~~ $ left $ & \lst{Expr} & [1, *] & left operand \\ + \hline + ~~~~ $ right $ & \lst{Expr} & [1, *] & right operand \\ + \hline + \multicolumn{4}{l}{\lst{end match}} \\ +\end{tabularx}\) + + +\subsubsection{\lst{If} operation (OpCode 149)} +\label{sec:serialization:operation:If} + +Compute condition, if true then compute trueBranch else compute falseBranch See~\hyperref[sec:appendix:primops:If]{\lst{if}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ condition $ & \lst{Expr} & [1, *] & condition expression \\ + \hline + $ trueBranch $ & \lst{Expr} & [1, *] & expression to execute when \lst{condition == true} \\ + \hline + $ falseBranch $ & \lst{Expr} & [1, *] & expression to execute when \lst{condition == false} \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{AND} operation (OpCode 150)} +\label{sec:serialization:operation:AND} + +Returns true if \emph{all} the elements in collection are \lst{true}. See~\hyperref[sec:appendix:primops:AND]{\lst{allOf}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ conditions $ & \lst{Expr} & [1, *] & a collection of conditions \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{OR} operation (OpCode 151)} +\label{sec:serialization:operation:OR} + +Returns true if \emph{any} the elements in collection are \lst{true}. See~\hyperref[sec:appendix:primops:OR]{\lst{anyOf}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ conditions $ & \lst{Expr} & [1, *] & a collection of conditions \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{AtLeast} operation (OpCode 152)} +\label{sec:serialization:operation:AtLeast} + + Logical threshold. + AtLeast has two inputs: integer \lst{bound} and \lst{children} same as in AND/OR. + The result is true if at least \lst{bound} children are proven. + See~\hyperref[sec:appendix:primops:AtLeast]{\lst{atLeast}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ bound $ & \lst{Expr} & [1, *] & required minimum of proven children \\ + \hline + $ children $ & \lst{Expr} & [1, *] & proposition to be proven/validated \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Minus} operation (OpCode 153)} +\label{sec:serialization:operation:Minus} + +Returns a result of subtracting second numeric operand from the first. See~\hyperref[sec:appendix:primops:Minus]{\lst{-}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ left $ & \lst{Expr} & [1, *] & left operand \\ + \hline + $ right $ & \lst{Expr} & [1, *] & right operand \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Plus} operation (OpCode 154)} +\label{sec:serialization:operation:Plus} + +Returns a sum of two numeric operands See~\hyperref[sec:appendix:primops:Plus]{\lst{+}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ left $ & \lst{Expr} & [1, *] & left operand \\ + \hline + $ right $ & \lst{Expr} & [1, *] & right operand \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Xor} operation (OpCode 155)} +\label{sec:serialization:operation:Xor} + +Byte-wise XOR of two collections of bytes See~\hyperref[sec:appendix:primops:Xor]{\lst{binary_|}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ left $ & \lst{Expr} & [1, *] & left operand \\ + \hline + $ right $ & \lst{Expr} & [1, *] & right operand \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Multiply} operation (OpCode 156)} +\label{sec:serialization:operation:Multiply} + +Returns a multiplication of two numeric operands See~\hyperref[sec:appendix:primops:Multiply]{\lst{*}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ left $ & \lst{Expr} & [1, *] & left operand \\ + \hline + $ right $ & \lst{Expr} & [1, *] & right operand \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Division} operation (OpCode 157)} +\label{sec:serialization:operation:Division} + +Integer division of the first operand by the second operand. See~\hyperref[sec:appendix:primops:Division]{\lst{/}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ left $ & \lst{Expr} & [1, *] & left operand \\ + \hline + $ right $ & \lst{Expr} & [1, *] & right operand \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Modulo} operation (OpCode 158)} +\label{sec:serialization:operation:Modulo} + +Reminder from division of the first operand by the second operand. See~\hyperref[sec:appendix:primops:Modulo]{\lst{\%}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ left $ & \lst{Expr} & [1, *] & left operand \\ + \hline + $ right $ & \lst{Expr} & [1, *] & right operand \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Exponentiate} operation (OpCode 159)} +\label{sec:serialization:operation:Exponentiate} + +Exponentiate this \lst{GroupElement} to the given number. Returns this to the power of k See~\hyperref[sec:type:GroupElement:exp]{\lst{GroupElement.exp}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + $ k $ & \lst{Expr} & [1, *] & The power \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{MultiplyGroup} operation (OpCode 160)} +\label{sec:serialization:operation:MultiplyGroup} + +Group operation. See~\hyperref[sec:type:GroupElement:multiply]{\lst{GroupElement.multiply}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + $ other $ & \lst{Expr} & [1, *] & other element of the group \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Min} operation (OpCode 161)} +\label{sec:serialization:operation:Min} + +Minimum value of two operands. See~\hyperref[sec:appendix:primops:Min]{\lst{min}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ left $ & \lst{Expr} & [1, *] & left operand \\ + \hline + $ right $ & \lst{Expr} & [1, *] & right operand \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Max} operation (OpCode 162)} +\label{sec:serialization:operation:Max} + +Maximum value of two operands. See~\hyperref[sec:appendix:primops:Max]{\lst{max}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ left $ & \lst{Expr} & [1, *] & left operand \\ + \hline + $ right $ & \lst{Expr} & [1, *] & right operand \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{MapCollection} operation (OpCode 173)} +\label{sec:serialization:operation:MapCollection} + + Builds a new collection by applying a function to all elements of this collection. + Returns a new collection of type \lst{Coll[B]} resulting from applying the given function + \lst{f} to each element of this collection and collecting the results. + See~\hyperref[sec:type:SCollection:map]{\lst{SCollection.map}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + $ f $ & \lst{Expr} & [1, *] & the function to apply to each element \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Exists} operation (OpCode 174)} +\label{sec:serialization:operation:Exists} + +Tests whether a predicate holds for at least one element of this collection. +Returns \lst{true} if the given predicate \lst{p} is satisfied by at least one element of this collection, otherwise \lst{false} + See~\hyperref[sec:type:SCollection:exists]{\lst{SCollection.exists}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + $ p $ & \lst{Expr} & [1, *] & the predicate used to test elements \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{ForAll} operation (OpCode 175)} +\label{sec:serialization:operation:ForAll} + +Tests whether a predicate holds for all elements of this collection. +Returns \lst{true} if this collection is empty or the given predicate \lst{p} +holds for all elements of this collection, otherwise \lst{false}. + See~\hyperref[sec:type:SCollection:forall]{\lst{SCollection.forall}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + $ p $ & \lst{Expr} & [1, *] & the predicate used to test elements \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Fold} operation (OpCode 176)} +\label{sec:serialization:operation:Fold} + +Applies a binary operator to a start value and all elements of this collection, going left to right. See~\hyperref[sec:type:SCollection:fold]{\lst{SCollection.fold}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + $ zero $ & \lst{Expr} & [1, *] & a starting value \\ + \hline + $ op $ & \lst{Expr} & [1, *] & the binary operator \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{SizeOf} operation (OpCode 177)} +\label{sec:serialization:operation:SizeOf} + +The size of the collection in elements. See~\hyperref[sec:type:SCollection:size]{\lst{SCollection.size}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{ByIndex} operation (OpCode 178)} +\label{sec:serialization:operation:ByIndex} + +Return the element of collection if \lst{index} is in range \lst{0 .. size-1} See~\hyperref[sec:type:SCollection:getOrElse]{\lst{SCollection.getOrElse}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + $ index $ & \lst{Expr} & [1, *] & index of the element of this collection \\ + \hline + \multicolumn{4}{l}{\lst{optional}~$default$} \\ + \hline + ~~$tag$ & \lst{Byte} & 1 & 0 - no value; 1 - has value \\ + \hline + \multicolumn{4}{l}{~~\lst{when}~$tag == 1$} \\ + \hline + ~~~~ $ default $ & \lst{Expr} & [1, *] & value to return when \lst{index} is out of range \\ + \hline + \multicolumn{4}{l}{\lst{end optional}} \\ +\end{tabularx}\) + + +\subsubsection{\lst{Append} operation (OpCode 179)} +\label{sec:serialization:operation:Append} + +Puts the elements of other collection after the elements of this collection (concatenation of 2 collections) See~\hyperref[sec:type:SCollection:append]{\lst{SCollection.append}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + $ other $ & \lst{Expr} & [1, *] & the collection to append at the end of this \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Slice} operation (OpCode 180)} +\label{sec:serialization:operation:Slice} + +Selects an interval of elements. The returned collection is made up + of all elements \lst{x} which satisfy the invariant: + \lst{ + from <= indexOf(x) < until + } + See~\hyperref[sec:type:SCollection:slice]{\lst{SCollection.slice}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + $ from $ & \lst{Expr} & [1, *] & the lowest index to include from this collection \\ + \hline + $ until $ & \lst{Expr} & [1, *] & the lowest index to EXCLUDE from this collection \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{ExtractAmount} operation (OpCode 193)} +\label{sec:serialization:operation:ExtractAmount} + +Mandatory: Monetary value, in Ergo tokens (NanoErg unit of measure) See~\hyperref[sec:type:Box:value]{\lst{Box.value}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{ExtractScriptBytes} operation (OpCode 194)} +\label{sec:serialization:operation:ExtractScriptBytes} + +Serialized bytes of guarding script, which should be evaluated to true in order to + open this box. (aka spend it in a transaction) See~\hyperref[sec:type:Box:propositionBytes]{\lst{Box.propositionBytes}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{ExtractBytes} operation (OpCode 195)} +\label{sec:serialization:operation:ExtractBytes} + +Serialized bytes of this box's content, including proposition bytes. See~\hyperref[sec:type:Box:bytes]{\lst{Box.bytes}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{ExtractBytesWithNoRef} operation (OpCode 196)} +\label{sec:serialization:operation:ExtractBytesWithNoRef} + +Serialized bytes of this box's content, excluding transactionId and index of output. See~\hyperref[sec:type:Box:bytesWithoutRef]{\lst{Box.bytesWithoutRef}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{ExtractId} operation (OpCode 197)} +\label{sec:serialization:operation:ExtractId} + +Blake2b256 hash of this box's content, basically equals to \lst{blake2b256(bytes)} See~\hyperref[sec:type:Box:id]{\lst{Box.id}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{ExtractRegisterAs} operation (OpCode 198)} +\label{sec:serialization:operation:ExtractRegisterAs} + + Extracts register by id and type. + Type param \lst{T} expected type of the register. + Returns \lst{Some(value)} if the register is defined and has given type and \lst{None} otherwise + See~\hyperref[sec:type:Box:getReg]{\lst{Box.getReg}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + $ regId $ & \lst{Byte} & 1 & zero-based identifier of the register. \\ + \hline + $ type $ & \lst{Type} & [1, *] & expected type of the value in register \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{ExtractCreationInfo} operation (OpCode 199)} +\label{sec:serialization:operation:ExtractCreationInfo} + + If \lst{tx} is a transaction which generated this box, then \lst{creationInfo._1} + is a height of the tx's block. The \lst{creationInfo._2} is a serialized transaction + identifier followed by box index in the transaction outputs. + See~\hyperref[sec:type:Box:creationInfo]{\lst{Box.creationInfo}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{CalcBlake2b256} operation (OpCode 203)} +\label{sec:serialization:operation:CalcBlake2b256} + +Calculate Blake2b hash from \lst{input} bytes. See~\hyperref[sec:appendix:primops:CalcBlake2b256]{\lst{blake2b256}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ input $ & \lst{Expr} & [1, *] & collection of bytes \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{CalcSha256} operation (OpCode 204)} +\label{sec:serialization:operation:CalcSha256} + +Calculate Sha256 hash from \lst{input} bytes. See~\hyperref[sec:appendix:primops:CalcSha256]{\lst{sha256}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ input $ & \lst{Expr} & [1, *] & collection of bytes \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{CreateProveDlog} operation (OpCode 205)} +\label{sec:serialization:operation:CreateProveDlog} + +ErgoTree operation to create a new \lst{SigmaProp} value representing public key + of discrete logarithm signature protocol. + See~\hyperref[sec:appendix:primops:CreateProveDlog]{\lst{proveDlog}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ value $ & \lst{Expr} & [1, *] & element of elliptic curve group \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{CreateProveDHTuple} operation (OpCode 206)} +\label{sec:serialization:operation:CreateProveDHTuple} + + ErgoTree operation to create a new SigmaProp value representing public key + of Diffie Hellman signature protocol. + Common input: (g,h,u,v) + See~\hyperref[sec:appendix:primops:CreateProveDHTuple]{\lst{proveDHTuple}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ g $ & \lst{Expr} & [1, *] & \\ + \hline + $ h $ & \lst{Expr} & [1, *] & \\ + \hline + $ u $ & \lst{Expr} & [1, *] & \\ + \hline + $ v $ & \lst{Expr} & [1, *] & \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{SigmaPropBytes} operation (OpCode 208)} +\label{sec:serialization:operation:SigmaPropBytes} + +Serialized bytes of this sigma proposition taken as ErgoTree. See~\hyperref[sec:type:SigmaProp:propBytes]{\lst{SigmaProp.propBytes}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{BoolToSigmaProp} operation (OpCode 209)} +\label{sec:serialization:operation:BoolToSigmaProp} + +Embedding of \lst{Boolean} values to \lst{SigmaProp} values. + As an example, this operation allows boolean experessions + to be used as arguments of \lst{atLeast(..., sigmaProp(boolExpr), ...)} operation. + During execution results to either \lst{TrueProp} or \lst{FalseProp} values of \lst{SigmaProp} type. + See~\hyperref[sec:appendix:primops:BoolToSigmaProp]{\lst{sigmaProp}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ condition $ & \lst{Expr} & [1, *] & boolean value to embed in SigmaProp value \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{DeserializeContext} operation (OpCode 212)} +\label{sec:serialization:operation:DeserializeContext} + +Extracts context variable as \lst{Coll[Byte]}, deserializes it to script + and then executes this script in the current context. + The original \lst{Coll[Byte]} of the script is available as \lst{getVar[Coll[Byte]](id)}. + Type parameter \lst{V} result type of the deserialized script. + Throws an exception if the actual script type doesn't conform to T. + Returns a result of the script execution in the current context + See~\hyperref[sec:appendix:primops:DeserializeContext]{\lst{executeFromVar}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ type $ & \lst{Type} & [1, *] & expected type of the deserialized script \\ + \hline + $ id $ & \lst{Byte} & 1 & identifier of the context variable \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{DeserializeRegister} operation (OpCode 213)} +\label{sec:serialization:operation:DeserializeRegister} + +Extracts SELF register as \lst{Coll[Byte]}, deserializes it to script + and then executes this script in the current context. + The original \lst{Coll[Byte]} of the script is available as \lst{SELF.getReg[Coll[Byte]](id)}. + Type parameter \lst{T} result type of the deserialized script. + Throws an exception if the actual script type doesn't conform to \lst{T}. + Returns a result of the script execution in the current context + See~\hyperref[sec:appendix:primops:DeserializeRegister]{\lst{executeFromSelfReg}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ id $ & \lst{Byte} & 1 & identifier of the register \\ + \hline + $ type $ & \lst{Type} & [1, *] & expected type of the deserialized script \\ + \hline + \multicolumn{4}{l}{\lst{optional}~$default$} \\ + \hline + ~~$tag$ & \lst{Byte} & 1 & 0 - no value; 1 - has value \\ + \hline + \multicolumn{4}{l}{~~\lst{when}~$tag == 1$} \\ + \hline + ~~~~ $ default $ & \lst{Expr} & [1, *] & optional default value, if register is not available \\ + \hline + \multicolumn{4}{l}{\lst{end optional}} \\ +\end{tabularx}\) + + +\subsubsection{\lst{ValDef} operation (OpCode 214)} +\label{sec:serialization:operation:ValDef} + + + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + % skipped OptionalScope(type arguments, ArrayBuffer()) + +\end{tabularx}\) + + +\subsubsection{\lst{FunDef} operation (OpCode 215)} +\label{sec:serialization:operation:FunDef} + + + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + % skipped OptionalScope(type arguments, ArrayBuffer()) + +\end{tabularx}\) + + +\subsubsection{\lst{BlockValue} operation (OpCode 216)} +\label{sec:serialization:operation:BlockValue} + + + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ numItems $ & \lst{VLQ(UInt)} & [1, *] & number of block items \\ + \hline + \multicolumn{4}{l}{\lst{for}~$i=1$~\lst{to}~$numItems$} \\ + \hline + ~~ $ item_i $ & \lst{Expr} & [1, *] & block's item in i-th position \\ + \hline + \multicolumn{4}{l}{\lst{end for}} \\\hline + $ result $ & \lst{Expr} & [1, *] & result expression of the block \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{FuncValue} operation (OpCode 217)} +\label{sec:serialization:operation:FuncValue} + + + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ numArgs $ & \lst{VLQ(UInt)} & [1, *] & number of function arguments \\ + \hline + \multicolumn{4}{l}{\lst{for}~$i=1$~\lst{to}~$numArgs$} \\ + \hline + ~~ $ id_i $ & \lst{VLQ(UInt)} & [1, *] & identifier of the i-th argument \\ + \hline + ~~ $ type_i $ & \lst{Type} & [1, *] & type of the i-th argument \\ + \hline + \multicolumn{4}{l}{\lst{end for}} \\\hline + $ body $ & \lst{Expr} & [1, *] & function body, which is parameterized by arguments \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Apply} operation (OpCode 218)} +\label{sec:serialization:operation:Apply} + +Apply the function to the arguments. See~\hyperref[sec:appendix:primops:Apply]{\lst{apply}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ func $ & \lst{Expr} & [1, *] & function which is applied \\ + \hline + $ \#items $ & \lst{VLQ(UInt)} & [1, *] & number of items in the collection \\ + \hline + \multicolumn{4}{l}{\lst{for}~$i=1$~\lst{to}~$\#items$} \\ + \hline + ~~ $ args_i $ & \lst{Expr} & [1, *] & i-th item in the list of arguments \\ + \hline + \multicolumn{4}{l}{\lst{end for}} \\ +\end{tabularx}\) + + +\subsubsection{\lst{PropertyCall} operation (OpCode 219)} +\label{sec:serialization:operation:PropertyCall} + +Convert true to 1 and false to 0 + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ typeCode $ & \lst{Byte} & 1 & type of the method (see Table~\ref{table:predeftypes}) \\ + \hline + $ methodCode $ & \lst{Byte} & 1 & a code of the proprty \\ + \hline + $ obj $ & \lst{Expr} & [1, *] & receiver object of this property call \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{MethodCall} operation (OpCode 220)} +\label{sec:serialization:operation:MethodCall} + + + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ typeCode $ & \lst{Byte} & 1 & type of the method (see Table~\ref{table:predeftypes}) \\ + \hline + $ methodCode $ & \lst{Byte} & 1 & a code of the method \\ + \hline + $ obj $ & \lst{Expr} & [1, *] & receiver object of this method call \\ + \hline + $ \#items $ & \lst{VLQ(UInt)} & [1, *] & number of items in the collection \\ + \hline + \multicolumn{4}{l}{\lst{for}~$i=1$~\lst{to}~$\#items$} \\ + \hline + ~~ $ args_i $ & \lst{Expr} & [1, *] & i-th item in the arguments of the method call \\ + \hline + \multicolumn{4}{l}{\lst{end for}} \\ +\end{tabularx}\) + + +\subsubsection{\lst{GetVar} operation (OpCode 227)} +\label{sec:serialization:operation:GetVar} + +Get context variable with given \lst{varId} and type. See~\hyperref[sec:appendix:primops:GetVar]{\lst{getVar}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ varId $ & \lst{Byte} & 1 & \lst{Byte} identifier of context variable \\ + \hline + $ type $ & \lst{Type} & [1, *] & expected type of context variable \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{OptionGet} operation (OpCode 228)} +\label{sec:serialization:operation:OptionGet} + +Returns the option's value. The option must be nonempty. Throws exception if the option is empty. See~\hyperref[sec:type:SOption:get]{\lst{SOption.get}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{OptionGetOrElse} operation (OpCode 229)} +\label{sec:serialization:operation:OptionGetOrElse} + +Returns the option's value if the option is nonempty, otherwise +return the result of evaluating \lst{default}. + See~\hyperref[sec:type:SOption:getOrElse]{\lst{SOption.getOrElse}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + $ default $ & \lst{Expr} & [1, *] & the default value \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{OptionIsDefined} operation (OpCode 230)} +\label{sec:serialization:operation:OptionIsDefined} + +Returns \lst{true} if the option is an instance of \lst{Some}, \lst{false} otherwise. See~\hyperref[sec:type:SOption:isDefined]{\lst{SOption.isDefined}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ this $ & \lst{Expr} & [1, *] & this instance \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{SigmaAnd} operation (OpCode 234)} +\label{sec:serialization:operation:SigmaAnd} + +Returns sigma proposition which is proven when \emph{all} the elements in collection are proven. See~\hyperref[sec:appendix:primops:SigmaAnd]{\lst{allZK}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ \#items $ & \lst{VLQ(UInt)} & [1, *] & number of items in the collection \\ + \hline + \multicolumn{4}{l}{\lst{for}~$i=1$~\lst{to}~$\#items$} \\ + \hline + ~~ $ propositions_i $ & \lst{Expr} & [1, *] & i-th item in the a collection of propositions \\ + \hline + \multicolumn{4}{l}{\lst{end for}} \\ +\end{tabularx}\) + + +\subsubsection{\lst{SigmaOr} operation (OpCode 235)} +\label{sec:serialization:operation:SigmaOr} + +Returns sigma proposition which is proven when \emph{any} of the elements in collection is proven. See~\hyperref[sec:appendix:primops:SigmaOr]{\lst{anyZK}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ \#items $ & \lst{VLQ(UInt)} & [1, *] & number of items in the collection \\ + \hline + \multicolumn{4}{l}{\lst{for}~$i=1$~\lst{to}~$\#items$} \\ + \hline + ~~ $ propositions_i $ & \lst{Expr} & [1, *] & i-th item in the a collection of propositions \\ + \hline + \multicolumn{4}{l}{\lst{end for}} \\ +\end{tabularx}\) + + +\subsubsection{\lst{BinOr} operation (OpCode 236)} +\label{sec:serialization:operation:BinOr} + +Logical OR of two operands See~\hyperref[sec:appendix:primops:BinOr]{\lst{||}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + \multicolumn{4}{l}{\lst{match}~$ (left, right) $} \\ + + \multicolumn{4}{l}{~~\lst{otherwise} } \\ + \hline + ~~~~ $ left $ & \lst{Expr} & [1, *] & left operand \\ + \hline + ~~~~ $ right $ & \lst{Expr} & [1, *] & right operand \\ + \hline + \multicolumn{4}{l}{\lst{end match}} \\ +\end{tabularx}\) + + +\subsubsection{\lst{BinAnd} operation (OpCode 237)} +\label{sec:serialization:operation:BinAnd} + +Logical AND of two operands See~\hyperref[sec:appendix:primops:BinAnd]{\lst{&&}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + \multicolumn{4}{l}{\lst{match}~$ (left, right) $} \\ + + \multicolumn{4}{l}{~~\lst{otherwise} } \\ + \hline + ~~~~ $ left $ & \lst{Expr} & [1, *] & left operand \\ + \hline + ~~~~ $ right $ & \lst{Expr} & [1, *] & right operand \\ + \hline + \multicolumn{4}{l}{\lst{end match}} \\ +\end{tabularx}\) + + +\subsubsection{\lst{DecodePoint} operation (OpCode 238)} +\label{sec:serialization:operation:DecodePoint} + +Convert \lst{Coll[Byte]} to \lst{GroupElement} using \lst{GroupElementSerializer} See~\hyperref[sec:appendix:primops:DecodePoint]{\lst{decodePoint}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ input $ & \lst{Expr} & [1, *] & serialized bytes of some \lst{GroupElement} value \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{LogicalNot} operation (OpCode 239)} +\label{sec:serialization:operation:LogicalNot} + +Logical NOT operation. Returns \lst{true} if input is \lst{false} and \lst{false} if input is \lst{true}. See~\hyperref[sec:appendix:primops:LogicalNot]{\lst{unary_!}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ input $ & \lst{Expr} & [1, *] & input \lst{Boolean} value \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Negation} operation (OpCode 240)} +\label{sec:serialization:operation:Negation} + +Negates numeric value \lst{x} by returning \lst{-x}. See~\hyperref[sec:appendix:primops:Negation]{\lst{unary_-}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ input $ & \lst{Expr} & [1, *] & value of numeric type \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{XorOf} operation (OpCode 255)} +\label{sec:serialization:operation:XorOf} + +Similar to \lst{allOf}, but performing logical XOR operation between all conditions instead of \lst{&&} See~\hyperref[sec:appendix:primops:XorOf]{\lst{xorOf}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ conditions $ & \lst{Expr} & [1, *] & a collection of conditions \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{SubstConstants} operation (OpCode 116)} +\label{sec:serialization:operation:SubstConstants} + +Transforms serialized bytes of ErgoTree with segregated constants by replacing constants + at given positions with new values. This operation allow to use serialized scripts as + pre-defined templates. + The typical usage is "check that output box have proposition equal to given script bytes, + where minerPk (constants(0)) is replaced with currentMinerPk". + Each constant in original scriptBytes have SType serialized before actual data (see ConstantSerializer). + During substitution each value from newValues is checked to be an instance of the corresponding type. + This means, the constants during substitution cannot change their types. + + Returns original scriptBytes array where only specified constants are replaced and all other bytes remain exactly the same. + See~\hyperref[sec:appendix:primops:SubstConstants]{\lst{substConstants}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ scriptBytes $ & \lst{Expr} & [1, *] & serialized ErgoTree with ConstantSegregationFlag set to 1. \\ + \hline + $ positions $ & \lst{Expr} & [1, *] & zero based indexes in ErgoTree.constants array which should be replaced with new values \\ + \hline + $ newValues $ & \lst{Expr} & [1, *] & new values to be injected into the corresponding positions in ErgoTree.constants array \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{LongToByteArray} operation (OpCode 122)} +\label{sec:serialization:operation:LongToByteArray} + +Converts \lst{Long} value to big-endian bytes representation. See~\hyperref[sec:appendix:primops:LongToByteArray]{\lst{longToByteArray}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ input $ & \lst{Expr} & [1, *] & value to convert \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{ByteArrayToBigInt} operation (OpCode 123)} +\label{sec:serialization:operation:ByteArrayToBigInt} + +Convert big-endian bytes representation (Coll[Byte]) to BigInt value. See~\hyperref[sec:appendix:primops:ByteArrayToBigInt]{\lst{byteArrayToBigInt}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ input $ & \lst{Expr} & [1, *] & collection of bytes in big-endian format \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{ByteArrayToLong} operation (OpCode 124)} +\label{sec:serialization:operation:ByteArrayToLong} + +Convert big-endian bytes representation (Coll[Byte]) to Long value. See~\hyperref[sec:appendix:primops:ByteArrayToLong]{\lst{byteArrayToLong}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ input $ & \lst{Expr} & [1, *] & collection of bytes in big-endian format \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Downcast} operation (OpCode 125)} +\label{sec:serialization:operation:Downcast} + +Cast this numeric value to a smaller type (e.g. Long to Int). Throws exception if overflow. See~\hyperref[sec:appendix:primops:Downcast]{\lst{downcast}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ input $ & \lst{Expr} & [1, *] & value to cast \\ + \hline + $ type $ & \lst{Type} & [1, *] & resulting type of the cast operation \\ + \hline + +\end{tabularx}\) + + +\subsubsection{\lst{Upcast} operation (OpCode 126)} +\label{sec:serialization:operation:Upcast} + +Cast this numeric value to a bigger type (e.g. Int to Long) See~\hyperref[sec:appendix:primops:Upcast]{\lst{upcast}} + +\noindent +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ input $ & \lst{Expr} & [1, *] & value to cast \\ + \hline + $ type $ & \lst{Type} & [1, *] & resulting type of the cast operation \\ + \hline + +\end{tabularx}\) + \ No newline at end of file diff --git a/docs/spec/generated/predeffunc_rows.tex b/docs/spec/generated/predeffunc_rows.tex new file mode 100644 index 0000000000..401d345644 --- /dev/null +++ b/docs/spec/generated/predeffunc_rows.tex @@ -0,0 +1,112 @@ + 115 & \hyperref[sec:serialization:operation:ConstantPlaceholder]{\lst{ConstantPlaceholder}} & \parbox{4cm}{\lst{placeholder:} \\ \lst{(Int)} \\ \lst{ => T}} & Create special ErgoTree node which can be replaced by constant with given id. \\ + \hline + 116 & \hyperref[sec:serialization:operation:SubstConstants]{\lst{SubstConstants}} & \parbox{4cm}{\lst{substConstants:} \\ \lst{(Coll[Byte], Coll[Int], Coll[T])} \\ \lst{ => Coll[Byte]}} & ... \\ + \hline + 122 & \hyperref[sec:serialization:operation:LongToByteArray]{\lst{LongToByteArray}} & \parbox{4cm}{\lst{longToByteArray:} \\ \lst{(Long)} \\ \lst{ => Coll[Byte]}} & Converts \lst{Long} value to big-endian bytes representation. \\ + \hline + 123 & \hyperref[sec:serialization:operation:ByteArrayToBigInt]{\lst{ByteArrayToBigInt}} & \parbox{4cm}{\lst{byteArrayToBigInt:} \\ \lst{(Coll[Byte])} \\ \lst{ => BigInt}} & Convert big-endian bytes representation (Coll[Byte]) to BigInt value. \\ + \hline + 124 & \hyperref[sec:serialization:operation:ByteArrayToLong]{\lst{ByteArrayToLong}} & \parbox{4cm}{\lst{byteArrayToLong:} \\ \lst{(Coll[Byte])} \\ \lst{ => Long}} & Convert big-endian bytes representation (Coll[Byte]) to Long value. \\ + \hline + 125 & \hyperref[sec:serialization:operation:Downcast]{\lst{Downcast}} & \parbox{4cm}{\lst{downcast:} \\ \lst{(T)} \\ \lst{ => R}} & Cast this numeric value to a smaller type (e.g. Long to Int). Throws exception if overflow. \\ + \hline + 126 & \hyperref[sec:serialization:operation:Upcast]{\lst{Upcast}} & \parbox{4cm}{\lst{upcast:} \\ \lst{(T)} \\ \lst{ => R}} & Cast this numeric value to a bigger type (e.g. Int to Long) \\ + \hline + 140 & \hyperref[sec:serialization:operation:SelectField]{\lst{SelectField}} & \parbox{4cm}{\lst{selectField:} \\ \lst{(T, Byte)} \\ \lst{ => R}} & Select tuple field by its 1-based index. E.g. \lst{input._1} is transformed to \lst{SelectField(input, 1)} \\ + \hline + 143 & \hyperref[sec:serialization:operation:LT]{\lst{LT}} & \parbox{4cm}{\lst{<:} \\ \lst{(T, T)} \\ \lst{ => Boolean}} & Returns \lst{true} is the left operand is less then the right operand, \lst{false} otherwise. \\ + \hline + 144 & \hyperref[sec:serialization:operation:LE]{\lst{LE}} & \parbox{4cm}{\lst{<=:} \\ \lst{(T, T)} \\ \lst{ => Boolean}} & Returns \lst{true} is the left operand is less then or equal to the right operand, \lst{false} otherwise. \\ + \hline + 145 & \hyperref[sec:serialization:operation:GT]{\lst{GT}} & \parbox{4cm}{\lst{>:} \\ \lst{(T, T)} \\ \lst{ => Boolean}} & Returns \lst{true} is the left operand is greater then the right operand, \lst{false} otherwise. \\ + \hline + 146 & \hyperref[sec:serialization:operation:GE]{\lst{GE}} & \parbox{4cm}{\lst{>=:} \\ \lst{(T, T)} \\ \lst{ => Boolean}} & Returns \lst{true} is the left operand is greater then or equal to the right operand, \lst{false} otherwise. \\ + \hline + 147 & \hyperref[sec:serialization:operation:EQ]{\lst{EQ}} & \parbox{4cm}{\lst{==:} \\ \lst{(T, T)} \\ \lst{ => Boolean}} & Compare equality of \lst{left} and \lst{right} arguments \\ + \hline + 148 & \hyperref[sec:serialization:operation:NEQ]{\lst{NEQ}} & \parbox{4cm}{\lst{!=:} \\ \lst{(T, T)} \\ \lst{ => Boolean}} & Compare inequality of \lst{left} and \lst{right} arguments \\ + \hline + 149 & \hyperref[sec:serialization:operation:If]{\lst{If}} & \parbox{4cm}{\lst{if:} \\ \lst{(Boolean, T, T)} \\ \lst{ => T}} & Compute condition, if true then compute trueBranch else compute falseBranch \\ + \hline + 150 & \hyperref[sec:serialization:operation:AND]{\lst{AND}} & \parbox{4cm}{\lst{allOf:} \\ \lst{(Coll[Boolean])} \\ \lst{ => Boolean}} & Returns true if \emph{all} the elements in collection are \lst{true}. \\ + \hline + 151 & \hyperref[sec:serialization:operation:OR]{\lst{OR}} & \parbox{4cm}{\lst{anyOf:} \\ \lst{(Coll[Boolean])} \\ \lst{ => Boolean}} & Returns true if \emph{any} the elements in collection are \lst{true}. \\ + \hline + 152 & \hyperref[sec:serialization:operation:AtLeast]{\lst{AtLeast}} & \parbox{4cm}{\lst{atLeast:} \\ \lst{(Int, Coll[SigmaProp])} \\ \lst{ => SigmaProp}} & ... \\ + \hline + 153 & \hyperref[sec:serialization:operation:Minus]{\lst{Minus}} & \parbox{4cm}{\lst{-:} \\ \lst{(T, T)} \\ \lst{ => T}} & Returns a result of subtracting second numeric operand from the first. \\ + \hline + 154 & \hyperref[sec:serialization:operation:Plus]{\lst{Plus}} & \parbox{4cm}{\lst{+:} \\ \lst{(T, T)} \\ \lst{ => T}} & Returns a sum of two numeric operands \\ + \hline + 155 & \hyperref[sec:serialization:operation:Xor]{\lst{Xor}} & \parbox{4cm}{\lst{binary_|:} \\ \lst{(Coll[Byte], Coll[Byte])} \\ \lst{ => Coll[Byte]}} & Byte-wise XOR of two collections of bytes \\ + \hline + 156 & \hyperref[sec:serialization:operation:Multiply]{\lst{Multiply}} & \parbox{4cm}{\lst{*:} \\ \lst{(T, T)} \\ \lst{ => T}} & Returns a multiplication of two numeric operands \\ + \hline + 157 & \hyperref[sec:serialization:operation:Division]{\lst{Division}} & \parbox{4cm}{\lst{/:} \\ \lst{(T, T)} \\ \lst{ => T}} & Integer division of the first operand by the second operand. \\ + \hline + 158 & \hyperref[sec:serialization:operation:Modulo]{\lst{Modulo}} & \parbox{4cm}{\lst{\%:} \\ \lst{(T, T)} \\ \lst{ => T}} & Reminder from division of the first operand by the second operand. \\ + \hline + 161 & \hyperref[sec:serialization:operation:Min]{\lst{Min}} & \parbox{4cm}{\lst{min:} \\ \lst{(T, T)} \\ \lst{ => T}} & Minimum value of two operands. \\ + \hline + 162 & \hyperref[sec:serialization:operation:Max]{\lst{Max}} & \parbox{4cm}{\lst{max:} \\ \lst{(T, T)} \\ \lst{ => T}} & Maximum value of two operands. \\ + \hline + 182 & \hyperref[sec:serialization:operation:CreateAvlTree]{\lst{CreateAvlTree}} & \parbox{4cm}{\lst{avlTree:} \\ \lst{(Byte, Coll[Byte], Int, Option[Int])} \\ \lst{ => AvlTree}} & Construct a new authenticated dictionary with given parameters and tree root digest. \\ + \hline + 183 & \hyperref[sec:serialization:operation:TreeLookup]{\lst{TreeLookup}} & \parbox{4cm}{\lst{treeLookup:} \\ \lst{(AvlTree, Coll[Byte], Coll[Byte])} \\ \lst{ => Option[Coll[Byte]]}} & \\ + \hline + 203 & \hyperref[sec:serialization:operation:CalcBlake2b256]{\lst{CalcBlake2b256}} & \parbox{4cm}{\lst{blake2b256:} \\ \lst{(Coll[Byte])} \\ \lst{ => Coll[Byte]}} & Calculate Blake2b hash from \lst{input} bytes. \\ + \hline + 204 & \hyperref[sec:serialization:operation:CalcSha256]{\lst{CalcSha256}} & \parbox{4cm}{\lst{sha256:} \\ \lst{(Coll[Byte])} \\ \lst{ => Coll[Byte]}} & Calculate Sha256 hash from \lst{input} bytes. \\ + \hline + 205 & \hyperref[sec:serialization:operation:CreateProveDlog]{\lst{CreateProveDlog}} & \parbox{4cm}{\lst{proveDlog:} \\ \lst{(GroupElement)} \\ \lst{ => SigmaProp}} & ErgoTree operation to create a new \lst{SigmaProp} value representing public key + of discrete logarithm signature protocol. + \\ + \hline + 206 & \hyperref[sec:serialization:operation:CreateProveDHTuple]{\lst{CreateProveDHTuple}} & \parbox{4cm}{\lst{proveDHTuple:} \\ \lst{(GroupElement, GroupElement, GroupElement, GroupElement)} \\ \lst{ => SigmaProp}} & ErgoTree operation to create a new SigmaProp value representing public key + of Diffie Hellman signature protocol. + Common input: (g,h,u,v) + \\ + \hline + 209 & \hyperref[sec:serialization:operation:BoolToSigmaProp]{\lst{BoolToSigmaProp}} & \parbox{4cm}{\lst{sigmaProp:} \\ \lst{(Boolean)} \\ \lst{ => SigmaProp}} & ... \\ + \hline + 212 & \hyperref[sec:serialization:operation:DeserializeContext]{\lst{DeserializeContext}} & \parbox{4cm}{\lst{executeFromVar:} \\ \lst{(Byte)} \\ \lst{ => T}} & ... \\ + \hline + 213 & \hyperref[sec:serialization:operation:DeserializeRegister]{\lst{DeserializeRegister}} & \parbox{4cm}{\lst{executeFromSelfReg:} \\ \lst{(Byte, Option[T])} \\ \lst{ => T}} & ... \\ + \hline + 218 & \hyperref[sec:serialization:operation:Apply]{\lst{Apply}} & \parbox{4cm}{\lst{apply:} \\ \lst{((T) => R, T)} \\ \lst{ => R}} & Apply the function to the arguments. \\ + \hline + 227 & \hyperref[sec:serialization:operation:GetVar]{\lst{GetVar}} & \parbox{4cm}{\lst{getVar:} \\ \lst{(Byte)} \\ \lst{ => Option[T]}} & Get context variable with given \lst{varId} and type. \\ + \hline + 234 & \hyperref[sec:serialization:operation:SigmaAnd]{\lst{SigmaAnd}} & \parbox{4cm}{\lst{allZK:} \\ \lst{(Coll[SigmaProp])} \\ \lst{ => SigmaProp}} & Returns sigma proposition which is proven when \emph{all} the elements in collection are proven. \\ + \hline + 235 & \hyperref[sec:serialization:operation:SigmaOr]{\lst{SigmaOr}} & \parbox{4cm}{\lst{anyZK:} \\ \lst{(Coll[SigmaProp])} \\ \lst{ => SigmaProp}} & Returns sigma proposition which is proven when \emph{any} of the elements in collection is proven. \\ + \hline + 236 & \hyperref[sec:serialization:operation:BinOr]{\lst{BinOr}} & \parbox{4cm}{\lst{||:} \\ \lst{(Boolean, Boolean)} \\ \lst{ => Boolean}} & Logical OR of two operands \\ + \hline + 237 & \hyperref[sec:serialization:operation:BinAnd]{\lst{BinAnd}} & \parbox{4cm}{\lst{&&:} \\ \lst{(Boolean, Boolean)} \\ \lst{ => Boolean}} & Logical AND of two operands \\ + \hline + 238 & \hyperref[sec:serialization:operation:DecodePoint]{\lst{DecodePoint}} & \parbox{4cm}{\lst{decodePoint:} \\ \lst{(Coll[Byte])} \\ \lst{ => GroupElement}} & Convert \lst{Coll[Byte]} to \lst{GroupElement} using \lst{GroupElementSerializer} \\ + \hline + 239 & \hyperref[sec:serialization:operation:LogicalNot]{\lst{LogicalNot}} & \parbox{4cm}{\lst{unary_!:} \\ \lst{(Boolean)} \\ \lst{ => Boolean}} & Logical NOT operation. Returns \lst{true} if input is \lst{false} and \lst{false} if input is \lst{true}. \\ + \hline + 240 & \hyperref[sec:serialization:operation:Negation]{\lst{Negation}} & \parbox{4cm}{\lst{unary_-:} \\ \lst{(T)} \\ \lst{ => T}} & Negates numeric value \lst{x} by returning \lst{-x}. \\ + \hline + 241 & \hyperref[sec:serialization:operation:BitInversion]{\lst{BitInversion}} & \parbox{4cm}{\lst{unary_~:} \\ \lst{(T)} \\ \lst{ => T}} & Invert every bit of the numeric value. \\ + \hline + 242 & \hyperref[sec:serialization:operation:BitOr]{\lst{BitOr}} & \parbox{4cm}{\lst{bit_|:} \\ \lst{(T, T)} \\ \lst{ => T}} & Bitwise OR of two numeric operands. \\ + \hline + 243 & \hyperref[sec:serialization:operation:BitAnd]{\lst{BitAnd}} & \parbox{4cm}{\lst{bit_&:} \\ \lst{(T, T)} \\ \lst{ => T}} & Bitwise AND of two numeric operands. \\ + \hline + 244 & \hyperref[sec:serialization:operation:BinXor]{\lst{BinXor}} & \parbox{4cm}{\lst{^:} \\ \lst{(Boolean, Boolean)} \\ \lst{ => Boolean}} & Logical XOR of two operands \\ + \hline + 245 & \hyperref[sec:serialization:operation:BitXor]{\lst{BitXor}} & \parbox{4cm}{\lst{bit_^:} \\ \lst{(T, T)} \\ \lst{ => T}} & Bitwise XOR of two numeric operands. \\ + \hline + 246 & \hyperref[sec:serialization:operation:BitShiftRight]{\lst{BitShiftRight}} & \parbox{4cm}{\lst{bit_>>:} \\ \lst{(T, T)} \\ \lst{ => T}} & Right shift of bits. \\ + \hline + 247 & \hyperref[sec:serialization:operation:BitShiftLeft]{\lst{BitShiftLeft}} & \parbox{4cm}{\lst{bit_<<:} \\ \lst{(T, T)} \\ \lst{ => T}} & Left shift of bits. \\ + \hline + 248 & \hyperref[sec:serialization:operation:BitShiftRightZeroed]{\lst{BitShiftRightZeroed}} & \parbox{4cm}{\lst{bit_>>>:} \\ \lst{(T, T)} \\ \lst{ => T}} & Right shift of bits. \\ + \hline + 255 & \hyperref[sec:serialization:operation:XorOf]{\lst{XorOf}} & \parbox{4cm}{\lst{xorOf:} \\ \lst{(Coll[Boolean])} \\ \lst{ => Boolean}} & Similar to \lst{allOf}, but performing logical XOR operation between all conditions instead of \lst{&&} \\ + \hline + \ No newline at end of file diff --git a/docs/spec/generated/predeffunc_sections.tex b/docs/spec/generated/predeffunc_sections.tex new file mode 100644 index 0000000000..8d6ac2f782 --- /dev/null +++ b/docs/spec/generated/predeffunc_sections.tex @@ -0,0 +1,1239 @@ + +\subsubsection{\lst{placeholder} method (Code 115)} +\label{sec:appendix:primops:ConstantPlaceholder} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Create special ErgoTree node which can be replaced by constant with given id. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{index} & \lst{: Int} & \text{// index of the constant in ErgoTree header} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ConstantPlaceholder]{\lst{ConstantPlaceholder}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{substConstants} method (Code 116)} +\label{sec:appendix:primops:SubstConstants} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Transforms serialized bytes of ErgoTree with segregated constants by replacing constants + at given positions with new values. This operation allow to use serialized scripts as + pre-defined templates. + The typical usage is "check that output box have proposition equal to given script bytes, + where minerPk (constants(0)) is replaced with currentMinerPk". + Each constant in original scriptBytes have SType serialized before actual data (see ConstantSerializer). + During substitution each value from newValues is checked to be an instance of the corresponding type. + This means, the constants during substitution cannot change their types. + + Returns original scriptBytes array where only specified constants are replaced and all other bytes remain exactly the same. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{scriptBytes} & \lst{: Coll[Byte]} & \text{// serialized ErgoTree with ConstantSegregationFlag set to 1.} \\ +\lst{positions} & \lst{: Coll[Int]} & \text{// zero based indexes in ErgoTree.constants array which should be replaced with new values} \\ +\lst{newValues} & \lst{: Coll[T]} & \text{// new values to be injected into the corresponding positions in ErgoTree.constants array} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:SubstConstants]{\lst{SubstConstants}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{longToByteArray} method (Code 122)} +\label{sec:appendix:primops:LongToByteArray} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Converts \lst{Long} value to big-endian bytes representation. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{input} & \lst{: Long} & \text{// value to convert} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:LongToByteArray]{\lst{LongToByteArray}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{byteArrayToBigInt} method (Code 123)} +\label{sec:appendix:primops:ByteArrayToBigInt} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Convert big-endian bytes representation (Coll[Byte]) to BigInt value. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{input} & \lst{: Coll[Byte]} & \text{// collection of bytes in big-endian format} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{BigInt} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ByteArrayToBigInt]{\lst{ByteArrayToBigInt}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{byteArrayToLong} method (Code 124)} +\label{sec:appendix:primops:ByteArrayToLong} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Convert big-endian bytes representation (Coll[Byte]) to Long value. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{input} & \lst{: Coll[Byte]} & \text{// collection of bytes in big-endian format} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Long} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:ByteArrayToLong]{\lst{ByteArrayToLong}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{downcast} method (Code 125)} +\label{sec:appendix:primops:Downcast} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Cast this numeric value to a smaller type (e.g. Long to Int). Throws exception if overflow. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{input} & \lst{: T} & \text{// value to cast} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{R} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Downcast]{\lst{Downcast}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{upcast} method (Code 126)} +\label{sec:appendix:primops:Upcast} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Cast this numeric value to a bigger type (e.g. Int to Long) \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{input} & \lst{: T} & \text{// value to cast} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{R} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Upcast]{\lst{Upcast}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{selectField} method (Code 140)} +\label{sec:appendix:primops:SelectField} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Select tuple field by its 1-based index. E.g. \lst{input._1} is transformed to \lst{SelectField(input, 1)} \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{input} & \lst{: T} & \text{// tuple of items} \\ +\lst{fieldIndex} & \lst{: Byte} & \text{// index of an item to select} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{R} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:SelectField]{\lst{SelectField}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{<} method (Code 143)} +\label{sec:appendix:primops:LT} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns \lst{true} is the left operand is less then the right operand, \lst{false} otherwise. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:LT]{\lst{LT}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{<=} method (Code 144)} +\label{sec:appendix:primops:LE} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns \lst{true} is the left operand is less then or equal to the right operand, \lst{false} otherwise. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:LE]{\lst{LE}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{>} method (Code 145)} +\label{sec:appendix:primops:GT} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns \lst{true} is the left operand is greater then the right operand, \lst{false} otherwise. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:GT]{\lst{GT}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{>=} method (Code 146)} +\label{sec:appendix:primops:GE} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns \lst{true} is the left operand is greater then or equal to the right operand, \lst{false} otherwise. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:GE]{\lst{GE}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{==} method (Code 147)} +\label{sec:appendix:primops:EQ} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Compare equality of \lst{left} and \lst{right} arguments \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:EQ]{\lst{EQ}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{!=} method (Code 148)} +\label{sec:appendix:primops:NEQ} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Compare inequality of \lst{left} and \lst{right} arguments \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:NEQ]{\lst{NEQ}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{if} method (Code 149)} +\label{sec:appendix:primops:If} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Compute condition, if true then compute trueBranch else compute falseBranch \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{condition} & \lst{: Boolean} & \text{// condition expression} \\ +\lst{trueBranch} & \lst{: T} & \text{// expression to execute when \lst{condition == true}} \\ +\lst{falseBranch} & \lst{: T} & \text{// expression to execute when \lst{condition == false}} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:If]{\lst{If}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{allOf} method (Code 150)} +\label{sec:appendix:primops:AND} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns true if \emph{all} the elements in collection are \lst{true}. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{conditions} & \lst{: Coll[Boolean]} & \text{// a collection of conditions} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:AND]{\lst{AND}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{anyOf} method (Code 151)} +\label{sec:appendix:primops:OR} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns true if \emph{any} the elements in collection are \lst{true}. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{conditions} & \lst{: Coll[Boolean]} & \text{// a collection of conditions} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:OR]{\lst{OR}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{atLeast} method (Code 152)} +\label{sec:appendix:primops:AtLeast} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Logical threshold. + AtLeast has two inputs: integer \lst{bound} and \lst{children} same as in AND/OR. + The result is true if at least \lst{bound} children are proven. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{bound} & \lst{: Int} & \text{// required minimum of proven children} \\ +\lst{children} & \lst{: Coll[SigmaProp]} & \text{// proposition to be proven/validated} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{SigmaProp} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:AtLeast]{\lst{AtLeast}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{-} method (Code 153)} +\label{sec:appendix:primops:Minus} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns a result of subtracting second numeric operand from the first. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Minus]{\lst{Minus}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{+} method (Code 154)} +\label{sec:appendix:primops:Plus} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns a sum of two numeric operands \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Plus]{\lst{Plus}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{binary_|} method (Code 155)} +\label{sec:appendix:primops:Xor} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Byte-wise XOR of two collections of bytes \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: Coll[Byte]} & \text{// left operand} \\ +\lst{right} & \lst{: Coll[Byte]} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Xor]{\lst{Xor}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{*} method (Code 156)} +\label{sec:appendix:primops:Multiply} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns a multiplication of two numeric operands \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Multiply]{\lst{Multiply}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{/} method (Code 157)} +\label{sec:appendix:primops:Division} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Integer division of the first operand by the second operand. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Division]{\lst{Division}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{\%} method (Code 158)} +\label{sec:appendix:primops:Modulo} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Reminder from division of the first operand by the second operand. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Modulo]{\lst{Modulo}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{min} method (Code 161)} +\label{sec:appendix:primops:Min} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Minimum value of two operands. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Min]{\lst{Min}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{max} method (Code 162)} +\label{sec:appendix:primops:Max} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Maximum value of two operands. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Max]{\lst{Max}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{avlTree} method (Code 182)} +\label{sec:appendix:primops:CreateAvlTree} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Construct a new authenticated dictionary with given parameters and tree root digest. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{operationFlags} & \lst{: Byte} & \text{// flags of available operations} \\ +\lst{digest} & \lst{: Coll[Byte]} & \text{// hash of merkle tree root} \\ +\lst{keyLength} & \lst{: Int} & \text{// length of dictionary keys in bytes} \\ +\lst{valueLengthOpt} & \lst{: Option[Int]} & \text{// optional width of dictionary values in bytes} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{AvlTree} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:CreateAvlTree]{\lst{CreateAvlTree}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{treeLookup} method (Code 183)} +\label{sec:appendix:primops:TreeLookup} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{tree} & \lst{: AvlTree} & \text{// tree to lookup the key} \\ +\lst{key} & \lst{: Coll[Byte]} & \text{// a key of an item in the \lst{tree} to lookup} \\ +\lst{proof} & \lst{: Coll[Byte]} & \text{// proof to perform verification of the operation} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[Coll[Byte]]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:TreeLookup]{\lst{TreeLookup}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{blake2b256} method (Code 203)} +\label{sec:appendix:primops:CalcBlake2b256} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Calculate Blake2b hash from \lst{input} bytes. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{input} & \lst{: Coll[Byte]} & \text{// collection of bytes} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:CalcBlake2b256]{\lst{CalcBlake2b256}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{sha256} method (Code 204)} +\label{sec:appendix:primops:CalcSha256} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Calculate Sha256 hash from \lst{input} bytes. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{input} & \lst{: Coll[Byte]} & \text{// collection of bytes} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Coll[Byte]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:CalcSha256]{\lst{CalcSha256}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{proveDlog} method (Code 205)} +\label{sec:appendix:primops:CreateProveDlog} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & ErgoTree operation to create a new \lst{SigmaProp} value representing public key + of discrete logarithm signature protocol. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{value} & \lst{: GroupElement} & \text{// element of elliptic curve group} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{SigmaProp} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:CreateProveDlog]{\lst{CreateProveDlog}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{proveDHTuple} method (Code 206)} +\label{sec:appendix:primops:CreateProveDHTuple} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & ErgoTree operation to create a new SigmaProp value representing public key + of Diffie Hellman signature protocol. + Common input: (g,h,u,v) + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{g} & \lst{: GroupElement} & \text{// } \\ +\lst{h} & \lst{: GroupElement} & \text{// } \\ +\lst{u} & \lst{: GroupElement} & \text{// } \\ +\lst{v} & \lst{: GroupElement} & \text{// } \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{SigmaProp} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:CreateProveDHTuple]{\lst{CreateProveDHTuple}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{sigmaProp} method (Code 209)} +\label{sec:appendix:primops:BoolToSigmaProp} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Embedding of \lst{Boolean} values to \lst{SigmaProp} values. + As an example, this operation allows boolean experessions + to be used as arguments of \lst{atLeast(..., sigmaProp(boolExpr), ...)} operation. + During execution results to either \lst{TrueProp} or \lst{FalseProp} values of \lst{SigmaProp} type. + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{condition} & \lst{: Boolean} & \text{// boolean value to embed in SigmaProp value} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{SigmaProp} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:BoolToSigmaProp]{\lst{BoolToSigmaProp}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{executeFromVar} method (Code 212)} +\label{sec:appendix:primops:DeserializeContext} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Extracts context variable as \lst{Coll[Byte]}, deserializes it to script + and then executes this script in the current context. + The original \lst{Coll[Byte]} of the script is available as \lst{getVar[Coll[Byte]](id)}. + Type parameter \lst{V} result type of the deserialized script. + Throws an exception if the actual script type doesn't conform to T. + Returns a result of the script execution in the current context + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{id} & \lst{: Byte} & \text{// identifier of the context variable} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:DeserializeContext]{\lst{DeserializeContext}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{executeFromSelfReg} method (Code 213)} +\label{sec:appendix:primops:DeserializeRegister} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Extracts SELF register as \lst{Coll[Byte]}, deserializes it to script + and then executes this script in the current context. + The original \lst{Coll[Byte]} of the script is available as \lst{SELF.getReg[Coll[Byte]](id)}. + Type parameter \lst{T} result type of the deserialized script. + Throws an exception if the actual script type doesn't conform to \lst{T}. + Returns a result of the script execution in the current context + \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{id} & \lst{: Byte} & \text{// identifier of the register} \\ +\lst{default} & \lst{: Option[T]} & \text{// optional default value, if register is not available} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:DeserializeRegister]{\lst{DeserializeRegister}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{apply} method (Code 218)} +\label{sec:appendix:primops:Apply} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Apply the function to the arguments. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{func} & \lst{: (T) => R} & \text{// function which is applied} \\ +\lst{args} & \lst{: T} & \text{// list of arguments} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{R} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Apply]{\lst{Apply}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{getVar} method (Code 227)} +\label{sec:appendix:primops:GetVar} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Get context variable with given \lst{varId} and type. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{varId} & \lst{: Byte} & \text{// \lst{Byte} identifier of context variable} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Option[T]} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:GetVar]{\lst{GetVar}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{allZK} method (Code 234)} +\label{sec:appendix:primops:SigmaAnd} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns sigma proposition which is proven when \emph{all} the elements in collection are proven. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{propositions} & \lst{: Coll[SigmaProp]} & \text{// a collection of propositions} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{SigmaProp} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:SigmaAnd]{\lst{SigmaAnd}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{anyZK} method (Code 235)} +\label{sec:appendix:primops:SigmaOr} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Returns sigma proposition which is proven when \emph{any} of the elements in collection is proven. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{propositions} & \lst{: Coll[SigmaProp]} & \text{// a collection of propositions} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{SigmaProp} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:SigmaOr]{\lst{SigmaOr}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{||} method (Code 236)} +\label{sec:appendix:primops:BinOr} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Logical OR of two operands \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: Boolean} & \text{// left operand} \\ +\lst{right} & \lst{: Boolean} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:BinOr]{\lst{BinOr}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{&&} method (Code 237)} +\label{sec:appendix:primops:BinAnd} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Logical AND of two operands \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: Boolean} & \text{// left operand} \\ +\lst{right} & \lst{: Boolean} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:BinAnd]{\lst{BinAnd}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{decodePoint} method (Code 238)} +\label{sec:appendix:primops:DecodePoint} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Convert \lst{Coll[Byte]} to \lst{GroupElement} using \lst{GroupElementSerializer} \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{input} & \lst{: Coll[Byte]} & \text{// serialized bytes of some \lst{GroupElement} value} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{GroupElement} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:DecodePoint]{\lst{DecodePoint}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{unary_!} method (Code 239)} +\label{sec:appendix:primops:LogicalNot} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Logical NOT operation. Returns \lst{true} if input is \lst{false} and \lst{false} if input is \lst{true}. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{input} & \lst{: Boolean} & \text{// input \lst{Boolean} value} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:LogicalNot]{\lst{LogicalNot}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{unary_-} method (Code 240)} +\label{sec:appendix:primops:Negation} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Negates numeric value \lst{x} by returning \lst{-x}. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{input} & \lst{: T} & \text{// value of numeric type} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:Negation]{\lst{Negation}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{unary_~} method (Code 241)} +\label{sec:appendix:primops:BitInversion} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Invert every bit of the numeric value. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{input} & \lst{: T} & \text{// value of numeric type} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:BitInversion]{\lst{BitInversion}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{bit_|} method (Code 242)} +\label{sec:appendix:primops:BitOr} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Bitwise OR of two numeric operands. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:BitOr]{\lst{BitOr}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{bit_&} method (Code 243)} +\label{sec:appendix:primops:BitAnd} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Bitwise AND of two numeric operands. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:BitAnd]{\lst{BitAnd}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{^} method (Code 244)} +\label{sec:appendix:primops:BinXor} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Logical XOR of two operands \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: Boolean} & \text{// left operand} \\ +\lst{right} & \lst{: Boolean} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:BinXor]{\lst{BinXor}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{bit_^} method (Code 245)} +\label{sec:appendix:primops:BitXor} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Bitwise XOR of two numeric operands. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:BitXor]{\lst{BitXor}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{bit_>>} method (Code 246)} +\label{sec:appendix:primops:BitShiftRight} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Right shift of bits. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:BitShiftRight]{\lst{BitShiftRight}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{bit_<<} method (Code 247)} +\label{sec:appendix:primops:BitShiftLeft} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Left shift of bits. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:BitShiftLeft]{\lst{BitShiftLeft}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{bit_>>>} method (Code 248)} +\label{sec:appendix:primops:BitShiftRightZeroed} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Right shift of bits. \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{left} & \lst{: T} & \text{// left operand} \\ +\lst{right} & \lst{: T} & \text{// right operand} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{T} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:BitShiftRightZeroed]{\lst{BitShiftRightZeroed}} \\ + \hline + +\end{tabularx} + +\subsubsection{\lst{xorOf} method (Code 255)} +\label{sec:appendix:primops:XorOf} +\noindent +\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Description} & Similar to \lst{allOf}, but performing logical XOR operation between all conditions instead of \lst{&&} \\ + + \hline + \bf{Parameters} & + \(\begin{array}{l l l} + \lst{conditions} & \lst{: Coll[Boolean]} & \text{// a collection of conditions} \\ + \end{array}\) \\ + + \hline + \bf{Result} & \lst{Boolean} \\ + \hline + + \bf{Serialized as} & \hyperref[sec:serialization:operation:XorOf]{\lst{XorOf}} \\ + \hline + +\end{tabularx} diff --git a/docs/spec/generated/predeftypes.tex b/docs/spec/generated/predeftypes.tex new file mode 100644 index 0000000000..a0d2e76d8f --- /dev/null +++ b/docs/spec/generated/predeftypes.tex @@ -0,0 +1,27 @@ +\lst{Boolean} & $1$ & \lst{true} & \lst{true} & \lst{true} & \lst{false} & $\Set{\lst{true}, \lst{false}}$ \\ +\hline +\lst{Byte} & $2$ & \lst{true} & \lst{true} & \lst{true} & \lst{true} & $\Set{-2^{7} \dots 2^{7}-1}$~\ref{sec:type:Byte} \\ +\hline +\lst{Short} & $3$ & \lst{true} & \lst{true} & \lst{true} & \lst{true} & $\Set{-2^{15} \dots 2^{15}-1}$~\ref{sec:type:Short} \\ +\hline +\lst{Int} & $4$ & \lst{true} & \lst{true} & \lst{true} & \lst{true} & $\Set{-2^{31} \dots 2^{31}-1}$~\ref{sec:type:Int} \\ +\hline +\lst{Long} & $5$ & \lst{true} & \lst{true} & \lst{true} & \lst{true} & $\Set{-2^{63} \dots 2^{63}-1}$~\ref{sec:type:Long} \\ +\hline +\lst{BigInt} & $6$ & \lst{true} & \lst{true} & \lst{true} & \lst{true} & $\Set{-2^{255} \dots 2^{255}-1}$~\ref{sec:type:BigInt} \\ +\hline +\lst{GroupElement} & $7$ & \lst{true} & \lst{true} & \lst{true} & \lst{false} & $\Set{p \in \lst{SecP256K1Point}}$ \\ +\hline +\lst{SigmaProp} & $8$ & \lst{false} & \lst{true} & \lst{true} & \lst{false} & Sec.~\ref{sec:type:SigmaProp} \\ +\hline +\lst{Box} & $99$ & \lst{false} & \lst{false} & \lst{false} & \lst{false} & Sec.~\ref{sec:type:Box} \\ +\hline +\lst{AvlTree} & $100$ & \lst{false} & \lst{false} & \lst{false} & \lst{false} & Sec.~\ref{sec:type:AvlTree} \\ +\hline +\lst{Context} & $101$ & \lst{false} & \lst{false} & \lst{false} & \lst{false} & Sec.~\ref{sec:type:Context} \\ +\hline +\lst{Header} & $104$ & \lst{true} & \lst{false} & \lst{false} & \lst{false} & Sec.~\ref{sec:type:Header} \\ +\hline +\lst{PreHeader} & $105$ & \lst{true} & \lst{false} & \lst{false} & \lst{false} & Sec.~\ref{sec:type:PreHeader} \\ +\hline +\lst{Global} & $106$ & \lst{true} & \lst{false} & \lst{false} & \lst{false} & Sec.~\ref{sec:type:Global} \\ \ No newline at end of file diff --git a/docs/spec/graph.tex b/docs/spec/graph.tex new file mode 100644 index 0000000000..a0a07f5dec --- /dev/null +++ b/docs/spec/graph.tex @@ -0,0 +1,2 @@ +\section{The Graph} +\label{sec:graph} \ No newline at end of file diff --git a/docs/spec/language.tex b/docs/spec/language.tex new file mode 100644 index 0000000000..159db621e0 --- /dev/null +++ b/docs/spec/language.tex @@ -0,0 +1,114 @@ +\section{Language} +\label{sec:language} + +Here we define abstract syntax for \langname language. It is a typed +functional language with tuples, collections, optional types and \lst{val} +binding expressions. The semantics of \langname is specified by first +translating it to a core calculus (\corelang) and then by giving its +evaluation semantics. Typing rules is given in Section~\ref{sec:typing} and +evaluation semantics is given in Section~\ref{sec:evaluation}. + +\langname is defined here using abstract syntax notation as shown in +Figure~\ref{fig:language}. This corresponds to \lst{ErgoTree} data structure, +which can be serialized to an array of bytes. The mnemonics shown in the +figure correspond to classes of \lst{ErgoTree} reference implementation. + +\begin{figure}[h] + \footnotesize + \input{figures/fig_language.tex} + \caption{Abstract syntax of ErgoScript language} + \label{fig:language} +\end{figure} + +We assign types to the terms in a standard way following typing rules shown +in Figure~\ref{fig:typing}. + +Constants keep both the type and the data value of that type. To be +well-formed the type of the constant should correspond to its value. + +Variables are always typed and identified by unique $id$, which refers to +either lambda bound variable of \lst{val} bound variable. The encoding of +variables and their resolution is described in Section~\ref{sec:blocks}. + +Lambda expressions can take a list of lambda-bound variables which can be +used in the body expression, which can be \emph{block expression}. + +Function application takes an expression of functional type (e.g. $T_1 \to +T_n$) and a list of arguments. The reason we do not write it $e_f(\Ov{e})$ +is that this notation suggests that $(\Ov{e})$ is a subterm, which it is not. + +Method invocation allows to apply functions defined as methods of +\emph{interface types}. If expression $e$ has interface type $I$ and and +method $m$ is declared in the interface $I$ then method invocation +$e.m(args)$ is defined for the appropriate $args$. + +Conditional expressions of \langname are strict in condition and lazy in both +of the branches. Each branch is an expression which is executed depending on +the result of condition. This laziness of branches specified by lowering to +\corelang (see Figure~\ref{fig:lowering}). + +Block expression contains a list of \lst{val} definitions of variables. To be +wellformed each subsequent definition can only refer to the previously defined +variables. Result of block execution is the result of the resulting +expression $e$, which can use any variable of the block. + +Each type may be associated with a list of method declarations, in which case +we say that \emph{the type has methods}. The semantics of the methods is the +same as in Java. Having an instance of some type with methods it is possible +to call methods on the instance with some additional arguments. +Each method can be parameterized by type variables, which +can be used in method signature. Because \langname supports only monomorphic +values each method call is monomorphic and all type variables are assigned to +concrete types (see \lst{MethodCall} typing rule in Figure~\ref{fig:typing}). + +The semantics of \langname is specified by translating all its terms to a +somewhat lower and simplified language, which we call \corelang. This +\emph{lowering} translation is shown in Figure~\ref{fig:lowering}. + +\begin{figure}[h] +\begin{center} +\begin{tabular}{ l c l } + \hline +$Term_{\langname}$ & & $Term_{Core}$ \\ + \hline + +$\Low{ \TyLam{x_i}{T_i}{e} }$ & \To & + $\Lam{x:(T_0,\dots,T_n)}{ \Low{ \{ \Ov{\lst{val}~x_i: T_i = x.\_i;}~e\} } }$ \\ + +$\Low{ \Apply{e_f}{\Ov{e_i}} }$ & \To & $\Apply{ \Low{e_f} }{ \Low{(\Ov{e_i})} }$ \\ +$\Low{ \Apply{e.m}{\Ov{e_i}} }$ & \To & $\Apply{ \Low{e}.m}{\Ov{ \Low{e_i} }}$ \\ +$\Low{ \Tup{e_1, \dots ,e_n} }$ & \To & $\Tup{\Low{e_1}, \dots ,\Low{e_n}}$ \\ + +$\Low{ e_1~\text{\lst{||}}~e_2 }$ & \To & $\Low{ \IfThenElse{ e_1 }{ \True }{ e_2 } }$ \\ +$\Low{ e_1~\text{\lst{\&\&}}~e_2 }$ & \To & $\Low{ \IfThenElse{ e_1 }{ e_2 }{ \False } }$ \\ + +$\Low{ \IfThenElse{e_{cond}}{e_1}{e_2} }$ & \To & + $\Apply{(\lst{if}(\Low{e_{cond}} ,~\Lam{(\_:Unit)}{\Low{e_1}} ,~\Lam{(\_:Unit)}{\Low{e_2}} ))}{}$ \\ + +$\Low{ \{ \Ov{\text{\lst{val}}~x_i: T_i = e_i;}~e\} }$ & \To & + $\Apply{ (\Lam{(x_1:T_1)}{( \dots \Apply{(\Lam{(x_n:T_n)}{\Low{e}})}{\Low{e_n}} \dots )}) }{\Low{e_1}}$\\ + +$\Low{ \Apply{\delta}{\Ov{e_i}} }$ & \To & $\Apply{\delta}{\Ov{ \Low{e_i} }}$ \\ +$\Low{ e }$ & \To & $e$ \\ +\end{tabular} +\end{center} +\caption{Lowering to \corelang} +\label{fig:lowering} +\end{figure} + +All $n$-ary lambdas when $n>1$ are transformed to single arguments lambdas +using tupled arguments. +Note that $\IfThenElse{e_{cond}}{e_1}{e_2}$ term of \langname has lazy +evaluation of its branches whereas right-hand-side \lst{if} is a primitive +operation and have strict evaluation of the arguments. The laziness +is achieved by using lambda expressions of \lst{Unit} $\to$ \lst{Boolean} +type. + +We translate logical operations (\lst{||}, \lst{&&}) of \langname, which are +lazy on second argument to \lst{if} term of \langname, which is recursively +translated to the corresponding \corelang term. + +Syntactic blocks of \langname are completely eliminated and translated to +nested lambda expressions, which unambiguously specify evaluation semantics +of blocks. The \corelang is specified in Section~\ref{sec:evaluation}. + diff --git a/docs/spec/llncs.cls b/docs/spec/llncs.cls new file mode 100644 index 0000000000..1d49f3d238 --- /dev/null +++ b/docs/spec/llncs.cls @@ -0,0 +1,1207 @@ +% LLNCS DOCUMENT CLASS -- version 2.17 (12-Jul-2010) +% Springer Verlag LaTeX2e support for Lecture Notes in Computer Science +% +%% +%% \CharacterTable +%% {Upper-case \A\B\C\D\E\F\G\H\I\J\K\L\M\N\O\P\Q\R\S\T\U\V\W\X\Y\Z +%% Lower-case \a\b\c\d\e\f\g\h\i\j\k\l\m\n\o\p\q\r\s\t\u\v\w\x\y\z +%% Digits \0\1\2\3\4\5\6\7\8\9 +%% Exclamation \! Double quote \" Hash (number) \# +%% Dollar \$ Percent \% Ampersand \& +%% Acute accent \' Left paren \( Right paren \) +%% Asterisk \* Plus \+ Comma \, +%% Minus \- Point \. Solidus \/ +%% Colon \: Semicolon \; Less than \< +%% Equals \= Greater than \> Question mark \? +%% Commercial at \@ Left bracket \[ Backslash \\ +%% Right bracket \] Circumflex \^ Underscore \_ +%% Grave accent \` Left brace \{ Vertical bar \| +%% Right brace \} Tilde \~} +%% +\NeedsTeXFormat{LaTeX2e}[1995/12/01] +\ProvidesClass{llncs}[2010/07/12 v2.17 +^^J LaTeX document class for Lecture Notes in Computer Science] +% Options +\let\if@envcntreset\iffalse +\DeclareOption{envcountreset}{\let\if@envcntreset\iftrue} +\DeclareOption{citeauthoryear}{\let\citeauthoryear=Y} +\DeclareOption{oribibl}{\let\oribibl=Y} +\let\if@custvec\iftrue +\DeclareOption{orivec}{\let\if@custvec\iffalse} +\let\if@envcntsame\iffalse +\DeclareOption{envcountsame}{\let\if@envcntsame\iftrue} +\let\if@envcntsect\iffalse +\DeclareOption{envcountsect}{\let\if@envcntsect\iftrue} +\let\if@runhead\iffalse +\DeclareOption{runningheads}{\let\if@runhead\iftrue} + +\let\if@openright\iftrue +\let\if@openbib\iffalse +\DeclareOption{openbib}{\let\if@openbib\iftrue} + +% languages +\let\switcht@@therlang\relax +\def\ds@deutsch{\def\switcht@@therlang{\switcht@deutsch}} +\def\ds@francais{\def\switcht@@therlang{\switcht@francais}} + +\DeclareOption*{\PassOptionsToClass{\CurrentOption}{article}} + +\ProcessOptions + +\LoadClass[twoside]{article} +\RequirePackage{multicol} % needed for the list of participants, index +\RequirePackage{aliascnt} + +\setlength{\textwidth}{12.2cm} +\setlength{\textheight}{19.3cm} +\renewcommand\@pnumwidth{2em} +\renewcommand\@tocrmarg{3.5em} +% +\def\@dottedtocline#1#2#3#4#5{% + \ifnum #1>\c@tocdepth \else + \vskip \z@ \@plus.2\p@ + {\leftskip #2\relax \rightskip \@tocrmarg \advance\rightskip by 0pt plus 2cm + \parfillskip -\rightskip \pretolerance=10000 + \parindent #2\relax\@afterindenttrue + \interlinepenalty\@M + \leavevmode + \@tempdima #3\relax + \advance\leftskip \@tempdima \null\nobreak\hskip -\leftskip + {#4}\nobreak + \leaders\hbox{$\m@th + \mkern \@dotsep mu\hbox{.}\mkern \@dotsep + mu$}\hfill + \nobreak + \hb@xt@\@pnumwidth{\hfil\normalfont \normalcolor #5}% + \par}% + \fi} +% +\def\switcht@albion{% +\def\abstractname{Abstract.} +\def\ackname{Acknowledgement.} +\def\andname{and} +\def\lastandname{\unskip, and} +\def\appendixname{Appendix} +\def\chaptername{Chapter} +\def\claimname{Claim} +\def\conjecturename{Conjecture} +\def\contentsname{Table of Contents} +\def\corollaryname{Corollary} +\def\definitionname{Definition} +\def\examplename{Example} +\def\exercisename{Exercise} +\def\figurename{Fig.} +\def\keywordname{{\bf Keywords:}} +\def\indexname{Index} +\def\lemmaname{Lemma} +\def\contriblistname{List of Contributors} +\def\listfigurename{List of Figures} +\def\listtablename{List of Tables} +\def\mailname{{\it Correspondence to\/}:} +\def\noteaddname{Note added in proof} +\def\notename{Note} +\def\partname{Part} +\def\problemname{Problem} +\def\proofname{Proof} +\def\propertyname{Property} +\def\propositionname{Proposition} +\def\questionname{Question} +\def\remarkname{Remark} +\def\seename{see} +\def\solutionname{Solution} +\def\subclassname{{\it Subject Classifications\/}:} +\def\tablename{Table} +\def\theoremname{Theorem}} +\switcht@albion +% Names of theorem like environments are already defined +% but must be translated if another language is chosen +% +% French section +\def\switcht@francais{%\typeout{On parle francais.}% + \def\abstractname{R\'esum\'e.}% + \def\ackname{Remerciements.}% + \def\andname{et}% + \def\lastandname{ et}% + \def\appendixname{Appendice} + \def\chaptername{Chapitre}% + \def\claimname{Pr\'etention}% + \def\conjecturename{Hypoth\`ese}% + \def\contentsname{Table des mati\`eres}% + \def\corollaryname{Corollaire}% + \def\definitionname{D\'efinition}% + \def\examplename{Exemple}% + \def\exercisename{Exercice}% + \def\figurename{Fig.}% + \def\keywordname{{\bf Mots-cl\'e:}} + \def\indexname{Index} + \def\lemmaname{Lemme}% + \def\contriblistname{Liste des contributeurs} + \def\listfigurename{Liste des figures}% + \def\listtablename{Liste des tables}% + \def\mailname{{\it Correspondence to\/}:} + \def\noteaddname{Note ajout\'ee \`a l'\'epreuve}% + \def\notename{Remarque}% + \def\partname{Partie}% + \def\problemname{Probl\`eme}% + \def\proofname{Preuve}% + \def\propertyname{Caract\'eristique}% +%\def\propositionname{Proposition}% + \def\questionname{Question}% + \def\remarkname{Remarque}% + \def\seename{voir} + \def\solutionname{Solution}% + \def\subclassname{{\it Subject Classifications\/}:} + \def\tablename{Tableau}% + \def\theoremname{Th\'eor\`eme}% +} +% +% German section +\def\switcht@deutsch{%\typeout{Man spricht deutsch.}% + \def\abstractname{Zusammenfassung.}% + \def\ackname{Danksagung.}% + \def\andname{und}% + \def\lastandname{ und}% + \def\appendixname{Anhang}% + \def\chaptername{Kapitel}% + \def\claimname{Behauptung}% + \def\conjecturename{Hypothese}% + \def\contentsname{Inhaltsverzeichnis}% + \def\corollaryname{Korollar}% +%\def\definitionname{Definition}% + \def\examplename{Beispiel}% + \def\exercisename{\"Ubung}% + \def\figurename{Abb.}% + \def\keywordname{{\bf Schl\"usselw\"orter:}} + \def\indexname{Index} +%\def\lemmaname{Lemma}% + \def\contriblistname{Mitarbeiter} + \def\listfigurename{Abbildungsverzeichnis}% + \def\listtablename{Tabellenverzeichnis}% + \def\mailname{{\it Correspondence to\/}:} + \def\noteaddname{Nachtrag}% + \def\notename{Anmerkung}% + \def\partname{Teil}% +%\def\problemname{Problem}% + \def\proofname{Beweis}% + \def\propertyname{Eigenschaft}% +%\def\propositionname{Proposition}% + \def\questionname{Frage}% + \def\remarkname{Anmerkung}% + \def\seename{siehe} + \def\solutionname{L\"osung}% + \def\subclassname{{\it Subject Classifications\/}:} + \def\tablename{Tabelle}% +%\def\theoremname{Theorem}% +} + +% Ragged bottom for the actual page +\def\thisbottomragged{\def\@textbottom{\vskip\z@ plus.0001fil +\global\let\@textbottom\relax}} + +\renewcommand\small{% + \@setfontsize\small\@ixpt{11}% + \abovedisplayskip 8.5\p@ \@plus3\p@ \@minus4\p@ + \abovedisplayshortskip \z@ \@plus2\p@ + \belowdisplayshortskip 4\p@ \@plus2\p@ \@minus2\p@ + \def\@listi{\leftmargin\leftmargini + \parsep 0\p@ \@plus1\p@ \@minus\p@ + \topsep 8\p@ \@plus2\p@ \@minus4\p@ + \itemsep0\p@}% + \belowdisplayskip \abovedisplayskip +} + +\frenchspacing +\widowpenalty=10000 +\clubpenalty=10000 + +\setlength\oddsidemargin {63\p@} +\setlength\evensidemargin {63\p@} +\setlength\marginparwidth {90\p@} + +\setlength\headsep {16\p@} + +\setlength\footnotesep{7.7\p@} +\setlength\textfloatsep{8mm\@plus 2\p@ \@minus 4\p@} +\setlength\intextsep {8mm\@plus 2\p@ \@minus 2\p@} + +\setcounter{secnumdepth}{2} + +\newcounter {chapter} +\renewcommand\thechapter {\@arabic\c@chapter} + +\newif\if@mainmatter \@mainmattertrue +\newcommand\frontmatter{\cleardoublepage + \@mainmatterfalse\pagenumbering{Roman}} +\newcommand\mainmatter{\cleardoublepage + \@mainmattertrue\pagenumbering{arabic}} +\newcommand\backmatter{\if@openright\cleardoublepage\else\clearpage\fi + \@mainmatterfalse} + +\renewcommand\part{\cleardoublepage + \thispagestyle{empty}% + \if@twocolumn + \onecolumn + \@tempswatrue + \else + \@tempswafalse + \fi + \null\vfil + \secdef\@part\@spart} + +\def\@part[#1]#2{% + \ifnum \c@secnumdepth >-2\relax + \refstepcounter{part}% + \addcontentsline{toc}{part}{\thepart\hspace{1em}#1}% + \else + \addcontentsline{toc}{part}{#1}% + \fi + \markboth{}{}% + {\centering + \interlinepenalty \@M + \normalfont + \ifnum \c@secnumdepth >-2\relax + \huge\bfseries \partname~\thepart + \par + \vskip 20\p@ + \fi + \Huge \bfseries #2\par}% + \@endpart} +\def\@spart#1{% + {\centering + \interlinepenalty \@M + \normalfont + \Huge \bfseries #1\par}% + \@endpart} +\def\@endpart{\vfil\newpage + \if@twoside + \null + \thispagestyle{empty}% + \newpage + \fi + \if@tempswa + \twocolumn + \fi} + +\newcommand\chapter{\clearpage + \thispagestyle{empty}% + \global\@topnum\z@ + \@afterindentfalse + \secdef\@chapter\@schapter} +\def\@chapter[#1]#2{\ifnum \c@secnumdepth >\m@ne + \if@mainmatter + \refstepcounter{chapter}% + \typeout{\@chapapp\space\thechapter.}% + \addcontentsline{toc}{chapter}% + {\protect\numberline{\thechapter}#1}% + \else + \addcontentsline{toc}{chapter}{#1}% + \fi + \else + \addcontentsline{toc}{chapter}{#1}% + \fi + \chaptermark{#1}% + \addtocontents{lof}{\protect\addvspace{10\p@}}% + \addtocontents{lot}{\protect\addvspace{10\p@}}% + \if@twocolumn + \@topnewpage[\@makechapterhead{#2}]% + \else + \@makechapterhead{#2}% + \@afterheading + \fi} +\def\@makechapterhead#1{% +% \vspace*{50\p@}% + {\centering + \ifnum \c@secnumdepth >\m@ne + \if@mainmatter + \large\bfseries \@chapapp{} \thechapter + \par\nobreak + \vskip 20\p@ + \fi + \fi + \interlinepenalty\@M + \Large \bfseries #1\par\nobreak + \vskip 40\p@ + }} +\def\@schapter#1{\if@twocolumn + \@topnewpage[\@makeschapterhead{#1}]% + \else + \@makeschapterhead{#1}% + \@afterheading + \fi} +\def\@makeschapterhead#1{% +% \vspace*{50\p@}% + {\centering + \normalfont + \interlinepenalty\@M + \Large \bfseries #1\par\nobreak + \vskip 40\p@ + }} + +\renewcommand\section{\@startsection{section}{1}{\z@}% + {-18\p@ \@plus -4\p@ \@minus -4\p@}% + {12\p@ \@plus 4\p@ \@minus 4\p@}% + {\normalfont\large\bfseries\boldmath + \rightskip=\z@ \@plus 8em\pretolerance=10000 }} +\renewcommand\subsection{\@startsection{subsection}{2}{\z@}% + {-18\p@ \@plus -4\p@ \@minus -4\p@}% + {8\p@ \@plus 4\p@ \@minus 4\p@}% + {\normalfont\normalsize\bfseries\boldmath + \rightskip=\z@ \@plus 8em\pretolerance=10000 }} +\renewcommand\subsubsection{\@startsection{subsubsection}{3}{\z@}% + {-18\p@ \@plus -4\p@ \@minus -4\p@}% + {-0.5em \@plus -0.22em \@minus -0.1em}% + {\normalfont\normalsize\bfseries\boldmath}} +\renewcommand\paragraph{\@startsection{paragraph}{4}{\z@}% + {-12\p@ \@plus -4\p@ \@minus -4\p@}% + {-0.5em \@plus -0.22em \@minus -0.1em}% + {\normalfont\normalsize\itshape}} +\renewcommand\subparagraph[1]{\typeout{LLNCS warning: You should not use + \string\subparagraph\space with this class}\vskip0.5cm +You should not use \verb|\subparagraph| with this class.\vskip0.5cm} + +\DeclareMathSymbol{\Gamma}{\mathalpha}{letters}{"00} +\DeclareMathSymbol{\Delta}{\mathalpha}{letters}{"01} +\DeclareMathSymbol{\Theta}{\mathalpha}{letters}{"02} +\DeclareMathSymbol{\Lambda}{\mathalpha}{letters}{"03} +\DeclareMathSymbol{\Xi}{\mathalpha}{letters}{"04} +\DeclareMathSymbol{\Pi}{\mathalpha}{letters}{"05} +\DeclareMathSymbol{\Sigma}{\mathalpha}{letters}{"06} +\DeclareMathSymbol{\Upsilon}{\mathalpha}{letters}{"07} +\DeclareMathSymbol{\Phi}{\mathalpha}{letters}{"08} +\DeclareMathSymbol{\Psi}{\mathalpha}{letters}{"09} +\DeclareMathSymbol{\Omega}{\mathalpha}{letters}{"0A} + +\let\footnotesize\small + +\if@custvec +\def\vec#1{\mathchoice{\mbox{\boldmath$\displaystyle#1$}} +{\mbox{\boldmath$\textstyle#1$}} +{\mbox{\boldmath$\scriptstyle#1$}} +{\mbox{\boldmath$\scriptscriptstyle#1$}}} +\fi + +\def\squareforqed{\hbox{\rlap{$\sqcap$}$\sqcup$}} +\def\qed{\ifmmode\squareforqed\else{\unskip\nobreak\hfil +\penalty50\hskip1em\null\nobreak\hfil\squareforqed +\parfillskip=0pt\finalhyphendemerits=0\endgraf}\fi} + +\def\getsto{\mathrel{\mathchoice {\vcenter{\offinterlineskip +\halign{\hfil +$\displaystyle##$\hfil\cr\gets\cr\to\cr}}} +{\vcenter{\offinterlineskip\halign{\hfil$\textstyle##$\hfil\cr\gets +\cr\to\cr}}} +{\vcenter{\offinterlineskip\halign{\hfil$\scriptstyle##$\hfil\cr\gets +\cr\to\cr}}} +{\vcenter{\offinterlineskip\halign{\hfil$\scriptscriptstyle##$\hfil\cr +\gets\cr\to\cr}}}}} +\def\lid{\mathrel{\mathchoice {\vcenter{\offinterlineskip\halign{\hfil +$\displaystyle##$\hfil\cr<\cr\noalign{\vskip1.2pt}=\cr}}} +{\vcenter{\offinterlineskip\halign{\hfil$\textstyle##$\hfil\cr<\cr +\noalign{\vskip1.2pt}=\cr}}} +{\vcenter{\offinterlineskip\halign{\hfil$\scriptstyle##$\hfil\cr<\cr +\noalign{\vskip1pt}=\cr}}} +{\vcenter{\offinterlineskip\halign{\hfil$\scriptscriptstyle##$\hfil\cr +<\cr +\noalign{\vskip0.9pt}=\cr}}}}} +\def\gid{\mathrel{\mathchoice {\vcenter{\offinterlineskip\halign{\hfil +$\displaystyle##$\hfil\cr>\cr\noalign{\vskip1.2pt}=\cr}}} +{\vcenter{\offinterlineskip\halign{\hfil$\textstyle##$\hfil\cr>\cr +\noalign{\vskip1.2pt}=\cr}}} +{\vcenter{\offinterlineskip\halign{\hfil$\scriptstyle##$\hfil\cr>\cr +\noalign{\vskip1pt}=\cr}}} +{\vcenter{\offinterlineskip\halign{\hfil$\scriptscriptstyle##$\hfil\cr +>\cr +\noalign{\vskip0.9pt}=\cr}}}}} +\def\grole{\mathrel{\mathchoice {\vcenter{\offinterlineskip +\halign{\hfil +$\displaystyle##$\hfil\cr>\cr\noalign{\vskip-1pt}<\cr}}} +{\vcenter{\offinterlineskip\halign{\hfil$\textstyle##$\hfil\cr +>\cr\noalign{\vskip-1pt}<\cr}}} +{\vcenter{\offinterlineskip\halign{\hfil$\scriptstyle##$\hfil\cr +>\cr\noalign{\vskip-0.8pt}<\cr}}} +{\vcenter{\offinterlineskip\halign{\hfil$\scriptscriptstyle##$\hfil\cr +>\cr\noalign{\vskip-0.3pt}<\cr}}}}} +\def\bbbr{{\rm I\!R}} %reelle Zahlen +\def\bbbm{{\rm I\!M}} +\def\bbbn{{\rm I\!N}} %natuerliche Zahlen +\def\bbbf{{\rm I\!F}} +\def\bbbh{{\rm I\!H}} +\def\bbbk{{\rm I\!K}} +\def\bbbp{{\rm I\!P}} +\def\bbbone{{\mathchoice {\rm 1\mskip-4mu l} {\rm 1\mskip-4mu l} +{\rm 1\mskip-4.5mu l} {\rm 1\mskip-5mu l}}} +\def\bbbc{{\mathchoice {\setbox0=\hbox{$\displaystyle\rm C$}\hbox{\hbox +to0pt{\kern0.4\wd0\vrule height0.9\ht0\hss}\box0}} +{\setbox0=\hbox{$\textstyle\rm C$}\hbox{\hbox +to0pt{\kern0.4\wd0\vrule height0.9\ht0\hss}\box0}} +{\setbox0=\hbox{$\scriptstyle\rm C$}\hbox{\hbox +to0pt{\kern0.4\wd0\vrule height0.9\ht0\hss}\box0}} +{\setbox0=\hbox{$\scriptscriptstyle\rm C$}\hbox{\hbox +to0pt{\kern0.4\wd0\vrule height0.9\ht0\hss}\box0}}}} +\def\bbbq{{\mathchoice {\setbox0=\hbox{$\displaystyle\rm +Q$}\hbox{\raise +0.15\ht0\hbox to0pt{\kern0.4\wd0\vrule height0.8\ht0\hss}\box0}} +{\setbox0=\hbox{$\textstyle\rm Q$}\hbox{\raise +0.15\ht0\hbox to0pt{\kern0.4\wd0\vrule height0.8\ht0\hss}\box0}} +{\setbox0=\hbox{$\scriptstyle\rm Q$}\hbox{\raise +0.15\ht0\hbox to0pt{\kern0.4\wd0\vrule height0.7\ht0\hss}\box0}} +{\setbox0=\hbox{$\scriptscriptstyle\rm Q$}\hbox{\raise +0.15\ht0\hbox to0pt{\kern0.4\wd0\vrule height0.7\ht0\hss}\box0}}}} +\def\bbbt{{\mathchoice {\setbox0=\hbox{$\displaystyle\rm +T$}\hbox{\hbox to0pt{\kern0.3\wd0\vrule height0.9\ht0\hss}\box0}} +{\setbox0=\hbox{$\textstyle\rm T$}\hbox{\hbox +to0pt{\kern0.3\wd0\vrule height0.9\ht0\hss}\box0}} +{\setbox0=\hbox{$\scriptstyle\rm T$}\hbox{\hbox +to0pt{\kern0.3\wd0\vrule height0.9\ht0\hss}\box0}} +{\setbox0=\hbox{$\scriptscriptstyle\rm T$}\hbox{\hbox +to0pt{\kern0.3\wd0\vrule height0.9\ht0\hss}\box0}}}} +\def\bbbs{{\mathchoice +{\setbox0=\hbox{$\displaystyle \rm S$}\hbox{\raise0.5\ht0\hbox +to0pt{\kern0.35\wd0\vrule height0.45\ht0\hss}\hbox +to0pt{\kern0.55\wd0\vrule height0.5\ht0\hss}\box0}} +{\setbox0=\hbox{$\textstyle \rm S$}\hbox{\raise0.5\ht0\hbox +to0pt{\kern0.35\wd0\vrule height0.45\ht0\hss}\hbox +to0pt{\kern0.55\wd0\vrule height0.5\ht0\hss}\box0}} +{\setbox0=\hbox{$\scriptstyle \rm S$}\hbox{\raise0.5\ht0\hbox +to0pt{\kern0.35\wd0\vrule height0.45\ht0\hss}\raise0.05\ht0\hbox +to0pt{\kern0.5\wd0\vrule height0.45\ht0\hss}\box0}} +{\setbox0=\hbox{$\scriptscriptstyle\rm S$}\hbox{\raise0.5\ht0\hbox +to0pt{\kern0.4\wd0\vrule height0.45\ht0\hss}\raise0.05\ht0\hbox +to0pt{\kern0.55\wd0\vrule height0.45\ht0\hss}\box0}}}} +\def\bbbz{{\mathchoice {\hbox{$\mathsf\textstyle Z\kern-0.4em Z$}} +{\hbox{$\mathsf\textstyle Z\kern-0.4em Z$}} +{\hbox{$\mathsf\scriptstyle Z\kern-0.3em Z$}} +{\hbox{$\mathsf\scriptscriptstyle Z\kern-0.2em Z$}}}} + +\let\ts\, + +\setlength\leftmargini {17\p@} +\setlength\leftmargin {\leftmargini} +\setlength\leftmarginii {\leftmargini} +\setlength\leftmarginiii {\leftmargini} +\setlength\leftmarginiv {\leftmargini} +\setlength \labelsep {.5em} +\setlength \labelwidth{\leftmargini} +\addtolength\labelwidth{-\labelsep} + +\def\@listI{\leftmargin\leftmargini + \parsep 0\p@ \@plus1\p@ \@minus\p@ + \topsep 8\p@ \@plus2\p@ \@minus4\p@ + \itemsep0\p@} +\let\@listi\@listI +\@listi +\def\@listii {\leftmargin\leftmarginii + \labelwidth\leftmarginii + \advance\labelwidth-\labelsep + \topsep 0\p@ \@plus2\p@ \@minus\p@} +\def\@listiii{\leftmargin\leftmarginiii + \labelwidth\leftmarginiii + \advance\labelwidth-\labelsep + \topsep 0\p@ \@plus\p@\@minus\p@ + \parsep \z@ + \partopsep \p@ \@plus\z@ \@minus\p@} + +\renewcommand\labelitemi{\normalfont\bfseries --} +\renewcommand\labelitemii{$\m@th\bullet$} + +\setlength\arraycolsep{1.4\p@} +\setlength\tabcolsep{1.4\p@} + +\def\tableofcontents{\chapter*{\contentsname\@mkboth{{\contentsname}}% + {{\contentsname}}} + \def\authcount##1{\setcounter{auco}{##1}\setcounter{@auth}{1}} + \def\lastand{\ifnum\value{auco}=2\relax + \unskip{} \andname\ + \else + \unskip \lastandname\ + \fi}% + \def\and{\stepcounter{@auth}\relax + \ifnum\value{@auth}=\value{auco}% + \lastand + \else + \unskip, + \fi}% + \@starttoc{toc}\if@restonecol\twocolumn\fi} + +\def\l@part#1#2{\addpenalty{\@secpenalty}% + \addvspace{2em plus\p@}% % space above part line + \begingroup + \parindent \z@ + \rightskip \z@ plus 5em + \hrule\vskip5pt + \large % same size as for a contribution heading + \bfseries\boldmath % set line in boldface + \leavevmode % TeX command to enter horizontal mode. + #1\par + \vskip5pt + \hrule + \vskip1pt + \nobreak % Never break after part entry + \endgroup} + +\def\@dotsep{2} + +\let\phantomsection=\relax + +\def\hyperhrefextend{\ifx\hyper@anchor\@undefined\else +{}\fi} + +\def\addnumcontentsmark#1#2#3{% +\addtocontents{#1}{\protect\contentsline{#2}{\protect\numberline + {\thechapter}#3}{\thepage}\hyperhrefextend}}% +\def\addcontentsmark#1#2#3{% +\addtocontents{#1}{\protect\contentsline{#2}{#3}{\thepage}\hyperhrefextend}}% +\def\addcontentsmarkwop#1#2#3{% +\addtocontents{#1}{\protect\contentsline{#2}{#3}{0}\hyperhrefextend}}% + +\def\@adcmk[#1]{\ifcase #1 \or +\def\@gtempa{\addnumcontentsmark}% + \or \def\@gtempa{\addcontentsmark}% + \or \def\@gtempa{\addcontentsmarkwop}% + \fi\@gtempa{toc}{chapter}% +} +\def\addtocmark{% +\phantomsection +\@ifnextchar[{\@adcmk}{\@adcmk[3]}% +} + +\def\l@chapter#1#2{\addpenalty{-\@highpenalty} + \vskip 1.0em plus 1pt \@tempdima 1.5em \begingroup + \parindent \z@ \rightskip \@tocrmarg + \advance\rightskip by 0pt plus 2cm + \parfillskip -\rightskip \pretolerance=10000 + \leavevmode \advance\leftskip\@tempdima \hskip -\leftskip + {\large\bfseries\boldmath#1}\ifx0#2\hfil\null + \else + \nobreak + \leaders\hbox{$\m@th \mkern \@dotsep mu.\mkern + \@dotsep mu$}\hfill + \nobreak\hbox to\@pnumwidth{\hss #2}% + \fi\par + \penalty\@highpenalty \endgroup} + +\def\l@title#1#2{\addpenalty{-\@highpenalty} + \addvspace{8pt plus 1pt} + \@tempdima \z@ + \begingroup + \parindent \z@ \rightskip \@tocrmarg + \advance\rightskip by 0pt plus 2cm + \parfillskip -\rightskip \pretolerance=10000 + \leavevmode \advance\leftskip\@tempdima \hskip -\leftskip + #1\nobreak + \leaders\hbox{$\m@th \mkern \@dotsep mu.\mkern + \@dotsep mu$}\hfill + \nobreak\hbox to\@pnumwidth{\hss #2}\par + \penalty\@highpenalty \endgroup} + +\def\l@author#1#2{\addpenalty{\@highpenalty} + \@tempdima=15\p@ %\z@ + \begingroup + \parindent \z@ \rightskip \@tocrmarg + \advance\rightskip by 0pt plus 2cm + \pretolerance=10000 + \leavevmode \advance\leftskip\@tempdima %\hskip -\leftskip + \textit{#1}\par + \penalty\@highpenalty \endgroup} + +\setcounter{tocdepth}{0} +\newdimen\tocchpnum +\newdimen\tocsecnum +\newdimen\tocsectotal +\newdimen\tocsubsecnum +\newdimen\tocsubsectotal +\newdimen\tocsubsubsecnum +\newdimen\tocsubsubsectotal +\newdimen\tocparanum +\newdimen\tocparatotal +\newdimen\tocsubparanum +\tocchpnum=\z@ % no chapter numbers +\tocsecnum=15\p@ % section 88. plus 2.222pt +\tocsubsecnum=23\p@ % subsection 88.8 plus 2.222pt +\tocsubsubsecnum=27\p@ % subsubsection 88.8.8 plus 1.444pt +\tocparanum=35\p@ % paragraph 88.8.8.8 plus 1.666pt +\tocsubparanum=43\p@ % subparagraph 88.8.8.8.8 plus 1.888pt +\def\calctocindent{% +\tocsectotal=\tocchpnum +\advance\tocsectotal by\tocsecnum +\tocsubsectotal=\tocsectotal +\advance\tocsubsectotal by\tocsubsecnum +\tocsubsubsectotal=\tocsubsectotal +\advance\tocsubsubsectotal by\tocsubsubsecnum +\tocparatotal=\tocsubsubsectotal +\advance\tocparatotal by\tocparanum} +\calctocindent + +\def\l@section{\@dottedtocline{1}{\tocchpnum}{\tocsecnum}} +\def\l@subsection{\@dottedtocline{2}{\tocsectotal}{\tocsubsecnum}} +\def\l@subsubsection{\@dottedtocline{3}{\tocsubsectotal}{\tocsubsubsecnum}} +\def\l@paragraph{\@dottedtocline{4}{\tocsubsubsectotal}{\tocparanum}} +\def\l@subparagraph{\@dottedtocline{5}{\tocparatotal}{\tocsubparanum}} + +\def\listoffigures{\@restonecolfalse\if@twocolumn\@restonecoltrue\onecolumn + \fi\section*{\listfigurename\@mkboth{{\listfigurename}}{{\listfigurename}}} + \@starttoc{lof}\if@restonecol\twocolumn\fi} +\def\l@figure{\@dottedtocline{1}{0em}{1.5em}} + +\def\listoftables{\@restonecolfalse\if@twocolumn\@restonecoltrue\onecolumn + \fi\section*{\listtablename\@mkboth{{\listtablename}}{{\listtablename}}} + \@starttoc{lot}\if@restonecol\twocolumn\fi} +\let\l@table\l@figure + +\renewcommand\listoffigures{% + \section*{\listfigurename + \@mkboth{\listfigurename}{\listfigurename}}% + \@starttoc{lof}% + } + +\renewcommand\listoftables{% + \section*{\listtablename + \@mkboth{\listtablename}{\listtablename}}% + \@starttoc{lot}% + } + +\ifx\oribibl\undefined +\ifx\citeauthoryear\undefined +\renewenvironment{thebibliography}[1] + {\section*{\refname} + \def\@biblabel##1{##1.} + \small + \list{\@biblabel{\@arabic\c@enumiv}}% + {\settowidth\labelwidth{\@biblabel{#1}}% + \leftmargin\labelwidth + \advance\leftmargin\labelsep + \if@openbib + \advance\leftmargin\bibindent + \itemindent -\bibindent + \listparindent \itemindent + \parsep \z@ + \fi + \usecounter{enumiv}% + \let\p@enumiv\@empty + \renewcommand\theenumiv{\@arabic\c@enumiv}}% + \if@openbib + \renewcommand\newblock{\par}% + \else + \renewcommand\newblock{\hskip .11em \@plus.33em \@minus.07em}% + \fi + \sloppy\clubpenalty4000\widowpenalty4000% + \sfcode`\.=\@m} + {\def\@noitemerr + {\@latex@warning{Empty `thebibliography' environment}}% + \endlist} +\def\@lbibitem[#1]#2{\item[{[#1]}\hfill]\if@filesw + {\let\protect\noexpand\immediate + \write\@auxout{\string\bibcite{#2}{#1}}}\fi\ignorespaces} +\newcount\@tempcntc +\def\@citex[#1]#2{\if@filesw\immediate\write\@auxout{\string\citation{#2}}\fi + \@tempcnta\z@\@tempcntb\m@ne\def\@citea{}\@cite{\@for\@citeb:=#2\do + {\@ifundefined + {b@\@citeb}{\@citeo\@tempcntb\m@ne\@citea\def\@citea{,}{\bfseries + ?}\@warning + {Citation `\@citeb' on page \thepage \space undefined}}% + {\setbox\z@\hbox{\global\@tempcntc0\csname b@\@citeb\endcsname\relax}% + \ifnum\@tempcntc=\z@ \@citeo\@tempcntb\m@ne + \@citea\def\@citea{,}\hbox{\csname b@\@citeb\endcsname}% + \else + \advance\@tempcntb\@ne + \ifnum\@tempcntb=\@tempcntc + \else\advance\@tempcntb\m@ne\@citeo + \@tempcnta\@tempcntc\@tempcntb\@tempcntc\fi\fi}}\@citeo}{#1}} +\def\@citeo{\ifnum\@tempcnta>\@tempcntb\else + \@citea\def\@citea{,\,\hskip\z@skip}% + \ifnum\@tempcnta=\@tempcntb\the\@tempcnta\else + {\advance\@tempcnta\@ne\ifnum\@tempcnta=\@tempcntb \else + \def\@citea{--}\fi + \advance\@tempcnta\m@ne\the\@tempcnta\@citea\the\@tempcntb}\fi\fi} +\else +\renewenvironment{thebibliography}[1] + {\section*{\refname} + \small + \list{}% + {\settowidth\labelwidth{}% + \leftmargin\parindent + \itemindent=-\parindent + \labelsep=\z@ + \if@openbib + \advance\leftmargin\bibindent + \itemindent -\bibindent + \listparindent \itemindent + \parsep \z@ + \fi + \usecounter{enumiv}% + \let\p@enumiv\@empty + \renewcommand\theenumiv{}}% + \if@openbib + \renewcommand\newblock{\par}% + \else + \renewcommand\newblock{\hskip .11em \@plus.33em \@minus.07em}% + \fi + \sloppy\clubpenalty4000\widowpenalty4000% + \sfcode`\.=\@m} + {\def\@noitemerr + {\@latex@warning{Empty `thebibliography' environment}}% + \endlist} + \def\@cite#1{#1}% + \def\@lbibitem[#1]#2{\item[]\if@filesw + {\def\protect##1{\string ##1\space}\immediate + \write\@auxout{\string\bibcite{#2}{#1}}}\fi\ignorespaces} + \fi +\else +\@cons\@openbib@code{\noexpand\small} +\fi + +\def\idxquad{\hskip 10\p@}% space that divides entry from number + +\def\@idxitem{\par\hangindent 10\p@} + +\def\subitem{\par\setbox0=\hbox{--\enspace}% second order + \noindent\hangindent\wd0\box0}% index entry + +\def\subsubitem{\par\setbox0=\hbox{--\,--\enspace}% third + \noindent\hangindent\wd0\box0}% order index entry + +\def\indexspace{\par \vskip 10\p@ plus5\p@ minus3\p@\relax} + +\renewenvironment{theindex} + {\@mkboth{\indexname}{\indexname}% + \thispagestyle{empty}\parindent\z@ + \parskip\z@ \@plus .3\p@\relax + \let\item\par + \def\,{\relax\ifmmode\mskip\thinmuskip + \else\hskip0.2em\ignorespaces\fi}% + \normalfont\small + \begin{multicols}{2}[\@makeschapterhead{\indexname}]% + } + {\end{multicols}} + +\renewcommand\footnoterule{% + \kern-3\p@ + \hrule\@width 2truecm + \kern2.6\p@} + \newdimen\fnindent + \fnindent1em +\long\def\@makefntext#1{% + \parindent \fnindent% + \leftskip \fnindent% + \noindent + \llap{\hb@xt@1em{\hss\@makefnmark\ }}\ignorespaces#1} + +\long\def\@makecaption#1#2{% + \small + \vskip\abovecaptionskip + \sbox\@tempboxa{{\bfseries #1.} #2}% + \ifdim \wd\@tempboxa >\hsize + {\bfseries #1.} #2\par + \else + \global \@minipagefalse + \hb@xt@\hsize{\hfil\box\@tempboxa\hfil}% + \fi + \vskip\belowcaptionskip} + +\def\fps@figure{htbp} +\def\fnum@figure{\figurename\thinspace\thefigure} +\def \@floatboxreset {% + \reset@font + \small + \@setnobreak + \@setminipage +} +\def\fps@table{htbp} +\def\fnum@table{\tablename~\thetable} +\renewenvironment{table} + {\setlength\abovecaptionskip{0\p@}% + \setlength\belowcaptionskip{10\p@}% + \@float{table}} + {\end@float} +\renewenvironment{table*} + {\setlength\abovecaptionskip{0\p@}% + \setlength\belowcaptionskip{10\p@}% + \@dblfloat{table}} + {\end@dblfloat} + +\long\def\@caption#1[#2]#3{\par\addcontentsline{\csname + ext@#1\endcsname}{#1}{\protect\numberline{\csname + the#1\endcsname}{\ignorespaces #2}}\begingroup + \@parboxrestore + \@makecaption{\csname fnum@#1\endcsname}{\ignorespaces #3}\par + \endgroup} + +% LaTeX does not provide a command to enter the authors institute +% addresses. The \institute command is defined here. + +\newcounter{@inst} +\newcounter{@auth} +\newcounter{auco} +\newdimen\instindent +\newbox\authrun +\newtoks\authorrunning +\newtoks\tocauthor +\newbox\titrun +\newtoks\titlerunning +\newtoks\toctitle + +\def\clearheadinfo{\gdef\@author{No Author Given}% + \gdef\@title{No Title Given}% + \gdef\@subtitle{}% + \gdef\@institute{No Institute Given}% + \gdef\@thanks{}% + \global\titlerunning={}\global\authorrunning={}% + \global\toctitle={}\global\tocauthor={}} + +\def\institute#1{\gdef\@institute{#1}} + +\def\institutename{\par + \begingroup + \parskip=\z@ + \parindent=\z@ + \setcounter{@inst}{1}% + \def\and{\par\stepcounter{@inst}% + \noindent$^{\the@inst}$\enspace\ignorespaces}% + \setbox0=\vbox{\def\thanks##1{}\@institute}% + \ifnum\c@@inst=1\relax + \gdef\fnnstart{0}% + \else + \xdef\fnnstart{\c@@inst}% + \setcounter{@inst}{1}% + \noindent$^{\the@inst}$\enspace + \fi + \ignorespaces + \@institute\par + \endgroup} + +\def\@fnsymbol#1{\ensuremath{\ifcase#1\or\star\or{\star\star}\or + {\star\star\star}\or \dagger\or \ddagger\or + \mathchar "278\or \mathchar "27B\or \|\or **\or \dagger\dagger + \or \ddagger\ddagger \else\@ctrerr\fi}} + +\def\inst#1{\unskip$^{#1}$} +\def\fnmsep{\unskip$^,$} +\def\email#1{{\tt#1}} +\AtBeginDocument{\@ifundefined{url}{\def\url#1{#1}}{}% +\@ifpackageloaded{babel}{% +\@ifundefined{extrasenglish}{}{\addto\extrasenglish{\switcht@albion}}% +\@ifundefined{extrasfrenchb}{}{\addto\extrasfrenchb{\switcht@francais}}% +\@ifundefined{extrasgerman}{}{\addto\extrasgerman{\switcht@deutsch}}% +}{\switcht@@therlang}% +\providecommand{\keywords}[1]{\par\addvspace\baselineskip +\noindent\keywordname\enspace\ignorespaces#1}% +} +\def\homedir{\~{ }} + +\def\subtitle#1{\gdef\@subtitle{#1}} +\clearheadinfo +% +%%% to avoid hyperref warnings +\providecommand*{\toclevel@author}{999} +%%% to make title-entry parent of section-entries +\providecommand*{\toclevel@title}{0} +% +\renewcommand\maketitle{\newpage +\phantomsection + \refstepcounter{chapter}% + \stepcounter{section}% + \setcounter{section}{0}% + \setcounter{subsection}{0}% + \setcounter{figure}{0} + \setcounter{table}{0} + \setcounter{equation}{0} + \setcounter{footnote}{0}% + \begingroup + \parindent=\z@ + \renewcommand\thefootnote{\@fnsymbol\c@footnote}% + \if@twocolumn + \ifnum \col@number=\@ne + \@maketitle + \else + \twocolumn[\@maketitle]% + \fi + \else + \newpage + \global\@topnum\z@ % Prevents figures from going at top of page. + \@maketitle + \fi + \thispagestyle{empty}\@thanks +% + \def\\{\unskip\ \ignorespaces}\def\inst##1{\unskip{}}% + \def\thanks##1{\unskip{}}\def\fnmsep{\unskip}% + \instindent=\hsize + \advance\instindent by-\headlineindent + \if!\the\toctitle!\addcontentsline{toc}{title}{\@title}\else + \addcontentsline{toc}{title}{\the\toctitle}\fi + \if@runhead + \if!\the\titlerunning!\else + \edef\@title{\the\titlerunning}% + \fi + \global\setbox\titrun=\hbox{\small\rm\unboldmath\ignorespaces\@title}% + \ifdim\wd\titrun>\instindent + \typeout{Title too long for running head. Please supply}% + \typeout{a shorter form with \string\titlerunning\space prior to + \string\maketitle}% + \global\setbox\titrun=\hbox{\small\rm + Title Suppressed Due to Excessive Length}% + \fi + \xdef\@title{\copy\titrun}% + \fi +% + \if!\the\tocauthor!\relax + {\def\and{\noexpand\protect\noexpand\and}% + \protected@xdef\toc@uthor{\@author}}% + \else + \def\\{\noexpand\protect\noexpand\newline}% + \protected@xdef\scratch{\the\tocauthor}% + \protected@xdef\toc@uthor{\scratch}% + \fi + \addtocontents{toc}{\noexpand\protect\noexpand\authcount{\the\c@auco}}% + \addcontentsline{toc}{author}{\toc@uthor}% + \if@runhead + \if!\the\authorrunning! + \value{@inst}=\value{@auth}% + \setcounter{@auth}{1}% + \else + \edef\@author{\the\authorrunning}% + \fi + \global\setbox\authrun=\hbox{\small\unboldmath\@author\unskip}% + \ifdim\wd\authrun>\instindent + \typeout{Names of authors too long for running head. Please supply}% + \typeout{a shorter form with \string\authorrunning\space prior to + \string\maketitle}% + \global\setbox\authrun=\hbox{\small\rm + Authors Suppressed Due to Excessive Length}% + \fi + \xdef\@author{\copy\authrun}% + \markboth{\@author}{\@title}% + \fi + \endgroup + \setcounter{footnote}{\fnnstart}% + \clearheadinfo} +% +\def\@maketitle{\newpage + \markboth{}{}% + \def\lastand{\ifnum\value{@inst}=2\relax + \unskip{} \andname\ + \else + \unskip \lastandname\ + \fi}% + \def\and{\stepcounter{@auth}\relax + \ifnum\value{@auth}=\value{@inst}% + \lastand + \else + \unskip, + \fi}% + \begin{center}% + \let\newline\\ + {\Large \bfseries\boldmath + \pretolerance=10000 + \@title \par}\vskip .8cm +\if!\@subtitle!\else {\large \bfseries\boldmath + \vskip -.65cm + \pretolerance=10000 + \@subtitle \par}\vskip .8cm\fi + \setbox0=\vbox{\setcounter{@auth}{1}\def\and{\stepcounter{@auth}}% + \def\thanks##1{}\@author}% + \global\value{@inst}=\value{@auth}% + \global\value{auco}=\value{@auth}% + \setcounter{@auth}{1}% +{\lineskip .5em +\noindent\ignorespaces +\@author\vskip.35cm} + {\small\institutename} + \end{center}% + } + +% definition of the "\spnewtheorem" command. +% +% Usage: +% +% \spnewtheorem{env_nam}{caption}[within]{cap_font}{body_font} +% or \spnewtheorem{env_nam}[numbered_like]{caption}{cap_font}{body_font} +% or \spnewtheorem*{env_nam}{caption}{cap_font}{body_font} +% +% New is "cap_font" and "body_font". It stands for +% fontdefinition of the caption and the text itself. +% +% "\spnewtheorem*" gives a theorem without number. +% +% A defined spnewthoerem environment is used as described +% by Lamport. +% +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% + +\def\@thmcountersep{} +\def\@thmcounterend{.} + +\def\spnewtheorem{\@ifstar{\@sthm}{\@Sthm}} + +% definition of \spnewtheorem with number + +\def\@spnthm#1#2{% + \@ifnextchar[{\@spxnthm{#1}{#2}}{\@spynthm{#1}{#2}}} +\def\@Sthm#1{\@ifnextchar[{\@spothm{#1}}{\@spnthm{#1}}} + +\def\@spxnthm#1#2[#3]#4#5{\expandafter\@ifdefinable\csname #1\endcsname + {\@definecounter{#1}\@addtoreset{#1}{#3}% + \expandafter\xdef\csname the#1\endcsname{\expandafter\noexpand + \csname the#3\endcsname \noexpand\@thmcountersep \@thmcounter{#1}}% + \expandafter\xdef\csname #1name\endcsname{#2}% + \global\@namedef{#1}{\@spthm{#1}{\csname #1name\endcsname}{#4}{#5}}% + \global\@namedef{end#1}{\@endtheorem}}} + +\def\@spynthm#1#2#3#4{\expandafter\@ifdefinable\csname #1\endcsname + {\@definecounter{#1}% + \expandafter\xdef\csname the#1\endcsname{\@thmcounter{#1}}% + \expandafter\xdef\csname #1name\endcsname{#2}% + \global\@namedef{#1}{\@spthm{#1}{\csname #1name\endcsname}{#3}{#4}}% + \global\@namedef{end#1}{\@endtheorem}}} + +\def\@spothm#1[#2]#3#4#5{% + \@ifundefined{c@#2}{\@latexerr{No theorem environment `#2' defined}\@eha}% + {\expandafter\@ifdefinable\csname #1\endcsname + {\newaliascnt{#1}{#2}% + \expandafter\xdef\csname #1name\endcsname{#3}% + \global\@namedef{#1}{\@spthm{#1}{\csname #1name\endcsname}{#4}{#5}}% + \global\@namedef{end#1}{\@endtheorem}}}} + +\def\@spthm#1#2#3#4{\topsep 7\p@ \@plus2\p@ \@minus4\p@ +\refstepcounter{#1}% +\@ifnextchar[{\@spythm{#1}{#2}{#3}{#4}}{\@spxthm{#1}{#2}{#3}{#4}}} + +\def\@spxthm#1#2#3#4{\@spbegintheorem{#2}{\csname the#1\endcsname}{#3}{#4}% + \ignorespaces} + +\def\@spythm#1#2#3#4[#5]{\@spopargbegintheorem{#2}{\csname + the#1\endcsname}{#5}{#3}{#4}\ignorespaces} + +\def\@spbegintheorem#1#2#3#4{\trivlist + \item[\hskip\labelsep{#3#1\ #2\@thmcounterend}]#4} + +\def\@spopargbegintheorem#1#2#3#4#5{\trivlist + \item[\hskip\labelsep{#4#1\ #2}]{#4(#3)\@thmcounterend\ }#5} + +% definition of \spnewtheorem* without number + +\def\@sthm#1#2{\@Ynthm{#1}{#2}} + +\def\@Ynthm#1#2#3#4{\expandafter\@ifdefinable\csname #1\endcsname + {\global\@namedef{#1}{\@Thm{\csname #1name\endcsname}{#3}{#4}}% + \expandafter\xdef\csname #1name\endcsname{#2}% + \global\@namedef{end#1}{\@endtheorem}}} + +\def\@Thm#1#2#3{\topsep 7\p@ \@plus2\p@ \@minus4\p@ +\@ifnextchar[{\@Ythm{#1}{#2}{#3}}{\@Xthm{#1}{#2}{#3}}} + +\def\@Xthm#1#2#3{\@Begintheorem{#1}{#2}{#3}\ignorespaces} + +\def\@Ythm#1#2#3[#4]{\@Opargbegintheorem{#1} + {#4}{#2}{#3}\ignorespaces} + +\def\@Begintheorem#1#2#3{#3\trivlist + \item[\hskip\labelsep{#2#1\@thmcounterend}]} + +\def\@Opargbegintheorem#1#2#3#4{#4\trivlist + \item[\hskip\labelsep{#3#1}]{#3(#2)\@thmcounterend\ }} + +\if@envcntsect + \def\@thmcountersep{.} + \spnewtheorem{theorem}{Theorem}[section]{\bfseries}{\itshape} +\else + \spnewtheorem{theorem}{Theorem}{\bfseries}{\itshape} + \if@envcntreset + \@addtoreset{theorem}{section} + \else + \@addtoreset{theorem}{chapter} + \fi +\fi + +%definition of divers theorem environments +\spnewtheorem*{claim}{Claim}{\itshape}{\rmfamily} +\spnewtheorem*{proof}{Proof}{\itshape}{\rmfamily} +\if@envcntsame % alle Umgebungen wie Theorem. + \def\spn@wtheorem#1#2#3#4{\@spothm{#1}[theorem]{#2}{#3}{#4}} +\else % alle Umgebungen mit eigenem Zaehler + \if@envcntsect % mit section numeriert + \def\spn@wtheorem#1#2#3#4{\@spxnthm{#1}{#2}[section]{#3}{#4}} + \else % nicht mit section numeriert + \if@envcntreset + \def\spn@wtheorem#1#2#3#4{\@spynthm{#1}{#2}{#3}{#4} + \@addtoreset{#1}{section}} + \else + \def\spn@wtheorem#1#2#3#4{\@spynthm{#1}{#2}{#3}{#4} + \@addtoreset{#1}{chapter}}% + \fi + \fi +\fi +\spn@wtheorem{case}{Case}{\itshape}{\rmfamily} +\spn@wtheorem{conjecture}{Conjecture}{\itshape}{\rmfamily} +\spn@wtheorem{corollary}{Corollary}{\bfseries}{\itshape} +\spn@wtheorem{definition}{Definition}{\bfseries}{\itshape} +\spn@wtheorem{example}{Example}{\itshape}{\rmfamily} +\spn@wtheorem{exercise}{Exercise}{\itshape}{\rmfamily} +\spn@wtheorem{lemma}{Lemma}{\bfseries}{\itshape} +\spn@wtheorem{note}{Note}{\itshape}{\rmfamily} +\spn@wtheorem{problem}{Problem}{\itshape}{\rmfamily} +\spn@wtheorem{property}{Property}{\itshape}{\rmfamily} +\spn@wtheorem{proposition}{Proposition}{\bfseries}{\itshape} +\spn@wtheorem{question}{Question}{\itshape}{\rmfamily} +\spn@wtheorem{solution}{Solution}{\itshape}{\rmfamily} +\spn@wtheorem{remark}{Remark}{\itshape}{\rmfamily} + +\def\@takefromreset#1#2{% + \def\@tempa{#1}% + \let\@tempd\@elt + \def\@elt##1{% + \def\@tempb{##1}% + \ifx\@tempa\@tempb\else + \@addtoreset{##1}{#2}% + \fi}% + \expandafter\expandafter\let\expandafter\@tempc\csname cl@#2\endcsname + \expandafter\def\csname cl@#2\endcsname{}% + \@tempc + \let\@elt\@tempd} + +\def\theopargself{\def\@spopargbegintheorem##1##2##3##4##5{\trivlist + \item[\hskip\labelsep{##4##1\ ##2}]{##4##3\@thmcounterend\ }##5} + \def\@Opargbegintheorem##1##2##3##4{##4\trivlist + \item[\hskip\labelsep{##3##1}]{##3##2\@thmcounterend\ }} + } + +\renewenvironment{abstract}{% + \list{}{\advance\topsep by0.35cm\relax\small + \leftmargin=1cm + \labelwidth=\z@ + \listparindent=\z@ + \itemindent\listparindent + \rightmargin\leftmargin}\item[\hskip\labelsep + \bfseries\abstractname]} + {\endlist} + +\newdimen\headlineindent % dimension for space between +\headlineindent=1.166cm % number and text of headings. + +\def\ps@headings{\let\@mkboth\@gobbletwo + \let\@oddfoot\@empty\let\@evenfoot\@empty + \def\@evenhead{\normalfont\small\rlap{\thepage}\hspace{\headlineindent}% + \leftmark\hfil} + \def\@oddhead{\normalfont\small\hfil\rightmark\hspace{\headlineindent}% + \llap{\thepage}} + \def\chaptermark##1{}% + \def\sectionmark##1{}% + \def\subsectionmark##1{}} + +\def\ps@titlepage{\let\@mkboth\@gobbletwo + \let\@oddfoot\@empty\let\@evenfoot\@empty + \def\@evenhead{\normalfont\small\rlap{\thepage}\hspace{\headlineindent}% + \hfil} + \def\@oddhead{\normalfont\small\hfil\hspace{\headlineindent}% + \llap{\thepage}} + \def\chaptermark##1{}% + \def\sectionmark##1{}% + \def\subsectionmark##1{}} + +\if@runhead\ps@headings\else +\ps@empty\fi + +\setlength\arraycolsep{1.4\p@} +\setlength\tabcolsep{1.4\p@} + +\endinput +%end of file llncs.cls diff --git a/docs/spec/serialization.tex b/docs/spec/serialization.tex new file mode 100644 index 0000000000..8e488a1267 --- /dev/null +++ b/docs/spec/serialization.tex @@ -0,0 +1,433 @@ +\section{Serialization} +\label{sec:serialization} + +This section defines a binary format, which is used to store \langname +contracts in persistent stores, to transfer them over wire and to enable +cross-platform interoperation. + +Terms of the language described in Section~\ref{sec:language} can be +serialized to array of bytes to be stored in Ergo blockchain (e.g. as +Box.propositionBytes). + +When the guarding script of an input box of a transaction is validated the +\lst{propositionBytes} array is deserialized to an \langname IR (called \ASDag), which can +be evaluated as it is specified in Section~\ref{sec:evaluation}. + +Here we specify the serialization procedure in general. The serialization +format of \langname terms and types is specified in +Appendix~\ref{sec:appendix:ergotree_serialization} and +~\ref{sec:appendix:type_serialization} correspondingly. + +Table~\ref{table:ser:formats} shows size limits which are checked during +contract deserialization. + +\begin{table}[h] + \footnotesize +\(\begin{tabularx}{\textwidth}{| l | p{2cm} | X |} + \hline + \bf{Name} & \bf{Value} & \bf{Description} \\ + \hline + $\MaxVlqSize$ & $10$ & Maximum size of VLQ encoded byte sequence (See VLQ formats) \\ + \hline + $\MaxTypeSize$ & $100$ & Maximum size of serialized type term (see Type format) \\ + \hline + $\MaxDataSize$ & $10Kb$ & Maximum size of serialized data instance (see Data format) \\ + \hline + $\MaxConstSize$ & $=\MaxTypeSize + \MaxDataSize$ & Maximum size of serialized data instance (see Const format) \\ + \hline + $\MaxExprSize$ & $1Kb$ & Maximum size of serialized \langname term (see Expr format) \\ + \hline + $\MaxErgoTreeSize$ & $24Kb$ & Maximum size of serialized \langname contract (see ErgoTree format) \\ + \hline +\end{tabularx}\) +\caption{Serialization limits} +\label{table:ser:formats} +\end{table} + +All serialization formats which are uses and defined thoughout this +section are listed in Table~\ref{table:ser:formats}. + +\begin{table}[h] + \footnotesize +\(\begin{tabularx}{\textwidth}{| l | l | X |} + \hline + \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + \lst{Byte} & $1$ & 8-bit signed two's-complement integer \\ + \hline + \lst{Short} & $2$ & 16-bit signed two's-complement integer (big-endian) \\ + \hline + \lst{Int} & $4$ & 32-bit signed two's-complement integer (big-endian) \\ + \hline + \lst{Long} & $8$ & 64-bit signed two's-complement integer (big-endian) \\ + \hline + \lst{UByte} & $1$ & 8-bit unsigned integer \\ + \hline + \lst{UShort} & $2$ & 16-bit unsigned integer (big-endian) \\ + \hline + \lst{UInt} & $4$ & 32-bit unsigned integer (big-endian) \\ + \hline + \lst{ULong} & $8$ & 64-bit unsigned integer (big-endian) \\ + + \hline + \lst{VLQ(UShort)} & $[1..3]$ & Encoded unsigned \lst{Short} value using VLQ. See~\cite{VLQWikipedia,VLQRosetta} and~\ref{sec:vlq-encoding} \\ + \hline + \lst{VLQ(UInt)} & $[1..5]$ & Encoded unsigned 32-bit integer using VLQ. \\ + \hline + \lst{VLQ(ULong)} & $[1..\MaxVlqSize]$ & Encoded unsigned 64-bit integer using VLQ. \\ + + \hline + \lst{Bits} & $[1..\MaxBits]$ & A collection of bits packed in a sequence of bytes. \\ + \hline + \lst{Bytes} & $[1..\MaxBytes]$ & A sequence (block) of bytes. + The size of the block should either stored elsewhere or wellknown. \\ + + \hline + \lst{Type} & $[1..\MaxTypeSize]$ & Serialized type terms of \langname. See~\ref{sec:ser:type} \\ + \hline + \lst{Data} & $[1..\MaxDataSize]$ & Serialized \langname values. See~\ref{sec:ser:data} \\ + \hline + \lst{GroupElement} & $33$ & Serialized elements of eliptic curve group. See~\ref{sec:ser:data:groupelement} \\ + \hline + \lst{SigmaProp} & $[1..\MaxSigmaProp]$ & Serialized sigma propositions. See~\ref{sec:ser:data:sigmaprop} \\ + \hline + \lst{Box} & $[1..\MaxBox]$ & Serialized box data. See~\ref{sec:ser:data:box} \\ + \hline + \lst{AvlTree} & $44$ & Serialized dynamic dictionary digest. See~\ref{sec:ser:data:avltree} \\ + \hline + \lst{Const} & $[1..\MaxConstSize]$ & Serialized \langname constants (values with types). See~\ref{sec:ser:const} \\ + \hline + \lst{Expr} & $[1..\MaxExprSize]$ & Serialized expression terms of \langname. See~\ref{sec:ser:expr} \\ + \hline + \lst{ErgoTree} & $[1..\MaxErgoTreeSize]$ & Serialized instances of \langname contracts. See~\ref{sec:ser:ergotree} \\ + \hline +\end{tabularx}\) +\caption{Serialization formats} +\label{table:ser:formats} +\end{table} + +Table~\ref{table:ser:formats} introduce a name for each format and also shows +the number of bytes each format may occupy in the byte stream. We use $[1..n]$ +notation when serialization may produce from 1 to n bytes depending of actual +data instance. + +Serialization format of \ASDag is optimized for compact storage. In many +cases serialization procedure is data dependent and thus have branching +logic. To express this complex serialization logic we use +\emph{pseudo-language operators} like \lst{for, match, if, optional} which +allow to specify a \emph{structure} on \emph{simple serialization slots}. +Each \emph{slot} specifies a fragment of serialized stream of bytes, whereas +\emph{operators} specifiy how the slots are combined together to form the +stream of bytes. + +\input{type_serialization.tex} + +\subsection{Data Serialization} +\label{sec:ser:data} + +In \langname all runtime data values have an associated type also available +at runtime (this is called \emph{type reification}\cite{Reification}). +However serialization format separates data values from its type descriptors. +This allows to save space when for example a collection of items is serialized. + +The contents of a typed data structure can be fully described by a type tree. +For example having a typed data object \lst{d: (Int, Coll[Byte], Boolean)} we can +tell that \lst{d} has 3 items, the first item contain 32-bit integer, the second +- collection of bytes, and the third - logical true/false value. + +To serialize/deserialize typed data we need to know its type descriptor (type +tree). Serialization procedure is recursive over type tree and the +corresponding subcomponents of an object. For primitive types (the leaves of +the type tree) the format is fixed. The data values of \langname types are +serialized using predefined function shown in Figure~\ref{fig:ser:data}. + +\begin{figure}[h] +\footnotesize +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + \hline + \multicolumn{4}{l}{\lst{def serializeData(}$t, v$\lst{)}} \\ + \multicolumn{4}{l}{~~\lst{match} $(t, v)$ } \\ + + \multicolumn{4}{l}{~~~~\lst{with} $(Unit, v \in \Denot{Unit})$~~~// nothing serialized } \\ + \multicolumn{4}{l}{~~~~\lst{with} $(Boolean, v \in \Denot{Boolean})$} \\ + \hline + $~~~~~~v$ & \lst{Byte} & 1 & 0 or 1 in a single byte \\ + + \hline + \multicolumn{4}{l}{~~~~\lst{with} $(Byte, v \in \Denot{Byte})$} \\ + \hline + $~~~~~~v$ & \lst{Byte} & 1 & in a single byte \\ + + \hline + \multicolumn{4}{l}{~~~~\lst{with} $(N, v \in \Denot{Short}), N \in {Short, Int, Long}$} \\ + \hline + $~~~~~~v$ & \lst{VLQ(ZigZag($$N$$))} & [1..3] & + 16,32,64-bit signed integer encoded using \hyperref[sec:zigzag-encoding]{ZigZag} + and then using \hyperref[sec:vlq-encoding]{VLQ} \\ + + \hline + \multicolumn{4}{l}{~~~~\lst{with} $(BigInt, v \in \Denot{BigInt})$} \\ + \multicolumn{4}{l}{~~~~~~$bytes = v$\lst{.toByteArray} } \\ + \hline + $~~~~~~numBytes$ & \lst{VLQ(UInt)} & & number of bytes in $bytes$ array \\ + \hline + $~~~~~~bytes$ & \lst{Bytes} & & serialized $bytes$ array \\ + + \hline + \multicolumn{4}{l}{~~~~\lst{with} $(GroupElement, v \in \Denot{GroupElement})$} \\ + \hline + ~~~~~~$v$ & \lst{GroupElement} & & serialization of GroupElement data. See~\ref{sec:ser:data:groupelement} \\ + + \hline + \multicolumn{4}{l}{~~~~\lst{with} $(SigmaProp, v \in \Denot{SigmaProp})$} \\ + \hline + ~~~~~~$v$ & \lst{SigmaProp} & & serialization of SigmaProp data. See~\ref{sec:ser:data:sigmaprop} \\ + + \hline + \multicolumn{4}{l}{~~~~\lst{with} $(Box, v \in \Denot{Box})$} \\ + \hline + ~~~~~~$v$ & \lst{Box} & & serialization of Box data. See~\ref{sec:ser:data:box} \\ + + \hline + \multicolumn{4}{l}{~~~~\lst{with} $(AvlTree, v \in \Denot{AvlTree})$} \\ + \hline + ~~~~~~$v$ & \lst{AvlTree} & & serialization of AvlTree data. See~\ref{sec:ser:data:avltree} \\ + + \hline + \multicolumn{4}{l}{~~~~\lst{with} $(Coll[T], v \in \Denot{Coll[T]})$} \\ + \hline + $~~~~~~len$ & \lst{VLQ(UShort)} & [1..3] & length of the collection \\ + \hline + \multicolumn{4}{l}{~~~~~~\lst{match} $(T, v)$ } \\ + + \multicolumn{4}{l}{~~~~~~~~\lst{with} $(Boolean, v \in \Denot{Coll[Boolean]})$} \\ + \hline + $~~~~~~~~~~items$ & \lst{Bits} & [1..1024] & boolean values packed in bits \\ + \hline + + \multicolumn{4}{l}{~~~~~~~~\lst{with} $(Byte, v \in \Denot{Coll[Byte]})$} \\ + \hline + $~~~~~~~~~~items$ & \lst{Bytes} & $[1..len]$ & items of the collection \\ + \hline + \multicolumn{4}{l}{~~~~~~~~\lst{otherwise} } \\ + \multicolumn{4}{l}{~~~~~~~~~~\lst{for}~$i=1$~\lst{to}~$len$} \\ + \multicolumn{4}{l}{~~~~~~~~~~~~\lst{serializeData(}$T, v_i$\lst{)}} \\ + \multicolumn{4}{l}{~~~~~~~~~~\lst{end for}} \\ + \multicolumn{4}{l}{~~~~~~\lst{end match}} \\ + + \multicolumn{4}{l}{~~\lst{end match}} \\ + \multicolumn{4}{l}{\lst{end serializeData}} \\ + \hline + \hline +\end{tabularx}\) +\caption{Data serialization format} +\label{fig:ser:data} +\end{figure} + +\subsubsection{GroupElement serialization} +\label{sec:ser:data:groupelement} + +\begin{figure}[h] +\footnotesize +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + \multicolumn{4}{l}{\lst{def serialize(}$ge$\lst{)}} \\ + \multicolumn{4}{l}{~~\lst{if} $ge.isIdentity$ \lst{then}} \\ + \hline + ~~~~$ $ & \lst{Const} & $ $ & \\ + \hline + \multicolumn{4}{l}{~~\lst{else}} \\ + \hline + ~~~~$ $ & & $ $ & \\ + \hline + \multicolumn{4}{l}{~~\lst{end if}} \\ + \multicolumn{4}{l}{\lst{end def}} \\ +\end{tabularx}\) +\caption{GroupElement serialization format} +\label{fig:ser:data:groupelement} +\end{figure} + +\subsubsection{SigmaProp serialization} +\label{sec:ser:data:sigmaprop} +\begin{figure}[h] +\footnotesize +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline +\end{tabularx}\) +\caption{SigmaProp serialization format} +\label{fig:ser:data:sigmaprop} +\end{figure} + +\subsubsection{Box serialization} +\label{sec:ser:data:box} +\begin{figure}[h] +\footnotesize +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline +\end{tabularx}\) +\caption{Box serialization format} +\label{fig:ser:data:box} +\end{figure} + +\subsubsection{AvlTree serialization} +\label{sec:ser:data:avltree} +\begin{figure}[h] +\footnotesize +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline +\end{tabularx}\) +\caption{AvlTree serialization format} +\label{fig:ser:data:avltree} +\end{figure} + +\subsection{Constant Serialization} +\label{sec:ser:const} + +\lst{Constant} format is simple and self sufficient to represent any data value in +\langname. Every data block of \lst{Constant} format contains both type and +data, such it can be stored or wire transfered and then later unambiguously +interpreted. The format is shown in Figure~\ref{fig:ser:const} + +\begin{figure}[h] +\footnotesize +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $type$ & \lst{Type} & $[1..\MaxTypeSize]$ & type of the data instance (see~\ref{sec:ser:type}) \\ + \hline + $value$ & \lst{Data} & $[1..\MaxDataSize]$ & serialized data instance (see~\ref{sec:ser:data}) \\ + \hline +\end{tabularx}\) +\caption{Constant serialization format} +\label{fig:ser:const} +\end{figure} + +\subsection{Expression Serialization} +\label{sec:ser:expr} + +Expressions of \langname are serialized as tree data structure using +recursive procedure described here. + +\begin{figure}[h] +\footnotesize +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + \multicolumn{4}{l}{\lst{def serializeExpr(}$e$\lst{)}} \\ + \hline + ~~$e.opCode$ & \lst{Byte} & $1$ & opcode of ErgoTree node, + used for selection of an appropriate node serializer from Appendix~\ref{sec:appendix:ergotree_serialization} \\ + \hline + \multicolumn{4}{l}{~~\lst{if} $opCode <= LastConstantCode$ \lst{then}} \\ + \hline + ~~~~$c$ & \lst{Const} & $[1..\MaxConstSize]$ & Constant serializaton slot \\ + \hline + \multicolumn{4}{l}{~~\lst{else}} \\ + \hline + ~~~~$body$ & Op & $[1..\MaxExprSize]$ & serialization of operation arguments + depending on $e.opCode$ as defined in Appendix~\ref{sec:appendix:ergotree_serialization} \\ + \hline + \multicolumn{4}{l}{~~\lst{end if}} \\ + \multicolumn{4}{l}{\lst{end serializeExpr}} \\ + \hline +\end{tabularx}\) +\caption{Expression serialization format} +\label{fig:ser:expr} +\end{figure} + + +\subsection{\ASDag~serialization} +\label{sec:ser:ergotree} + +The root of a serializable \langname term is a data structure called \ASDag +which serialization format shown in Figure~\ref{fig:ergotree} + +\begin{figure}[h] +\footnotesize +\(\begin{tabularx}{\textwidth}{| l | l | l | X |} + \hline + \bf{Slot} & \bf{Format} & \bf{\#bytes} & \bf{Description} \\ + \hline + $ header $ & \lst{VLQ(UInt)} & [1, *] & the first bytes of serialized byte array which + determines interpretation of the rest of the array \\ + \hline + $numConstants$ & \lst{VLQ(UInt)} & [1, *] & size of $constants$ array \\ + \hline + \multicolumn{4}{l}{\lst{for}~$i=1$~\lst{to}~$numConstants$} \\ + \hline + ~~ $ const_i $ & \lst{Const} & [1, *] & constant in i-th position \\ + \hline + \multicolumn{4}{l}{\lst{end for}} \\ + \hline + $ root $ & \lst{Expr} & [1, *] & If constantSegregationFlag is true, the contains ConstantPlaceholder instead of some Constant nodes. + Otherwise may not contain placeholders. + It is possible to have both constants and placeholders in the tree, but for every placeholder + there should be a constant in $constants$ array. \\ + \hline +\end{tabularx}\) +\caption{\ASDag serialization format} +\label{fig:ser:ergotree} +\end{figure} + + +Serialized instances of \ASDag are self sufficient and can be stored and passed around. +\ASDag format defines top-level serialization format of \langname scripts. +The interpretation of the byte array depend on the first $header$ bytes, which uses VLQ encoding up to 30 bits. +Currently we define meaning for only first byte, which may be extended in future versions. + +\begin{figure}[h] + \footnotesize +\(\begin{tabularx}{\textwidth}{| l | l | X |} + \hline + \bf{Bits} & \bf{Default Value} & \bf{Description} \\ + \hline + Bits 0-2 & 0 & language version (current version == 0) \\ + \hline + Bit 3 & 0 & reserved (should be 0) \\ + \hline + Bit 4 & 0 & == 1 if constant segregation is used for this ErgoTree (see~ Section~\ref{sec:ser:constant_segregation}\\ + \hline + Bit 5 & 0 & == 1 - reserved for context dependent costing (should be = 0) \\ + \hline + Bit 6 & 0 & reserved for GZIP compression (should be 0) \\ + \hline + Bit 7 & 0 & == 1 if the header contains more than 1 byte (should be 0) \\ + \hline +\end{tabularx}\) +\caption{\ASDag $header$ bits} +\label{fig:ergotree:header} +\end{figure} + +Currently we don't specify interpretation for the second and other bytes of +the header. We reserve the possibility to extend header by using Bit 7 == 1 +and chain additional bytes as in VLQ. Once the new bytes are required, a new +version of the language should be created and implemented via +soft-forkability. That new language will give an interpretation for the new +bytes. + +The default behavior of ErgoTreeSerializer is to preserve original structure +of \ASDag and check consistency. In case of any inconsistency the +serializer throws exception. + +If constant segregation bit is set to 1 then $constants$ collection contains +the constants for which there may be \lst{ConstantPlaceholder} nodes in the +tree. If is however constant segregation bit is 0, then \lst{constants} +collection should be empty and any placeholder in the tree will lead to +exception. + +\subsection{Constant Segregation} +\label{sec:ser:constant_segregation} + diff --git a/docs/spec/spec.bib b/docs/spec/spec.bib new file mode 100644 index 0000000000..e35d60bf51 --- /dev/null +++ b/docs/spec/spec.bib @@ -0,0 +1,1277 @@ +@inproceedings{NW06, + author = {Turlough Neary and + Damien Woods}, + title = {P-completeness of Cellular Automaton Rule 110}, + booktitle = {Automata, Languages and Programming, 33rd International Colloquium, + {ICALP} 2006, Venice, Italy, July 10-14, 2006, Proceedings, Part {I}}, + pages = {132--143}, + year = {2006}, + crossref = {DBLP:conf/icalp/2006-1}, + url = {https://doi.org/10.1007/11786986\_13}, + doi = {10.1007/11786986\_13}, + timestamp = {Fri, 02 Jun 2017 13:01:07 +0200}, + biburl = {https://dblp.org/rec/bib/conf/icalp/NearyW06}, + bibsource = {dblp computer science bibliography, https://dblp.org} +} +@proceedings{DBLP:conf/icalp/2006-1, + editor = {Michele Bugliesi and + Bart Preneel and + Vladimiro Sassone and + Ingo Wegener}, + title = {Automata, Languages and Programming, 33rd International Colloquium, + {ICALP} 2006, Venice, Italy, July 10-14, 2006, Proceedings, Part {I}}, + series = {Lecture Notes in Computer Science}, + volume = {4051}, + publisher = {Springer}, + year = {2006}, + url = {https://doi.org/10.1007/11786986}, + doi = {10.1007/11786986}, + isbn = {3-540-35904-4}, + timestamp = {Fri, 02 Jun 2017 13:01:07 +0200}, + biburl = {https://dblp.org/rec/bib/conf/icalp/2006-1}, + bibsource = {dblp computer science bibliography, https://dblp.org} +} +@book{wolfram1986theory, + title={Theory and applications of cellular automata: including selected + papers 1983-1986}, + author={Wolfram, Stephen}, + year={1986}, + publisher={World scientific} +} +@article{cook2004universality, + title={Universality in elementary cellular automata}, + author={Cook, Matthew}, + journal={Complex systems}, + volume={15}, + number={1}, + pages={1--40}, + year={2004}, + publisher={[Champaign, IL, USA: Complex Systems + Publications, Inc., c1987-} +} +@misc{Scilla, +title = {Scilla: a {S}mart {C}ontract {I}ntermediate-{L}evel {LA}nguage}, +author = {Ilya Sergey, Amrit Kumar, Aquinas Hobor}, +year = {2018}, +Eprint = {arXiv:1806.10116}, +note = {\url{https://arxiv.org/abs/1801.00687}}, +} +@misc{CKM18, +Author = {Alexander Chepurnoy and Vasily Kharin and Dmitry Meshkov}, +Title = {Self-Reproducing Coins as Universal Turing Machine}, +Year = {2018}, +Eprint = {arXiv:1806.10116}, +note = {\url{https://arxiv.org/abs/1806.10116}}, +} +@misc{Nol13, +author={Tier Nolan}, +title = {Alt chains and atomic transfers}, +url={https://bitcointalk.org/index.php?topic=193281.msg2224949#msg2224949}, +year = 2013 +} +@phdthesis{Cra96, +author = {Ronald Cramer}, +title = {Modular Design of Secure, yet Practical Cryptographic Protocols}, +school = {University of Amsterdam}, +year = 1996 +} + +@misc{Dam10, +author = {Ivan Damg{\aa}rd}, +title = {{On $\Sigma$-Protocols}}, +year = 2010, +note = {\url{http://www.cs.au.dk/~ivan/Sigma.pdf}} +} + + +@book{HL10, +author= {Carmit Hazay and Yehuda Lindell}, +title = {Efficient Secure Two-Party Protocols: Techniques and Constructions}, +year = 2010, +publisher = {Springer} +} + + +@inproceedings{CL06, + author = {Melissa Chase and + Anna Lysyanskaya}, + title = {On Signatures of Knowledge}, + booktitle = {Advances in Cryptology - {CRYPTO} 2006, 26th Annual International + Cryptology Conference, Santa Barbara, California, USA, August 20-24, + 2006, Proceedings}, + pages = {78--96}, + year = {2006}, + crossref = {DBLP:conf/crypto/2006}, + url = {https://doi.org/10.1007/11818175\_5}, + doi = {10.1007/11818175\_5}, + timestamp = {Fri, 02 Jun 2017 13:01:07 +0200}, + biburl = {https://dblp.org/rec/bib/conf/crypto/ChaseL06}, + bibsource = {dblp computer science bibliography, https://dblp.org} +} +@proceedings{DBLP:conf/crypto/2006, + editor = {Cynthia Dwork}, + title = {Advances in Cryptology - {CRYPTO} 2006, 26th Annual International + Cryptology Conference, Santa Barbara, California, USA, August 20-24, + 2006, Proceedings}, + series = {Lecture Notes in Computer Science}, + volume = {4117}, + publisher = {Springer}, + year = {2006}, + url = {https://doi.org/10.1007/11818175}, + doi = {10.1007/11818175}, + isbn = {3-540-37432-9}, + timestamp = {Fri, 02 Jun 2017 13:01:07 +0200}, + biburl = {https://dblp.org/rec/bib/conf/crypto/2006}, + bibsource = {dblp computer science bibliography, https://dblp.org} +} + +@Article{Sch91, + author = "Claus-Peter Schnorr", + title = "Efficient Signature Generation by Smart Cards", + pages = "161--174", + volume = 4, + month = "", + publisher = "Springer", + year = 1991, + journal = "Journal of Cryptology", + number = 3, +} +@book{rfc8032, +author={S. Josefsson and I. Liusvaara}, +title={{RFC} 8032: Edwards-Curve Digital Signature Algorithm ({EdDSA})}, +publisher={IETF}, +year=2017, +note={\url{https://tools.ietf.org/html/rfc8032}}, +} +@inproceedings{CDS94, + author = {Ronald Cramer and + Ivan Damg{\aa}rd and + Berry Schoenmakers}, + title = {Proofs of Partial Knowledge and Simplified Design of Witness Hiding + Protocols}, + booktitle = {Advances in Cryptology - {CRYPTO} '94, 14th Annual International Cryptology + Conference, Santa Barbara, California, USA, August 21-25, 1994, Proceedings}, + pages = {174--187}, + year = {1994}, + crossref = {DBLP:conf/crypto/1994}, + url = {https://doi.org/10.1007/3-540-48658-5_19}, + doi = {10.1007/3-540-48658-5_19}, + timestamp = {Sat, 20 May 2017 15:32:52 +0200}, + biburl = {https://dblp.org/rec/bib/conf/crypto/CramerDS94}, + bibsource = {dblp computer science bibliography, https://dblp.org}, +note = {\url{http://www.win.tue.nl/~berry/papers/crypto94.pdf}} +} +@proceedings{DBLP:conf/crypto/1994, + editor = {Yvo Desmedt}, + title = {Advances in Cryptology - {CRYPTO} '94, 14th Annual International Cryptology + Conference, Santa Barbara, California, USA, August 21-25, 1994, Proceedings}, + series = {Lecture Notes in Computer Science}, + volume = {839}, + publisher = {Springer}, + year = {1994}, + url = {https://doi.org/10.1007/3-540-48658-5}, + doi = {10.1007/3-540-48658-5}, + isbn = {3-540-58333-5}, + timestamp = {Sat, 20 May 2017 15:32:52 +0200}, + biburl = {https://dblp.org/rec/bib/conf/crypto/1994}, + bibsource = {dblp computer science bibliography, https://dblp.org} +} +@techreport{ourPaperFull, + author = {Leonid Reyzin and + Dmitry Meshkov and + Alexander Chepurnoy and + Sasha Ivanov}, + title = {Improving Authenticated Dynamic Dictionaries, with Applications to + Cryptocurrencies}, + institution = {{IACR} Cryptology ePrint Archive}, + number = {2016/994}, + year = 2016, + note = {\url{http://eprint.iacr.org/2016/994}}, +} + +@misc{peterToddTrees, +title = {Making {UTXO} Set Growth Irrelevant With Low-Latency Delayed {TXO} Commitments}, +author = {Peter Todd}, +year = 2016, +note = {\url{https://petertodd.org/2016/delayed-txo-commitments}}, +} +@misc{tendermint, +author={Jae Kwon}, +title={Tendermint Go-Merkle}, +note={\url{https://github.com/tendermint/go-merkle}}, +year=2016 +} + +@article{BDLSY12, + author = {Daniel J. Bernstein and + Niels Duif and + Tanja Lange and + Peter Schwabe and + Bo{-}Yin Yang}, + title = {High-speed high-security signatures}, + journal = {J. Cryptographic Engineering}, + volume = {2}, + number = {2}, + pages = {77--89}, + year = {2012}, + url = {http://dx.doi.org/10.1007/s13389-012-0027-1}, + doi = {10.1007/s13389-012-0027-1}, + timestamp = {Tue, 30 Oct 2012 13:34:04 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/jce/BernsteinDLSY12}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Avialable at \url{https://ed25519.cr.yp.to/}} +} + + +@misc{Mil12, +author = {Andrew Miller}, +title = {Storing {UTXOs} in a Balanced {Merkle} Tree (zero-trust nodes with {O}(1)-storage)}, +note = {\url{https://bitcointalk.org/index.php?topic=101734.msg1117428}}, +year = 2012 +} + +@misc{utxo, +author = {Jameson Lopp}, +title = {Unspent Transactions Outputs in {B}itcoin}, +note = {\url{http://statoshi.info/dashboard/db/unspent-transaction-output-set}, accessed Nov 7, 2016} +} + +@misc{gzip, +key={gzip}, +author={{Jean-loup} Gailly and Mark Adler}, +title={{gzip}}, +note={\url{http://www.gzip.org/}} +} + +@misc{Mil16, +author = {Andrew Miller}, +title = {Private Communication}, +year = 2016 +} + +@inproceedings{Rog06, + author = {Phillip Rogaway}, + title = {Formalizing Human Ignorance}, + booktitle = {Progress in Cryptology - {VIETCRYPT} 2006, First International Conferenceon + Cryptology in Vietnam, Hanoi, Vietnam, September 25-28, 2006, Revised + Selected Papers}, + pages = {211--228}, + year = {2006}, + crossref = {DBLP:conf/vietcrypt/2006}, + url = {http://dx.doi.org/10.1007/11958239_14}, + doi = {10.1007/11958239_14}, + timestamp = {Fri, 06 Mar 2015 14:11:06 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/vietcrypt/Rogaway06}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{https://eprint.iacr.org/2006/281.pdf}} +} +@proceedings{DBLP:conf/vietcrypt/2006, + editor = {Phong Q. Nguyen}, + title = {Progressin Cryptology - {VIETCRYPT} 2006, First International Conferenceon + Cryptology in Vietnam, Hanoi, Vietnam, September 25-28, 2006, Revised + Selected Papers}, + series = {Lecture Notes in Computer Science}, + volume = {4341}, + publisher = {Springer}, + year = {2006}, + url = {http://dx.doi.org/10.1007/11958239}, + doi = {10.1007/11958239}, + isbn = {3-540-68799-8}, + timestamp = {Fri, 06 Mar 2015 14:11:06 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/vietcrypt/2006}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@book{Knu98v3, +author ={Knuth, Donald}, +title={The Art of Computer Programming: Volume 3: Sorting and Searching}, +edition={2nd}, +publisher={Addison-Wesley}, +year = 1998 +} + +@misc{ethereum-benchmark, +key={eth}, +title = {Transactions Per Block}, +note = {\url{https://github.com/ethereum/wiki/wiki/Benchmarks}} +} + + +@misc{tbp, +key={tbp}, +title = {Transactions Per Block}, +note = {\url{https://blockchain.info/charts/n-transactions-per-block}} +} + +@misc{spv, +key = {spv}, +title={{SPV}, Simplified Payment Verification}, +note = {\url{https://bitcoin.org/en/glossary/simplified-payment-verification}} +} + +@inproceedings{EK13, + author = {Mohammad Etemad and + Alptekin K{\"{u}}p{\c{c}}{\"{u}}}, + title = {Database Outsourcing with Hierarchical Authenticated Data Structures}, + booktitle = {Information Security and Cryptology - {ICISC} 2013 - 16th International + Conference, Seoul, Korea, November 27-29, 2013, Revised Selected Papers}, + pages = {381--399}, + year = {2013}, + crossref = {DBLP:conf/icisc/2013}, + url = {http://dx.doi.org/10.1007/978-3-319-12160-4_23}, + doi = {10.1007/978-3-319-12160-4_23}, + timestamp = {Wed, 22 Oct 2014 16:56:48 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/icisc/EtemadK13}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{http://eprint.iacr.org/2015/351}} +} +@proceedings{DBLP:conf/icisc/2013, + editor = {Hyang{-}Sook Lee and + Dong{-}Guk Han}, + title = {Information Security and Cryptology - {ICISC} 2013 - 16th International + Conference, Seoul, Korea, November 27-29, 2013, Revised Selected Papers}, + series = {Lecture Notes in Computer Science}, + volume = {8565}, + publisher = {Springer}, + year = {2014}, + url = {http://dx.doi.org/10.1007/978-3-319-12160-4}, + doi = {10.1007/978-3-319-12160-4}, + isbn = {978-3-319-12159-8}, + timestamp = {Wed, 22 Oct 2014 16:55:58 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/icisc/2013}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} +@inproceedings{BP07, + author = {Giuseppe Di Battista and + Bernardo Palazzi}, + title = {Authenticated Relational Tables and Authenticated Skip Lists}, + booktitle = {Data and Applications Security XXI, 21st Annual {IFIP} {WG} 11.3 Working + Conference on Data and Applications Security, Redondo Beach, CA, USA, + July 8-11, 2007, Proceedings}, + pages = {31--46}, + year = {2007}, + crossref = {DBLP:conf/dbsec/2007}, + url = {http://dx.doi.org/10.1007/978-3-540-73538-0_3}, + doi = {10.1007/978-3-540-73538-0_3}, + timestamp = {Tue, 30 Jun 2015 09:52:18 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/dbsec/BattistaP07}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{http://www.ece.umd.edu/~cpap/published/alex-ber-cpap-rt-08b.pdf}} +} +@proceedings{DBLP:conf/dbsec/2007, + editor = {Steve Barker and + Gail{-}Joon Ahn}, + title = {Data and Applications Security XXI, 21st Annual {IFIP} {WG} 11.3 Working + Conference on Data and Applications Security, Redondo Beach, CA, USA, + July 8-11, 2007, Proceedings}, + series = {Lecture Notes in Computer Science}, + volume = {4602}, + publisher = {Springer}, + year = {2007}, + isbn = {978-3-540-73533-5}, + timestamp = {Tue, 30 Jun 2015 09:52:18 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/dbsec/2007}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{HPPT08, + author = {Alexander Heitzmann and + Bernardo Palazzi and + Charalampos Papamanthou and + Roberto Tamassia}, + title = {Efficient integrity checking of untrusted network storage}, + booktitle = {Proceedings of the 2008 {ACM} Workshop On Storage Security And Survivability, + StorageSS 2008, Alexandria, VA, USA, October 31, 2008}, + pages = {43--54}, + year = {2008}, + crossref = {DBLP:conf/storagess/2008}, + url = {http://doi.acm.org/10.1145/1456469.1456479}, + doi = {10.1145/1456469.1456479}, + timestamp = {Tue, 11 Nov 2008 13:01:27 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/storagess/HeitzmannPPT08}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{http://www.ece.umd.edu/~cpap/published/alex-ber-cpap-rt-08b.pdf}} +} +@proceedings{DBLP:conf/storagess/2008, + editor = {Yongdae Kim and + William Yurcik}, + title = {Proceedings of the 2008 {ACM} Workshop On Storage Security And Survivability, + StorageSS 2008, Alexandria, VA, USA, October 31, 2008}, + publisher = {{ACM}}, + year = {2008}, + isbn = {978-1-60558-299-3}, + timestamp = {Tue, 11 Nov 2008 12:59:27 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/storagess/2008}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{GPTT08, + author = {Michael T. Goodrich and + Charalampos Papamanthou and + Roberto Tamassia and + Nikos Triandopoulos}, + title = {Athos: Efficient Authentication of Outsourced File Systems}, + booktitle = {Information Security, 11th International Conference, {ISC} 2008, Taipei, + Taiwan, September 15-18, 2008. Proceedings}, + pages = {80--96}, + year = {2008}, + crossref = {DBLP:conf/isw/2008}, + url = {http://dx.doi.org/10.1007/978-3-540-85886-7_6}, + doi = {10.1007/978-3-540-85886-7_6}, + timestamp = {Mon, 22 Sep 2008 15:16:10 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/isw/GoodrichPTT08}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{http://www.ece.umd.edu/~cpap/published/mtg-cpap-rt-nikos-08.pdf}} +} +@proceedings{DBLP:conf/isw/2008, + editor = {Tzong{-}Chen Wu and + Chin{-}Laung Lei and + Vincent Rijmen and + Der{-}Tsai Lee}, + title = {Information Security, 11th International Conference, {ISC} 2008, Taipei, + Taiwan, September 15-18, 2008. Proceedings}, + series = {Lecture Notes in Computer Science}, + volume = {5222}, + publisher = {Springer}, + year = {2008}, + isbn = {978-3-540-85884-3}, + timestamp = {Mon, 22 Sep 2008 15:14:53 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/isw/2008}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} +} +@article{Pug90, + author = {William Pugh}, + title = {Skip Lists: {A} Probabilistic Alternative to Balanced Trees}, + journal = {Commun. {ACM}}, + volume = {33}, + number = {6}, + pages = {668--676}, + year = {1990}, + url = {http://doi.acm.org/10.1145/78973.78977}, + doi = {10.1145/78973.78977}, + timestamp = {Tue, 07 Jun 2011 16:52:04 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/cacm/Pugh90}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available from \url{http://citeseer.ist.psu.edu/viewdoc/summary?doi=10.1.1.15.9072}} +} + +@phdthesis{Pap11, +author = {Charalampos Papamanthou}, +title = {Cryptography for Efficiency: New Directions in Authenticated Data Structures}, +school = {Brown University}, +year = 2011, +note = {Available at \url{http://www.ece.umd.edu/~cpap/published/theses/cpap-phd.pdf}} +} + +@inproceedings{GSTW03, + author = {Michael T. Goodrich and + Michael Shin and + Roberto Tamassia and + William H. Winsborough}, + title = {Authenticated Dictionaries for Fresh Attribute Credentials}, + booktitle = {Trust Management, First International Conference, iTrust 2003, Heraklion, + Crete, Greece, May 28-30, 2002, Proceedings}, + pages = {332--347}, + year = {2003}, + crossref = {DBLP:conf/itrust/2003}, + url = {http://dx.doi.org/10.1007/3-540-44875-6_24}, + doi = {10.1007/3-540-44875-6_24}, + timestamp = {Tue, 05 Jul 2011 11:07:29 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/itrust/GoodrichSTW03}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{http://cs.brown.edu/cgc/stms/papers/itrust2003.pdf}} +} +@proceedings{DBLP:conf/itrust/2003, + editor = {Paddy Nixon and + Sotirios Terzis}, + title = {Trust Management, First International Conference, iTrust 2003, Heraklion, + Crete, Greece, May 28-30, 2002, Proceedings}, + series = {Lecture Notes in Computer Science}, + volume = {2692}, + publisher = {Springer}, + year = {2003}, + isbn = {3-540-40224-1}, + timestamp = {Fri, 30 May 2003 15:06:43 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/itrust/2003}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{CLHML15, + author = {Xiaofeng Chen and + Jin Li and + Xinyi Huang and + Jianfeng Ma and + Wenjing Lou}, + title = {New Publicly Verifiable Databases with Efficient Updates}, + journal = {{IEEE} Trans. Dependable Sec. Comput.}, + volume = {12}, + number = {5}, + pages = {546--556}, + year = {2015}, + url = {http://dx.doi.org/10.1109/TDSC.2014.2366471}, + doi = {10.1109/TDSC.2014.2366471}, + timestamp = {Thu, 21 Jul 2016 12:12:02 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/tdsc/0001LHML15}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} +@article{CLWML16, + author = {Xiaofeng Chen and + Jin Li and + Jian Weng and + Jianfeng Ma and + Wenjing Lou}, + title = {Verifiable Computation over Large Database with Incremental Updates}, + journal = {{IEEE} Trans. Computers}, + volume = {65}, + number = {10}, + pages = {3184--3195}, + year = {2016}, + url = {http://dx.doi.org/10.1109/TC.2015.2512870}, + doi = {10.1109/TC.2015.2512870}, + timestamp = {Mon, 19 Sep 2016 15:58:56 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/tc/00010WML16}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} +homebrowsesearchabout + + +@article{MWMS16, +title = {Publicly verifiable databases with efficient insertion/deletion operations}, +author = {Meixia Miao and Jianfeng Wang and Jianfeng Ma and Willy Susilo}, +year = 2016, +journal = {Journal of Computer and System Sciences}, +note = {Available on-line at \url{http://dx.doi.org/10.1016/j.jcss.2016.07.005}. To appearn in print.} +} + +@inproceedings{BGV11, + author = {Siavosh Benabbas and + Rosario Gennaro and + Yevgeniy Vahlis}, + title = {Verifiable Delegation of Computation over Large Datasets}, + booktitle = {Advances in Cryptology - {CRYPTO} 2011 - 31st Annual Cryptology Conference, + Santa Barbara, CA, USA, August 14-18, 2011. Proceedings}, + pages = {111--131}, + year = {2011}, + crossref = {DBLP:conf/crypto/2011}, + url = {http://dx.doi.org/10.1007/978-3-642-22792-9_7}, + doi = {10.1007/978-3-642-22792-9_7}, + timestamp = {Mon, 15 Aug 2011 21:29:40 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/crypto/BenabbasGV11}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{http://eprint.iacr.org/2011/132}} +} +@proceedings{DBLP:conf/crypto/2011, + editor = {Phillip Rogaway}, + title = {Advances in Cryptology - {CRYPTO} 2011 - 31st Annual Cryptology Conference, + Santa Barbara, CA, USA, August 14-18, 2011. Proceedings}, + series = {Lecture Notes in Computer Science}, + volume = {6841}, + publisher = {Springer}, + year = {2011}, + url = {http://dx.doi.org/10.1007/978-3-642-22792-9}, + doi = {10.1007/978-3-642-22792-9}, + isbn = {978-3-642-22791-2}, + timestamp = {Mon, 15 Aug 2011 21:26:36 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/crypto/2011}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{CF13, + author = {Dario Catalano and + Dario Fiore}, + title = {Vector Commitments and Their Applications}, + booktitle = {Public-Key Cryptography - {PKC} 2013 - 16th International Conference + on Practice and Theory in Public-Key Cryptography, Nara, Japan, February + 26 - March 1, 2013. Proceedings}, + pages = {55--72}, + year = {2013}, + crossref = {DBLP:conf/pkc/2013}, + url = {http://dx.doi.org/10.1007/978-3-642-36362-7_5}, + doi = {10.1007/978-3-642-36362-7_5}, + timestamp = {Tue, 05 Feb 2013 13:29:13 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/pkc/CatalanoF13}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{http://eprint.iacr.org/2011/495}} +} +@proceedings{DBLP:conf/pkc/2013, + editor = {Kaoru Kurosawa and + Goichiro Hanaoka}, + title = {Public-Key Cryptography - {PKC} 2013 - 16th International Conference + on Practice and Theory in Public-Key Cryptography, Nara, Japan, February + 26 - March 1, 2013. Proceedings}, + series = {Lecture Notes in Computer Science}, + volume = {7778}, + publisher = {Springer}, + year = {2013}, + url = {http://dx.doi.org/10.1007/978-3-642-36362-7}, + doi = {10.1007/978-3-642-36362-7}, + isbn = {978-3-642-36361-0}, + timestamp = {Tue, 05 Feb 2013 13:28:08 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/pkc/2013}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@unpublished{Whi15, +author = {Bill White}, +title = {A Theory for Lightweight Cryptocurrency +Ledgers}, +year = 2015, +note = {Available at \url{http://qeditas.org/lightcrypto.pdf} (see also code at \url{https://github.com/bitemyapp/ledgertheory})} +} + +@article{PTT16, + author = {Charalampos Papamanthou and + Roberto Tamassia and + Nikos Triandopoulos}, + title = {Authenticated Hash Tables Based on Cryptographic Accumulators}, + journal = {Algorithmica}, + volume = {74}, + number = {2}, + pages = {664--712}, + year = {2016}, + url = {http://dx.doi.org/10.1007/s00453-014-9968-3}, + doi = {10.1007/s00453-014-9968-3}, + timestamp = {Wed, 27 Jan 2016 13:00:22 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/algorithmica/PapamanthouTT16}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@book{Wei06, +author = {Mark Allen Weiss}, +title = {Data Structures and Algorithm Analysis in Java (Second Edition)}, +publisher = {Pearson}, +year = 2006 +} + +@misc{Pfa02, +author = {Ben Pfaff}, +title = {{GNU} libavl 2.0.2}, +note = {Available at \url{http://adtinfo.org/libavl.html/index.html}}, +year = 2002 +} + +@misc{impl, +key = {code}, +title = {Implementation of Authenticated Data Structures within Scorex}, +note = {\url{https://github.com/input-output-hk/scrypto/}} +} + +@misc{bscript, +key = {BSC}, +title = {Script - Bitcoin Wiki}, +note = {\url{https://en.bitcoin.it/wiki/Script}} +} + +@misc{Java, +key = {JAV}, +title = {The {Java} Programming Language}, +note = {\url{http://www.java.org/}} +} + +@misc{scala, +key = {SCL}, +title = {The {Scala} Programming Language}, +note = {\url{http://www.scala-lang.org/}} +} + +@misc{scapi, +key = {SCP}, +title = {Secure Computation API}, +note = {\url{https://cyber.biu.ac.il/scapi/}} +} + +@misc{bouncycastle, +key = {LBC}, +title = {The Legion of the Bouncy Castle}, +note = {\url{https://www.bouncycastle.org}} +} + +@misc{nxt, +key = {nxt}, +title = {The {Nxt} Cryptocurrency}, +note = {\url{https://nxt.org/}} +} + +@article{AVL62, + author = {Adel'son-Vel'skii and Landis}, + title = {An algorithm for the organization of information}, +journal = {Dokladi Akademia Nauk SSSR}, + volume = 146, +number = 2, +page = {263--266}, +year=1962, +note ={English translation in Soviet Math. Doklady 3, 1962, 1259--1263} +} +@misc{Sed08, + author = {Robert Sedgewick}, + title = {Left-leaning Red-Black Trees}, + year = 2008, +note = {Available at \url{http://www.cs.princeton.edu/~rs/talks/LLRB/LLRB.pdf}} +} +@inproceedings{GB78, + author = {Leonidas J. Guibas and + Robert Sedgewick}, + title = {A Dichromatic Framework for Balanced Trees}, + booktitle = {19th Annual Symposium on Foundations of Computer Science, Ann Arbor, + Michigan, USA, 16-18 October 1978}, + pages = {8--21}, + year = {1978}, + crossref = {DBLP:conf/focs/FOCS19}, + url = {http://dx.doi.org/10.1109/SFCS.1978.3}, + doi = {10.1109/SFCS.1978.3}, + timestamp = {Tue, 16 Dec 2014 09:57:19 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/focs/GuibasS78}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available from \url{http://professor.ufabc.edu.br/~jesus.mena/courses/mc3305-2q-2015/AED2-13-redblack-paper.pdf}} +} +@proceedings{DBLP:conf/focs/FOCS19, + title = {19th Annual Symposium on Foundations of Computer Science, Ann Arbor, + Michigan, USA, 16-18 October 1978}, + publisher = {{IEEE} Computer Society}, + year = {1978}, + url = {http://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=4567951}, + timestamp = {Mon, 15 Dec 2014 18:48:44 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/focs/FOCS19}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} +@article{MR98, + author = {Conrado Mart{\'{\i}}nez and + Salvador Roura}, + title = {Randomized Binary Search Trees}, + journal = {J. {ACM}}, + volume = {45}, + number = {2}, + pages = {288--323}, + year = {1998}, + url = {http://doi.acm.org/10.1145/274787.274812}, + doi = {10.1145/274787.274812}, + timestamp = {Wed, 19 Apr 2006 10:08:18 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/jacm/MartinezR98}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Availabel at \url{http://citeseer.ist.psu.edu/viewdoc/summary?doi=10.1.1.17.243}} +} +@article{SA96, + author = {Raimund Seidel and + Cecilia R. Aragon}, + title = {Randomized Search Trees}, + journal = {Algorithmica}, + volume = {16}, + number = {4/5}, + pages = {464--497}, + year = {1996}, + url = {http://dx.doi.org/10.1007/BF01940876}, + doi = {10.1007/BF01940876}, + timestamp = {Wed, 18 May 2011 16:16:57 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/algorithmica/SeidelA96}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{https://faculty.washington.edu/aragon/pubs/rst96.pdf}} +} + +@article{CW11, + author = {Scott A. Crosby and + Dan S. Wallach}, + title = {Authenticated Dictionaries: Real-World Costs and Trade-Offs}, + journal = {{ACM} Trans. Inf. Syst. Secur.}, + volume = {14}, + number = {2}, + pages = {17}, + year = {2011}, + url = {http://doi.acm.org/10.1145/2019599.2019602}, + doi = {10.1145/2019599.2019602}, + timestamp = {Thu, 03 Nov 2011 15:33:19 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/tissec/CrosbyW11}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{http://tamperevident.cs.rice.edu/Storage.html}} +} +@inproceedings{AGT01, + author = {Aris Anagnostopoulos and + Michael T. Goodrich and + Roberto Tamassia}, + title = {Persistent Authenticated Dictionaries and Their Applications}, + booktitle = {Information Security, 4th International Conference, {ISC} 2001, Malaga, + Spain, October 1-3, 2001, Proceedings}, + pages = {379--393}, + year = {2001}, + crossref = {DBLP:conf/isw/2001}, + url = {http://dx.doi.org/10.1007/3-540-45439-X_26}, + doi = {10.1007/3-540-45439-X_26}, + timestamp = {Tue, 28 Jun 2011 15:13:09 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/isw/AnagnostopoulosGT01}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{http://aris.me/pubs/pad.pdf}} +} +@proceedings{DBLP:conf/isw/2001, + editor = {George I. Davida and + Yair Frankel}, + title = {Information Security, 4th International Conference, {ISC} 2001, Malaga, + Spain, October 1-3, 2001, Proceedings}, + series = {Lecture Notes in Computer Science}, + volume = {2200}, + publisher = {Springer}, + year = {2001}, + isbn = {3-540-42662-0}, + timestamp = {Wed, 17 Apr 2002 08:43:13 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/isw/2001}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@article{NN00, + author = {Moni Naor and + Kobbi Nissim}, + title = {Certificate revocation and certificate update}, + journal = {{IEEE} Journal on Selected Areas in Communications}, + volume = {18}, + number = {4}, + pages = {561--570}, + year = {2000}, + url = {http://dx.doi.org/10.1109/49.839932}, + doi = {10.1109/49.839932}, + timestamp = {Fri, 17 Feb 2012 09:46:39 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/jsac/NaorN00}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.41.7072}} +} + +@inproceedings{BEGKN91, + author = {Manuel Blum and + William S. Evans and + Peter Gemmell and + Sampath Kannan and + Moni Naor}, + title = {Checking the Correctness of Memories}, + booktitle = {32nd Annual Symposium on Foundations of Computer Science, San Juan, + Puerto Rico, 1-4 October 1991}, + pages = {90--99}, + year = {1991}, + crossref = {DBLP:conf/focs/FOCS32}, + url = {http://dx.doi.org/10.1109/SFCS.1991.185352}, + doi = {10.1109/SFCS.1991.185352}, + timestamp = {Tue, 16 Dec 2014 09:57:25 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/focs/BlumEGKN91}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Later appears as \cite{BEGKN94}, which is available at \url{http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.29.2991}} +} + +@article{BEGKN94, + author = {Manuel Blum and + William S. Evans and + Peter Gemmell and + Sampath Kannan and + Moni Naor}, + title = {Checking the Correctness of Memories}, + journal = {Algorithmica}, + volume = {12}, + number = {2/3}, + pages = {225--244}, + year = {1994}, + url = {http://dx.doi.org/10.1007/BF01185212}, + doi = {10.1007/BF01185212}, + timestamp = {Wed, 18 May 2011 16:16:50 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/algorithmica/BlumEGKN94}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.29.2991}}} +} + +@proceedings{DBLP:conf/focs/FOCS32, + title = {32nd Annual Symposium on Foundations of Computer Science, San Juan, + Puerto Rico, 1-4 October 1991}, + publisher = {{IEEE} Computer Society}, + year = {1991}, + url = {http://ieeexplore.ieee.org/xpl/mostRecentIssue.jsp?punumber=379}, + isbn = {0-8186-2445-0}, + timestamp = {Mon, 15 Dec 2014 18:48:44 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/focs/FOCS32}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} +@unpublished{Nak08, +author = {Satoshi Nakamoto}, +title = {Bitcoin: A Peer-to-Peer Electronic Cash System}, +year = 2008, +note = {\url{https://bitcoin.org/bitcoin.pdf}} +} + +@inproceedings{PT07, + author = {Charalampos Papamanthou and + Roberto Tamassia}, + title = {Time and Space Efficient Algorithms for Two-Party Authenticated Data + Structures}, + booktitle = {Information and Communications Security, 9th International Conference, + {ICICS} 2007, Zhengzhou, China, December 12-15, 2007, Proceedings}, + pages = {1--15}, + year = {2007}, + crossref = {DBLP:conf/icics/2007}, + url = {http://dx.doi.org/10.1007/978-3-540-77048-0_1}, + doi = {10.1007/978-3-540-77048-0_1}, + timestamp = {Tue, 21 Oct 2008 14:44:58 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/icics/PapamanthouT07}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{http://www.ece.umd.edu/~cpap/published/cpap-rt-07.pdf}} +} +@proceedings{DBLP:conf/icics/2007, + editor = {Sihan Qing and + Hideki Imai and + Guilin Wang}, + title = {Information and Communications Security, 9th International Conference, + {ICICS} 2007, Zhengzhou, China, December 12-15, 2007, Proceedings}, + series = {Lecture Notes in Computer Science}, + volume = {4861}, + publisher = {Springer}, + year = {2008}, + isbn = {978-3-540-77047-3}, + timestamp = {Tue, 21 Oct 2008 14:44:58 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/icics/2007}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{DJ07, + author = {Brian C. Dean and + Zachary H. Jones}, + title = {Exploring the duality between skip lists and binary search trees}, + booktitle = {Proceedings of the 45th Annual Southeast Regional Conference, 2007, + Winston-Salem, North Carolina, USA, March 23-24, 2007}, + pages = {395--399}, + year = {2007}, + crossref = {DBLP:conf/ACMse/2007}, + url = {http://doi.acm.org/10.1145/1233341.1233413}, + doi = {10.1145/1233341.1233413}, + timestamp = {Wed, 02 May 2007 15:10:53 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/ACMse/DeanJ07}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{https://people.cs.clemson.edu/~bcdean/skip_bst.pdf}} +} +@proceedings{DBLP:conf/ACMse/2007, + editor = {David John and + Sandria N. Kerr}, + title = {Proceedings of the 45th Annual Southeast Regional Conference, 2007, + Winston-Salem, North Carolina, USA, March 23-24, 2007}, + publisher = {{ACM}}, + year = {2007}, + isbn = {978-1-59593-629-5}, + timestamp = {Wed, 02 May 2007 15:10:53 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/ACMse/2007}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{GPT07, + author = {Michael T. Goodrich and + Charalampos Papamanthou and + Roberto Tamassia}, + title = {On the Cost of Persistence and Authentication in Skip Lists}, + booktitle = {Experimental Algorithms, 6th International Workshop, {WEA} 2007, Rome, + Italy, June 6-8, 2007, Proceedings}, + pages = {94--107}, + year = {2007}, + crossref = {DBLP:conf/wea/2007}, + url = {http://dx.doi.org/10.1007/978-3-540-72845-0_8}, + doi = {10.1007/978-3-540-72845-0_8}, + timestamp = {Thu, 28 Jun 2007 15:26:47 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/wea/GoodrichPT07}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{http://cs.brown.edu/cgc/stms/papers/pers-auth.pdf}} +} +@proceedings{DBLP:conf/wea/2007, + editor = {Camil Demetrescu}, + title = {Experimental Algorithms, 6th International Workshop, {WEA} 2007, Rome, + Italy, June 6-8, 2007, Proceedings}, + series = {Lecture Notes in Computer Science}, + volume = {4525}, + publisher = {Springer}, + year = {2007}, + isbn = {978-3-540-72844-3}, + timestamp = {Thu, 28 Jun 2007 15:26:47 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/wea/2007}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} +@inproceedings{MHKS14, + author = {Andrew Miller and + Michael Hicks and + Jonathan Katz and + Elaine Shi}, + title = {Authenticated data structures, generically}, + booktitle = {The 41st Annual {ACM} {SIGPLAN-SIGACT} Symposium on Principles of + Programming Languages, {POPL} '14, San Diego, CA, USA, January 20-21, + 2014}, + pages = {411--424}, + year = {2014}, + crossref = {DBLP:conf/popl/2014}, + url = {http://doi.acm.org/10.1145/2535838.2535851}, + doi = {10.1145/2535838.2535851}, + timestamp = {Thu, 09 Jan 2014 08:32:32 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/popl/MillerHKS14}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Project page and full version at \url{http://amiller.github.io/lambda-auth/paper.html}} +} +@proceedings{DBLP:conf/popl/2014, + editor = {Suresh Jagannathan and + Peter Sewell}, + title = {The 41st Annual {ACM} {SIGPLAN-SIGACT} Symposium on Principles of + Programming Languages, {POPL} '14, San Diego, CA, USA, January 20-21, + 2014}, + publisher = {{ACM}}, + year = {2014}, + url = {http://dl.acm.org/citation.cfm?id=2535838}, + isbn = {978-1-4503-2544-8}, + timestamp = {Thu, 09 Jan 2014 08:21:22 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/popl/2014}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} +homebrowsesearchabout + +@article{ORS04, + author = {Rafail Ostrovsky and + Charles Rackoff and + Adam D. Smith}, + title = {Efficient Consistency Proofs for Generalized Queries on a Committed + Database}, + journal = {{IACR} Cryptology ePrint Archive}, + volume = {2004}, + pages = {170}, + year = {2004}, + url = {http://eprint.iacr.org/2004/170}, + timestamp = {Fri, 08 Apr 2016 07:34:31 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/iacr/OstrovskyRS04}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{http://eprint.iacr.org/2004/170}} +} + +@article{MNDGKS04, + author = {Charles U. Martel and + Glen Nuckolls and + Premkumar T. Devanbu and + Michael Gertz and + April Kwong and + Stuart G. Stubblebine}, + title = {A General Model for Authenticated Data Structures}, + journal = {Algorithmica}, + volume = {39}, + number = {1}, + pages = {21--41}, + year = {2004}, + url = {http://dx.doi.org/10.1007/s00453-003-1076-8}, + doi = {10.1007/s00453-003-1076-8}, + timestamp = {Fri, 16 Jul 2004 14:34:49 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/journals/algorithmica/MartelNDGKS04}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.75.3658}} +} + +@unpublished{GTS01, +author={M.T. Goodrich and R. Tamassia and A. Schwerin}, +title={Implementation of an authenticated dictionary with skip lists and commutative hashing}, +note={Available at \url{http://cs.brown.edu/cgc/stms/papers/discex2001.pdf}; also presented in Proc. {DARPA} Information +Survivability Conference \& Exposition {II} {(DISCEX II)}}, +year = 2001 +} + + +@unpublished{GT00, + author = {M. T. Goodrich and R. Tamassia}, +title={Efficient authenticated dictionaries +with skip lists and commutative hashing}, +note = {Technical +Report, Johns Hopkins Information Security Institute; available at \url{http://cs.brown.edu/cgc/stms/papers/hashskip.pdf}}, +year= 2000 +} + +@inproceedings{Mer89, + author = {Ralph C. Merkle}, + title = {A Certified Digital Signature}, + booktitle = {Advances in Cryptology - {CRYPTO} '89, 9th Annual International Cryptology + Conference, Santa Barbara, California, USA, August 20-24, 1989, Proceedings}, + pages = {218--238}, + year = {1989}, + crossref = {DBLP:conf/crypto/1989}, + url = {http://dx.doi.org/10.1007/0-387-34805-0_21}, + doi = {10.1007/0-387-34805-0_21}, + timestamp = {Fri, 18 Sep 2009 09:03:51 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/crypto/Merkle89}, + bibsource = {dblp computer science bibliography, http://dblp.org}, + note = {Available at \url{http://www.merkle.com/papers/Certified1979.pdf}} +} +@proceedings{DBLP:conf/crypto/1989, + editor = {Gilles Brassard}, + title = {Advances in Cryptology - {CRYPTO} '89, 9th Annual International Cryptology + Conference, Santa Barbara, California, USA, August 20-24, 1989, Proceedings}, + series = {Lecture Notes in Computer Science}, + volume = {435}, + publisher = {Springer}, + year = {1990}, + isbn = {3-540-97317-6}, + timestamp = {Thu, 07 Feb 2002 09:41:00 +0100}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/crypto/1989}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + +@inproceedings{BGM16, + author = {Iddo Bentov and + Ariel Gabizon and + Alex Mizrahi}, + title = {Cryptocurrencies Without Proof of Work}, + booktitle = {Financial Cryptography and Data Security - {FC} 2016 International + Workshops, BITCOIN, VOTING, and WAHC, Christ Church, Barbados, February + 26, 2016, Revised Selected Papers}, + pages = {142--157}, + year = {2016}, + crossref = {DBLP:conf/fc/2016w}, + url = {http://dx.doi.org/10.1007/978-3-662-53357-4_10}, + doi = {10.1007/978-3-662-53357-4_10}, + timestamp = {Wed, 31 Aug 2016 11:20:19 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/fc/BentovGM16}, + bibsource = {dblp computer science bibliography, http://dblp.org}, +note = {Available at \url{http://arxiv.org/abs/1406.5694}} +} + +@proceedings{DBLP:conf/fc/2016w, + editor = {Jeremy Clark and + Sarah Meiklejohn and + Peter Y. A. Ryan and + Dan S. Wallach and + Michael Brenner and + Kurt Rohloff}, + title = {Financial Cryptography and Data Security - {FC} 2016 International + Workshops, BITCOIN, VOTING, and WAHC, Christ Church, Barbados, February + 26, 2016, Revised Selected Papers}, + series = {Lecture Notes in Computer Science}, + volume = {9604}, + publisher = {Springer}, + year = {2016}, + url = {http://dx.doi.org/10.1007/978-3-662-53357-4}, + doi = {10.1007/978-3-662-53357-4}, + isbn = {978-3-662-53356-7}, + timestamp = {Wed, 31 Aug 2016 11:18:50 +0200}, + biburl = {http://dblp.uni-trier.de/rec/bib/conf/fc/2016w}, + bibsource = {dblp computer science bibliography, http://dblp.org} +} + + +@misc{kiayias2016, + author = {Aggelos Kiayias and Ioannis Konstantinou and Alexander Russell and Bernardo David and Roman Oliynykov}, + title = {A Provably Secure Proof-of-Stake Blockchain Protocol}, + howpublished = {Cryptology ePrint Archive, Report 2016/889}, + year = {2016}, + note = {\url{http://eprint.iacr.org/2016/889}}, +} + +@unpublished{wood2014ethereum, + title={Ethereum: A secure decentralised generalised transaction ledger}, + author={Wood, Gavin}, + year=2014, + note={Available at \url{http://gavwood.com/Paper.pdf}} +} + +@inproceedings{croman2016scaling, + title={On scaling decentralized blockchains}, + author={Croman, Kyle and Decker, Christian and Eyal, Ittay and Gencer, Adem Efe and Juels, Ari and Kosba, Ahmed and Miller, Andrew and Saxena, Prateek and Shi, Elaine and G{\"u}n, Emin}, + booktitle={Proc. 3rd Workshop on Bitcoin and Blockchain Research}, + year={2016} +} + +@inproceedings{decker2013information, + title={Information propagation in the bitcoin network}, + author={Decker, Christian and Wattenhofer, Roger}, + booktitle={IEEE P2P 2013 Proceedings}, + pages={1--10}, + year={2013}, + organization={IEEE} +} + +@inproceedings{sompolinsky2015secure, + title={Secure high-rate transaction processing in Bitcoin}, + author={Sompolinsky, Yonatan and Zohar, Aviv}, + booktitle={International Conference on Financial Cryptography and Data Security}, + pages={507--527}, + year={2015}, + organization={Springer} +} + +@inproceedings{eyal2016bitcoin, + title={Bitcoin-NG: A scalable blockchain protocol}, + author={Eyal, Ittay and Gencer, Adem Efe and Sirer, Emin G{\"u}n and Van Renesse, Robbert}, + booktitle={13th USENIX Symposium on Networked Systems Design and Implementation (NSDI 16)}, + pages={45--59}, + year={2016} +} + +@article{kokoris2016enhancing, + title={Enhancing bitcoin security and performance with strong consistency via collective signing}, + author={Kokoris-Kogias, Eleftherios and Jovanovic, Philipp and Gailly, Nicolas and Khoffi, Ismail and Gasser, Linus and Ford, Bryan}, + journal={arXiv preprint arXiv:1602.06997}, + year={2016} +} + +@inproceedings{aumasson2013blake2, + title={{BLAKE2}: simpler, smaller, fast as {MD5}}, + author={Aumasson, Jean-Philippe and Neves, Samuel and Wilcox-O’Hearn, Zooko and Winnerlein, Christian}, + booktitle={International Conference on Applied Cryptography and Network Security}, + pages={119--135}, + year={2013}, + organization={Springer} +} + +@misc{fullnodes, + title={The Decline in Bitcoin Full Nodes}, + author={Parker, Luke}, + year=2015, + note={\url{http://bravenewcoin.com/news/the-decline-in-bitcoins-full-nodes/}} +} + +@misc{bitcoindos, + title={{CVE}-2013-2293: New {DoS} vulnerability by Forcing Continuous Hard Disk Seek/Read Activity}, + author={Bitcoin Wiki}, + year=2013, + note={\url{https://en.bitcoin.it/wiki/CVE-2013-2293}} +} + +@misc{ethdos, + title={Transaction spam attack: Next Steps}, + author={Vitalik Buterin}, + year=2016, + note={\url{https://blog.ethereum.org/2016/09/22/transaction-spam-attack-next-steps/}} +} + +@misc{geth, + title={Official golang implementation of the Ethereum protocol}, + author={The Go Ethereum Team}, + year=2016, + note={\url{http://geth.ethereum.org/}} +} + +@misc{fivehrs, + title={A Bitcoin transaction that takes 5 hours to verify}, + author={Sergio Lerner}, + year=2017, + note={\url{https://bitslog.wordpress.com/2017/01/08/a-bitcoin-transaction-that-takes-5-hours-to-verify/}} +} + +@misc{ethattacks, + title={Ethereum Network Attacker’s IP Address Is Traceable}, + author={Bok Khoo}, + year=2016, + note={\url{https://www.bokconsulting.com.au/blog/ethereum-network-attackers-ip-address-is-traceable/}} +} +` +@misc{VLQWikipedia, + title={Variable-length quantity}, + note={\url{https://en.wikipedia.org/wiki/Variable-length_quantity}} +} + +@misc{VLQRosetta, + title={Variable-length quantity}, + note={\url{https://rosettacode.org/wiki/Variable-length_quantity}} +} + +@misc{Reification, + title={Reification}, + note={\url{https://en.wikipedia.org/wiki/Reification_(computer_science)}} +} + +@misc{UbiqLang, + title={Ubiquitous Language}, + note={\url{https://www.itworld.com/article/2833252/the-most-wtf-y-programming-languages.html}} +} + +@misc{WTFLang, + title={The most WTF-y programming languages}, + note={\url{https://www.itworld.com/article/2833252/the-most-wtf-y-programming-languages.html}} +} \ No newline at end of file diff --git a/docs/spec/spec.tex b/docs/spec/spec.tex new file mode 100644 index 0000000000..38f90a1bf7 --- /dev/null +++ b/docs/spec/spec.tex @@ -0,0 +1,299 @@ +\documentclass[11pt]{article} + +\usepackage{fullpage} + +\usepackage{mathtools,color,xcolor,hyperref,graphicx,wrapfig,listings,array,xspace,tabu,stmaryrd,tabularx,verbatim,longtable} + +% "define" Scala +\lstdefinelanguage{scala}{ + morekeywords={abstract,case,catch,class,def,% + do,else,extends,false,final,finally,% + for,if,implicit,import,match,mixin,% + new,null,object,override,package,% + private,protected,requires,return,sealed,% + super,this,throw,trait,true,try,% + type,val,var,while,with,yield}, + otherkeywords={=>,<-,<\%,<:,>:,\#,@}, + sensitive=true, + morecomment=[l]{//}, + morecomment=[n]{/*}{*/}, + morestring=[b]", + morestring=[b]', + morestring=[b]""" +} + +\newcommand{\authnote}[2]{\textsf{#1 \textcolor{blue}{: #2}}} + +\newcommand{\knote}[1]{{\authnote{\textcolor{green}{kushti}}{#1}}} +\newcommand{\mnote}[1]{{\authnote{\textcolor{red}{Morphic}}{#1}}} +\newcommand{\dnote}[1]{{\authnote{\textcolor{brown}{Dima}}{#1}}} + + +\newcommand{\ret}{\mathsf{ret}} +\newcommand{\new}{\mathsf{new}} +\newcommand{\hnew}{h_\mathsf{new}} +\newcommand{\old}{\mathsf{old}} +\newcommand{\op}{\mathsf{op}} +\newcommand{\verifier}{\mathcal{V}} +\newcommand{\prover}{\mathcal{P}} +\newcommand{\key}{\mathsf{key}} +\newcommand{\nextkey}{\mathsf{nextKey}} +\newcommand{\node}{\mathsf{t}} +\newcommand{\parent}{\mathsf{p}} +\newcommand{\leaf}{\mathsf{f}} +\newcommand{\vl}{\mathsf{value}} +\newcommand{\balance}{\mathsf{balance}} +\newcommand{\lft}{\mathsf{left}} +\newcommand{\rgt}{\mathsf{right}} +\newcommand{\lbl}{\mathsf{label}} +\newcommand{\direction}{\mathsf{d}} +\newcommand{\oppositedirection}{\bar{\mathsf{d}}} +\newcommand{\found}{\mathsf{found}} +\newcommand{\mypar}[1]{\smallskip\noindent\textbf{#1.}\ \ \ } +\newcommand{\ignore}[1]{} + +\newcommand{\langname}{ErgoTree\xspace} +\newcommand{\corelang}{$\lst{Core-}\lambda$\xspace} +\newcommand{\lst}[1]{\text{\lstinline[basicstyle={\ttfamily}]$#1$}} + +\newcommand{\andnode}{\ensuremath{\mathsf{AND}}} +\newcommand{\ornode}{\ensuremath{\mathsf{OR}}} +\newcommand{\tnode}{\ensuremath{\mathsf{THRESHOLD}}} +\newcommand{\GF}{\ensuremath{\mathrm{GF}}} + +\newcommand{\ASDag}{ErgoTree\xspace} + +\newcommand{\I}[1]{\mathit{#1}} +\newcommand{\B}[1]{\mathbf{#1}} +\newcommand{\PA}[1]{\I{PA}\langle\I{#1}\rangle} +\newcommand{\NA}[1]{\I{NA}\langle\I{#1}\rangle} +\newcommand{\nlindent}[1][0.2cm]{\newline\hangindent=#1} +\newcommand{\MU}[1]{\mu\B{\alpha}.\B{#1}} +\newcommand{\Monoid}[1]{\I{Monoid}\TY{#1}} +%\newcommand{\indentline}{\hangindent=0.7cm} +\newcommand{\tick}{\checkmark} +\newcommand{\Left}[3]{\text{\lst{l}}[#1,#2]\cdot #3} +\newcommand{\Right}[3]{\text{\lst{r}}[#1,#2]\cdot #3} +\newcommand{\SelectField}[2]{\text{\lst{Field}}(#1, #2)} +\newcommand{\Fst}[1]{\text{\lst{fst}}~#1} +\newcommand{\Snd}[1]{\text{\lst{snd}}~#1} +% \newcommand{\Fst}[1]{$#1$\lst{.fst}} +% \newcommand{\Snd}[1]{$#1$\lst{.snd}} +\newcommand{\Ctx}{\mathcal{E}} +\newcommand{\Apply}[2]{#1\langle#2\rangle} +\newcommand{\RCtx}{\mathcal{R}} +\newcommand{\RMatch}[1]{(#1 :: \mathcal{R})} +\newcommand{\RCtxEmpty}{\epsilon} +\newcommand{\Frame}{\mathcal{F}} +\newcommand{\Prim}{\delta} +\newcommand{\Sp}{\mathcal{S}} +\newcommand{\Spec}[1]{\mathcal{S}|[#1|]} +\newcommand{\Build}[1]{\mathcal{B}|[#1|]} +\newcommand{\Hole}{\diamondsuit{}} +\newcommand{\Trait}[2]{\text{\lst{trait}}~#1~\{ #2 \}} +\newcommand{\Class}[3]{\text{\lst{class}}~#1~\text{\lst{extends}}~#2 \{ #3 \}} +\newcommand{\MSig}[3]{\text{\lst{def}}~#1(#2): #3} +\newcommand{\CaseOfxxx}[3]{\lst{case} $#1$ \lst{of} \{ $#2 \to #3$ \}} +\newcommand{\LetXXX}[3]{\lst{let} $#1$ \lst{=} $#2$ \lst{in} $#3$} +\newcommand{\LetrecXXX}[3]{\lst{letrec} $#1$ \lst{=} $#2$ \lst{in} $#3$} +\newcommand{\CaseOfXX}[2]{\text{\lst{case}}~#1~\text{\lst{of}}~\{ #2 \}} +\newcommand{\CaseOf}[3]{\text{\lst{case}}~#1~\text{\lst{of}}~\{ #2 \to #3 \}} +\newcommand{\True}{\text{\lst{true}}} +\newcommand{\False}{\text{\lst{false}}} +\newcommand{\IfThenElse}[3]{\text{\lst{if}}~(#1)~#2~\text{\lst{else}}~#3} +\newcommand{\Let}[3]{\text{\lst{let}}~#1~\text{\lst{=}}~#2~\text{\lst{in}}~#3} +\newcommand{\Field}[2]{#1.\text{\lst{#2}}} +\newcommand{\FDecl}[2]{\text{\lst{val}}~#1 : #2} +\newcommand{\New}[1]{\text{\lst{new}}~#1} +\newcommand{\Meth}[2]{\text{\lst{#1.#2}}} + +\newcommand{\KSet}{\mathcal{K}} +\newcommand{\VSet}{\mathcal{V}} +\newcommand{\LSet}{\mathcal{L}} +\newcommand{\Low}[1]{\mathcal{L}\llbracket#1\rrbracket} +\newcommand{\Denot}[1]{\llbracket#1\rrbracket} +\newcommand{\PSet}{\mathcal{P}} +\newcommand{\DSet}{\mathcal{D}} +\newcommand{\CSet}{\mathcal{CLS}} +\newcommand{\ISet}{\mathcal{ABS}} + +\newcommand{\Ov}[1]{\overline{#1}} +\newcommand{\Un}[1]{\underline{#1}} +\newcommand{\Tup}[1]{(#1)} +\newcommand{\Coll}[1]{\text{\lst{Coll}}(#1)} +\newcommand{\Some}[1]{\text{\lst{Some}}(#1)} +\newcommand{\None}[1]{\text{\lst{None}}[#1]} +\newcommand{\Def}[1]{\llparenthesis#1\rrparenthesis} +\newcommand{\ByDef}{\overset{def}{=}} +\newcommand{\Dag}{\Delta} +\newcommand{\Dom}[1]{\mathcal{D}om~#1} +\newcommand{\TAddr}{Addr} +\newcommand{\TDef}{Def} +\newcommand{\TNode}{Node} +\newcommand{\TDag}{Dag} +\newcommand{\TPair}[2]{#1\times#2} +\newcommand{\TList}[1]{List~#1} +\newcommand{\TMDag}{\TDag * \TAddr} +\newcommand{\Focus}[1]{\langle#1\rangle} +\newcommand{\MDag}[1]{\Delta\Focus{#1}} +\newcommand{\MDagPr}[1]{\Delta'\Focus{#1}} +\newcommand{\Map}[2]{#1 \mapsto #2} +\newcommand{\AddMap}[3]{#1 \cup \{#2 \mapsto #3\}} +\newcommand{\To}{$\mapsto$} +\newcommand{\TP}[2]{#1 \to #2} +\newcommand{\Set}[1]{\{ #1 \}} +\newcommand{\DHole}[2]{d~#1\Hole#2} +\newcommand{\PrimPat}{\Prim~\overline{\beta}} +\newcommand{\DefPat}{d~(\Ov{\beta})} +\newcommand{\Lam}[2]{\lambda#1.#2} +\newcommand{\TyLam}[3]{\lambda(\Ov{#1:#2}).#3} +\newcommand{\LamPat}[2]{\lambda#1.#2} +\newcommand{\DagPat}[2]{\{ \TP{#1}{#2} \}} +\newcommand{\MDagPat}[4]{\{ \TP{#1}{#2} \}^#3\Focus{#4}} +\newcommand{\Inj}[3]{#1\xleftarrow{}#3} +\newcommand{\SE}[3]{SE'|[#1|]~#2~#3} +\newcommand{\SEtop}[2]{SE|[#1|]~#2} +\newcommand{\TEnv}{\Gamma} +\newcommand{\Der}[2]{#1~\vdash~#2} +\newcommand{\DerV}[2]{#1~\vdash^{\text{\lst{v}}}~#2} +\newcommand{\DerC}[2]{#1~\vdash^{\text{\lst{c}}}~#2} +\newcommand{\DerEnv}[1]{\Der{\TEnv}{#1}} +\newcommand{\DerEnvV}[1]{\DerV{\TEnv}{#1}} +\newcommand{\DerEnvC}[1]{\DerC{\TEnv}{#1}} +\newcommand{\Dif}[1]{\partial#1} +\newcommand{\View}[2]{#1\sphericalangle #2} +\newcommand{\This}{\text{\lst{this}}} +\newcommand{\Section}[1]{Section~\ref{section:#1}} +\newcommand{\MaxVlqSize}{VLQ_{max}} +\newcommand{\MaxBits}{Bits_{max}} +\newcommand{\MaxBytes}{Bytes_{max}} +\newcommand{\MaxTypeSize}{Type_{max}} +\newcommand{\MaxDataSize}{Data_{max}} +\newcommand{\MaxBox}{Box_{max}} +\newcommand{\MaxSigmaProp}{SigmaProp_{max}} +\newcommand{\MaxAvlTree}{AvlTree_{max}} +\newcommand{\MaxConstSize}{Const_{max}} +\newcommand{\MaxExprSize}{Expr_{max}} +\newcommand{\MaxErgoTreeSize}{ErgoTree_{max}} + +\newtheorem{definition}{Definition} + +\setcounter{tocdepth}{2} + +\begin{document} + +\title{ErgoTree Specification} + +\author{authors} + + +\maketitle + +\begin{abstract} +In this document we consider typed abstract syntax of the language +called \ASDag which defines semantics of a condition which protects a closed +box in the Ergo Platform blockchain. Serialized graph is written into a box. +Most of Ergo users are unaware of the graph since they are developing contracts in higher-level languages, such as +ErgoScript. However, for developers of alternative higher-level languages, client libraries and clients knowledge of +internals would be highly useful. This document is providing the internals, namely, the following data structures and +algorithms: +\begin{itemize} +\item{} Serialization to a binary format and graph deserialization from the binary form. +\item{} When a graph is considered to be well-formed and when not. +\item{} Type system and typing rules. +\item{} How graph is transformed into an execution trace. +\item{} How execution trace is costed. +\item{} How execution trace is reduced into a Sigma-expression. +\item{} How Sigma-expression is proven and verified. +\end{itemize} +\end{abstract} + +\knote{Please note that the document is intended for general high-skilled tech audience, so avoid describing Scala +classes etc.} + +\tableofcontents + +\section{Introduction} +\label{sec:intro} + +The design space of programming languages is very broad ranging from +general-purpose languages like C,Java,Python up to specialized languages +like SQL, HTML, CSS, etc. + +Since Ergo's goal is to provide a platform for contractual money, the choice +of the language for writing contracts is very important. + +First of all the language and contract execution environment should be +\emph{deterministic}. Once created and stored in Ergo blockchain, smart +contract should always behave predictably and deterministically, it should only depend on well-defined data context and nothing else. +As long as data context doesn't change, any execution of the contract +should return the same value any time it is executed, on any execution +platform, and even on any \emph{compliant} language implementation. +No general purpose programming language is deterministic because +all of them provide non-deterministic operations. ErgoScript doesn't have +non-deterministic operations. + +Second, the language should be \emph{spam-resistant}, meaning it should +facilitate in defending against attacks when malicious contracts can overload +network nodes and bring the blockchain down. To fullfill this goal ErgoScript +support \emph{ahead-of-time cost estimation}, the fast check performed before +contract execution to ensure the evaluation cost is within acceptable +bounds. In general, such cost prediction is not possible, however if the +language is simple enough (which is the case of ErgoScript) and if operations +are carefully selected, then costing is possible and doesn't require +usage of Gas~\mnote{cite etherium} and allow to avoid related problems~\mnote{cite Gas related problems}. + +Third, being simple, the contracts language should be \emph{expressive +enough}. It should be possible to implement most of the practical scenarios, +which is the case of ErgoScript. In our experience expressivity of contracts +language comes hand in hand with design and capabilities of Ergo blockchain +platform itself, making the whole system \emph{turing-complete} as we +demonstrated in \mnote{cite TuringPaper}. + +Forth, simplicity and expressivity are often characteristics of +domain-specific languages~\mnote{cite DSL}. From this perspective ErgoScript +is a DSL for writing smart contracts. The language directly captures the +Ubiquites Language~\cite{UbiqLang} of smart contracts +domain directly manipulating with first-class Boxes, Tokens, Zero-Knowledge +Sigma-Propostions etc., these are the novel features Ergo aims to provide as a platform/service +for custom user applicatons. Domain-specific nature nature of ErgoScript also fasilitates spam-resistance, +because operations of ErgoScript are all carefully selected to be \emph{costing +friendly}. + +And last, but not the least, we wanted our new language to be, nevertheless, +\emph{familiar to the most} since we aim to address as large audience of +programmers as possible with minimum surprise and WTF ratio +\cite{WTFLang}. +The syntax of ErgoScript is inspired by Scala/Kotlin, but in fact it shares a +common subset with Java and C\#, thus if you are proficient in any of these +languages you will be right at home with ErgoScript as well. + +Guided by this requirements we designed ErgoScript as a new yet familiar +looking language which directly support all novel features of Ergo blockchain. +We also implemented reference implementation of the specification described in this document. + +\include{language} + +\include{types} + +\include{evaluation} + +\include{serialization} + +\include{graph} + +\include{costing} + + +\bibliographystyle{alpha} +\bibliography{spec.bib} + +\appendix + +\include{appendix_predeftypes} +\include{appendix_primops} +\include{appendix_ergotree_serialization} +\include{appendix_motivation} +\include{appendix_integer_encoding} + +\end{document} \ No newline at end of file diff --git a/docs/spec/type_serialization.tex b/docs/spec/type_serialization.tex new file mode 100644 index 0000000000..18d92a4630 --- /dev/null +++ b/docs/spec/type_serialization.tex @@ -0,0 +1,156 @@ +\subsection{Type Serialization} +\label{sec:ser:type} + +In this section we describe how the types (like \lst{Int}, \lst{Coll[Byte]}, +etc.) are serialized, then we define serialization of typed data. This will +give us a basis to describe serialization of Constant nodes of \ASDag. From +that we proceed to serialization of arbitrary \ASDag trees. + +For motivation behind this type encoding please see Appendix~\ref{sec:appendix:motivation:type}. + +\subsubsection{Distribution of type codes} +\label{sec:ser:type:codedist} + +The whole space of 256 codes is divided as the following: + +\begin{figure}[h] \footnotesize +\(\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Interval} & \bf{Distribution} \\ + \hline + \lst{0x00} & special value to represent undefined type (\lst{NoType} in \ASDag) \\ + \hline + \lst{0x01 - 0x6F(111)} & data types including primitive types, arrays, options + aka nullable types, classes (in future), 111 = 255 - 144 different codes \\ + \hline + \lst{0x70(112) - 0xFF(255)} & function types \lst{T1 => T2}, 144 = 12 x 12 + different codes \\ + \hline +\end{tabularx}\) +\caption{Distribution of type codes} +\label{fig:ser:type:codedist} +\end{figure} + +\subsubsection{Encoding Data Types} + +There are 9 different values for primitive types and 2 more are reserved for future extensions. +Each primitive type has an id in a range {1,...,11} as the following. + +\begin{figure}[h] \footnotesize + \(\begin{tabularx}{\textwidth}{| l | X |} + \hline + \bf{Id} & \bf{Type} \\ \hline +1 & Boolean \\ \hline +2 & Byte\\ \hline +3 & Short (16 bit)\\ \hline +4 & Int (32 bit)\\ \hline +5 & Long (64 bit)\\ \hline +6 & BigInt (java.math.BigInteger)\\ \hline +7 & GroupElement (org.bouncycastle.math.ec.ECPoint)\\ \hline +8 & SigmaProp \\ \hline +9 & reserved for Char \\ \hline +10 & reserved for Double \\ \hline +11 & reserved \\ \hline +\end{tabularx}\) +\label{fig:ser:type:primtypes} +\end{figure} + +For each type constructor like \lst{Coll} or \lst{Option} we use the encoding +schema defined below. Type constructor has associated \emph{base code} (e.g. +12 for \lst{Coll[_]}, 24 for \lst{Coll[Coll[_]]} etc. ), which is multiple of +12. +Base code can be added to primitive type id to produce code of constructed +type, for example 12 + 1 = 13 is a code of \lst{Coll[Byte]}. The code of type +constructor (12 in this example) is used when type parameter is non-primitive +type (e.g. \lst{Coll[(Byte, Int)]}). In this case the code of type +constructor is read first, and then recursive descent is performed to read +bytes of the parameter type (in this case \lst{(Byte, Int)}) This encoding +allows very simple and quick decoding by using div and mod operations. + +The interval of codes for data types is divided as the following: + +\begin{figure}[h] \footnotesize + \(\begin{tabularx}{\textwidth}{| l | l | X |} +\hline +\bf{Interval} & \bf{Type constructor} & \bf{Description} \\ \hline +0x01 - 0x0B(11) & & primitive types (including 2 reserved) \\ \hline +0x0C(12) & \lst{Coll[_]} & Collection of non-primivite types (\lst{Coll[(Int,Boolean)]}) \\ \hline +0x0D(13) - 0x17(23) & \lst{Coll[_]} & Collection of primitive types (\lst{Coll[Byte]}, \lst{Coll[Int]}, etc.) \\ \hline +0x18(24) & \lst{Coll[Coll[_]]} & Nested collection of non-primitive types (\lst{Coll[Coll[(Int,Boolean)]]}) \\ \hline +0x19(25) - 0x23(35) & \lst{Coll[Coll[_]]} & Nested collection of primitive types (\lst{Coll[Coll[Byte]]}, \lst{Coll[Coll[Int]]}) \\ \hline +0x24(36) & \lst{Option[_]} & Option of non-primitive type (\lst{Option[(Int, Byte)]}) \\ \hline +0x25(37) - 0x2F(47) & \lst{Option[_]} & Option of primitive type (\lst{Option[Int]}) \\ \hline +0x30(48) & \lst{Option[Coll[_]]} & Option of Coll of non-primitive type (\lst{Option[Coll[(Int, Boolean)]]}) \\ \hline +0x31(49) - 0x3B(59) & \lst{Option[Coll[_]]} & Option of Coll of primitive type (\lst{Option[Coll[Int]]}) \\ \hline +0x3C(60) & \lst{(_,_)} & Pair of non-primitive types (\lst{((Int, Byte), (Boolean,Box))}, etc.) \\ \hline +0x3D(61) - 0x47(71) & \lst{(_, Int)} & Pair of types where first is primitive (\lst{(_, Int)}) \\ \hline +0x48(72) & \lst{(_,_,_)} & Triple of types \\ \hline +0x49(73) - 0x53(83) & \lst{(Int, _)} & Pair of types where second is primitive (\lst{(Int, _)}) \\ \hline +0x54(84) & \lst{(_,_,_,_)} & Quadruple of types \\ \hline +0x55(85) - 0x5F(95) & \lst{(_, _)} & Symmetric pair of primitive types (\lst{(Int, Int)}, \lst{(Byte,Byte)}, etc.) \\ \hline +0x60(96) & \lst{(_,...,_)} & \lst{Tuple} type with more than 4 items \lst{(Int, Byte, Box, Boolean, Int)} \\ \hline +0x61(97) & \lst{Any} & Any type \\ \hline +0x62(98) & \lst{Unit} & Unit type \\ \hline +0x63(99) & \lst{Box} & Box type \\ \hline +0x64(100) & \lst{AvlTree} & AvlTree type \\ \hline +0x65(101) & \lst{Context} & Context type \\ \hline +0x65(102) & \lst{String} & String \\ \hline +0x66(103) & \lst{IV} & TypeIdent \\ \hline +0x67(104)- 0x6E(110)& & reserved for future use \\ \hline +0x6F(111) & & Reserved for future \lst{Class} type (e.g. user-defined types) \\ \hline +\end{tabularx}\) +\label{fig:ser:type:primtypes} +\end{figure} + +\subsubsection{Encoding Function Types} + +We use $12$ different values for both domain and range types of functions. This +gives us $12 * 12 = 144$ function types in total and allows to represent $11 * +11 = 121$ functions over primitive types using just single byte. + +Each code $F$ in a range of function types can be represented as +$F = D * 12 + R + 112$, where $D, R \in \{0,\dots,11\}$ - indices of domain and range types correspondingly, +$112$ - is the first code in an interval of function types. + +If $D = 0$ then domain type is not primitive and recursive descent is necessary to write/read domain type. + +If $R = 0$ then range type is not primitive and recursive descent is necessary to write/read range type. + +\subsubsection{Recursive Descent} + +When an argument of a type constructor is not a primitive type we fallback to +the simple encoding schema. + +In such a case we emit the special code for the type constructor according to +the table above and descend recursively to every child node of the type tree. + +We do this descend only for those children whose code cannot be embedded in +the parent code. For example, serialization of \lst{Coll[(Int,Boolean)]} +proceeds as the following: +\begin{enumerate} +\item emit \lst{0x0C} because element of collection is not primitive +\item recursively serialize \lst{(Int, Boolean)} +\item emit \lst{0x3D} because first item in the pair is primitive +\item recursivley serialize \lst{Boolean} +\item emit \lst{0x02} - the code for primitive type \lst{Boolean} +\end{enumerate} + +\noindent Examples + +\begin{figure}[h] \footnotesize +\(\begin{tabularx}{\textwidth}{| l | c | c | l | c | X |} +\hline +\bf{Type} &\bf{D} & \bf{R} & \bf{Bytes} & \bf{\#Bytes} & \bf{Comments} \\ \hline +\lst{Byte} & & & 1 & 1 & \\ \hline +\lst{Coll[Byte]} & & & 12 + 1 = 13 & 1 & \\ \hline +\lst{Coll[Coll[Byte]]} & & & 24 + 1 = 25 & 1 & \\ \hline +\lst{Option[Byte]} & & & 36 + 1 = 37 & 1 & register \\ \hline +\lst{Option[Coll[Byte]]} & & & 48 + 1 = 49 & 1 & register \\ \hline +\lst{(Int,Int)} & & & 84 + 3 = 87 & 1 & fold \\ \hline +\lst{Box=>Boolean} & 7 & 2 & 198 = 7*12+2+112 & 1 & exist, forall \\ \hline +\lst{(Int,Int)=>Int} & 0 & 3 & 115=0*12+3+112, 87 & 2 & fold \\ \hline +\lst{(Int,Boolean)} & & & 60 + 3, 2 & 2 & \\ \hline +\lst{(Int,Box)=>Boolean} & 0 & 2 & 0*12+2+112, 60+3, 7 & 3 & \\ \hline +\end{tabularx}\) +\label{fig:ser:type:primtypes} +\end{figure} diff --git a/docs/spec/types.tex b/docs/spec/types.tex new file mode 100644 index 0000000000..7e973617d4 --- /dev/null +++ b/docs/spec/types.tex @@ -0,0 +1,29 @@ +\section{Typing} +\label{sec:typing} + +\langname is a strictly typed language, in which every term should have a +type in order to be wellformed and evaluated. Typing judgement of the form +$\Der{\Gamma}{e : T}$ say that $e$ is a term of type $T$ in the typing +context $\Gamma$. + +\begin{figure}[h] +\input{figures/fig_typing.tex} +\caption{Typing rules of \langname} +\label{fig:typing} +\end{figure} + +Note that each well-typed term has exactly one type hence we assume there +exists a funcion $termType: Term \to \mathcal{T}$ which relates each well-typed +term with the corresponding type. + +Primitive operations can be parameterized with type variables, for example +addition (Table~\ref{table:primops}) has the signature $+~:~ (T,T) \to T$ +where $T$ is numeric type (Table~\ref{table:predeftypes}). Function $ptype$, defined in Appendix~\ref{sec:appendix:primops} returns a type of primitive operation specialized for concrete types of its arguments, for example $ptype(+,\lst{Int}, \lst{Int}) = (\lst{Int}, \lst{Int}) \to \lst{Int}$. + +Similarily, the function $mtype$ returns a type of method specialized for concrete types of the arguments of the \lst{MethodCall} term. + +\lst{BlockExpr} rule defines a type of well-formed block expression. It +assumes a total ordering on \lst{val} definitions. If a block expression is +not well-formed than is cannot be typed and evaluated. + +The rest of the rules are standard for typed lambda calculus. \ No newline at end of file diff --git a/docs/wpaper/sigma.bib b/docs/wpaper/sigma.bib index e8fc15d421..6a79c96d36 100644 --- a/docs/wpaper/sigma.bib +++ b/docs/wpaper/sigma.bib @@ -1249,4 +1249,42 @@ @misc{ethattacks author={Bok Khoo}, year=2016, note={\url{https://www.bokconsulting.com.au/blog/ethereum-network-attackers-ip-address-is-traceable/}} -} \ No newline at end of file +} + +@inproceedings{seijas2018marlowe, + title={Marlowe: Financial Contracts on Blockchain}, + author={Seijas, Pablo Lamela and Thompson, Simon}, + booktitle={International Symposium on Leveraging Applications of Formal Methods}, + pages={356--375}, + year={2018}, + organization={Springer} +} + +@article{sergey2018scilla, + title={Scilla: a smart contract intermediate-level language}, + author={Sergey, Ilya and Kumar, Amrit and Hobor, Aquinas}, + journal={arXiv preprint arXiv:1801.00687}, + year={2018} +} + +@inproceedings{crary2015peer, + title={Peer-to-peer affine commitment using Bitcoin}, + author={Crary, Karl and Sullivan, Michael J}, + booktitle={ACM SIGPLAN Notices}, + volume={50}, + number={6}, + pages={479--488}, + year={2015}, + organization={ACM} +} + +@inproceedings{o2017simplicity, + title={Simplicity: A new language for blockchains}, + author={O'Connor, Russell}, + booktitle={Proceedings of the 2017 Workshop on Programming Languages and Analysis for Security}, + pages={107--120}, + year={2017}, + organization={ACM} +} + + diff --git a/docs/wpaper/sigma.tex b/docs/wpaper/sigma.tex index 4ae0b932fb..cb0bfc85f3 100644 --- a/docs/wpaper/sigma.tex +++ b/docs/wpaper/sigma.tex @@ -62,14 +62,14 @@ \title{\langname, a Cryptocurrency Scripting Language\\Supporting Noninteractive Zero-Knowledge Proofs} -\author{authors} +\author{Ergo Developers} \maketitle \begin{abstract} -This paper describes \langname, a powerful and protocol-friendly scripting language for cryptocurrencies. Programs in \langname are used to specify the conditions under which currency can be spent. The language supports a type of non-interactive zero-knowledge proofs called $\Sigma$-protocols and is flexible enough to allow for ring-signatures, multisignatures, multiple currencies, atomic transactions, self-replicating scripts, and long-term computation. \lnote{this list of features could use some improvement} +This paper describes \langname, a powerful and protocol-friendly scripting language for cryptocurrencies. Programs in \langname are used to specify the conditions under which currency can be spent. The language supports a type of non-interactive zero-knowledge proofs called $\Sigma$-protocols and is flexible enough to allow for ring-signatures, multisignatures, multiple currencies, atomic swaps, self-replicating scripts, and long-term computation. \end{abstract} @@ -81,16 +81,18 @@ \subsection{Background} The Bitcoin Script language is a primitive stack-based language without loops~\cite{bscript}. To spend an output protected by a program, a spending transaction must provide a program in the same language, and the concatenation of the two programs must evaluate to \emph{true}. The creator of the spending transaction can be viewed as a prover (for example, proving knowledge of the secret key by producing a signature), where the statement that needs to be proven is specified by the output that is being spent. Transaction validity is verified by evaluating programs. Bounded validation time is ensured by the absence of loops in the programming language and a maximum program size of 10 kilobytes. Even so, some denial-of-service attacks exploiting script validation time have appeared \cite{bitcoindos, fivehrs, ethattacks}. On the other hand, the deliberate simplicity of the programming language limits the kinds of contracts that can be created on the Bitcoin platform. -On other end of the generality spectrum, Ethereum allows for arbitrary Turing-complete programs~\cite{wood2014ethereum}. This approach requires charging for computation \knote{it is also required to have gas limit per block, otherwise DoS is possible anyway if there are enough resources to pay for it, also, a miner can do DoS for free} in order to prevent denial-of-service attacks, because the running time of a Turing-complete program cannot, in general, be estimated without actually running the program. +On other end of the generality spectrum, Ethereum allows for arbitrary Turing-complete programs~\cite{wood2014ethereum}. This approach requires charging for computation, in order to prevent denial-of-service attacks, because the running time of a Turing-complete program cannot, in general, be estimated without actually running the program. It is also needed in this case to have gas limit per block, otherwise, DoS is possible anyway, for example, a miner can do attack the network for free. -A variety of cryptocurrency languages have appeared. We do not survey them here, but refer the reader to \cite[p. 11]{Scilla}. \lnote{if time, can talk about Simplicity, Plutus, TypeCoin, Rholang, Scilla \dots} \knote{As detailed comparison would be time-consuming, lets just provide links to all the competitors. } +A variety of cryptocurrency languages have appeared. We do not survey them here, but refer the reader to Scilla~\cite{Scilla}, +Simplicity~\cite{o2017simplicity}, Marlowe~\cite{seijas2018marlowe}, TypeCoin~\cite{crary2015peer}, and many other languages. \subsection{Our Contribution: \langname} -In this paper we introduce a new language called \langname that is specifically designed to be friendly to cryptographic protocols and applications. \langname is considerably more powerful than Bitcoin Script. As \langname contains no looping or recursive constructs, individual scripts in \langname are not Turing-complete. In fact, given a program in \langname, it is easy to obtain an estimate of its running time (see \ref{sec:runningtime}). However, because \langname allows for self-replication, \langname can be used to create Turing-complete processes in a blockchain, as shown in \cite{CKM18} (see Section \ref{sec:self-replicating}). +In this paper we introduce a new language called \langname that is specifically designed to be friendly to cryptographic protocols and applications. \langname is considerably more powerful than Bitcoin Script. As \langname contains no unbounded looping or recursive constructs, individual scripts in \langname are not Turing-complete. In fact, given a program in \langname, it is easy to obtain an estimate of its running time. However, because \langname allows for self-replication, \langname can be used to create Turing-complete processes in a blockchain, as shown in \cite{CKM18} (see also Section \ref{sec:self-replicating}). \paragraph{Built-in $\Sigma$-protocols} -Our new language incorporates proving and verifying as first-class primitives, giving developers to a subclass of cryptographic proof systems known as non-interactive $\Sigma$-protocols (pronounced ``sigma-protocols''). Thus, a script protecting a transaction output can contain statements ($\Sigma$-statements) that need to proven (by producing $\Sigma$-proofs) in order to spend the output. + +Our new language incorporates proving and verifying as first-class primitives, giving developers access to a subclass of cryptographic proof systems known as non-interactive $\Sigma$-protocols (pronounced ``sigma-protocols''). Thus, a script protecting a transaction output can contain statements ($\Sigma$-statements) that need to be proven (by producing $\Sigma$-proofs) in order to spend the output. Conceptually, $\Sigma$-proofs \cite{Cra96} are generalizations~\cite{CL06} of digital signatures. In fact, Schnorr signature scheme~\cite{Sch91} (whose more recent version is popularly known as EdDSA \cite{BDLSY12,rfc8032}) is the canonical example of a $\Sigma$-proof: it proves that the recipient knows the discrete logarithm of the public key (the proof is attached to a specific message, such as a particular transaction, and thus becomes a signature on the message; all $\Sigma$-proofs described here are attached to specific messages). $\Sigma$-protocols exist for proving a variety of properties and, importantly for \langname, elementary $\Sigma$-protocols can be combined into more sophisticated ones using the techniques of \cite{CDS94}. For an introduction to $\Sigma$-protocols, we refer the reader to \cite{Dam10} and \cite[Chapter 6]{HL10}. @@ -100,52 +102,51 @@ \subsection{Our Contribution: \langname} \item A proof of knowledge of discrete logarithm with respect to a fixed group generator: given a group element $h$, the proof convinces a verifier that the prover knows $w$ such that $h=g^w$, where $g$ is the group generator (also known as base point), without revealing $w$. This is the same as a Schnorr signature with public key $h$. \item A proof that of equality of discrete logarithms (i.e., a proof of a Diffie-Hellman tuple): given group elements $g_1, g_2, u_1, u_2$, the proof convinces a verifier that the prover knows $w$ such that $u_1=g_1^w$ and $u_2=g_2^w$, without revealing $w$ \end{itemize} -\lnote{This is not the notation used in the code, but in the code, $g$ is overloaded because it's the base point in item 1 and an arbitrary point in item 2; $h$ is also overloaded. Perhaps we should change the code to better notation and then make this text match it.} \langname also provides the ability to build more sophisticated $\Sigma$-protocols by using connectives $\andnode$, $\ornode$, and $\tnode$ (also known as $k$-out-of-$n$). Crucially, the proof for an $\ornode$ and a $\tnode$ connective does not reveal which of the relevant values the prover knows: for example, in \langname a ring signature by public keys $h_1, \dots, h_n$ can be specified as an $\ornode$ of $\Sigma$-protocols for proving knowledge of discrete logarithms of $h_1, \dots, h_n$. The proof can be constructed with the knowledge of just one such discrete logarithm, and does not reveal which one was used in its construction. -Our implementation of these protocols is in Scala \cite{scala} and Java \cite{java}. The implementation was informed by SCAPI \cite{scapi}, but does not use SCAPI code. We use Bouncy Castle \cite{bouncycastle} for big integer and elliptic curve operations; the implementation of arithmetic in fields of characteristic 2 (for $\tnode$ connectives) is our own. \lnote{any other credits or background info?} +Our implementation of these protocols is in Scala \cite{scala} and Java \cite{java}. The implementation was informed by SCAPI \cite{scapi}, but does not use SCAPI code. We use Bouncy Castle \cite{bouncycastle} for big integer and elliptic curve operations; the implementation of arithmetic in fields of characteristic 2 (for $\tnode$ connectives) is our own. \paragraph{Rich context, enabling self-replication} In addition to $\Sigma$-protocols, \langname allows for predicates over the state of the blockchain and the current transaction. These predicates can be combined, via Boolean connectives, with $\Sigma$-statements, and are used during transaction validation. The set of predicates is richer than in Bitcoin, but still lean in order to allow for efficient processing even by light clients. Like in Bitcoin, we allow the use of current height of the blockchain; unlike Bitcoin, we also allow the use of information contained in the spending transaction, such as inputs it is trying to spend and outputs it is trying to create. This feature enables self-replication and sophisticated (even Turing-complete) long-term script behaviour, as described in examples below. -\langname is statically typed (with compile-time type checking) and allows the usual operations, such as integer arithmetic. - -\paragraph{Running time estimation and safety checks} -\lnote{someone should fill this in, because I know very little about it} -See Section \ref{sec:safety} for more details. - - +\langname is statically typed (with compile-time type checking) functional +language with first-class lambda expressions, collection, tuple and optional +type values, it allows standard operations, such as integer arithmetic, +logical and comparison operations as well as operations on group elements and authenticated dictionaries. +\ignore{ + \paragraph{Running time estimation and safety checks} + \lnote{someone should fill this in, because I know very little about it} + See Section \ref{sec:safety} for more details. +} \section{\langname Language Description} -\lnote{Should we add, for every code example, a link to the code where it appears? That would help the reader.} - -The language syntax is similar to Scala, and therefore many of the constructs are easy to read for those familiar with Scala. +The language syntax is a subset of Scala with the same meaning, and therefore many of the constructs are easy to read for those familiar with Scala. Before we describe the language, let us fix some terminology. A \emph{box} (often called a ``coin'' in other literature) contains some amount (\emph{value}, measured in Ergo tokens) and is protected by a \emph{script} (boxes also contain additional information, such as other tokens; this information is described in detail in Section \ref{sec:box-registers}). A \emph{transaction} spends the value of boxes that are its \emph{inputs} (which are outputs of some earlier transaction) and produces boxes that are its \emph{outputs}. In a given transaction, the sum of the values of the inputs must equal the sum of the values of the outputs (as we describe below, the scripting language is rich enough to allow for payment transactions fees and for minting of new coins without violating this rule). -All the unspent transaction outputs (\emph{UTXO}) at a given time represent the value stored in the blockchain. A script for a box must evaluate to ``true'' when this box is used as an input to a transaction. This evaluation is helped by a \emph{proof} (for $\Sigma$-statements) and a \emph{context}, which are part of the transaction. The proof is produced by someone who knows the relevant secrets, such as secret keys; the context contains information about the transaction, such as details of its inputs and outputs, and the current state of the blockchain, such as the current number of blocks (height) and the root of the Merkle tree that contains the UTXO set. -\dnote{This should change in future, e.g. fixed number of last headers is going to be added here (and header contains height and root hash).} \lnote{adjust this as needed} +All the unspent transaction outputs (\emph{UTXO}) at a given time represent the value stored in the blockchain. A script for a box must evaluate to ``true'' when this box is used as an input to a transaction. This evaluation is helped by a \emph{proof} (for $\Sigma$-statements) and a \emph{context}, which are part of the transaction. The proof is produced by someone who knows the relevant secrets, such as secret keys; the context contains information about the spending transaction, such as details of its inputs and outputs, and the current state of the blockchain, such as the current height of the blockchain and last 10 block headers from the blockchain. \subsection{$\Sigma$-Statements} - -\lnote{every script example here should be also in our test code exactly as written, to make sure our own paper passes our own tests. These should be edited as the language evolves.} - -\lnote{If we want to show examples of the underlying syntax tree to which the language gets compiled, we can to that here, in parallel with examples in the language} +\label{sec:sigma-statements} The simplest script allows the owner of a public key to spend an output box in a future transaction by issuing a signature with the corresponding secret key. If the variable \texttt{pk} holds the public key, then this script is specified simply as a string \begin{verbatim} pk \end{verbatim} -In order for the compiler to know what value the variable \texttt{pk} is referring to, the compiler needs to be supplied with an \emph{environment}, which, in this case, is a single-element map, mapping the string \texttt{"pk"} to the object holding the public key.\footnote{This map is an object in Scala, and is passed to the compiler as a parameter together with the script (which is passed in as a string) when the compiler is invoked from within Scala code. We postpone the discussion of how to invoke the compiler outside of Scala code.\lnote{we should add this discussion at some point}} -Thus, the constant public key is hardwired into the script at compile time. When the script is later evaluated (i.e., when the box is used as a transaction input), a $\Sigma$-proof of knowledge of the corresponding secret key must be supplied by the prover. +In order for the compiler to know what value the variable \texttt{pk} is referring to, the compiler needs to be supplied with an \emph{environment}, which, in this case, is a single-element map, mapping the string \texttt{"pk"} to the object holding the public key.\footnote{This map is an object in Scala, and is passed to the compiler as a parameter together with the script (which is passed in as a string) when the compiler is invoked from within Scala code. We defer the discussion of how to invoke the compiler outside of Scala code to another article.} +The value of public key is hardwired into the script at compile time, thus +\texttt{"pk"} can also be called \emph{named constant} to reflect the fact +that it cannot change. When the script is later evaluated (i.e., when the box +is used as a transaction input), a $\Sigma$-proof of knowledge of the +corresponding secret key must be supplied by the prover. A slightly more complex script may allow either one of two people to spend an box. If Alice owns public key \texttt{pkA} and Bob owns public key \texttt{pkB} (with corresponding secret key \texttt{skA} and \texttt{skB}), then the script that would allow either one of them to spend the box is \begin{verbatim} @@ -188,9 +189,6 @@ \subsection{$\Sigma$-Statements} \end{verbatim} The same result could be achieved by writing an \texttt{anyOf} of all possible 3-out-of-6 (twenty) combinations. - -\lnote{should probably do a diffie-hellman example, and maybe another one below with getting some of the DH values from a context. E.g., $g_1$ is just $g$, $g_2$ is $pk_A$, $h_1$ is a hash of a message in a context, and $h_2$ is from the context. } - \subsection{Mixing $\Sigma$-statements with other statements} \langname allows combining statements that require proofs with other boolean statements. These statements can refer to the \emph{context}, which has predefined variables with information about the transaction in which the script is evaluated (i.e., the box is spent). For example, @@ -203,20 +201,30 @@ \subsection{Mixing $\Sigma$-statements with other statements} In general, script evaluation reduces the script to a $\Sigma$-statement by first evaluating all the Boolean predicates that are not $\Sigma$-statements. As we saw in the above example, the resulting $\Sigma$-statement will, in general, depend on the values of the Boolean predicates (such as whether \texttt{HEIGHT > 500}). -We emphasize that this evaluation is not the same as the usual left-to-right lazy evaluation of logical expressions, because expressions involving $\Sigma$-statements are not treated the same way as usual boolean expressions: they are evaluated last and in zero-knowledge. In fact, $\texttt{pkA}$ is not of type \texttt{Boolean}. It is of type \texttt{GroupElement}; the compiler automatically converts it to \texttt{proveDlog(\texttt{pkA})}, which is of type \texttt{ProveDlog}. Type \texttt{ProveDlog} is special because it is used differently by the prover (who constructs the proof) and the verifier (who checks it); moreover, variables of this type cannot be stored in boxes (Section \ref{sec:context}) or context extensions (Section \ref{sec:extension}). \lnote{the type system is undergoing changes, so this needs fixing} - - -\lnote{Should we have an example with fees? According to Dima, fees are implemented as boxes with TrueLeaf as a proposition, last transaction in a block consumes all this just created boxes and in favor of miner.} - +We emphasize that this evaluation is not the same as the usual left-to-right +lazy evaluation of logical expressions, because expressions involving +$\Sigma$-statements are not treated the same way as usual boolean +expressions: they are evaluated last and in zero-knowledge. + +In fact, $\texttt{pkA}$ is not of type \texttt{Boolean}. It is a constant of +type \texttt{SigmaProp} with the concrete value +\texttt{ProveDlog(\texttt{ge})}, for some public key \texttt{ge} of +\texttt{GroupElement} type. The type \texttt{SigmaProp} is special in +\langname because it is used differently by the prover (who constructs the +proof) and the verifier (who checks it). In this case \texttt{ProveDlog} +require: 1) the prover (when the transaction is created) to provide a proof +of knowledge of the discrete logarithm corresponing to the value \texttt{ge}; +2) the verifier (when the transaction is added to a block) to check that the +proof was indeed provided. \subsection{Accessing the Context and Box Contents} \label{sec:context} -In addition to the predefined variable \texttt{HEIGHT}, the context contains predefined collections \texttt{INPUTS} and \texttt{OUTPUTS}, which refer to the inputs and outputs of the transaction. Elements of these collections are of type \texttt{Box}. The script also has access to its own box via the context variable \texttt{SELF} of type \texttt{Box}. Note that \texttt{SELF} is also an element of the \texttt{INPUTS} collection, because the script is executed when the box is being spent. +In addition to the predefined variable \texttt{HEIGHT}, the context contains predefined collections \texttt{INPUTS} and \texttt{OUTPUTS}, which refer to the inputs and outputs of the spending transaction. Elements of these collections are of type \texttt{Box}. The script also has access to its own box via the context variable \texttt{SELF} of type \texttt{Box}. Note that \texttt{SELF} is also an element of the \texttt{INPUTS} collection, because the script is executed when the box is being spent. -To access information inside a box \texttt{b}, scripts can use \texttt{b.value} for the amount, \texttt{b.propositionBytes} for the protecting script, and \texttt{b.id} for the identifier of the box, which is the BLAKE2b-256 hash of the contents of the box. Boxes include additional information in \emph{registers}; each box is unique, because one of its registers includes the transaction id in which it was created as an output, and its own index in the \texttt{OUTPUTS} collection, accessible through \texttt{b.R3} (see Section \ref{sec:box-registers} for more on registers). +To access information inside a box \texttt{b}, scripts can use \texttt{b.value} for the amount, \texttt{b.propositionBytes} for the protecting script, and \texttt{b.id} for the identifier of the box, which is the BLAKE2b-256 hash of the contents of the box. Boxes include additional information in \emph{registers}; each box is unique, because one of its registers includes the transaction id in which it was created as an output, and its own index in the outputs of the transaction which created the box, accessible through \texttt{b.R3} (see Section \ref{sec:box-registers} for more on registers). \paragraph{Example: two boxes together} -Access to this information allows us, for example, to create an output box that can be spent only in the same transaction as another output box, and only if no other inputs are present in the transaction. If \texttt{friend} stands for an already existing box (per the environment mapping), then we create a new box that can be spent only together with \texttt{friend} and no other input by the following script (note that it uses the collection property \texttt{size} and collection indexing, starting at 0, denoted by parentheses): +Access to this information allows us, for example, to create an output box that can be spent only in the same transaction as another known box, and only if no other inputs are present in the transaction. If \texttt{friend} stands for an already existing box (per the environment mapping), then we create a new box that can be spent only together with \texttt{friend} and no other input by the following script (note that it uses the collection property \texttt{size} and collection indexing, starting at 0, denoted by parentheses): \begin{verbatim} INPUTS.size == 2 && INPUTS(0).id == friend.id @@ -227,7 +235,7 @@ \subsection{Accessing the Context and Box Contents} We can be more permissive and allow for other inputs in addition to the friend box. To do so, we will examine the input collection using the \texttt{exists} operator, which applies a boolean function to each collection element until it finds one that satisfies the function or finds that none exists. To define a function, we use lambda syntax; the argument type (in this case \texttt{Box}) is specified with a colon after the argument name \texttt{inputBox}. We name the function using the \texttt{val} keyword. \begin{verbatim} { - val isFriend = { (inputBox: Box) => inputBox.id == friend.id } + def isFriend(inputBox: Box) = inputBox.id == friend.id INPUTS.exists (isFriend) } \end{verbatim} @@ -242,7 +250,7 @@ \subsection{Accessing the Context and Box Contents} \paragraph{Example: crowdfunding} Access to the context allows us to create a script for the following crowdfunding situation: a project backer (with key \texttt{backerPubKey}) wishes to give money to a project (with key \texttt{projectPubKey}), but only if the project raises enough money (at least \texttt{minToRaise}) from other sources by a deadline (expressed in terms of \texttt{HEIGHT}). -To give money to the project, the backer will create an output box protected by the following script. The script contains two conditions: one for the case the deadline has passed (enabling the backer to get the money back) and one for the case it succeeded (enabling the project to spend the money if the amount is at least \texttt{minToRaise} before the deadline). In order to ensure enough money has been raised, the script will search the output collection for a box with a sufficient value going to the \texttt{projectPubKey}. To check where the value of the output box is going, the script will read the script protecting the output box and compare it to the script corresponding to \texttt{proveDlog(projectPubKey)}; this script can be obtained by \texttt{projectPubKey.propBytes}. +To give money to the project, the backer will create an output box protected by the following script. The script contains two conditions: one for the case the deadline has passed (enabling the backer to get the money back) and one for the case it succeeded (enabling the project to spend the money if the amount is at least \texttt{minToRaise} before the deadline). In order to ensure enough money has been raised, the script will search the output collection for a box with a sufficient value going to the \texttt{projectPubKey}. To check where the value of the output box is going, the script will read the script protecting the output box and compare it to the script \texttt{"projectPubKey"} (that is the simple script described in Section~\ref{sec:sigma-statements}); bytes of this script can be obtained by \texttt{projectPubKey.propBytes}. \begin{verbatim} { @@ -273,8 +281,7 @@ \subsection{Context Extension and Hashing} says that spending can be done only using the signature of Alice, and only if the preimage of \texttt{hashOutput} is written in the context. Specifically, the script requires that the context extension should contain a variable (with id \texttt{1}), which is a collection of bytes that hashes to the value of \texttt{hashOutput} (the value of \texttt{hashOutput}, like \texttt{pkA}, is defined in the environment and is hardwired into the script at compile time). Note that although the script requires both the secret key corresponding to \texttt{pkA} and the hash preimage corresponding to \texttt{hashOutput}, there is the stark difference between how these two values are used: the secret key is not revealed in the proof (by the zero-knowledge property of $\Sigma$-proofs), while the hash preimage is explicitly written into the context extension and can be seen by anyone once the transaction takes place. \paragraph{Example: atomic transactions and cross-chain trading} -Suppose there are two separate blockchains, for two different asset types. Alice wants to receive some assets in her blockchain in exchange for giving some assets to Bob in his blockchain. \langname allows to accomplish in a simpler way than proposed for Bitcoin, for example, in \cite{Nol13}. -\lnote{is this the right text to go along with the reference to \cite{Nol13}?} +Suppose there are two separate blockchains, for two different asset types. Alice wants to receive some assets in her blockchain in exchange for giving some assets to Bob in his blockchain. \langname allows to accomplish it in a simpler way than proposed for Bitcoin, for example, in \cite{Nol13}. Alice creates a random secret \texttt{x} of 256 bits (32 bytes), hashes it to obtain the value \texttt{hx}, and creates a transaction in Bob's blockchain with the output box protected by the following script: \begin{verbatim} @@ -287,38 +294,39 @@ \subsection{Context Extension and Hashing} From this output, Bob learns \texttt{hx}. He creates a transaction in Alice's blockchain with an output box protected by the following script: \begin{verbatim} + val x = getVar[Coll[Byte]](1).get anyOf( Coll( HEIGHT > deadlineAlice && pkB, allOf( Coll( pkA, - getVar[Coll[Byte]](1).get.size < 33, - blake2b256(getVar[Coll[Byte]](1).get) == hx + x.size < 33, + blake2b256(x) == hx )) )) \end{verbatim} If Alice is satisfied with the amount Bob is giving her, she claims the value of this box by revealing \texttt{x}. Alice is protected as long as the hash function is one-way and she keeps her \texttt{x} secret until she claims the value of this box. (She should be careful to submit her transaction in enough time before \texttt{deadlineAlice} to make sure it gets processed before Bob can reclaim this money, because once she submits the transaction, \texttt{x} is public and thus, if the \texttt{deadlineAlice} passes before the transaction is processed, Bob can both reclaim this box and claim the box Alice left in his blockchain.) -Bob is protected, because in order for Alice to claim the value of this box, she must present a hash preimage of \texttt{hx} as a context extension in the transaction that uses this box as input. But once she does, Bob also learns this hash preimage, and is able to claim the value of the box that Alice placed into his blockchain. Note that Bob needs to choose \texttt{deadlineAlice} early enough to make sure that he is able to learn the preimage of \texttt{hx} from the transaction in Alice's block chain, and create a transaction in his blockchain, all before \texttt{deadlineBob} that Alice chose. Note also that \texttt{HEIGHT} in the two scripts is with respect to two different blockchains, which may be growing at a different rate. Bob also needs to make sure that he can use Alice's \texttt{x} as a context extension; to make sure Alice cannot cheat by making this \texttt{x} so long that it will not be allowed as a context extension in his blockchain, he uses the constraint \texttt{getVar[Coll[Byte]](1).get.size < 33}. +Bob is protected, because in order for Alice to claim the value of this box, she must present a hash preimage of \texttt{hx} as a context extension in the transaction that uses this box as input. But once she does, Bob also learns this hash preimage, and is able to claim the value of the box that Alice placed into his blockchain. Note that Bob needs to choose \texttt{deadlineAlice} early enough to make sure that he is able to learn the preimage of \texttt{hx} from the transaction in Alice's block chain, and create a transaction in his blockchain, all before \texttt{deadlineBob} that Alice chose. Note also that \texttt{HEIGHT} in the two scripts is with respect to two different blockchains, which may be growing at a different rate. Bob also needs to make sure that he can use Alice's \texttt{x} as a context extension; to make sure Alice cannot cheat by making this \texttt{x} so long that it will not be allowed as a context extension in his blockchain, he uses the constraint \texttt{x.size < 33}. The same approach can be used to trade different assets on the same blockchain, in case of multiasset blockchains. However, for transactions on a single blockchain, an alternative approach is also possible. We describe it below. \subsection{Box Registers and Additional Tokens} \label{sec:box-registers} -In addition to its value and protecting script, a box can contain up to 10 numbered registers, \texttt{R0} through \texttt{R9}. The first four of these have fixed meaning, as follows. For a box \texttt{b}, \texttt{b.R0} is the same as \texttt{b.value} and \texttt{b.R1} is the same as \texttt{b.propositionBytes}. - - -The third register, \texttt{b.R2}, is for specifying additional, secondary tokens contained in the box (the primary token amount is specified in \texttt{b.value}). \texttt{b.R2} contains a collection of pairs, the first element of each pair specifying the token id (as a byte string) and the second element specifying the amount (as a long constant). The maximum number of tokens in a box is set to 4. For every token id, the sum of amounts in inputs boxes should equal the sum of amounts in output boxes. There is one exception to this rule for the creation of new tokens. When a new token type gets created in a transaction, its id is equal to the id of the input box 0. Thus, the exception for the creation of new tokens is that if the token id in some output box is equal to the id of input box 0, then an arbitrary amount of this token can be output. Because each box has a unique id (see Section~\ref{sec:context}, this exception can be applied exactly once per token type. A newly created token can be emitted in a time-controlled fashion---see Section~\ref{sec:self-replicating}. +Together with its value and protecting script, a box can contain up to 10 numbered registers, \texttt{R0} through \texttt{R9}. The first four of these have fixed meaning, as follows. For a box \texttt{b}, \texttt{b.R0} is the same as \texttt{b.value} and \texttt{b.R1} is the same as \texttt{b.propositionBytes}. +The third register, \texttt{b.R2}, is for specifying additional, secondary tokens contained in the box (the primary token amount is specified in \texttt{b.value}). \texttt{b.R2} contains a collection of pairs, the first element of each pair specifying the token id (as a collection of 32 bytes) and the second element specifying the amount (as a long value). The maximum number of tokens in a box is set to 4. For every token id, the sum of amounts in inputs boxes be no less than the sum of amounts in output boxes. There is one exception to this rule for the creation of new tokens. When a new token type gets created in a transaction, its id is equal to the id of the input box 0. Thus, the exception for the creation of new tokens is that if the token id in some output box is equal to the id of input box 0, then an arbitrary amount of this token can be output. Because each box has a unique id (see Section~\ref{sec:context}, this exception can be applied exactly once per token type. A newly created token can be emitted in a time-controlled fashion---see Section~\ref{sec:self-replicating}. -The fourth register, \texttt{b.R3}, contains a 34-byte collection. This collection specifies the 32-byte transaction id \lnote{how is transaction id computed?} where this box appears as an output followed by a 2-byte sequence number of this box in the \texttt{OUTPUTS} collection of that transaction. This ensures that each box has unique \texttt{R3} and therefore a unique \texttt{id} as long as there are no hash collisions (because the \texttt{id} of the box is computed by hashing its content, including \texttt{R3}). +The fourth register, \texttt{b.R3}, contains a pair of integer and 34-byte collection~(its type then is $(Int, Coll[Byte])$). The collection specifies the 32-byte unique transaction id where this box appears as an output followed by a 2-byte sequence number of this box in the \texttt{OUTPUTS} collection of that transaction. This ensures that each box has unique \texttt{R3} and therefore a unique \texttt{id} as long as there are no hash collisions (because the \texttt{id} of the box is computed by hashing its content, including \texttt{R3}). +The first element of the pair contains creation height provided by user created the box. This height could only be less or equal than real inclusion height~ +(a transaction could not be included into the blockchain if it contains an output with creation height being no more than current blockchain height). The remaining six registers can be used arbitrarily. To access a register, the type of the register needs to be specified in brackets following the register number (for example, \texttt{b.R4[Int]}). Note that \texttt{b.R4[T]} is of type \texttt{Option[T]}; \texttt{b.R4[T].isDefined} indicates whether it actually contains a value of type \texttt{T}, and \texttt{b.R4[T].get} obtains this value. -In addition to registers, scripts can access two serialized versions of the box: \texttt{b.bytes} is a serialization of the entire box including the value, all the registers, and the script \lnote{anything else?}, and \texttt{b.byteWithNoRef}, which the same but without \texttt{R3} (so that a box can be viewed independently of where it appeared \lnote{is there a good reason someone would need this? Can we explain it here?} \knote{As output id and bytes are not predictable before spending transaction is generated, if a box is willing to require spending transaction to create box w. some semantics, the only way to do it is to use box bytes cleared from transaction-specific information. A simple example of such requirement is hash(Outputs(0).bytesWithNoRef) == "0x..."}). \lnote{Are there any other box properties that scripts can access?} +In addition to registers, scripts can access two serialized versions of the box: \texttt{b.bytes} is a serialization of the entire box including all its registers, and \texttt{b.byteWithNoRef}, which the same but without the transation identifier and the output index~(so that a box can be viewed independently of where it appeared. \paragraph{Example: atomic exchange on a single block chain} These box registers provide additional capabilities to \langname. Consider, for example, Alice and Bob who want to exchange tokens: they agree that Alice will give Bob 60 tokens of type \texttt{token1} (this type is mapped an actual token id in the environment map) in exchange for 100 Ergo tokens. Alice could create an output box with value 100 and protect it with following script: @@ -330,48 +338,26 @@ \subsection{Box Registers and Additional Tokens} tokenData._1 == token1, tokenData._2 >= 60L, OUTPUTS(0).propositionBytes == pkA.propBytes, - OUTPUTS(0).value >= 1L + OUTPUTS(0).R4[Coll[Byte]].get == SELF.id )) } \end{verbatim} This script ensures that the box can be spent only in a transaction that produces an output with 60 tokens of type token1 and gives them to Alice (Alice can reclaim the box after the deadline). +Moreover, the last condition (\texttt{OUTPUTS(0).R4[Col[Byte]].get == SELF.id}) ensures that if Alice has multiple such boxes outstanding at a given time, each will produce a separate output that identifies the corresponding input. This condition prevents the following attack: if Alice has two such boxes outstanding but the last condition is not present, then they can be both used in a single transaction that contains just one output with 60 tokens of type ``token1" --- the script of each input box will be individually satisfied, but Alice will get less only half of what owed to her. -Bob, similarly, could create an output box with value 0 -\lnote{for some reason the example in the code we want value 1 --- why? It's not explained. Similarly, why do we need \texttt{OUTPUTS(0).value >= 1L} in the script above? } and 60 tokens of type \texttt{token1} and protect it by the following script: +Bob, similarly, could create an output box with value about 0 and 60 tokens of type \texttt{token1} and protect it by the following script: \begin{verbatim} (HEIGHT > deadline && pkB) || allOf( Coll( - OUTPUTS(1).value >= 100, - OUTPUTS(1).propositionBytes == pkB.propBytes + OUTPUTS(1).value >= 100L, + OUTPUTS(1).propositionBytes == pkB.propBytes, + OUTPUTS(1).R4[Coll[Byte]].get == SELF.id, )) \end{verbatim} -\lnote{Is the \texttt{L} after constants 60, 1, and 100 necessary? Won't conversion happen automatically? Some constants in other scripts above that probably need to be long, like 100 for height, don't have it. We should be consistent and clarify this for the reader. Also, make sure that scripts here match testing code.} - A transaction containing these two boxes as inputs must produce two outputs: the first giving at least 60 tokens of type1 to Alice and the second giving at least 100 tokens of type2 to Bob. Once the two boxes are on the blockchain, anyone can create such a transaction using the two boxes as inputs, and thus effect the exchange between Alice and Bob. Unlike the cross-chain trading example above using hashing (which requires one side to go first), there are no potential problems with synchronization here, because the exchange will happen in a single transaction or will not happen at all. - -We caution that for security, Alice cannot have two such boxes outstanding at any given time, because they can be both used in a single transaction that contains just one output with 60 tokens of type ``token1" --- the script of each box will be individually satisfied, but Alice will get less only half of what owed to her. Same for Bob. - -\lnote{this seems like a pretty big security vulnerability and we should highlight it and show how to fix it. Restricting the number of inputs and outputs to just 2 is not enough. For example, suppose Alice has another box C in the UTXO with value 100 Ergo and 60 token1, and the following script:} -\begin{verbatim} - (HEIGHT > deadline && pkA) || { - val tokenData = OUTPUTS(0).R2[Coll[(Coll[Byte], Long)]].get(0) - allOf(Coll( - tokenData._1 == token1, - tokenData._2 >= 60L, - OUTPUTS(0).propositionBytes == pkA.propBytes, - OUTPUTS(0).value >= 100L - )) - } - || ... -\end{verbatim} -\lnote{Basically, this script allows Alice to simply reclaim what she has in B; it may also also her to do other things, which is why ``\texttt{|| ...}'' is there in the script. I don't know the purpose of such a script, but it seems plausible. Now, an adversary can create a transaction with two inputs: Alice's box above (which requires to give her 60 token1 in the output) and box C; and two outputs: output with value 100 Ergo + 60 token1 to Alice, and output with value 100 Ergo to the adversary). Alice just lost 100 Ergo.} - - -\lnote{Another question: why do this instead of hash-based example?} - \subsection{Self-Replicating Code} \label{sec:self-replicating} Access to box registers allow us to create self-replicating boxes, because a script can check that an output box contains the same script as \texttt{SELF}. As shown in \cite{CKM18}, this powerful tool allows for Turing-completeness as computation evolves from box to box, even if each individual script is not Turing-complete. We will demonstrate two examples of complex behavior via self-replication. @@ -393,29 +379,24 @@ \subsection{Self-Replicating Code} val heightCorrect = out.R4[Int].get == HEIGHT val lastCoins = SELF.value <= oneEpochReduction allOf(Coll( - correctCoinsConsumed, heightCorrect, heightIncreased, - sameScriptRule)) + sameScriptRule, + correctCoinsConsumed)) || (heightIncreased && lastCoins) } \end{verbatim} -\lnote{Not including demurrage example because it required more explanation about checking from miners and consensus about what registers are for what} - - \paragraph{Example: arbitrary computation via a simple cellular automaton} -The example in the paragraph is not meant for practical implementation; rather, it is here merely to demonstrate the Turing-complete power of self-replication. It implements the so-called ``rule 110'' one-dimensional cellular automaton \cite{wolfram1986theory}, which is known to be Turing-complete \cite{cook2004universality} (with only polynomial-time overheard --- i.e., $P$-complete \cite{NW06}). See \cite{CKM18} for more details. The code for this example is too complex to be put here; it is available at \lnote{give a reference to the code once it is written in our \langname and is sitting online somewhere}. - -\lnote{maybe give a the simple example from \url{https://github.com/ScorexFoundation/sigmastate-interpreter/blob/master/src/test/scala/sigmastate/utxo/examples/Rule110Specification.scala\#L40?}} - +The example in the paragraph is not meant for practical implementation; rather, it is here merely to demonstrate the Turing-complete power of self-replication. It implements the so-called ``rule 110'' one-dimensional cellular automaton \cite{wolfram1986theory}, which is known to be Turing-complete \cite{cook2004universality} (with only polynomial-time overheard --- i.e., $P$-complete \cite{NW06}). See \cite{CKM18} for more details. The code for this example is too complex to be put here; it is available at +\url{https://github.com/ScorexFoundation/sigmastate-interpreter/blob/master/src/test/scala/sigmastate/utxo/examples/Rule110Specification.scala}. +\ignore{ \subsection{Merkle Trees} Explain \texttt{isMember} and provide an example of usage. Explain that the context also contains the root hash of the all the unspent output boxes in the previous block \lnote{check: previous or current block? It would seem that the current is not available, so it should be previous.}, available via the predefined variable \texttt{LastBlockUtxoRootHash}. Give examples of usage, such as oracle, MAST, FSM. (For the oracle, explain that our language is rich enough to support signature verification within the script.) - -\lnote{All of this has to wait until there are working code examples in \langname (currently, the examples in \texttt{OracleExamplesSpecification} and \texttt{MASTExampleSpecification} and \texttt{FsmExampleSpecification} are all written as ASTs, not as compilable code.} +} @@ -426,29 +407,46 @@ \section{Implementation} \begin{itemize} \item We estimate the time required to process the script and, if it exceeds a certain bound, refuses to evaluate it in order to prevent a denial-of-service attack. -\item The evaluation converts the script not to a Boolean value, but to a $\Sigma$-statement. This statement is a tree, with \texttt{proveDlog} or \texttt{proveDHtuple} nodes for leaves, and $\andnode$ (\texttt{\&\&}), $\ornode$ (\texttt{||}), or $\tnode$ (\lnote{insert \langname notation here}) for non-leaves. The prover (when trying to use in a transaction the box that is protected by the script) generates a proof this $\Sigma$-statement. The verifier verifies it, obtaining a Boolean value. +\item The evaluation converts the script not to a Boolean value, but to a $\Sigma$-statement. This statement is a tree, with \texttt{proveDlog} or \texttt{proveDHtuple} nodes for leaves, and $\andnode$ (\texttt{\&\&}), $\ornode$ (\texttt{||}), or $\tnode$ for non-leaves. The prover (when trying to use in a transaction the box that is protected by the script) generates a proof this $\Sigma$-statement. The verifier verifies it, obtaining a Boolean value. \end{itemize} -\lnote{Is the above accurate about everything else being relatively standard? Is there anything we want to describe?} +We describe the latter non-standard step in Appendix \ref{app:crypto}, while the former will be described in a separate +upcoming document. + +\section{Further Work} +\label{sec:further} + +The next steps we plan to do~(and have partially done) after releasing this document are: + \begin{enumerate} + \item{} More examples, including non-interactive and fully on-chain tumbler for mixing the coins, cold wallets, + oracles, initial coin offering scenario, multi-state contract defined as a finite state machine, and so on. We + already have code for such the examples done, and writing documentation about them at the moment. + \item{} Detailed description of used type system, cost estimation procedure, safety guarantees, + and abstract syntax tree format. + \end{enumerate} +\bibliographystyle{alpha} +\bibliography{sigma.bib} + + -We describe these two nonstandard steps in Section \ref{sec:safety} and Appendix \ref{app:crypto}. \lnote{Should they be both in the appendix or both in the main body?} +\appendix +\ignore { \section{Safety Guarantees} -\label{sec:safety} +\label{app:safety} We need to be sure that an adversary can not produce such a statement for -which the Verifier spends more time than it is safe to spend. \knote{links to -verifier dilemma, orphan rates etc} +which the Verifier spends more time than it is safe to spend. In order to filter out malicious statements, Verifier needs to perform a -series of safety checks in particular consistency checks and cost -estimations. This safety checks by itself require Verifier to spend some -time. However, the idea is to strictly limit the check complexity to be +series of safety checks, in particular, consistency checks and execution cost +estimations. These safety checks also require Verifier to spend some +time. However, the idea is to strictly limit overall complexity of the checks to be linear in the size of the statement tree and input data, which are strictly limited in size. -Verification and safety checks are performed in stages. +Verification and safety checks are performed in following stages. \subsection{Verification and Safety Checks} \label{sec:safety-checks} @@ -470,10 +468,11 @@ \subsection{Verification and Safety Checks} Cost Function is the function of the same input (Context, variables, registers, etc.) as Statement function, but it computes Int value of the computation cost of the Statement function, so it answer the question "How -many operations it is required to compute the statement?". +many operations it is required to compute the statement?". The Cost Function is instantiated by the following steps: \begin{enumerate} + \item{} \end{enumerate} The idea is that the complexity of obtaining the Cost Function ($genCF$) is @@ -485,7 +484,7 @@ \subsection{Verification and Safety Checks} The Cost Function is applied right before evaluation of the Statement by the following steps: \begin{enumerate} - \item construct the context as it is required for Statement evaluation + \item construct the context as it is required for Statement evaluation \item invoke the interpreter to evaluate the Cost Function in the given context \item check resulting cost limit @@ -516,18 +515,18 @@ \subsection{Generation of Cost Function Tree} \begin{figure} \begin{center} -\begin{tabular}{>{$}l<{$} >{$}c<{$} >{$}l<{$}} +\begin{tabular}{>{$}l<{$} >{$}c<{$} >{$}l<{$}} \multicolumn{3}{@{}l}{\textbf{def} $genCF(D: Context, t: Value[T]): Value[Long]$} \\ D, v~@~ContextVar(\_) & \to & dataCost(v) \\ D, c~@~Constant(\_) & \to & dataCost(c) \\ D, t_1 \otimes t_2 & \to & genCF(D, t_1) + Cost_\otimes + genCF(D, t_2) \\ - arr.map(f) & \to & + arr.map(f) & \to & \begin{tabular}[t]{l} val $f_{cost}$ = $genCF(D, f)$ \\ $sum(arr.map(\lambda x \to f_{cost}(x)))$ \\ \end{tabular} \\ & \to & $\mnote{arg}$ \\ - % v @ Constant(_) & \to & + % v @ Constant(_) & \to & % \begin{tabular}[t]{l} % $v :=$ variable with $varId$ from data environment \\ % return $dataCost(v)$ \\ @@ -538,31 +537,7 @@ \subsection{Generation of Cost Function Tree} \end{center} \end{figure} -\subsection{Storing Cost Function} -\label{sec:store-cost-function} - - -\subsection{Cost Interpreter} -\label{sec:cost-interpreter} - - - -\subsection{Cost Model} -\label{sec:cost-model} - - - - - - -\bibliographystyle{alpha} -\bibliography{sigma.bib} - - - -\appendix - - +} \section{Implementation of Noninteractive $\Sigma$-protocols for an arbitrary And/Or/Threshold composition} \label{app:crypto} @@ -732,108 +707,9 @@ \subsection{Verifying} \end{enumerate} -\section{Old Intro Text -- not sure what's needed} - -Much like digital signatures prove that the signer knows the secret key, $\Sigma$-protocols can be used to prove knowledge of a discrete logarithm. - - -The idea behind the language is that a subset of zero-knowledge protocols known as $\Sigma$-protocols (sigma protocols) could be combined via $\sqcap$ and $\sqcup$ connectives forming complex statements like ``prove me a knowledge of discrete logarithm of (publicly known) $x_1$ or knowledge of Pedersen commitment $x_2$ preimage''. We make an observation that sigma protocol statements and their conjectures are naturally correspond to propositional logic, and we can add arbitrary boolean predicates to statements provable via a $\Sigma$-protocol, if both prover and verifier are able to evaluate the predicates in exactly the same way. This is the case if predicates are evaluated deterministically in the same way by both the prover and the verifier, and inputs for the predicates are the same on both sides. -We assume prover and verifier can be different parties, e.g. prover is a creator of transactions (wallet application) and verifier is a miner validating incoming transactions. -We use predicates over state of blockchain system during script validation~(which happens when a transaction tries to spend an output protected by the script). To avoid inefficient processing and impossibility for a light client to validate a transaction, this state is very lean but nevertheless it is richer than in Bitcoin. Like in Bitcoin, we use current height of the blockchain, but also a spending transaction with outputs it creates and inputs it tries to spend. Unlike Bitcoin, we allow outputs to contain more fields than amount and protecting script, in a form of additional registers an output can have. In addition to $\land$ and $\lor$ connectives for boolean propositions we also introduce $\sqcap$ and $\sqcup$ connectives for sigma protocol statements. The language also have different operations over statically typed arguments. We reject out during compilation time expressions with type errors, like $2 + 2 > true$, thanks to a type system used. - -In blockchain systems, there is a strict need to tackle the problem of denial-of-service attack carrying by crafting scripts which are too costly to validate. For example, if it is needed for more time to validate a script than average delay between blocks on commodity hardware, network could be obviously attacked, with nodes stuck in processing, and also increased number of forks as result. - -\section{Old Language Design Section} - -We assume that a {\em prover} and a {\em verifier} have a shared {\em context}. As we are designing a language for cryptocurrencies, the context is about current state of the blockchain (such as height $h$ of a best block in the blockchain, a spending transaction with outputs it spends and newly created outputs, etc). Please note that the language can be repurposed for other areas where shared context is possible, but this is out of scope of the paper. - -\subsection{Notation} - -We use $Dlog(x)$ to denote a statement ``prove a knowledge of such $w$ that $x = g^w$''. Proving is to be done by a prover in zero knowledge~(with no presenting the secret $w$ to a verifier). Statement $Dlog(x_1) \sqcap Dlog(x_2)$ means ``prove a knowledge of both $w_1$ and $w_2$, such as $x_1 = g^{w_1}, x_2 = g^{w_2}$'', similarly, $Dlog(x_1) \lor Dlog(x_2)$ is about a proof of knowledge of either $w_1$ or $w_2$. - -\subsection{UTXO model} - -We assume a transactional model close to Bitcoin's. That is, a transaction spends unspent outputs (pointed to by \emph{transaction inputs}) from previous transaction written into the blockchain, and creates new outputs. An output is associated with arbitrary amount of money, and also a protecting script. In Bitcoin, there is a special kind of transaction, so-called {\em coinbase transaction}, which can create some amount of money out of thin air~(to reward a block generator). In Bitcoin transaction fees money flow is not captured in outputs, a transaction fee is just a difference of amounts of outputs being spent and outputs being created. In Appendix~\ref{apx:unified} we provide a way to avoid coinbase transactions, and also express fees explicitly as outputs. In Appendix~\ref{apx:account} we discuss how to make a language for a cryptocurrency with account-based transactions~(such as Waves~\cite{Waves}). - -\subsection{Logic of $\Sigma$-protocols} - -Cryptocraphic propositions verifiable via $\Sigma$-protocols are represented by values of an algebraic data type $SigmaProp$, -which stands for \emph{sigma proposition} and can be described using the following declaration (using Idris/Agda ADT notation~\cite{Idris, Agda}) - -\begin{lstlisting} -data SigmaProp: Type where - Dlog: GroupElement -> SigmaProp - Dht: (gv, hv, uv, vv: GroupElement) -> SigmaProp - ... -\end{lstlisting} - -In the definition above, each constructor represents one of the cryptographic primitives, and the set of such constructors -can be extended (although this extensibility is out of paper's scope). - -We want to be able to construct more complex statements out of basic primitive propositions. -There are two binary operations over $SigmaProp$ values: -\begin{itemize} -\item $\sqcup: (SigmaProp, SigmaProp) -> SigmaProp$ -\item $\sqcap: (SigmaProp, SigmaProp) -> SigmaProp$ -\end{itemize} - -Any expression of $SigmaProp$ type can be efficiently verified using sigma protocol and thus mapped to a truth values. - -We implement a verification procedure $V: SigmaProp -> Boolean$, which performs such mapping, and satisfies the following -naturality conditions with respect to SigmaProp and Boolean connectives. - -For any two statements $sigma1, sigma2: SigmaProp$: -\begin{itemize} - \item $V(s_1 \sqcup s_2) = V(s_1) \lor V(s_2)$ - \item $V(s_1 \sqcap s_2) = V(s_1) \land V(s_2)$ -\end{itemize} - -Bacause SigmaProp connectives and logical connectives satisfy naturality conditions we can reason about complex sigma statements -the same way we reason about logical statements. -However, unlike logical statements evaluation of sigma statements is done with zero knowledge about intermediate steps. -Thus, we can verify the truth of $s_1 \sqcup s_2$ (i.e. compute $V(s_1 \sqcup s_2)$) statement but we cannot know whether $s_1$ or $s_2$ or both is true. At the same time evaluation of $V(s_1) \lor V(s_2)$ discloses results of both $V(s_1)$ and $V(s_2)$ before execution of $\lor$. - -There is a function $isValid: SigmaProp -> Boolean$ in Sigma-state language which implements the function $V$. -The type system of the language allows to explicitly delimit usages of zero knowledge evaluation -of sigma statements and classical evaluation of boolean statements. - -This allows script designer to explicitly control security (zero-knowledge) guarantees where it is necessary. - -\subsection{General Idea} - -By using a $\Sigma$-protocol, a prover can prove a knowledge of secret information corresponding to publicly known values, in zero-knowledge(i.e. without disclosing a secret value), for some relations between secret and public values. Unlike generic proof systems, $\Sigma$-protocols are efficient~(for both the prover and the verifier). For a cryptocurrency setting, the pros of this class of protocols are generic transformation from an interactive protocol to non-interactive one~(by using Fiat-Shamir transformation), and also composability: we can combine statements provable with $\Sigma$-protocols via $\land$, $\lor$ and k-out-of-N conjectures, and the compound statement is also provable via a $\Sigma$-protocol. An observation which is lying in the foundation of our work is that we can view a (potentially complex) statement provable via a $\Sigma$-protocol as a formula over sigma values homomorphic to logical formula. For example, the statement $dlog(x_1) \lor dlog(x_2)$ could be viewed as a formula consisting of two sigma values connected by $\lor$ and which is homomorphic to the formula $b1 or b2$. Then we can enrich the language of $\Sigma$-protocols (which describes relations between prover's secrets and their publicly known images) with deterministic predicates over a context shared between the prover and the verifier. Still, we are using only $\land$, $\lor$ and k-out-of-N conjectures. By evaluating predicates over the context into concrete boolean values and then eliminating them, both (honest) the prover and verifier do agree on the same reduction procedure output, which could be one of the following: boolean value (true or false) or a statement provable via a $\Sigma$-protocol. However, in the light of denial-of-service attacks found against scripting capabilities of Bitcoin and Ethereum \knote{todo: links}, we need to limit possible complexity of the reduction procedure. For that, we have two measures against possible overload issues. In the first place, we have to use only context which is efficiently computable. For example, we can not have predicates over transactional history, as it is linearly growing with time, and also could not be hold by a light client. In opposite, we can use height of a block which contains the transaction spending the output of interest, access to this information is constant-time and available to light clients. If a validation state~(which is similar to UTXO set in Bitcoin for Bitcoin-like cryptocurrencies) is authenticated~(like proposed in the paper \knote{cite AVL paper}), we can construct a predicate for existence of an unspent output (with some conditions to be met), then the prover is providing a Merkle proof for an output satisfying the predicate, and even a light client can validate the predicate efficiently. In the second place, we put a limit on size of an initial logic formula which protects an output, and also we take care that the formula will not become too big due rewritings (as some transformations in our proposal could actually increase a size of the formula\mnote{For example? Looks like all the transforms are decreasing.}) and number of transformations is below an upper limit. - -\subsection{Model} - -An output to spend is protected by a logical expression, which we are also calling a {\em guarding expression}. An expression is a mix of predicates over publicly known context, and statements provable via sigma protocols. As a simplest example, consider the following statement: - -\begin{equation} -\label{eq:example1} -dlog(x_1) \lor ((h > 5) \land dlog(x_2)) -\end{equation} - -which is to be read as ``proof of knowledge of a secret with public image $x_1$ is always enough, also, if height of a block containing spending transaction is more than 5, knowledge of a secret with public image $x_2$ is also enough'' to admit spending of the output. - -Both the prover and the verifier are first reducing the statement by substituting shared context variables and evaluating parts of the expression which are possible to evaluate. Four outcomes of the reduction process are possible: {\em true}, {\em false}, failure to reduce~(if statement is invalid, or transformations of it are taking too much time or result in unreasonably big statement), which is equivalent to {\em false}; or statement which contains only cryptographic statements. For the example, if $h = 10$, the reduced statement is $dlog(x_1) \lor dlog(x_2)$. In this case, the prover is generating a proof, and verifier is checking validity of the proof, accepting or rejecting it. We use cryptographic statements which are provable via {\em sigma protocols}~(\knote{links}). These protocols are efficient zero-knowledge \knote{special honest verifier ZK actually} proof-of-knowledge protocols which are composable via $\land$, $\lor$ and k-out-of-N conjectures, also any sigma protocol has a standard way to be converted into a signature by using the Fiat-Shamir transformation. - -In addition to secret information, which knowledge is to be proven in zero-knowledge, we allow prover to enhance context with custom variables. For example, for the statement: - -$$dlog(x) \land (blake2b256(c) = C)$$ - -where $C$ is some constant~\footnote{we avoid providing a value for the constant due to column size limit}, and $blake2b256$ is operation which calculates hash value for function Blake2b256~(\knote{link}). The prover then needs to prove knowledge of discrete logarithm of $x$ and also to present value $c$ such as Blake2b256 on $c$ evaluates to $C$. Even if verifier does not know $c$ before being presented a proof, we can not count $c$ as secret, as it could be replayed by an eavesdropper. - +\ignore { - -\subsection{Language Details} -\label{sec:lang-details} - -Here we provide details on building blocks of the language. -Both the prover and the verifier are doing the same first few steps, and they both are using the same deterministic interpreter. In the first place, the interpreter is parsing incoming expression~(in typical case of validating a transaction within a block, it is encoded in a binary form), building a tree from the expression, and checking that the expression is well-formed according to typing rules. We describe types and typing rules in Section~\ref{sec:types}. The interpreter then is reducing the expression by applying rewriting rules to the tree as described in the Section~\ref{sec:rewriting}. The possible result of the reduction is whether an abort, or a successfully reduced expression, which could be whether a boolean value or a statement provable via a $\Sigma$-protocol. For the latter case, the prover and the verifier are doing different jobs: the prover is proving knowledge of secrets associated with the statement, as described in Section~\ref{sec:proving}; the verifier is checking a proof generated by the prover against the statement, as described in the Section~\ref{sec:verifying}. - -The interpreter is also checking that the expression is not exceeding by number of sub-expressions and their cumulative complexity some predefined limit~(which is the same for the prover and the verifier, e.g a constant of a blockchain system, or changed by miners in predictable and controllable fashion, like gas limit per block in Ethereum). As complexity is going beyond the limit, the interpreter aborts immediately. - - -\subsection{Types} +\section{Types} \label{sec:types} All the operations as well as operands have types. For example, addition operation ``+'' may take two Int and returns an Int value. Comparison operation ``$>$'' takes two Int and returns a boolean value. Some operations may be overloaded so that the same symbol is reused for operations acting on different types, e.g. $+: (Int,Int) \to Int$, $+: (Long, Long) \to Long$ etc. @@ -853,32 +729,6 @@ \subsection{Types} \item{optional value} \end{itemize} -\knote{todo: improve description, also, add unsigned integer?} - -\ignore{ -\begin{center} - \begin{tabular}{| l | l | l | l | l |} - \hline - Operation & bytes & ints & prop & bool \\ \hline - $=$ & + & + & + & + \\ - $\neq$ & + & + & + & +\\ - $+$ & + & + & - & - \\ - $-$ & - & + & - & - \\ - $>$ & - & + & - & - \\ - $\ge$ & - & + & - & -\\ - $<$ & - & + & - & -\\ - $\le$ & - & + & - & -\\ - $\oplus$ & + & - & - & + \\ - $\lor$ & - & - & - & + \\ - $\land$ & - & - & - & + \\ - $blake2b256$ & + & - & - & -\\ - $dlog$ & - & - & - & -\\ - $dh$ & - & - & - & -\\ - \hline - \end{tabular} -\end{center} -} - \subsection{Rewriting a Tree} \label{sec:rewriting} @@ -898,89 +748,7 @@ \subsection{Rewriting a Tree} -\section{Old Context Section} - -Shared context of a blockchain system could be expressed in different ways, based on desired expressiveness, efficiency, planned use cases and so on. In this paper we focus on context for Ergo blockchain. The main priority for this blockchain is maximum efficiency of transaction validation process, safety, and friendliness to light clients. Considering this, we require that context should contain only spending transaction along with outputs it spends, and limited number of last block headers. Thus even a client which does not have all the headers of the blockchain~(for example, the client could be working in a light-SPV mode, where the client is holding only sublinear part of the headers-chain) is able to validate a transaction, by being shown it~(as well as outputs the transaction spends along with Merkle proofs for them). - -\knote{brief context description, link to an appendix with details} - - - - -\section{Old Examples Section} - -In this section we provide some examples of useful guarding scripts. We focus on examples which are impossible or much harder to express in Bitcoin Script. - -\subsection{Crowdfunding} -\label{sec:crowdfunding} - -We provide simple solution to crowdfunding here. In the example, a crowdfunding project associated with public key $x_P$ is considered successful if it can collect unspent outputs with total value not less than $to\_raise$ before height $timeout$. A project backer creates an output protected by the following statement: - -\begin{equation*} -\begin{split} -(height \ge timeout \land dlog(x_B)) \lor \\ -(height & < timeout \land dlog(x_P)\\ -& \land Exists(Outputs, 20,\\ -& \quad \quad \quad \quad ExtractAmount(TaggedBox(20)) \ge to\_raise \land \\ -& \quad \quad \quad \quad ExtractScriptBytes(TaggedBox(20)) = ToBytes(dlog(x_P)))) -\end{split} -\end{equation*} - -Then the project can collect biggest outputs with total value not less than $to\_raise$ with a single transaction~(it is possible to collect up to ~22,000 outputs in Bitcoin, which is enough even for a big crowdfunding campaign). For remaining small outputs over $to\_raise$, it is possible to construct follow-up transactions. - -Please note why such a guarding expression is not possible in Bitcoin: we use the condition on a spending transaction, namely, we require the transaction to have an output with value not less than required, and also with a particular statement protecting the output. - -\subsection{Money With Scheduled Maintenance Payments} - -\knote{description} - -\begin{equation*} -\begin{split} -user\_statement \lor \\ -(height & \ge (ExtractHeight(self) + period) \land \\ - & Exists(Outputs, 20, \\ - & \quad \quad \quad \quad ExtractAmount(TaggedBox(20)) \ge (ExtractAmount(self) - cost) \land \\ - & \quad \quad \quad \quad ExtractScriptBytes(TaggedBox(20)) = ExtractScriptBytes(self))) -\end{split} -\end{equation*} - -We highlight impossibility of such a statement in the Bitcoin Script: similarly to the statement in the previous section~\ref{sec:crowdfunding}, we use a condition on a spending transaction. Another feature missed in the Bitcoin Script is that we also use an output to spend in the execution context. In particular, in the example above we require a spending transaction to have an output which has the same statement as an output it spends. - -\subsection{Ring Signature} -\label{sec:ring} - -Linear-sized ring signature is very straightforward in the language. Assume a ring consists of $m$ public keys $x_1, \dots, x_m$. If one wants an output to be spent by a ring signature associated with the ring, the output is to be protected by the following statement: - -$$dlog(x_1) \lor \dots \lor dlog(x_m)$$ - -Please note that proving of the statement is to be done in zero-knowledge, so it is not possible to know which key signed, the only fact to conclude is that some key from the ring signed output spending. - -\subsection{Complex Signature Schemes} - -We can build more complex signature schemes than possible in Bitcoin. One particular example was provided in the previous Section~\ref{sec:ring}. Another example is a scheme where at least one out of (Alice, Bob), and at least one out of (Charles, Diana) are needed to sign, and it is not to be known who signed an output spending. The corresponding statement involving public keys $x_A, x_B, x_C, x_D$ of Alice, Bob, Charles and Diana respectively would be following: - -$$(dlog(x_A) \lor dlog(x_B)) \land (dlog(x_C) \lor dlog(x_D))$$ - -\subsection{Simple Tumbler} -\label{sec:tumbler} - -\knote{does the example makes sense? check other tumbler papers. also, update the scripts, now approach is more generic than using tx.outbytes} - -Privacy is a tough problem for cryptocurrency users. Bitcoin is a pseudonymous cryptocurrency, so no real identities attached to a transaction. However, it is possible to reconstruct transactional graph for all the transactions ever entered into the Bitcoin blockchain, and often restore real identities by using auxiliary databases. \knote{link} To improve privacy, tumblers are being used. A tumbler is a scheme which us getting some money transfers as inputs, produces output money transfers, and has a property of unlinkability: it is not possible to draw a link from an input to an output. Thus a tumbler user is hiding herself amongst a ring of users sending inputs to the scheme. Privacy then depends on a ring size. For maximum privacy, there exists a cryptocurrency ZCash with inbuilt tumbler based on zkSnarks\knote{links}, where a user is hiding among all the users in the system. However, ZCash requires trusted setup, and transaction validation is relatively slow~(10 ms). In other cryptocurrencies users are usually using external services varying in security and efficiency. - -Here we are describing simple tumbler service. It is very efficient and requires no trusted dealer. Its disadvantage is that from observing blockchain transactions it is possible to conclude that users are using a tumbler. - -Assume Alice with public key $x_A$ and Bob with public key $x_B$ want to relocate funds to keys $y_1$ and $y_2$ respectively, with a property than external observer looking into blockchain is not capable to know beyond the fact that money flows from $x_A$ to whether $y_1$ or $y_2$~(and the same for $x_B$). - -First, Alice and Bob communicate off-chain to construct collectively outputs of the final transaction~(payments to $y_1$ and $y_2$). Each of them then is calculating a hash value from outputs bytes $h$~(we assume that Blake2b-256 hash function is used). Then each of them is creating an output to spend~(possibly, in a dedicated transaction) with such a condition for Alice and Bob, respectively: - -$blake2b256(tx.outbytes) = h \lor dlog(x_A)$ - -$blake2b256(tx.outbytes) = h \lor dlog(x_B)$ - -Then it is possible to make a refund at any moment of time~(right condition in the $\lor$ conjectures), and before a refund any of the them can construct a transaction which is spending the outputs, and it is impossible to construct an alternative transaction, for which hash of the output bytes is $h$, but bytes are different~(if chosen hash function is collision-resistant). - -\subsection{Oracle Example} +\section{Oracle Example} \knote{Text below is just copied from code comments, polish it} @@ -1044,32 +812,6 @@ \section{Types} & $\mid$ & \lst{Any} & type of any value (common supertype of all the types) \\ \end{tabular}\] - -\section{Possible Other Sections, Not Currently Realized} -\begin{itemize} - -\item Roadmap - -\item Tree Nodes and Transformation Rules - -\item Cost Table - -\item Outputs vs Accounts OR A Language For An Account-Based Cryptocurrency - -\item Unified Transactions - -\item An Example of a Concrete Transaction Format \knote{Describe Ergo transaction format here. Malleability problems to be discussed here.} - - -\item Denial-of-Service Attacks - -\item Extensibility : It is hard to predict which functions would be useful for blockchain applications. - -\item Implementation and Evaluation - -\item Further Work - -\item Conclusion -\end{itemize} +} \end{document} \ No newline at end of file diff --git a/scalanizer/src/main/scala/special/sigma/scalanizer/SigmaPlugin.scala b/scalanizer/src/main/scala/special/sigma/scalanizer/SigmaPlugin.scala index 1214083d68..fd5c45ee0c 100644 --- a/scalanizer/src/main/scala/special/sigma/scalanizer/SigmaPlugin.scala +++ b/scalanizer/src/main/scala/special/sigma/scalanizer/SigmaPlugin.scala @@ -1,11 +1,10 @@ package special.sigma.scalanizer -import spu.device.config.SigmaLibraryConfig - import scala.tools.nsc.Global import scalan.meta.{ConfMap, TargetModuleConf, SourceModuleConf} import scalan.meta.scalanizer.ScalanizerConfig import scalan.plugin.{ScalanizerPluginConfig, ScalanizerPlugin} +import special.sigma.config.SigmaLibraryConfig class SigmaPlugin(g: Global) extends ScalanizerPlugin(g) { plugin => override def createScalanizerConfig(): ScalanizerConfig = new SigmaScalanizerConfig diff --git a/sigma-api/src/main/scala/special/sigma/CostTable.scala b/sigma-api/src/main/scala/special/sigma/CostTable.scala index a68775b629..a1e7daafc9 100644 --- a/sigma-api/src/main/scala/special/sigma/CostTable.scala +++ b/sigma-api/src/main/scala/special/sigma/CostTable.scala @@ -39,6 +39,7 @@ object CostTable { ("SigmaPropBytes: SigmaProp => Array[Byte]", 0.000001), ("BinAnd: (Boolean, Boolean) => Boolean", 0.000001), ("BinOr: (Boolean, Boolean) => Boolean", 0.000001), + ("BinXor: (Boolean, Boolean) => Boolean", 0.000001), ("+: (BigInt, BigInt) => BigInt", 0.0001), ("+_per_item: (BigInt, BigInt) => BigInt", 0.000001) )) diff --git a/sigma-api/src/main/scala/special/sigma/SigmaDsl.scala b/sigma-api/src/main/scala/special/sigma/SigmaDsl.scala index b6e989c044..463a1ae75d 100644 --- a/sigma-api/src/main/scala/special/sigma/SigmaDsl.scala +++ b/sigma-api/src/main/scala/special/sigma/SigmaDsl.scala @@ -90,7 +90,7 @@ trait BigInt { */ def plusModQ(other: BigInt): BigInt - /** Subracts this number with `other` by module Q. + /** Subtracts this number with `other` by module Q. * @since 2.0 */ def minusModQ(other: BigInt): BigInt @@ -212,7 +212,7 @@ trait GroupElement { def negate: GroupElement /** - * Get an encoding of the point value, optionally in compressed format. + * Get an encoding of the point value. * * @return the point encoding */ @@ -223,6 +223,7 @@ trait GroupElement { @scalan.Liftable trait SigmaProp { def isValid: Boolean + /** Serialized bytes of this sigma proposition taken as ErgoTree and then serialized. */ def propBytes: Coll[Byte] /** Logical AND between this SigmaProp and other SigmaProp. @@ -335,8 +336,8 @@ trait Box { */ @scalan.Liftable trait AvlTree { - /** Returns digest of the state represent by this tree. - * Authenticated tree digest: root hash along with tree height + /** Returns digest of the state represented by this tree. + * Authenticated tree digest = root hash bytes ++ tree height * @since 2.0 */ def digest: Coll[Byte] @@ -537,7 +538,7 @@ trait Context { /** Authenticated dynamic dictionary digest representing Utxo state before current state. */ def LastBlockUtxoRootHash: AvlTree - /** + /** A fixed number of last block headers in descending order (first header is the newest one) * @since 2.0 */ def headers: Coll[Header] @@ -636,6 +637,11 @@ trait SigmaDslBuilder { def proveDlog(g: GroupElement): SigmaProp def proveDHTuple(g: GroupElement, h: GroupElement, u: GroupElement, v: GroupElement): SigmaProp + /** + * The generator g of the group is an element of the group such that, when written multiplicatively, every element + * of the group is a power of g. + * @return the generator of this Dlog group + */ def groupGenerator: GroupElement @Reified("T") @@ -650,5 +656,7 @@ trait SigmaDslBuilder { /** Construct a new authenticated dictionary with given parameters and tree root digest. */ def avlTree(operationFlags: Byte, digest: Coll[Byte], keyLength: Int, valueLengthOpt: Option[Int]): AvlTree + + def xor(l: Coll[Byte], r: Coll[Byte]): Coll[Byte] } diff --git a/sigma-api/src/main/scala/special/sigma/SigmaExamples.scala b/sigma-api/src/main/scala/special/sigma/SigmaExamples.scala index 850a506873..f31905b8ec 100644 --- a/sigma-api/src/main/scala/special/sigma/SigmaExamples.scala +++ b/sigma-api/src/main/scala/special/sigma/SigmaExamples.scala @@ -72,7 +72,7 @@ trait InChainAtomicSwap extends SigmaContract { def templateForBob(ctx: Context) = verifyZK { (pkB && ctx.HEIGHT > deadline) || allOf( Collection( - ctx.OUTPUTS(1).value >= 100, + ctx.OUTPUTS(1).value >= 100L, ctx.OUTPUTS(1).propositionBytes == pkB.propBytes )) } diff --git a/sigma-api/src/main/scala/special/sigma/SigmaPredef.scala b/sigma-api/src/main/scala/special/sigma/SigmaPredef.scala index 708d75d4e0..9cc8fa19bb 100644 --- a/sigma-api/src/main/scala/special/sigma/SigmaPredef.scala +++ b/sigma-api/src/main/scala/special/sigma/SigmaPredef.scala @@ -1,31 +1,17 @@ package special.sigma -import org.bouncycastle.math.ec.ECPoint - -import scala.reflect.ClassTag - object SigmaPredef { + + // TODO cleanup: since it is not really used def dataSize[T](v: T): Long = v match { case _: Boolean => 1 case _: Byte => 1 case _: Short => 2 case _: Int => 4 case _: Long => 8 -// case b: Box => b.dataSize case _ => sys.error(s"Cannot compute dataSize($v)") } - //TODO chack how it can be implemented -// def sizeOf[T](implicit cT: ClassTag[T]): Long = cT match { -// case _: ClassTag[Boolean] => 1 -// case _: ClassTag[Byte => 1 -// case _: Short => 2 -// case _: Int => 4 -// case _: Long => 8 -// case b: Box => b.dataSize -// case p: ECPoint => p.getEncoded(true).length -// } - } diff --git a/sigma-api/src/main/scala/special/sigma/package.scala b/sigma-api/src/main/scala/special/sigma/package.scala index 04050c3612..205d96e65a 100644 --- a/sigma-api/src/main/scala/special/sigma/package.scala +++ b/sigma-api/src/main/scala/special/sigma/package.scala @@ -10,29 +10,23 @@ import scala.reflect.{ClassTag, classTag} package sigma { - case class WrapperType[Wrapper](cWrapper: ClassTag[Wrapper]) extends RType[Wrapper] { - override def classTag: ClassTag[Wrapper] = cWrapper - override def toString: String = cWrapper.toString - override def name: String = cWrapper.runtimeClass.getSimpleName + case class ArgType(override val name: String) extends RType[Any] { + override def classTag: ClassTag[Any] = ClassTag.Any override def isConstantSize: Boolean = false // pessimistic but safe default } - } package object sigma { - def wrapperType[W: ClassTag]: RType[W] = WrapperType(classTag[W]) - - // TODO make these types into GeneralType (same as Header and PreHeader) - implicit val BigIntRType: RType[BigInt] = new WrapperType(classTag[BigInt]) { + implicit val BigIntRType: RType[BigInt] = new GeneralType(classTag[BigInt]) { override def isConstantSize: Boolean = true } - implicit val GroupElementRType: RType[GroupElement] = new WrapperType(classTag[GroupElement]) { + implicit val GroupElementRType: RType[GroupElement] = new GeneralType(classTag[GroupElement]) { override def isConstantSize: Boolean = true } - implicit val SigmaPropRType: RType[SigmaProp] = wrapperType[SigmaProp] - implicit val BoxRType: RType[Box] = wrapperType[Box] - implicit val AvlTreeRType: RType[AvlTree] = wrapperType[AvlTree] - implicit val ContextRType: RType[Context] = wrapperType[Context] + implicit val SigmaPropRType: RType[SigmaProp] = GeneralType(classTag[SigmaProp]) + implicit val BoxRType: RType[Box] = GeneralType(classTag[Box]) + implicit val AvlTreeRType: RType[AvlTree] = GeneralType(classTag[AvlTree]) + implicit val ContextRType: RType[Context] = GeneralType(classTag[Context]) // these are not wrapper types since they are used directly in ErgoTree values (e.g. Constants) // and no conversion is necessary @@ -63,4 +57,6 @@ package object sigma { implicit val SizeBoxRType: RType[SizeBox] = RType.fromClassTag(classTag[SizeBox]) implicit val SizeContextRType: RType[SizeContext] = RType.fromClassTag(classTag[SizeContext]) implicit val SizeBuilderRType: RType[SizeBuilder] = RType.fromClassTag(classTag[SizeBuilder]) + + def argRType(name: String): RType[Any] = ArgType(name) } \ No newline at end of file diff --git a/sigma-api/src/main/scala/special/sigma/wrappers/WrappersSpec.scala b/sigma-api/src/main/scala/special/sigma/wrappers/WrappersSpec.scala index 64dc5499bb..2111d24bdd 100644 --- a/sigma-api/src/main/scala/special/sigma/wrappers/WrappersSpec.scala +++ b/sigma-api/src/main/scala/special/sigma/wrappers/WrappersSpec.scala @@ -10,7 +10,6 @@ import special.sigma.SigmaPredef import scala.reflect.ClassTag trait ECPointWrapSpec extends WrapSpecBase { - //todo remove compressed flag, use GroupElementSerializer def getEncoded[A](g: ECPoint, compressed: Boolean): Array[Byte] = g.getEncoded(compressed) def multiply(l: ECPoint, r: BigInteger) = l.multiply(r) def add(l: ECPoint, r: ECPoint) = l.add(r) diff --git a/sigma-conf/src/main/scala/spu/device/config/SigmaLibraryConfig.scala b/sigma-conf/src/main/scala/special/sigma/config/SigmaLibraryConfig.scala similarity index 86% rename from sigma-conf/src/main/scala/spu/device/config/SigmaLibraryConfig.scala rename to sigma-conf/src/main/scala/special/sigma/config/SigmaLibraryConfig.scala index 049af6e7ff..e81bd8322f 100644 --- a/sigma-conf/src/main/scala/spu/device/config/SigmaLibraryConfig.scala +++ b/sigma-conf/src/main/scala/special/sigma/config/SigmaLibraryConfig.scala @@ -1,8 +1,7 @@ -package spu.device.config +package special.sigma.config import special.library.config.SpecialLibraryConfig - -import scalan.{FunctorType, ContainerType, Liftable} +import scalan.Liftable import scalan.meta.ScalanAst.WrapperConf import scalan.meta.{LibraryConfig, ConfMap, TargetModuleConf, SourceModuleConf} @@ -33,12 +32,10 @@ class SigmaLibraryConfig extends LibraryConfig { .addUnit("special/sigma/wrappers/WrappersSpec.scala", wrapperConfigs) .addUnit("special/sigma/SigmaDsl.scala") .addUnit("special/sigma/CostedObjects.scala") - // .addUnit("special/sigma/SigmaExamples.scala") // TODO move to downstream library val ImplModule = new SourceModuleConf(baseDir, "sigma-impl") .moduleDependencies(specialLibrary.ApiModule, specialLibrary.ImplModule) .addUnit("special/sigma/SigmaDslOverArrays.scala") -// .addUnit("special/sigma/TestContracts.scala") // TODO move to downstream library .addUnit("special/sigma/SigmaDslCosted.scala") .dependsOn(ApiModule) diff --git a/sigma-impl/src/main/scala/sigma/util/Extensions.scala b/sigma-impl/src/main/scala/sigma/util/Extensions.scala index c69d3e7b76..bfecab6bdf 100644 --- a/sigma-impl/src/main/scala/sigma/util/Extensions.scala +++ b/sigma-impl/src/main/scala/sigma/util/Extensions.scala @@ -218,11 +218,4 @@ object Extensions { None } } - - implicit class NullableOps[T](val nul: Nullable[T]) { - def toOption: Option[T] = nul match { - case Nullable(v) => Some(v) - case _ => None - } - } } diff --git a/sigma-impl/src/main/scala/special/sigma/Extensions.scala b/sigma-impl/src/main/scala/special/sigma/Extensions.scala index 500f172696..20465ccb54 100644 --- a/sigma-impl/src/main/scala/special/sigma/Extensions.scala +++ b/sigma-impl/src/main/scala/special/sigma/Extensions.scala @@ -1,7 +1,6 @@ package special.sigma import org.bouncycastle.math.ec.ECPoint -import scalan.RType /** This extensions are used from SigmaDsl. * If you add something here, make sure the corresponding syntax is supported by SigmaScript. */ @@ -23,9 +22,8 @@ object Extensions { s"ECPoint($rawX,$rawY,...)" } - implicit class GroupElementOps(source: GroupElement) { + implicit class GroupElementOps(val source: GroupElement) extends AnyVal { def showToString: String = showECPoint(source.value) } - def toAnyValue[A:RType](x: A) = new TestValue(x, RType[A].asInstanceOf[RType[Any]]) } diff --git a/sigma-impl/src/main/scala/special/sigma/SigmaDslOverArrays.scala b/sigma-impl/src/main/scala/special/sigma/SigmaDslOverArrays.scala index 89ffb18e19..3338d0bb3c 100644 --- a/sigma-impl/src/main/scala/special/sigma/SigmaDslOverArrays.scala +++ b/sigma-impl/src/main/scala/special/sigma/SigmaDslOverArrays.scala @@ -105,5 +105,8 @@ class TestSigmaDslBuilder extends SigmaDslBuilder { @NeverInline override def avlTree(operationFlags: Byte, digest: Coll[Byte], keyLength: Int, valueLengthOpt: Option[Int]): AvlTree = SpecialPredef.rewritableMethod + + @NeverInline + override def xor(l: Coll[Byte], r: Coll[Byte]): Coll[Byte] = Colls.xor(l, r) } diff --git a/sigma-impl/src/main/scala/special/sigma/TestBox.scala b/sigma-impl/src/main/scala/special/sigma/TestBox.scala deleted file mode 100644 index e9c63b219d..0000000000 --- a/sigma-impl/src/main/scala/special/sigma/TestBox.scala +++ /dev/null @@ -1,46 +0,0 @@ -package special.sigma - -import scala.reflect.ClassTag -import special.collection.Coll -import scalan.{NeverInline, RType, Reified} - -class TestBox( - val id: Coll[Byte], - val value: Long, - val bytes: Coll[Byte], - val bytesWithoutRef: Coll[Byte], - val propositionBytes: Coll[Byte], - val registers: Coll[AnyValue]) extends Box -{ - def builder = new TestSigmaDslBuilder - @NeverInline - def getReg[T](id: Int)(implicit cT: RType[T]): Option[T] = { - implicit val tag: ClassTag[T] = cT.classTag - if (id < 0 || id >= registers.length) return None - val value = registers(id) - if (value != null ) { - // once the value is not null it should be of the right type - value match { - case value: TestValue[_] if value.value != null && value.tA == cT => - Some(value.value.asInstanceOf[T]) - case _ => - throw new InvalidType(s"Cannot getReg[${cT.name}]($id): invalid type of value $value at id=$id") - } - } else None - } - - def creationInfo: (Int, Coll[Byte]) = this.getReg[(Int, Coll[Byte])](3).get - - def tokens: Coll[(Coll[Byte], Long)] = { - this.getReg[Coll[(Coll[Byte], Long)]](2).get - } - - @NeverInline - override def executeFromRegister[@Reified T](regId: Byte)(implicit cT: RType[T]): T = ??? - - override def hashCode(): Int = id.hashCode() - - override def equals(obj: Any): Boolean = (this eq obj.asInstanceOf[AnyRef]) || (obj != null && ( obj match { - case obj: Box => id == obj.id - })) -} diff --git a/src/main/scala/org/ergoplatform/ErgoAddress.scala b/src/main/scala/org/ergoplatform/ErgoAddress.scala index 54a2bc1166..f4f1c4a1a4 100644 --- a/src/main/scala/org/ergoplatform/ErgoAddress.scala +++ b/src/main/scala/org/ergoplatform/ErgoAddress.scala @@ -8,9 +8,11 @@ import scorex.crypto.hash.{Digest32, Blake2b256} import scorex.util.encode.Base58 import sigmastate.Values._ import sigmastate._ -import sigmastate.basics.DLogProtocol.ProveDlog +import sigmastate.basics.DLogProtocol.{ProveDlogProp, ProveDlog} +import sigmastate.lang.exceptions.SigmaException import sigmastate.serialization._ import sigmastate.utxo.{DeserializeContext, Slice} +import special.collection.Coll import scala.util.Try @@ -215,32 +217,40 @@ case class ErgoAddressEncoder(networkPrefix: NetworkPrefix) { case Pay2SHAddress.addressTypePrefix => new Pay2SHAddress(contentBytes) case Pay2SAddress.addressTypePrefix => - new Pay2SAddress(ErgoTreeSerializer.DefaultSerializer.deserializeErgoTree(contentBytes), contentBytes) + val tree = ErgoTreeSerializer.DefaultSerializer.deserializeErgoTree(contentBytes) + new Pay2SAddress(tree, contentBytes) case _ => throw new Exception("Unsupported address type: " + addressType) } } } + object IsPay2SHAddress { + def unapply(exp: SigmaPropValue): Option[Coll[Byte]] = exp match { + case SigmaAnd(Seq( + BoolToSigmaProp( + EQ( + Slice(_: CalcHash, ConstantNode(0, SInt), ConstantNode(24, SInt)), + ByteArrayConstant(scriptHash))), + DeserializeContext(Pay2SHAddress.scriptId, SSigmaProp))) => Some(scriptHash) + case _ => None + } + } + def fromProposition(proposition: ErgoTree): Try[ErgoAddress] = Try { proposition.root match { - case SigmaPropConstant(d: ProveDlog) => P2PKAddress(d) - //TODO move this pattern to PredefScripts - case SigmaAnd(Seq( - BoolToSigmaProp( - EQ( - Slice(_: CalcHash, ConstantNode(0, SInt), ConstantNode(24, SInt)), - ByteArrayConstant(scriptHash))), - DeserializeContext(Pay2SHAddress.scriptId, SSigmaProp))) => new Pay2SHAddress(scriptHash) - case b: Value[SSigmaProp.type]@unchecked if b.tpe == SSigmaProp => Pay2SAddress(proposition) - case other => - throw new RuntimeException(s"Cannot create ErgoAddress form proposition: ${proposition}") + case Right(SigmaPropConstant(ProveDlogProp(d))) => P2PKAddress(d) + case Right(IsPay2SHAddress(scriptHash)) => new Pay2SHAddress(scriptHash.toArray) + case Right(b: Value[SSigmaProp.type]@unchecked) if b.tpe == SSigmaProp => Pay2SAddress(proposition) + case Left(unparsedErgoTree) => + throw new SigmaException(s"Cannot create ErgoAddress form unparsed ergo tree: $unparsedErgoTree") + case _ => + throw new RuntimeException(s"Cannot create ErgoAddress form proposition: $proposition") } } } object ErgoAddressEncoder { - type NetworkPrefix = Byte val MainnetNetworkPrefix: NetworkPrefix = 0.toByte val TestnetNetworkPrefix: NetworkPrefix = 16.toByte diff --git a/src/main/scala/org/ergoplatform/ErgoBox.scala b/src/main/scala/org/ergoplatform/ErgoBox.scala index d1bb1ad996..9336e74a9b 100644 --- a/src/main/scala/org/ergoplatform/ErgoBox.scala +++ b/src/main/scala/org/ergoplatform/ErgoBox.scala @@ -4,15 +4,18 @@ import com.google.common.primitives.Shorts import org.ergoplatform.ErgoBox.{NonMandatoryRegisterId, TokenId} import scorex.crypto.authds.ADKey import scorex.util.encode.Base16 -import scorex.crypto.hash.{Blake2b256, Digest32} +import scorex.crypto.hash.{Digest32, Blake2b256} import scorex.util._ import sigmastate.Values._ import sigmastate.SType.AnyOps import sigmastate._ import sigmastate.serialization.SigmaSerializer import sigmastate.SCollection.SByteArray -import sigmastate.utils.{Helpers, SigmaByteReader, SigmaByteWriter} +import sigmastate.utils.{SigmaByteReader, SigmaByteWriter, Helpers} import sigmastate.utxo.ExtractCreationInfo +import special.collection._ +import sigmastate.eval._ +import sigmastate.eval.Extensions._ import scala.runtime.ScalaRunTime @@ -44,26 +47,24 @@ import scala.runtime.ScalaRunTime * This height is declared by user and should not exceed height of the block, * containing the transaction with this box. */ -class ErgoBox private( - override val value: Long, - override val ergoTree: ErgoTree, - override val additionalTokens: Seq[(TokenId, Long)] = Seq(), - override val additionalRegisters: Map[NonMandatoryRegisterId, _ <: EvaluatedValue[_ <: SType]] = Map(), - val transactionId: ModifierId, - val index: Short, - override val creationHeight: Int - ) extends ErgoBoxCandidate(value, ergoTree, creationHeight, additionalTokens, additionalRegisters) { +class ErgoBox( + override val value: Long, + override val ergoTree: ErgoTree, + override val additionalTokens: Coll[(TokenId, Long)] = Colls.emptyColl[(TokenId, Long)], + override val additionalRegisters: Map[NonMandatoryRegisterId, _ <: EvaluatedValue[_ <: SType]] = Map(), + val transactionId: ModifierId, + val index: Short, + override val creationHeight: Int + ) extends ErgoBoxCandidate(value, ergoTree, creationHeight, additionalTokens, additionalRegisters) { import ErgoBox._ lazy val id: BoxId = ADKey @@ Blake2b256.hash(bytes) - override def dataSize: Long = bytes.length - override def get(identifier: RegisterId): Option[Value[SType]] = { identifier match { case ReferenceRegId => - val tupleVal = Array(creationHeight, Helpers.concatArrays(Seq(transactionId.toBytes, Shorts.toByteArray(index)))) + val tupleVal = (creationHeight, Helpers.concatArrays(transactionId.toBytes, Shorts.toByteArray(index)).toColl) Some(Constant(tupleVal.asWrappedType, SReferenceRegType)) case _ => super.get(identifier) } @@ -98,7 +99,7 @@ object ErgoBox { val size: Short = 32 } - val MaxBoxSize: Int = 64 * 1024 + val MaxBoxSize: Int = ErgoConstants.MaxBoxSize.get val STokenType = STuple(SByteArray, SLong) val STokensRegType = SCollection(STokenType) @@ -113,7 +114,7 @@ object ErgoBox { override def toString: Idn = "R" + number } - abstract class MandatoryRegisterId(override val number: Byte, purpose: String) extends RegisterId + abstract class MandatoryRegisterId(override val number: Byte, val purpose: String) extends RegisterId abstract class NonMandatoryRegisterId(override val number: Byte) extends RegisterId object R0 extends MandatoryRegisterId(0, "Monetary value, in Ergo tokens") @@ -132,9 +133,9 @@ object ErgoBox { val TokensRegId: MandatoryRegisterId = R2 val ReferenceRegId: MandatoryRegisterId = R3 - val MaxTokens: Byte = 4 + val MaxTokens: Byte = ErgoConstants.MaxTokens.get - val maxRegisters = 10 + val maxRegisters: Int = ErgoConstants.MaxRegisters.get val mandatoryRegisters: Vector[MandatoryRegisterId] = Vector(R0, R1, R2, R3) val nonMandatoryRegisters: Vector[NonMandatoryRegisterId] = Vector(R4, R5, R6, R7, R8, R9) val startingNonMandatoryIndex: Byte = nonMandatoryRegisters.head.number @@ -158,7 +159,10 @@ object ErgoBox { additionalRegisters: Map[NonMandatoryRegisterId, _ <: EvaluatedValue[_ <: SType]] = Map(), transactionId: ModifierId = allZerosModifierId, boxIndex: Short = 0): ErgoBox = - new ErgoBox(value, ergoTree, additionalTokens, additionalRegisters, transactionId, boxIndex, creationHeight) + new ErgoBox(value, ergoTree, + Colls.fromArray(additionalTokens.toArray[(TokenId, Long)]), + additionalRegisters, + transactionId, boxIndex, creationHeight) object sigmaSerializer extends SigmaSerializer[ErgoBox, ErgoBox] { diff --git a/src/main/scala/org/ergoplatform/ErgoBoxCandidate.scala b/src/main/scala/org/ergoplatform/ErgoBoxCandidate.scala index eb43e6cca4..718bb9eeee 100644 --- a/src/main/scala/org/ergoplatform/ErgoBoxCandidate.scala +++ b/src/main/scala/org/ergoplatform/ErgoBoxCandidate.scala @@ -3,8 +3,8 @@ package org.ergoplatform import java.util import org.ergoplatform.ErgoBox._ -import scorex.util.encode.Base16 import scorex.crypto.hash.Digest32 +import scorex.util.encode.Base16 import scorex.util.ModifierId import sigmastate.Values._ import sigmastate._ @@ -12,9 +12,10 @@ import sigmastate.SType.AnyOps import sigmastate.lang.Terms._ import sigmastate.serialization.{ErgoTreeSerializer, SigmaSerializer} import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} -import sigmastate.utxo.CostTable.Cost +import special.collection.Coll +import sigmastate.eval._ +import sigmastate.eval.Extensions._ -import scala.collection.mutable.WrappedArray.ofByte import scala.runtime.ScalaRunTime /** @@ -34,33 +35,26 @@ import scala.runtime.ScalaRunTime class ErgoBoxCandidate(val value: Long, val ergoTree: ErgoTree, val creationHeight: Int, - val additionalTokens: Seq[(TokenId, Long)] = Seq(), + val additionalTokens: Coll[(TokenId, Long)] = Colls.emptyColl, val additionalRegisters: Map[NonMandatoryRegisterId, _ <: EvaluatedValue[_ <: SType]] = Map()) { - def proposition: BoolValue = ergoTree.proposition.asBoolValue - - def dataSize: Long = bytesWithNoRef.length.toLong - - lazy val cost: Int = (dataSize / 1024 + 1).toInt * Cost.BoxPerKilobyte + def proposition: BoolValue = ergoTree.toProposition(ergoTree.isConstantSegregation).asBoolValue lazy val propositionBytes: Array[Byte] = ErgoTreeSerializer.DefaultSerializer.serializeErgoTree(ergoTree) lazy val bytesWithNoRef: Array[Byte] = ErgoBoxCandidate.serializer.toBytes(this) def toBox(txId: ModifierId, boxIndex: Short) = - ErgoBox(value, ergoTree, creationHeight, additionalTokens, additionalRegisters, txId, boxIndex) + new ErgoBox(value, ergoTree, additionalTokens, additionalRegisters, txId, boxIndex, creationHeight) def get(identifier: RegisterId): Option[Value[SType]] = { identifier match { case ValueRegId => Some(LongConstant(value)) case ScriptRegId => Some(ByteArrayConstant(propositionBytes)) case TokensRegId => - val tokenTuples = additionalTokens.map { case (id, amount) => - Array(id, amount) - }.toArray - Some(Constant(tokenTuples.asWrappedType, STokensRegType)) + Some(Constant(additionalTokens.map { case (id, v) => (id.toColl, v) }.asWrappedType, STokensRegType)) // TODO optimize using mapFirst case ReferenceRegId => - val tupleVal = Array(creationHeight, Array.fill(34)(0: Byte)) + val tupleVal = (creationHeight, ErgoBoxCandidate.UndefinedBoxRef) Some(Constant(tupleVal.asWrappedType, SReferenceRegType)) case n: NonMandatoryRegisterId => additionalRegisters.get(n) @@ -78,16 +72,16 @@ class ErgoBoxCandidate(val value: Long, ScalaRunTime._hashCode((value, ergoTree, additionalTokens, additionalRegisters, creationHeight)) override def toString: Idn = s"ErgoBoxCandidate($value, $ergoTree," + - s"tokens: (${additionalTokens.map(t => Base16.encode(t._1) + ":" + t._2).mkString(", ")}), " + + s"tokens: (${additionalTokens.map(t => Base16.encode(t._1) + ":" + t._2).toArray.mkString(", ")}), " + s"$additionalRegisters, creationHeight: $creationHeight)" } object ErgoBoxCandidate { - + val UndefinedBoxRef = Array.fill(34)(0: Byte).toColl object serializer extends SigmaSerializer[ErgoBoxCandidate, ErgoBoxCandidate] { def serializeBodyWithIndexedDigests(obj: ErgoBoxCandidate, - tokensInTx: Option[Array[ofByte]], + tokensInTx: Option[Coll[TokenId]], w: SigmaByteWriter): Unit = { w.putULong(obj.value) w.putBytes(ErgoTreeSerializer.DefaultSerializer.serializeErgoTree(obj.ergoTree)) @@ -95,7 +89,7 @@ object ErgoBoxCandidate { w.putUByte(obj.additionalTokens.size) obj.additionalTokens.foreach { case (id, amount) => if (tokensInTx.isDefined) { - val tokenIndex = tokensInTx.get.indexOf(new ofByte(id)) + val tokenIndex = tokensInTx.get.indexWhere(v => util.Arrays.equals(v, id), 0) if (tokenIndex == -1) sys.error(s"failed to find token id ($id) in tx's digest index") w.putUInt(tokenIndex) } else { @@ -127,7 +121,7 @@ object ErgoBoxCandidate { serializeBodyWithIndexedDigests(obj, None, w) } - def parseBodyWithIndexedDigests(digestsInTx: Option[Array[Digest32]], r: SigmaByteReader): ErgoBoxCandidate = { + def parseBodyWithIndexedDigests(digestsInTx: Option[Coll[TokenId]], r: SigmaByteReader): ErgoBoxCandidate = { val value = r.getULong() val tree = ErgoTreeSerializer.DefaultSerializer.deserializeErgoTree(r) val creationHeight = r.getUInt().toInt @@ -135,13 +129,14 @@ object ErgoBoxCandidate { val addTokens = (0 until addTokensCount).map { _ => val tokenId = if (digestsInTx.isDefined) { val digestIndex = r.getUInt().toInt - if (!digestsInTx.get.isDefinedAt(digestIndex)) sys.error(s"failed to find token id with index $digestIndex") - digestsInTx.get.apply(digestIndex) + val digests = digestsInTx.get + if (!digests.isDefinedAt(digestIndex)) sys.error(s"failed to find token id with index $digestIndex") + digests(digestIndex) } else { - Digest32 @@ r.getBytes(TokenId.size) + r.getBytes(TokenId.size) } val amount = r.getULong() - tokenId -> amount + Digest32 @@ tokenId -> amount } val regsCount = r.getByte() val regs = (0 until regsCount).map { iReg => @@ -150,7 +145,7 @@ object ErgoBoxCandidate { val v = r.getValue().asInstanceOf[EvaluatedValue[SType]] (reg, v) }.toMap - new ErgoBoxCandidate(value, tree, creationHeight, addTokens, regs) + new ErgoBoxCandidate(value, tree, creationHeight, addTokens.toColl, regs) } override def parse(r: SigmaByteReader): ErgoBoxCandidate = { diff --git a/src/main/scala/org/ergoplatform/ErgoConstants.scala b/src/main/scala/org/ergoplatform/ErgoConstants.scala new file mode 100644 index 0000000000..8bdcdd0551 --- /dev/null +++ b/src/main/scala/org/ergoplatform/ErgoConstants.scala @@ -0,0 +1,66 @@ +package org.ergoplatform + +import sigmastate.{AtLeast, SBigInt, SPrimType} + +case class SizeConstant[T: Numeric](value: T, id: Short, description: String) { + def get: T = value +} + +/** + * Fundamental constants that are used in sigma's logic and checks + */ +object ErgoConstants { + + object MaxInputSize extends SizeConstant[Int](1024 * 1024 * 1, 1, + "Input size should not be greater then provided value") { + } + + object MaxTreeDepth extends SizeConstant[Int](110, 2, + "Max tree depth should not be greater then provided value") { + } + + object MaxByteArrayLength extends SizeConstant[Int](10000, 3, + "Max bytearray length") { + } + + object MaxTokens extends SizeConstant[Byte](4, 6, + "Tokens count should not be greater than provided value") { + } + + object MaxRegisters extends SizeConstant[Int](10, 7, + "Registers count should not be greater than provided value") { + } + + object MaxBoxSize extends SizeConstant[Int](64 * 1024, 8, + "Box size should not be greater than provided value") { + } + + object MaxBigIntSizeInBytes extends SizeConstant[Long](32L, 9, + "BigInt size in bytes should not be greater than provided value") { + } + + object MaxTupleLength extends SizeConstant[Int](255, 10, + "Tuple length should not be greater than provided value") { + } + + object MaxHeaders extends SizeConstant[Int](10, 11, + "Headers count should not be greater than provided value") { + } + + object MaxChildrenCountForAtLeastOp extends SizeConstant[Int](255, 12, + "Max children count should not be greater than provided value") { + } + + val ConstTable: Seq[SizeConstant[_]] = Seq( + MaxInputSize, + MaxTreeDepth, + MaxByteArrayLength, + MaxTokens, + MaxRegisters, + MaxBoxSize, + MaxBigIntSizeInBytes, + MaxTupleLength, + MaxHeaders, + MaxChildrenCountForAtLeastOp + ) +} diff --git a/src/main/scala/org/ergoplatform/ErgoLikeContext.scala b/src/main/scala/org/ergoplatform/ErgoLikeContext.scala index d552e846c7..abd15e913f 100644 --- a/src/main/scala/org/ergoplatform/ErgoLikeContext.scala +++ b/src/main/scala/org/ergoplatform/ErgoLikeContext.scala @@ -1,24 +1,24 @@ package org.ergoplatform -import java.math.BigInteger - -import org.bouncycastle.math.ec.ECPoint import org.ergoplatform.ErgoLikeContext.Height import scalan.RType -import scalan.RType.{TupleType, PairType} import sigmastate.Values._ import sigmastate._ import sigmastate.eval._ import sigmastate.eval.Extensions._ -import sigmastate.interpreter.{ContextExtension, Context => ErgoContext} +import sigmastate.interpreter.{ContextExtension, InterpreterContext} import sigmastate.serialization.OpCodes import sigmastate.serialization.OpCodes.OpCode -import special.collection.{Coll, CollType} +import special.collection.Coll import special.sigma -import special.sigma.{WrapperType, Header, Box, AnyValue, TestValue, PreHeader} +import special.sigma.{AnyValue, Box, Header, PreHeader} +import sigmastate.SType._ +import scalan.RType._ +import special.sigma.{AnyValue, Box, Header, PreHeader} +import special.sigma.{AnyValue, Box, Header, PreHeader} import SType._ import RType._ -import special.sigma.Extensions._ +import org.ergoplatform.validation.{SigmaValidationSettings, ValidationRules} import scala.util.Try @@ -49,8 +49,9 @@ class ErgoLikeContext(val currentHeight: Height, val boxesToSpend: IndexedSeq[ErgoBox], val spendingTransaction: ErgoLikeTransactionTemplate[_ <: UnsignedInput], val self: ErgoBox, - override val extension: ContextExtension = ContextExtension(Map()) - ) extends ErgoContext { + override val extension: ContextExtension = ContextExtension(Map()), + val validationSettings: SigmaValidationSettings = ValidationRules.currentSettings + ) extends InterpreterContext { assert(self == null || boxesToSpend.exists(box => box.id == self.id), s"Self box if defined should be among boxesToSpend") assert(preHeader == null || preHeader.height == currentHeight, "Incorrect preHeader height") @@ -64,12 +65,12 @@ class ErgoLikeContext(val currentHeight: Height, override def withExtension(newExtension: ContextExtension): ErgoLikeContext = new ErgoLikeContext( currentHeight, lastBlockUtxoRoot, minerPubkey, headers, preHeader, - dataBoxes, boxesToSpend, spendingTransaction, self, newExtension) + dataBoxes, boxesToSpend, spendingTransaction, self, newExtension, validationSettings) def withTransaction(newSpendingTransaction: ErgoLikeTransactionTemplate[_ <: UnsignedInput]): ErgoLikeContext = new ErgoLikeContext( currentHeight, lastBlockUtxoRoot, minerPubkey, headers, preHeader, - dataBoxes, boxesToSpend, newSpendingTransaction, self, extension) + dataBoxes, boxesToSpend, newSpendingTransaction, self, extension, validationSettings) import ErgoLikeContext._ import Evaluation._ @@ -84,8 +85,7 @@ class ErgoLikeContext(val currentHeight: Height, spendingTransaction.outputs.toArray.map(_.toTestBox(isCost)).toColl val varMap = extension.values.mapValues { case v: EvaluatedValue[_] => val tVal = stypeToRType[SType](v.tpe) - val dslData = Evaluation.toDslData(v.value, v.tpe, isCost) - toAnyValue(dslData.asWrappedType)(tVal) + toAnyValue(v.value.asWrappedType)(tVal) } val vars = contextVars(varMap ++ extensions) val avlTree = CAvlTree(lastBlockUtxoRoot) @@ -108,7 +108,7 @@ object ErgoLikeContext { val dummyPreHeader: PreHeader = null /** Maximimum number of headers in `headers` collection of the context. */ - val MaxHeaders = 10 + val MaxHeaders = ErgoConstants.MaxHeaders.get def apply(currentHeight: Height, lastBlockUtxoRoot: AvlTreeData, @@ -116,12 +116,25 @@ object ErgoLikeContext { boxesToSpend: IndexedSeq[ErgoBox], spendingTransaction: ErgoLikeTransactionTemplate[_ <: UnsignedInput], self: ErgoBox, - extension: ContextExtension = ContextExtension(Map())) = + extension: ContextExtension = ContextExtension(Map()), + vs: SigmaValidationSettings = ValidationRules.currentSettings) = new ErgoLikeContext(currentHeight, lastBlockUtxoRoot, minerPubkey, noHeaders, dummyPreHeader, noBoxes, - boxesToSpend, spendingTransaction, self, extension) + boxesToSpend, spendingTransaction, self, extension, vs) + + def apply(currentHeight: Height, + lastBlockUtxoRoot: AvlTreeData, + minerPubkey: Array[Byte], + dataBoxes: IndexedSeq[ErgoBox], + boxesToSpend: IndexedSeq[ErgoBox], + spendingTransaction: ErgoLikeTransactionTemplate[_ <: UnsignedInput], + self: ErgoBox) = + new ErgoLikeContext(currentHeight, lastBlockUtxoRoot, minerPubkey, + noHeaders, + dummyPreHeader, + dataBoxes, boxesToSpend, spendingTransaction, self, ContextExtension(Map())) def dummy(selfDesc: ErgoBox) = ErgoLikeContext(currentHeight = 0, @@ -150,7 +163,6 @@ object ErgoLikeContext { val noOutputs: Array[Box] = Array[Box]() import special.sigma._ - import sigmastate.SType._ def contextVars(m: Map[Byte, AnyValue])(implicit IR: Evaluation): Coll[AnyValue] = { val maxKey = if (m.keys.isEmpty) 0 else m.keys.max @@ -163,54 +175,68 @@ object ErgoLikeContext { } implicit class ErgoBoxOps(val ebox: ErgoBox) extends AnyVal { - def toTestBox(isCost: Boolean)(implicit IR: Evaluation): Box = { + def toTestBox(isCost: Boolean): Box = { if (ebox == null) return null - new CostingBox(IR, isCost, ebox) + new CostingBox(isCost, ebox) } } } /** When interpreted evaluates to a ByteArrayConstant built from Context.minerPubkey */ -case object MinerPubkey extends NotReadyValueByteArray { - override val opCode: OpCode = OpCodes.MinerPubkeyCode +case object MinerPubkey extends NotReadyValueByteArray with ValueCompanion { + override def opCode: OpCode = OpCodes.MinerPubkeyCode def opType = SFunc(SContext, SCollection.SByteArray) + override def companion = this } /** When interpreted evaluates to a IntConstant built from Context.currentHeight */ -case object Height extends NotReadyValueInt { - override val opCode: OpCode = OpCodes.HeightCode +case object Height extends NotReadyValueInt with ValueCompanion { + override def companion = this + override def opCode: OpCode = OpCodes.HeightCode def opType = SFunc(SContext, SInt) } /** When interpreted evaluates to a collection of BoxConstant built from Context.boxesToSpend */ -case object Inputs extends LazyCollection[SBox.type] { - override val opCode: OpCode = OpCodes.InputsCode +case object Inputs extends LazyCollection[SBox.type] with ValueCompanion { + override def companion = this + override def opCode: OpCode = OpCodes.InputsCode val tpe = SCollection(SBox) def opType = SFunc(SContext, tpe) } /** When interpreted evaluates to a collection of BoxConstant built from Context.spendingTransaction.outputs */ -case object Outputs extends LazyCollection[SBox.type] { - override val opCode: OpCode = OpCodes.OutputsCode +case object Outputs extends LazyCollection[SBox.type] with ValueCompanion { + override def companion = this + override def opCode: OpCode = OpCodes.OutputsCode val tpe = SCollection(SBox) def opType = SFunc(SContext, tpe) } /** When interpreted evaluates to a AvlTreeConstant built from Context.lastBlockUtxoRoot */ -case object LastBlockUtxoRootHash extends NotReadyValueAvlTree { - override val opCode: OpCode = OpCodes.LastBlockUtxoRootHashCode +case object LastBlockUtxoRootHash extends NotReadyValueAvlTree with ValueCompanion { + override def companion = this + override def opCode: OpCode = OpCodes.LastBlockUtxoRootHashCode def opType = SFunc(SContext, tpe) } /** When interpreted evaluates to a BoxConstant built from Context.self */ -case object Self extends NotReadyValueBox { - override val opCode: OpCode = OpCodes.SelfCode +case object Self extends NotReadyValueBox with ValueCompanion { + override def companion = this + override def opCode: OpCode = OpCodes.SelfCode def opType = SFunc(SContext, SBox) } -case object Context extends NotReadyValue[SContext.type] { - override val opCode: OpCode = OpCodes.ContextCode +case object Context extends NotReadyValue[SContext.type] with ValueCompanion { + override def companion = this + override def opCode: OpCode = OpCodes.ContextCode override def tpe: SContext.type = SContext override def opType: SFunc = SFunc(SUnit, SContext) } + +case object Global extends NotReadyValue[SGlobal.type] with ValueCompanion { + override def companion = this + override def opCode: OpCode = OpCodes.GlobalCode + override def tpe: SGlobal.type = SGlobal + override def opType: SFunc = SFunc(SUnit, SGlobal) +} diff --git a/src/main/scala/org/ergoplatform/ErgoLikeInterpreter.scala b/src/main/scala/org/ergoplatform/ErgoLikeInterpreter.scala index 9bdc46ab6d..ffdce3292c 100644 --- a/src/main/scala/org/ergoplatform/ErgoLikeInterpreter.scala +++ b/src/main/scala/org/ergoplatform/ErgoLikeInterpreter.scala @@ -17,7 +17,7 @@ class ErgoLikeInterpreter(override val maxCost: Long = CostTable.ScriptLimit)(im context.self.get(d.reg).flatMap { v => v match { case eba: EvaluatedValue[SByteArray]@unchecked => - val outVal = ValueSerializer.deserialize(eba.value) + val outVal = ValueSerializer.deserialize(eba.value.toArray) if (outVal.tpe != d.tpe) sys.error(s"Failed deserialization of $d: expected deserialized value to have type ${d.tpe}; got ${outVal.tpe}") else diff --git a/src/main/scala/org/ergoplatform/ErgoLikeTransaction.scala b/src/main/scala/org/ergoplatform/ErgoLikeTransaction.scala index 143f6107c0..dfd8b0b88c 100644 --- a/src/main/scala/org/ergoplatform/ErgoLikeTransaction.scala +++ b/src/main/scala/org/ergoplatform/ErgoLikeTransaction.scala @@ -1,5 +1,7 @@ package org.ergoplatform +import java.util + import org.ergoplatform.ErgoBox.TokenId import scorex.crypto.authds.ADKey import scorex.crypto.hash.{Blake2b256, Digest32} @@ -7,10 +9,13 @@ import scorex.util._ import sigmastate.interpreter.ProverResult import sigmastate.serialization.SigmaSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} +import special.collection.ExtensionMethods._ +import sigmastate.eval.Extensions._ import scala.collection.mutable import scala.util.Try - +import sigmastate.SType._ +import sigmastate.eval._ trait ErgoBoxReader { def byId(boxId: ADKey): Try[ErgoBox] @@ -108,13 +113,14 @@ object ErgoLikeTransactionSerializer extends SigmaSerializer[ErgoLikeTransaction w.putBytes(input.boxId) } // serialize distinct ids of tokens in transaction outputs - val distinctTokenIds = tx.outputCandidates - .flatMap(_.additionalTokens.map(t => new mutable.WrappedArray.ofByte(t._1))) - .distinct - .toArray + val tokenIds = tx.outputCandidates.toColl + .flatMap(box => box.additionalTokens.map(t => t._1)) + + val distinctTokenIds = tokenIds.map(_.toColl).distinct.map(_.toArray.asInstanceOf[TokenId]) + w.putUInt(distinctTokenIds.length) distinctTokenIds.foreach { tokenId => - w.putBytes(tokenId.array) + w.putBytes(tokenId.toArray) } // serialize outputs w.putUShort(tx.outputCandidates.length) @@ -138,12 +144,13 @@ object ErgoLikeTransactionSerializer extends SigmaSerializer[ErgoLikeTransaction } // parse distinct ids of tokens in transaction outputs val tokensCount = r.getUInt().toInt - val tokensBuilder = mutable.ArrayBuilder.make[Digest32]() + val tokensBuilder = mutable.ArrayBuilder.make[TokenId]() for (_ <- 0 until tokensCount) { tokensBuilder += Digest32 @@ r.getBytes(TokenId.size) } + val tokens = tokensBuilder.result().toColl // parse outputs - val tokens = tokensBuilder.result() + val outsCount = r.getUShort() val outputCandidatesBuilder = mutable.ArrayBuilder.make[ErgoBoxCandidate]() for (_ <- 0 until outsCount) { diff --git a/src/main/scala/org/ergoplatform/ErgoScriptPredef.scala b/src/main/scala/org/ergoplatform/ErgoScriptPredef.scala index bfed9b0124..abb4f98874 100644 --- a/src/main/scala/org/ergoplatform/ErgoScriptPredef.scala +++ b/src/main/scala/org/ergoplatform/ErgoScriptPredef.scala @@ -9,8 +9,8 @@ import sigmastate.eval.IRContext import sigmastate.interpreter.CryptoConstants import sigmastate.lang.Terms.ValueOps import sigmastate.{SLong, _} -import sigmastate.lang.{TransformingSigmaBuilder, SigmaCompiler} -import sigmastate.serialization.ErgoTreeSerializer +import sigmastate.lang.{SigmaCompiler, TransformingSigmaBuilder} +import sigmastate.serialization.ErgoTreeSerializer.DefaultSerializer import sigmastate.utxo._ object ErgoScriptPredef { @@ -37,7 +37,7 @@ object ErgoScriptPredef { def expectedMinerOutScriptBytesVal(delta: Int, minerPkBytesVal: Value[SByteArray]): Value[SByteArray] = { val genericPk = ProveDlog(CryptoConstants.dlogGroup.generator) val genericMinerProp = rewardOutputScript(delta, genericPk) - val genericMinerPropBytes = ErgoTreeSerializer.DefaultSerializer.serializeWithSegregation(genericMinerProp) + val genericMinerPropBytes = DefaultSerializer.serializeErgoTree(genericMinerProp) // first segregated constant is delta, so key is second constant val positions = IntArrayConstant(Array[Int](1)) val minerPubkeySigmaProp = CreateProveDlog(DecodePoint(minerPkBytesVal)) @@ -48,30 +48,30 @@ object ErgoScriptPredef { /** * Required script of the box, that collects mining rewards */ - def rewardOutputScript(delta: Int, minerPk: ProveDlog): SigmaPropValue = { + def rewardOutputScript(delta: Int, minerPk: ProveDlog): ErgoTree = { SigmaAnd( GE(Height, Plus(boxCreationHeight(Self), IntConstant(delta))).toSigmaProp, SigmaPropConstant(minerPk) - ) + ).treeWithSegregation } /** * Proposition that allows to send coins to a box which is protected by the following proposition: * prove dlog of miner's public key and height is at least `delta` blocks bigger then the current one. */ - def feeProposition(delta: Int = 720): SigmaPropValue = { + def feeProposition(delta: Int = 720): ErgoTree = { val out = ByIndex(Outputs, IntConstant(0)) AND( EQ(Height, boxCreationHeight(out)), EQ(ExtractScriptBytes(out), expectedMinerOutScriptBytesVal(delta, MinerPubkey)), EQ(SizeOf(Outputs), 1) - ).toSigmaProp + ).toSigmaProp.treeWithSegregation } /** * A contract that only allows to collect emission reward by a box with miner proposition. */ - def emissionBoxProp(s: MonetarySettings): SigmaPropValue = { + def emissionBoxProp(s: MonetarySettings): ErgoTree = { val rewardOut = ByIndex(Outputs, IntConstant(0)) val minerOut = ByIndex(Outputs, IntConstant(1)) @@ -97,7 +97,7 @@ object ErgoScriptPredef { heightIncreased, correctMinerOutput, OR(AND(outputsNum, sameScriptRule, correctCoinsConsumed, heightCorrect), lastCoins) - ).toSigmaProp + ).toSigmaProp.treeWithSegregation } /** @@ -115,7 +115,7 @@ object ErgoScriptPredef { * may add or remove members, or change it to something more complicated like * `tokenThresholdScript`. */ - def foundationScript(s: MonetarySettings): SigmaPropValue = { + def foundationScript(s: MonetarySettings): ErgoTree = { // new output of the foundation val newFoundationBox = ByIndex(Outputs, IntConstant(0)) // calculate number of coins, that are not issued yet and should be kept in `newFoundationBox` @@ -155,7 +155,7 @@ object ErgoScriptPredef { // check, that additional rules defined by foundation members are satisfied val customProposition = DeserializeRegister(ErgoBox.R4, SSigmaProp) // combine 3 conditions above with AND conjunction - SigmaAnd(amountCorrect.toSigmaProp, sameScriptRule.toSigmaProp, customProposition) + SigmaAnd(amountCorrect.toSigmaProp, sameScriptRule.toSigmaProp, customProposition).treeWithSegregation } /** diff --git a/src/main/scala/org/ergoplatform/dsl/AvlTreeHelpers.scala b/src/main/scala/org/ergoplatform/dsl/AvlTreeHelpers.scala index 17268a9d4c..a06b32db03 100644 --- a/src/main/scala/org/ergoplatform/dsl/AvlTreeHelpers.scala +++ b/src/main/scala/org/ergoplatform/dsl/AvlTreeHelpers.scala @@ -1,6 +1,5 @@ package org.ergoplatform.dsl -import scalan.RType import special.collection.Coll import sigmastate.serialization.OperationSerializer import sigmastate.eval.{CAvlTree, CostingSigmaDslBuilder} @@ -9,9 +8,9 @@ import scorex.crypto.hash.{Digest32, Blake2b256} import sigmastate.{AvlTreeData, AvlTreeFlags} import special.sigma.AvlTree import scorex.crypto.authds.avltree.batch.{BatchAVLProver, Operation, Insert} +import CostingSigmaDslBuilder.Colls object AvlTreeHelpers { - val Colls = CostingSigmaDslBuilder.Colls /** Create authenticated dictionary with given allowed operations and key-value entries. */ def createAvlTree(flags: AvlTreeFlags, entries: (ADKey, ADValue)*): (AvlTree, BatchAVLProver[Digest32, Blake2b256.type]) = { @@ -19,7 +18,8 @@ object AvlTreeHelpers { val ok = entries.forall { case (key, value) => avlProver.performOneOperation(Insert(key, value)).isSuccess } - val proof = avlProver.generateProof() + if (!ok) throw new Exception("Test tree generation failed") + val _ = avlProver.generateProof() val digest = avlProver.digest val treeData = new AvlTreeData(digest, flags, 32, None) (CAvlTree(treeData), avlProver) @@ -34,10 +34,12 @@ object AvlTreeHelpers { implicit class ADKeyArrayOps(arr: Array[ADKey]) { def toColl: Coll[Coll[Byte]] = Colls.fromArray(arr.map(x => Colls.fromArray(x))) } + implicit class ADKeyValueArrayOps(arr: Array[(ADKey, ADValue)]) { def toColl: Coll[(Coll[Byte], Coll[Byte])] = { val kvs = arr.map { case (k, v) => (Colls.fromArray(k), Colls.fromArray(v)) } Colls.fromArray(kvs) } } + } diff --git a/src/main/scala/org/ergoplatform/dsl/ContractSpec.scala b/src/main/scala/org/ergoplatform/dsl/ContractSpec.scala index 7228f92b27..c8563a5ae4 100644 --- a/src/main/scala/org/ergoplatform/dsl/ContractSpec.scala +++ b/src/main/scala/org/ergoplatform/dsl/ContractSpec.scala @@ -1,13 +1,12 @@ package org.ergoplatform.dsl -import sigmastate.SType import org.ergoplatform.ErgoBox.{NonMandatoryRegisterId, BoxId} import sigmastate.interpreter.{ProverResult, CostedProverResult} import scalan.RType import org.ergoplatform.{ErgoLikeContext, ErgoBox} import special.sigma.{SigmaDslBuilder, AnyValue, SigmaProp} -import sigmastate.Values.{ErgoTree, Constant} -import sigmastate.eval.{IRContext, CostingSigmaDslBuilder, Evaluation} +import sigmastate.Values.ErgoTree +import sigmastate.eval.{IRContext, CostingSigmaDslBuilder} import scala.util.Try import org.ergoplatform.dsl.ContractSyntax.{Token, TokenId, ErgoScript, Proposition} diff --git a/src/main/scala/org/ergoplatform/dsl/ContractSyntax.scala b/src/main/scala/org/ergoplatform/dsl/ContractSyntax.scala index f719164a6b..eb17ea217f 100644 --- a/src/main/scala/org/ergoplatform/dsl/ContractSyntax.scala +++ b/src/main/scala/org/ergoplatform/dsl/ContractSyntax.scala @@ -1,20 +1,14 @@ package org.ergoplatform.dsl -import org.ergoplatform.{ErgoLikeContext, ErgoBox} -import org.ergoplatform.ErgoBox.{NonMandatoryRegisterId, BoxId} import scalan.RType import sigmastate.SType import sigmastate.SType.AnyOps -import org.ergoplatform.dsl.ContractSyntax.{Token, TokenId, ErgoScript, Proposition} -import sigmastate.Values.{ErgoTree, Constant} -import sigmastate.eval.{IRContext, CSigmaProp, CostingSigmaDslBuilder, Evaluation} -import sigmastate.interpreter.{ProverResult, CostedProverResult} +import org.ergoplatform.dsl.ContractSyntax.{ErgoScript, Proposition} +import sigmastate.eval.{CostingSigmaDslBuilder, Evaluation} import sigmastate.interpreter.Interpreter.ScriptEnv import special.collection.Coll -import special.sigma.{SigmaProp, SigmaContract, AnyValue, Context, DslSyntaxExtensions, SigmaDslBuilder} - +import special.sigma.{SigmaProp, SigmaContract, Context, DslSyntaxExtensions, SigmaDslBuilder} import scala.language.implicitConversions -import scala.util.Try trait ContractSyntax { contract: SigmaContract => override def builder: SigmaDslBuilder = new CostingSigmaDslBuilder @@ -31,10 +25,8 @@ trait ContractSyntax { contract: SigmaContract => def proposition(name: String, dslSpec: Proposition, scriptCode: String) = { val env = contractEnv.mapValues { v => val tV = Evaluation.rtypeOf(v).get - val treeType = Evaluation.toErgoTreeType(tV) - val data = Evaluation.fromDslData(v, treeType) - val elemTpe = Evaluation.rtypeToSType(treeType) - spec.IR.builder.mkConstant[SType](data.asWrappedType, elemTpe) + val elemTpe = Evaluation.rtypeToSType(tV) + spec.IR.builder.mkConstant[SType](v.asWrappedType, elemTpe) } spec.mkPropositionSpec(name, dslSpec, ErgoScript(env, scriptCode)) } diff --git a/src/main/scala/org/ergoplatform/settings/MonetarySettings.scala b/src/main/scala/org/ergoplatform/settings/MonetarySettings.scala index f1b1a11da5..911e1c1c31 100644 --- a/src/main/scala/org/ergoplatform/settings/MonetarySettings.scala +++ b/src/main/scala/org/ergoplatform/settings/MonetarySettings.scala @@ -2,8 +2,7 @@ package org.ergoplatform.settings import org.ergoplatform.ErgoScriptPredef import org.ergoplatform.mining.emission.EmissionRules -import sigmastate.Values.{Value, SigmaPropValue} -import sigmastate.{SBoolean, Values} +import sigmastate.Values.ErgoTree /** * Configuration file for monetary settings of Ergo chain @@ -17,9 +16,9 @@ case class MonetarySettings(fixedRatePeriod: Int = 30 * 2 * 24 * 365, minerRewardDelay: Int = 720, foundersInitialReward: Long = 75L * EmissionRules.CoinsInOneErgo / 10) { - val feeProposition: SigmaPropValue = ErgoScriptPredef.feeProposition(minerRewardDelay) + val feeProposition: ErgoTree = ErgoScriptPredef.feeProposition(minerRewardDelay) val feePropositionBytes: Array[Byte] = feeProposition.bytes - val emissionBoxProposition: SigmaPropValue = ErgoScriptPredef.emissionBoxProp(this) - val foundersBoxProposition: SigmaPropValue = ErgoScriptPredef.foundationScript(this) + val emissionBoxProposition: ErgoTree = ErgoScriptPredef.emissionBoxProp(this) + val foundersBoxProposition: ErgoTree = ErgoScriptPredef.foundationScript(this) } diff --git a/src/main/scala/org/ergoplatform/validation/RuleStatus.scala b/src/main/scala/org/ergoplatform/validation/RuleStatus.scala new file mode 100644 index 0000000000..f40a916701 --- /dev/null +++ b/src/main/scala/org/ergoplatform/validation/RuleStatus.scala @@ -0,0 +1,55 @@ +package org.ergoplatform.validation + +import java.util + +/** Base trait for rule status information. */ +sealed trait RuleStatus { + def statusCode: Byte +} +object RuleStatus { + val EnabledRuleCode: Byte = 1.toByte + val DisabledRuleCode: Byte = 2.toByte + val ReplacedRuleCode: Byte = 3.toByte + val ChangedRuleCode: Byte = 4.toByte +} + +/** This is a default status of a rule which is registered in the table + * and not yet altered by soft-forks. + */ +case object EnabledRule extends RuleStatus { + val statusCode: Byte = RuleStatus.EnabledRuleCode +} + +/** This is a status of a rule which is disabled in current version + * and not yet altered by soft-forks. + * The rule can be disabled via block extensions and voting process. + */ +case object DisabledRule extends RuleStatus { + val statusCode: Byte = RuleStatus.DisabledRuleCode +} + +/** The status of the rule which is replaced by a new rule via soft-fork extensions. + * This is similar to DisabledRule, but in addition require the new rule to be enabled + * at the same time (i.e. atomically) + * @see `ValidationSettings.isSoftFork` + * @param newRuleId id of a new rule which replaces the rule marked with this status + */ +case class ReplacedRule(newRuleId: Short) extends RuleStatus { + val statusCode: Byte = RuleStatus.ReplacedRuleCode +} + +/** The status of the rule whose parameters are changed via soft-fork extensions. + * The same rule can be changed many times via voting. + * @param newValue new value of block extension value with key == rule.id + */ +case class ChangedRule(newValue: Array[Byte]) extends RuleStatus { + val statusCode: Byte = RuleStatus.ChangedRuleCode + + override def hashCode(): Int = util.Arrays.hashCode(newValue) + + override def canEqual(that: Any): Boolean = that.isInstanceOf[ChangedRule] + + override def equals(obj: Any): Boolean = (this eq obj.asInstanceOf[AnyRef]) || (obj match { + case that: ChangedRule => util.Arrays.equals(newValue, that.newValue) + }) +} diff --git a/src/main/scala/org/ergoplatform/validation/RuleStatusSerializer.scala b/src/main/scala/org/ergoplatform/validation/RuleStatusSerializer.scala new file mode 100644 index 0000000000..e032f0f647 --- /dev/null +++ b/src/main/scala/org/ergoplatform/validation/RuleStatusSerializer.scala @@ -0,0 +1,61 @@ +package org.ergoplatform.validation + +import sigmastate.serialization.SigmaSerializer +import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} + +object RuleStatusSerializer extends SigmaSerializer[RuleStatus, RuleStatus] { + import RuleStatus._ + + val FirstRuleId = 1000 + + def measureWrittenBytes(block: SigmaByteWriter => Unit): Int = { + val w = SigmaSerializer.startWriter() + block(w) + val bytes = w.toBytes + bytes.length + } + + /** The general format for RuleStatuses + * field | Format | #bytes | Description + * ---------------------------------------------------------------------- + * dataSize | UShort | 1..2 bytes | number of bytes for dataBytes + * statusCode | Byte | 1 byte | code of the status type + * dataBytes | Bytes | dataSize bytes | serialized byte if status value + */ + override def serialize(status: RuleStatus, w: SigmaByteWriter): Unit = status match { + case EnabledRule | DisabledRule => + w.putUShort(0) // zero bytes for dataBytes + w.put(status.statusCode) + case ReplacedRule(newRuleId) => + val ofs = (newRuleId - FirstRuleId) // id offset + val dataSize = measureWrittenBytes(w => w.putUShort(ofs)) // number of bytes to store id offset + w.putUShort(dataSize) // size of dataBytes + w.put(status.statusCode) + w.putUShort(ofs) // dataBytes + case ChangedRule(data) => + w.putUShort(data.length) + w.put(status.statusCode) + w.putBytes(data) + } + + override def parse(r: SigmaByteReader): RuleStatus = { + val dataSize = r.getUShort() // read number of bytes occupied by status data + val statusType = r.getByte() + statusType match { + case EnabledRuleCode => + EnabledRule + case DisabledRuleCode => + DisabledRule // the rule is explicitly disabled + case ReplacedRuleCode => + val newRule = (r.getUShort() + FirstRuleId).toShort // store small offsets using single byte + ReplacedRule(newRule) // the rule is disabled, but we also have info about new rule + case ChangedRuleCode => + val bytes = r.getBytes(dataSize) // value bytes except statusType + ChangedRule(bytes) + case _ => + r.position += dataSize // skip status bytes which we don't understand + ReplacedRule(0) // unrecognized status code, the old code should process it as soft-fork + } + } +} + diff --git a/src/main/scala/org/ergoplatform/validation/SigmaValidationSettings.scala b/src/main/scala/org/ergoplatform/validation/SigmaValidationSettings.scala new file mode 100644 index 0000000000..b062cf501d --- /dev/null +++ b/src/main/scala/org/ergoplatform/validation/SigmaValidationSettings.scala @@ -0,0 +1,76 @@ +package org.ergoplatform.validation + +/** + * Configuration of validation. Each `ValidationRule` instance should be + * implemented as an `object` to facilitate type-safe usage. It then should be + * registered in `ValidationRules.currentSettings` to be used in the code to + * perform validation. Added to `currentSettings` the rule has EnabledRule + * status by default, but only in a current version of the code. Thus, the + * value `currentSettings` represents the validation settings of the current + * version of the code. The set of rules in `currentSettings` is fixed in the + * current version of the code and thus only rule status can be changed (as + * described below) + * + * Old versions of the code don't have access to the rules added in newer + * versions. The implementation of the specific rule, once released under + * specific ruleId, should never be changed, hence ruleId denotes that + * implementation. However, the behavior of rules (released with code) can be + * altered by changing their status in block extensions section via voting. + * + * The status changes are represented in ValidationSettings using the + * RuleStatus type. Each descendant class represent a particular change in the + * rule status. Rule ids are used as keys of the status values stored in the + * block extension section. RuleStatus instances are deserialized from the + * block extension values. Deserialized (ruleId, status) pairs are joined with + * the (ruleId,status) pairs in `currentSettings`, and for matching ruleIds the + * default statues stored in `currentSettings` are replaced with the new + * statuses obtained from the blockchain. Deserialized (ruleId,status) pairs + * which don't match with `currentSettings` are ignored. + * + * Each rule has associated check of soft-fork condition by implementing + * `isSoftFork` method. If `isSoftFork` returns true, then ValidationException + * raised by the rule is interpreted as *soft-fork condition*. Depending on the + * use case, soft-fork condition allows some operations performed by an old + * code to succeed which otherwise would fail due to ValidationException raised + * by the validation rule. One notable use case is Box.ergoTree validation in + * which old code can skip ValidationExceptions under soft-fork condition (i.e. + * when isSoftFork returns true), for example when a new opCode is added in the + * newer version of the protocol, and this fact can be recognized by the old + * code. + * @see SoftForkWhenCodeAdded + */ +abstract class SigmaValidationSettings extends Iterable[(Short, (ValidationRule, RuleStatus))] { + def get(id: Short): Option[(ValidationRule, RuleStatus)] + def getStatus(id: Short): Option[RuleStatus] + def updated(id: Short, newStatus: RuleStatus): SigmaValidationSettings + def isSoftFork(ve: ValidationException): Boolean = isSoftFork(ve.rule.id, ve) + def isSoftFork(ruleId: Short, ve: ValidationException): Boolean = { + val infoOpt = get(ruleId) + infoOpt match { + case Some((_, ReplacedRule(newRuleId))) => true + case Some((rule, status)) => rule.isSoftFork(this, rule.id, status, ve.args) + case None => false + } + } +} + +/** Default representation of validation settings. */ +sealed class MapSigmaValidationSettings(private val map: Map[Short, (ValidationRule, RuleStatus)]) extends SigmaValidationSettings { + override def iterator: Iterator[(Short, (ValidationRule, RuleStatus))] = map.iterator + override def get(id: Short): Option[(ValidationRule, RuleStatus)] = map.get(id) + override def getStatus(id: Short): Option[RuleStatus] = map.get(id).map(_._2) + override def updated(id: Short, newStatus: RuleStatus): MapSigmaValidationSettings = { + val (rule,_) = map(id) + new MapSigmaValidationSettings(map.updated(id, (rule, newStatus))) + } + + override def canEqual(that: Any): Boolean = that.isInstanceOf[MapSigmaValidationSettings] + + override def equals(obj: Any): Boolean = (this eq obj.asInstanceOf[AnyRef]) || (obj match { + case that: MapSigmaValidationSettings => map == that.map + case _ => false + }) + + override def hashCode(): Int = map.hashCode() +} + diff --git a/src/main/scala/org/ergoplatform/validation/SigmaValidationSettingsSerializer.scala b/src/main/scala/org/ergoplatform/validation/SigmaValidationSettingsSerializer.scala new file mode 100644 index 0000000000..6d0e44253a --- /dev/null +++ b/src/main/scala/org/ergoplatform/validation/SigmaValidationSettingsSerializer.scala @@ -0,0 +1,35 @@ +package org.ergoplatform.validation + +import sigmastate.serialization.SigmaSerializer +import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} +import sigma.util.Extensions.{IntOps,LongOps} + +/** The rules are serialized ordered by ruleId. + * This serializer preserves roundtrip identity `deserialize(serialize(_)) = identity` + * however it may not preserve `serialize(deserialize(_)) = identity` */ +object SigmaValidationSettingsSerializer extends SigmaSerializer[SigmaValidationSettings, SigmaValidationSettings] { + + override def serialize(settings: SigmaValidationSettings, w: SigmaByteWriter): Unit = { + val rules = settings.toArray.sortBy(_._1) + w.putUInt(rules.length) + rules.foreach { r => + w.putUShort(r._1) + RuleStatusSerializer.serialize(r._2._2, w) + } + } + + override def parse(r: SigmaByteReader): SigmaValidationSettings = { + val nRules = r.getUInt().toIntExact + val parsed = (0 until nRules).map { _ => + val ruleId = r.getUShort().toShortExact + val status = RuleStatusSerializer.parse(r) + ruleId -> status + } + val initVs = ValidationRules.currentSettings + val res = parsed + .filter(pair => initVs.get(pair._1).isDefined) + .foldLeft(initVs) { (vs, rule) => vs.updated(rule._1, rule._2) } + res + } +} + diff --git a/src/main/scala/org/ergoplatform/validation/SoftForkChecker.scala b/src/main/scala/org/ergoplatform/validation/SoftForkChecker.scala new file mode 100644 index 0000000000..3af05c6f6d --- /dev/null +++ b/src/main/scala/org/ergoplatform/validation/SoftForkChecker.scala @@ -0,0 +1,42 @@ +package org.ergoplatform.validation + +/** Interface implemented by objects capable of checking soft-fork conditions. */ +trait SoftForkChecker { + /** Check soft-fork condition. + * @param vs ValidationSettings actualized from blockchain extension sections + * @param ruleId id of the rule which raised ValidationException + * @param status status of the rule in the blockchain (agreed upon via voting) + * @param args arguments of Validation rule with which the rule has risen the exception + * @return true if `args` and `status` can be interpreted as valid soft-fork condition. + */ + def isSoftFork(vs: SigmaValidationSettings, ruleId: Short, status: RuleStatus, args: Seq[Any]): Boolean = false +} + +/** Checks that the failed validation rule has ReplacedRule status in block extensions section. + * This means the rule given by `ruleId` is not used in newer versions of the protocol. + * Instead it has been replaced by the new rule given by ReplacedRule status. + */ +trait SoftForkWhenReplaced extends SoftForkChecker { + override def isSoftFork(vs: SigmaValidationSettings, + ruleId: Short, + status: RuleStatus, + args: Seq[Any]): Boolean = (status, args) match { + case (ReplacedRule(_), _) => true + case _ => false + } +} + +/** Checks that the unknown `code` is however present in the ChangedRule new value + * stored in block extensions section. This is interpreted as soft-fork condition, + * i.e. the unknown `code` is not arbitrary, but explicitly added to the blockchain + * configuration and implemented in newer versions of the protocol. + */ +trait SoftForkWhenCodeAdded extends SoftForkChecker { + override def isSoftFork(vs: SigmaValidationSettings, + ruleId: Short, + status: RuleStatus, + args: Seq[Any]): Boolean = (status, args) match { + case (ChangedRule(newValue), Seq(code: Byte)) => newValue.contains(code) + case _ => false + } +} diff --git a/src/main/scala/org/ergoplatform/validation/ValidationRules.scala b/src/main/scala/org/ergoplatform/validation/ValidationRules.scala new file mode 100644 index 0000000000..cd3bd3d4e3 --- /dev/null +++ b/src/main/scala/org/ergoplatform/validation/ValidationRules.scala @@ -0,0 +1,203 @@ +package org.ergoplatform.validation + +import sigmastate.eval.IRContext +import sigmastate.serialization.DataSerializer.CheckSerializableTypeCode +import sigmastate.serialization.OpCodes.OpCode +import sigmastate.Values.{ErgoTree, IntValue, SValue, Value} +import sigmastate.serialization.{OpCodes, ValueSerializer} +import sigmastate.utxo.DeserializeContext +import sigmastate.lang.exceptions._ +import sigmastate.serialization.TypeSerializer.{CheckPrimitiveTypeCode, CheckTypeCode} +import sigmastate.{CheckAndGetMethod, CheckTypeWithMethods, SCollection, SType} +import sigma.util.Extensions.ByteOps + +/** Base class for different validation rules registered in ValidationRules.currentSettings. + * Each rule is identified by `id` and have a description. + * Validation logic is implemented by `apply` methods of derived classes. + */ +case class ValidationRule( + id: Short, + description: String +) extends SoftForkChecker { + + /** Generic helper method to implement validation rules. + * It executes the given `block` only when this rule is disabled of `condition` is satisfied. + * Should be used in derived classes to implemented validation logic. + * + * @tparam T type of the result produced by `block` + * @param condition executes condition to be checked and returns its result + * @param cause executed only when condition returns false, attached as `cause` parameter when Validation exception + * @param args parameters which should be attached to ValidationException + * @param block executed only when condition returns true, its result become a result of `validate` call. + * @return result produced by the `block` if condition is true + * @throws SigmaException if this rule is not found in ValidationRules.currentSettings + * @throws ValidationException if the `condition` is not true. + * + * @see ValidationRules + */ + protected def validate[T]( + condition: => Boolean, + cause: => Throwable, args: Seq[Any], block: => T): T = { + ValidationRules.currentSettings.getStatus(this.id) match { + case None => + throw new SigmaException(s"ValidationRule $this not found in validation settings") + case Some(DisabledRule) => + block // if the rule is disabled we still need to execute the block of code + case Some(_) => + if (condition) block + else throw ValidationException(s"Validation failed on $this with args $args", this, args, Option(cause)) + } + } +} + +/** Base class for all exceptions which may be thrown by validation rules. + * Instances of this class are used as messages to communicate soft-fork information, + * from the context where the soft-fork condition is detected (such as in ValidationRules), + * up the stack to the point where it is clear how to handle it. + * Some messages of this kind are not handled, in which case a new Exception is thrown + * and this instance should be attached as a `cause` parameter. + */ +case class ValidationException(message: String, rule: ValidationRule, args: Seq[Any], cause: Option[Throwable] = None) + extends Exception(message, cause.orNull) + +object ValidationRules { + /** The id of the first validation rule. Can be used as the beginning of the rules id range. */ + val FirstRuleId = 1000.toShort + + object CheckDeserializedScriptType extends ValidationRule(FirstRuleId, + "Deserialized script should have expected type") { + def apply[T](d: DeserializeContext[_], script: SValue)(block: => T): T = + validate(d.tpe == script.tpe, + new InterpreterException(s"Failed context deserialization of $d: \n" + + s"expected deserialized script to have type ${d.tpe}; got ${script.tpe}"), + Seq[Any](d, script), block + ) + } + + object CheckDeserializedScriptIsSigmaProp extends ValidationRule(1001, + "Deserialized script should have SigmaProp type") { + def apply[T](root: SValue)(block: => T): T = + validate(root.tpe.isSigmaProp, + new SerializerException(s"Failed deserialization, expected deserialized script to have type SigmaProp; got ${root.tpe}"), + Seq(root), block + ) + } + + object CheckValidOpCode extends ValidationRule(1002, + "Check the opcode is supported by registered serializer or is added via soft-fork") + with SoftForkWhenCodeAdded { + def apply[T](ser: ValueSerializer[_], opCode: OpCode)(block: => T): T = { + def msg = s"Cannot find serializer for Value with opCode = LastConstantCode + ${opCode.toUByte - OpCodes.LastConstantCode}" + def args = Seq(opCode) + validate(ser != null && ser.opCode == opCode, new InvalidOpCode(msg), args, block) + } + } + + object CheckIsSupportedIndexExpression extends ValidationRule(1003, + "Check the index expression for accessing collection element is supported.") { + def apply[Ctx <: IRContext, T](ctx: Ctx)(coll: Value[SCollection[_]], i: IntValue, iSym: ctx.Rep[Int])(block: => T): T = { + def msg = s"Unsupported index expression $i when accessing collection $coll" + def args = Seq(coll, i) + validate(ctx.isSupportedIndexExpression(iSym), + new SigmaException(msg, i.sourceContext.toOption), + args, block) + } + } + + object CheckCostFunc extends ValidationRule(1004, + "Cost function should contain only operations from specified list.") { + def apply[Ctx <: IRContext, T](ctx: Ctx)(costF: ctx.Rep[Any => Int])(block: => T): T = { + def args = Seq(costF) + lazy val verification = ctx.verifyCostFunc(ctx.asRep[Any => Int](costF)) + validate(verification.isSuccess, + verification.toEither.left.get, + args, block) + } + } + + object CheckCalcFunc extends ValidationRule(1005, + "If SigmaProp.isProven method calls exists in the given function,\n then it is the last operation") { + def apply[Ctx <: IRContext, T](ctx: Ctx)(calcF: ctx.Rep[ctx.Context => Any])(block: => T): T = { + def args = Seq(calcF) + lazy val verification = ctx.verifyIsProven(calcF) + validate(verification.isSuccess, + verification.toEither.left.get, + args, block) + } + } + + object CheckCostWithContext extends ValidationRule(1006, + "Contract execution cost in a given context is limited by given maximum value.") { + def apply[Ctx <: IRContext, T](ctx: Ctx) + (costingCtx: ctx.Context.SContext, exp: Value[SType], + costF: ctx.Rep[((ctx.Context, (Int, ctx.Size[ctx.Context]))) => Int], maxCost: Long): Int = { + def args = Seq(costingCtx, exp, costF, maxCost) + lazy val estimatedCostTry = ctx.checkCostWithContext(costingCtx, exp, costF, maxCost) + validate(estimatedCostTry.isSuccess, + { + val t = estimatedCostTry.toEither.left.get + new CosterException(s"Script cannot be executed due to high cost $exp: ", exp.sourceContext.toList.headOption, Some(t)) + }, + args, estimatedCostTry.get) + } + } + + object CheckTupleType extends ValidationRule(1007, + "Supported tuple type.") with SoftForkWhenReplaced { + def apply[Ctx <: IRContext, T](ctx: Ctx)(e: ctx.Elem[_])(block: => T): T = { + def msg = s"Invalid tuple type $e" + lazy val condition = e match { + case _: ctx.PairElem[_,_] => true + case _ => false + } + validate(condition, new SigmaException(msg), Seq[ctx.Elem[_]](e), block) + } + } + + object CheckHeaderSizeBit extends ValidationRule(1013, + "For version greater then 0, size bit should be set.") with SoftForkWhenReplaced { + def apply(header: Byte): Unit = { + validate( + ErgoTree.getVersion(header) == 0 || ErgoTree.hasSize(header), + new SigmaException(s"Invalid ErgoTreeHeader $header, size bit is expected"), Seq(header), {}) + } + } + + val ruleSpecs: Seq[ValidationRule] = Seq( + CheckDeserializedScriptType, + CheckDeserializedScriptIsSigmaProp, + CheckValidOpCode, + CheckIsSupportedIndexExpression, + CheckCostFunc, + CheckCalcFunc, + CheckCostWithContext, + CheckTupleType, + CheckPrimitiveTypeCode, + CheckTypeCode, + CheckSerializableTypeCode, + CheckTypeWithMethods, + CheckAndGetMethod, + CheckHeaderSizeBit, + ) + + /** Validation settings that correspond to the current version of the ErgoScript implementation. + * Different version of the code will have a different set of rules here. + * This variable is globally available and can be use wherever checking of the rules is necessary. + * This is immutable data structure, it can be augmented with RuleStates from block extension + * sections of the blockchain, but that augmentation is only available in stateful context. + */ + val currentSettings: SigmaValidationSettings = new MapSigmaValidationSettings({ + val map = ruleSpecs.map(r => r.id -> (r, EnabledRule)).toMap + assert(map.size == ruleSpecs.size, s"Duplicate ruleIds ${ruleSpecs.groupBy(_.id).filter(g => g._2.length > 1)}") + map + }) + + def trySoftForkable[T](whenSoftFork: => T)(block: => T)(implicit vs: SigmaValidationSettings): T = { + try block + catch { + case ve: ValidationException => + if (vs.isSoftFork(ve)) whenSoftFork + else throw ve + } + } +} diff --git a/src/main/scala/sigmastate/AvlTreeData.scala b/src/main/scala/sigmastate/AvlTreeData.scala index 94535264e8..ac89c070f2 100644 --- a/src/main/scala/sigmastate/AvlTreeData.scala +++ b/src/main/scala/sigmastate/AvlTreeData.scala @@ -18,6 +18,10 @@ object AvlTreeFlags { lazy val AllOperationsAllowed = AvlTreeFlags(insertAllowed = true, updateAllowed = true, removeAllowed = true) + lazy val InsertOnly = AvlTreeFlags(insertAllowed = true, updateAllowed = false, removeAllowed = false) + + lazy val RemoveOnly = AvlTreeFlags(insertAllowed = false, updateAllowed = false, removeAllowed = true) + def apply(serializedFlags: Byte): AvlTreeFlags = { val insertAllowed = (serializedFlags & 0x01) != 0 val updateAllowed = (serializedFlags & 0x02) != 0 @@ -70,8 +74,10 @@ object AvlTreeData { val DigestSize: Int = CryptoConstants.hashLength + 1 //please read class comments above for details val TreeDataSize = DigestSize + 3 + 4 + 4 - val dummy = - new AvlTreeData(ADDigest @@ Array.fill(DigestSize)(0:Byte), AvlTreeFlags.AllOperationsAllowed, keyLength = 32) + val dummy = new AvlTreeData( + ADDigest @@ Array.fill(DigestSize)(0:Byte), + AvlTreeFlags.AllOperationsAllowed, + keyLength = 32) object serializer extends SigmaSerializer[AvlTreeData, AvlTreeData] { diff --git a/src/main/scala/sigmastate/Operations.scala b/src/main/scala/sigmastate/Operations.scala new file mode 100644 index 0000000000..3771b722cd --- /dev/null +++ b/src/main/scala/sigmastate/Operations.scala @@ -0,0 +1,552 @@ +package sigmastate + +import sigmastate.lang.SigmaPredef.PredefinedFuncRegistry +import sigmastate.lang.StdSigmaBuilder + +/** WARNING: This file is generated by GenSerializableOps tool. + * Don't edit it directly, use the tool instead to regenerate. + * The operations are alphabetically sorted. + */ +object Operations { + val predefinedOps = new PredefinedFuncRegistry(StdSigmaBuilder) + trait InfoObject { + def argInfos: Seq[ArgInfo] + } + + object ANDInfo extends InfoObject { + private val func = predefinedOps.funcs("allOf") + val conditionsArg: ArgInfo = func.argInfo("conditions") + val argInfos: Seq[ArgInfo] = Seq(conditionsArg) + } + + object AppendInfo extends InfoObject { + private val method = SMethod.fromIds(12, 9) + val thisArg: ArgInfo = method.argInfo("this") + val otherArg: ArgInfo = method.argInfo("other") + val argInfos: Seq[ArgInfo] = Seq(thisArg, otherArg) + } + + object ApplyInfo extends InfoObject { + private val func = predefinedOps.specialFuncs("apply") + val funcArg: ArgInfo = func.argInfo("func") + val argsArg: ArgInfo = func.argInfo("args") + val argInfos: Seq[ArgInfo] = Seq(funcArg, argsArg) + } + + object AtLeastInfo extends InfoObject { + private val func = predefinedOps.funcs("atLeast") + val boundArg: ArgInfo = func.argInfo("bound") + val childrenArg: ArgInfo = func.argInfo("children") + val argInfos: Seq[ArgInfo] = Seq(boundArg, childrenArg) + } + + object BinAndInfo extends InfoObject { + private val func = predefinedOps.funcs("&&") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object BinOrInfo extends InfoObject { + private val func = predefinedOps.funcs("||") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object BinXorInfo extends InfoObject { + private val func = predefinedOps.funcs("^") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object BitAndInfo extends InfoObject { + private val func = predefinedOps.funcs("bit_&") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object BitInversionInfo extends InfoObject { + private val func = predefinedOps.funcs("unary_~") + val inputArg: ArgInfo = func.argInfo("input") + val argInfos: Seq[ArgInfo] = Seq(inputArg) + } + + object BitOrInfo extends InfoObject { + private val func = predefinedOps.funcs("bit_|") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object BitShiftLeftInfo extends InfoObject { + private val func = predefinedOps.funcs("bit_<<") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object BitShiftRightInfo extends InfoObject { + private val func = predefinedOps.funcs("bit_>>") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object BitShiftRightZeroedInfo extends InfoObject { + private val func = predefinedOps.funcs("bit_>>>") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object BitXorInfo extends InfoObject { + private val func = predefinedOps.funcs("bit_^") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object BoolToSigmaPropInfo extends InfoObject { + private val func = predefinedOps.funcs("sigmaProp") + val conditionArg: ArgInfo = func.argInfo("condition") + val argInfos: Seq[ArgInfo] = Seq(conditionArg) + } + + object ByIndexInfo extends InfoObject { + private val method = SMethod.fromIds(12, 2) + val thisArg: ArgInfo = method.argInfo("this") + val indexArg: ArgInfo = method.argInfo("index") + val defaultArg: ArgInfo = method.argInfo("default") + val argInfos: Seq[ArgInfo] = Seq(thisArg, indexArg, defaultArg) + } + + object ByteArrayToBigIntInfo extends InfoObject { + private val func = predefinedOps.funcs("byteArrayToBigInt") + val inputArg: ArgInfo = func.argInfo("input") + val argInfos: Seq[ArgInfo] = Seq(inputArg) + } + + object ByteArrayToLongInfo extends InfoObject { + private val func = predefinedOps.funcs("byteArrayToLong") + val inputArg: ArgInfo = func.argInfo("input") + val argInfos: Seq[ArgInfo] = Seq(inputArg) + } + + object CalcBlake2b256Info extends InfoObject { + private val func = predefinedOps.funcs("blake2b256") + val inputArg: ArgInfo = func.argInfo("input") + val argInfos: Seq[ArgInfo] = Seq(inputArg) + } + + object CalcSha256Info extends InfoObject { + private val func = predefinedOps.funcs("sha256") + val inputArg: ArgInfo = func.argInfo("input") + val argInfos: Seq[ArgInfo] = Seq(inputArg) + } + + object ConstantPlaceholderInfo extends InfoObject { + private val func = predefinedOps.specialFuncs("placeholder") + val indexArg: ArgInfo = func.argInfo("index") + val argInfos: Seq[ArgInfo] = Seq(indexArg) + } + + object CreateAvlTreeInfo extends InfoObject { + private val func = predefinedOps.funcs("avlTree") + val operationFlagsArg: ArgInfo = func.argInfo("operationFlags") + val digestArg: ArgInfo = func.argInfo("digest") + val keyLengthArg: ArgInfo = func.argInfo("keyLength") + val valueLengthOptArg: ArgInfo = func.argInfo("valueLengthOpt") + val argInfos: Seq[ArgInfo] = Seq(operationFlagsArg, digestArg, keyLengthArg, valueLengthOptArg) + } + + object CreateProveDHTupleInfo extends InfoObject { + private val func = predefinedOps.funcs("proveDHTuple") + val gArg: ArgInfo = func.argInfo("g") + val hArg: ArgInfo = func.argInfo("h") + val uArg: ArgInfo = func.argInfo("u") + val vArg: ArgInfo = func.argInfo("v") + val argInfos: Seq[ArgInfo] = Seq(gArg, hArg, uArg, vArg) + } + + object CreateProveDlogInfo extends InfoObject { + private val func = predefinedOps.funcs("proveDlog") + val valueArg: ArgInfo = func.argInfo("value") + val argInfos: Seq[ArgInfo] = Seq(valueArg) + } + + object DecodePointInfo extends InfoObject { + private val func = predefinedOps.funcs("decodePoint") + val inputArg: ArgInfo = func.argInfo("input") + val argInfos: Seq[ArgInfo] = Seq(inputArg) + } + + object DeserializeContextInfo extends InfoObject { + private val func = predefinedOps.funcs("executeFromVar") + val idArg: ArgInfo = func.argInfo("id") + val argInfos: Seq[ArgInfo] = Seq(idArg) + } + + object DeserializeRegisterInfo extends InfoObject { + private val func = predefinedOps.funcs("executeFromSelfReg") + val idArg: ArgInfo = func.argInfo("id") + val defaultArg: ArgInfo = func.argInfo("default") + val argInfos: Seq[ArgInfo] = Seq(idArg, defaultArg) + } + + object DivisionInfo extends InfoObject { + private val func = predefinedOps.funcs("/") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object DowncastInfo extends InfoObject { + private val func = predefinedOps.specialFuncs("downcast") + val inputArg: ArgInfo = func.argInfo("input") + val argInfos: Seq[ArgInfo] = Seq(inputArg) + } + + object EQInfo extends InfoObject { + private val func = predefinedOps.funcs("==") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object ExistsInfo extends InfoObject { + private val method = SMethod.fromIds(12, 4) + val thisArg: ArgInfo = method.argInfo("this") + val pArg: ArgInfo = method.argInfo("p") + val argInfos: Seq[ArgInfo] = Seq(thisArg, pArg) + } + + object ExponentiateInfo extends InfoObject { + private val method = SMethod.fromIds(7, 3) + val thisArg: ArgInfo = method.argInfo("this") + val kArg: ArgInfo = method.argInfo("k") + val argInfos: Seq[ArgInfo] = Seq(thisArg, kArg) + } + + object ExtractAmountInfo extends InfoObject { + private val method = SMethod.fromIds(99, 1) + val thisArg: ArgInfo = method.argInfo("this") + val argInfos: Seq[ArgInfo] = Seq(thisArg) + } + + object ExtractBytesInfo extends InfoObject { + private val method = SMethod.fromIds(99, 3) + val thisArg: ArgInfo = method.argInfo("this") + val argInfos: Seq[ArgInfo] = Seq(thisArg) + } + + object ExtractBytesWithNoRefInfo extends InfoObject { + private val method = SMethod.fromIds(99, 4) + val thisArg: ArgInfo = method.argInfo("this") + val argInfos: Seq[ArgInfo] = Seq(thisArg) + } + + object ExtractCreationInfoInfo extends InfoObject { + private val method = SMethod.fromIds(99, 6) + val thisArg: ArgInfo = method.argInfo("this") + val argInfos: Seq[ArgInfo] = Seq(thisArg) + } + + object ExtractIdInfo extends InfoObject { + private val method = SMethod.fromIds(99, 5) + val thisArg: ArgInfo = method.argInfo("this") + val argInfos: Seq[ArgInfo] = Seq(thisArg) + } + + object ExtractRegisterAsInfo extends InfoObject { + private val method = SMethod.fromIds(99, 7) + val thisArg: ArgInfo = method.argInfo("this") + val regIdArg: ArgInfo = method.argInfo("regId") + val argInfos: Seq[ArgInfo] = Seq(thisArg, regIdArg) + } + + object ExtractScriptBytesInfo extends InfoObject { + private val method = SMethod.fromIds(99, 2) + val thisArg: ArgInfo = method.argInfo("this") + val argInfos: Seq[ArgInfo] = Seq(thisArg) + } + + object FilterInfo extends InfoObject { + private val method = SMethod.fromIds(12, 8) + val thisArg: ArgInfo = method.argInfo("this") + val pArg: ArgInfo = method.argInfo("p") + val argInfos: Seq[ArgInfo] = Seq(thisArg, pArg) + } + + object FoldInfo extends InfoObject { + private val method = SMethod.fromIds(12, 5) + val thisArg: ArgInfo = method.argInfo("this") + val zeroArg: ArgInfo = method.argInfo("zero") + val opArg: ArgInfo = method.argInfo("op") + val argInfos: Seq[ArgInfo] = Seq(thisArg, zeroArg, opArg) + } + + object ForAllInfo extends InfoObject { + private val method = SMethod.fromIds(12, 6) + val thisArg: ArgInfo = method.argInfo("this") + val pArg: ArgInfo = method.argInfo("p") + val argInfos: Seq[ArgInfo] = Seq(thisArg, pArg) + } + + object GEInfo extends InfoObject { + private val func = predefinedOps.funcs(">=") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object GTInfo extends InfoObject { + private val func = predefinedOps.funcs(">") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object GetVarInfo extends InfoObject { + private val func = predefinedOps.funcs("getVar") + val varIdArg: ArgInfo = func.argInfo("varId") + val argInfos: Seq[ArgInfo] = Seq(varIdArg) + } + + object GroupGeneratorInfo extends InfoObject { + private val method = SMethod.fromIds(106, 1) + val thisArg: ArgInfo = method.argInfo("this") + val argInfos: Seq[ArgInfo] = Seq(thisArg) + } + + object IfInfo extends InfoObject { + private val func = predefinedOps.specialFuncs("if") + val conditionArg: ArgInfo = func.argInfo("condition") + val trueBranchArg: ArgInfo = func.argInfo("trueBranch") + val falseBranchArg: ArgInfo = func.argInfo("falseBranch") + val argInfos: Seq[ArgInfo] = Seq(conditionArg, trueBranchArg, falseBranchArg) + } + + object LEInfo extends InfoObject { + private val func = predefinedOps.funcs("<=") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object LTInfo extends InfoObject { + private val func = predefinedOps.funcs("<") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object LogicalNotInfo extends InfoObject { + private val func = predefinedOps.funcs("unary_!") + val inputArg: ArgInfo = func.argInfo("input") + val argInfos: Seq[ArgInfo] = Seq(inputArg) + } + + object LongToByteArrayInfo extends InfoObject { + private val func = predefinedOps.funcs("longToByteArray") + val inputArg: ArgInfo = func.argInfo("input") + val argInfos: Seq[ArgInfo] = Seq(inputArg) + } + + object MapCollectionInfo extends InfoObject { + private val method = SMethod.fromIds(12, 3) + val thisArg: ArgInfo = method.argInfo("this") + val fArg: ArgInfo = method.argInfo("f") + val argInfos: Seq[ArgInfo] = Seq(thisArg, fArg) + } + + object MaxInfo extends InfoObject { + private val func = predefinedOps.funcs("max") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object MinInfo extends InfoObject { + private val func = predefinedOps.funcs("min") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object MinusInfo extends InfoObject { + private val func = predefinedOps.funcs("-") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object MinusModQInfo extends InfoObject { + private val method = SMethod.fromIds(6, 3) + val thisArg: ArgInfo = method.argInfo("this") + val otherArg: ArgInfo = method.argInfo("other") + val argInfos: Seq[ArgInfo] = Seq(thisArg, otherArg) + } + + object ModQInfo extends InfoObject { + private val method = SMethod.fromIds(6, 1) + val thisArg: ArgInfo = method.argInfo("this") + val argInfos: Seq[ArgInfo] = Seq(thisArg) + } + + object ModuloInfo extends InfoObject { + private val func = predefinedOps.funcs("%") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object MultiplyInfo extends InfoObject { + private val func = predefinedOps.funcs("*") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object MultiplyGroupInfo extends InfoObject { + private val method = SMethod.fromIds(7, 4) + val thisArg: ArgInfo = method.argInfo("this") + val otherArg: ArgInfo = method.argInfo("other") + val argInfos: Seq[ArgInfo] = Seq(thisArg, otherArg) + } + + object NEQInfo extends InfoObject { + private val func = predefinedOps.funcs("!=") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object NegationInfo extends InfoObject { + private val func = predefinedOps.funcs("unary_-") + val inputArg: ArgInfo = func.argInfo("input") + val argInfos: Seq[ArgInfo] = Seq(inputArg) + } + + object ORInfo extends InfoObject { + private val func = predefinedOps.funcs("anyOf") + val conditionsArg: ArgInfo = func.argInfo("conditions") + val argInfos: Seq[ArgInfo] = Seq(conditionsArg) + } + + object OptionGetInfo extends InfoObject { + private val method = SMethod.fromIds(36, 3) + val thisArg: ArgInfo = method.argInfo("this") + val argInfos: Seq[ArgInfo] = Seq(thisArg) + } + + object OptionGetOrElseInfo extends InfoObject { + private val method = SMethod.fromIds(36, 4) + val thisArg: ArgInfo = method.argInfo("this") + val defaultArg: ArgInfo = method.argInfo("default") + val argInfos: Seq[ArgInfo] = Seq(thisArg, defaultArg) + } + + object OptionIsDefinedInfo extends InfoObject { + private val method = SMethod.fromIds(36, 2) + val thisArg: ArgInfo = method.argInfo("this") + val argInfos: Seq[ArgInfo] = Seq(thisArg) + } + + object PlusInfo extends InfoObject { + private val func = predefinedOps.funcs("+") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object PlusModQInfo extends InfoObject { + private val method = SMethod.fromIds(6, 2) + val thisArg: ArgInfo = method.argInfo("this") + val otherArg: ArgInfo = method.argInfo("other") + val argInfos: Seq[ArgInfo] = Seq(thisArg, otherArg) + } + + object PropertyCallInfo extends InfoObject { + private val method = SMethod.fromIds(1, 1) + val thisArg: ArgInfo = method.argInfo("this") + val argInfos: Seq[ArgInfo] = Seq(thisArg) + } + + object SelectFieldInfo extends InfoObject { + private val func = predefinedOps.specialFuncs("selectField") + val inputArg: ArgInfo = func.argInfo("input") + val fieldIndexArg: ArgInfo = func.argInfo("fieldIndex") + val argInfos: Seq[ArgInfo] = Seq(inputArg, fieldIndexArg) + } + + object SigmaAndInfo extends InfoObject { + private val func = predefinedOps.funcs("allZK") + val propositionsArg: ArgInfo = func.argInfo("propositions") + val argInfos: Seq[ArgInfo] = Seq(propositionsArg) + } + + object SigmaOrInfo extends InfoObject { + private val func = predefinedOps.funcs("anyZK") + val propositionsArg: ArgInfo = func.argInfo("propositions") + val argInfos: Seq[ArgInfo] = Seq(propositionsArg) + } + + object SigmaPropBytesInfo extends InfoObject { + private val method = SMethod.fromIds(8, 1) + val thisArg: ArgInfo = method.argInfo("this") + val argInfos: Seq[ArgInfo] = Seq(thisArg) + } + + object SizeOfInfo extends InfoObject { + private val method = SMethod.fromIds(12, 1) + val thisArg: ArgInfo = method.argInfo("this") + val argInfos: Seq[ArgInfo] = Seq(thisArg) + } + + object SliceInfo extends InfoObject { + private val method = SMethod.fromIds(12, 7) + val thisArg: ArgInfo = method.argInfo("this") + val fromArg: ArgInfo = method.argInfo("from") + val untilArg: ArgInfo = method.argInfo("until") + val argInfos: Seq[ArgInfo] = Seq(thisArg, fromArg, untilArg) + } + + object SubstConstantsInfo extends InfoObject { + private val func = predefinedOps.funcs("substConstants") + val scriptBytesArg: ArgInfo = func.argInfo("scriptBytes") + val positionsArg: ArgInfo = func.argInfo("positions") + val newValuesArg: ArgInfo = func.argInfo("newValues") + val argInfos: Seq[ArgInfo] = Seq(scriptBytesArg, positionsArg, newValuesArg) + } + + object TreeLookupInfo extends InfoObject { + private val func = predefinedOps.specialFuncs("treeLookup") + val treeArg: ArgInfo = func.argInfo("tree") + val keyArg: ArgInfo = func.argInfo("key") + val proofArg: ArgInfo = func.argInfo("proof") + val argInfos: Seq[ArgInfo] = Seq(treeArg, keyArg, proofArg) + } + + object UpcastInfo extends InfoObject { + private val func = predefinedOps.specialFuncs("upcast") + val inputArg: ArgInfo = func.argInfo("input") + val argInfos: Seq[ArgInfo] = Seq(inputArg) + } + + object XorInfo extends InfoObject { + private val func = predefinedOps.funcs("binary_|") + val leftArg: ArgInfo = func.argInfo("left") + val rightArg: ArgInfo = func.argInfo("right") + val argInfos: Seq[ArgInfo] = Seq(leftArg, rightArg) + } + + object XorOfInfo extends InfoObject { + private val func = predefinedOps.funcs("xorOf") + val conditionsArg: ArgInfo = func.argInfo("conditions") + val argInfos: Seq[ArgInfo] = Seq(conditionsArg) + } + +} diff --git a/src/main/scala/sigmastate/SigSerializer.scala b/src/main/scala/sigmastate/SigSerializer.scala index 3f45073702..65f5c87c09 100644 --- a/src/main/scala/sigmastate/SigSerializer.scala +++ b/src/main/scala/sigmastate/SigSerializer.scala @@ -3,7 +3,7 @@ package sigmastate import org.bouncycastle.util.BigIntegers import sigmastate.basics.DLogProtocol.{SecondDLogProverMessage, ProveDlog} import sigmastate.basics.VerifierMessage.Challenge -import sigmastate.Values.{Value, SigmaBoolean} +import sigmastate.Values.SigmaBoolean import sigmastate.interpreter.CryptoConstants import sigmastate.utils.Helpers import Helpers.xor diff --git a/src/main/scala/sigmastate/UncheckedTree.scala b/src/main/scala/sigmastate/UncheckedTree.scala index 4faabd9f71..59a970f30f 100644 --- a/src/main/scala/sigmastate/UncheckedTree.scala +++ b/src/main/scala/sigmastate/UncheckedTree.scala @@ -20,8 +20,7 @@ trait UncheckedConjecture extends UncheckedSigmaTree with ProofTreeConjecture { override def equals(obj: Any): Boolean = obj match { case x: UncheckedConjecture => - util.Arrays.equals(challenge, x.challenge) && // todo: why does this code mix .equals and == ? - children == x.children + util.Arrays.equals(challenge, x.challenge) && children == x.children } } @@ -37,7 +36,7 @@ case class UncheckedSchnorr(override val proposition: ProveDlog, override def equals(obj: Any): Boolean = obj match { case x: UncheckedSchnorr => - util.Arrays.equals(challenge, x.challenge) && // todo: why does this code mix .equals and == ? + util.Arrays.equals(challenge, x.challenge) && commitmentOpt == x.commitmentOpt && secondMessage == x.secondMessage case _ => false @@ -55,7 +54,7 @@ case class UncheckedDiffieHellmanTuple(override val proposition: ProveDHTuple, case x: UncheckedDiffieHellmanTuple => proposition == x.proposition && commitmentOpt == x.commitmentOpt && - util.Arrays.equals(challenge, x.challenge) && // todo: why does this code mix .equals and == ? + util.Arrays.equals(challenge, x.challenge) && secondMessage == x.secondMessage } } diff --git a/src/main/scala/sigmastate/UnprovenTree.scala b/src/main/scala/sigmastate/UnprovenTree.scala index 59acd61967..4e816470ec 100644 --- a/src/main/scala/sigmastate/UnprovenTree.scala +++ b/src/main/scala/sigmastate/UnprovenTree.scala @@ -6,9 +6,8 @@ import com.google.common.primitives.Shorts import gf2t.GF2_192_Poly import sigmastate.basics.DLogProtocol.{FirstDLogProverMessage, ProveDlog} import sigmastate.basics.VerifierMessage.Challenge -import sigmastate.Values.{SigmaBoolean, SigmaPropConstant} -import sigmastate.basics.{FirstProverMessage, ProveDHTuple, FirstDiffieHellmanTupleProverMessage} -import sigmastate.serialization.ErgoTreeSerializer +import sigmastate.Values.{ErgoTree, SigmaBoolean, SigmaPropConstant} +import sigmastate.basics.{FirstDiffieHellmanTupleProverMessage, FirstProverMessage, ProveDHTuple} import sigmastate.serialization.ErgoTreeSerializer.DefaultSerializer import scala.language.existentials @@ -129,8 +128,8 @@ case class UnprovenDiffieHellmanTuple(override val proposition: ProveDHTuple, * and should not contain challenges, responses, or the real/simulated flag for any node. * */ -// todo: write a test that restores the tree from this string and check that the result is equal, -// todo: in order to make sure this conversion is unambiguous +// TODO coverage: write a test that restores the tree from this string and check that the result is equal, +// in order to make sure this conversion is unambiguous object FiatShamirTree { val internalNodePrefix = 0: Byte val leafPrefix = 1: Byte @@ -139,7 +138,8 @@ object FiatShamirTree { def traverseNode(node: ProofTree): Array[Byte] = node match { case l: ProofTreeLeaf => - val propBytes = DefaultSerializer.serializeWithSegregation(SigmaPropConstant(l.proposition)) + val propTree = ErgoTree.withSegregation(SigmaPropConstant(l.proposition)) + val propBytes = DefaultSerializer.serializeErgoTree(propTree) val commitmentBytes = l.commitmentOpt.get.bytes leafPrefix +: ((Shorts.toByteArray(propBytes.length.toShort) ++ propBytes) ++ diff --git a/src/main/scala/sigmastate/Values.scala b/src/main/scala/sigmastate/Values.scala index 588cb4fbe7..fc980a8345 100644 --- a/src/main/scala/sigmastate/Values.scala +++ b/src/main/scala/sigmastate/Values.scala @@ -1,32 +1,33 @@ package sigmastate import java.math.BigInteger -import java.util.{Objects, Arrays} +import java.util +import java.util.Objects import org.bitbucket.inkytonik.kiama.relation.Tree import org.bitbucket.inkytonik.kiama.rewriting.Rewriter.{strategy, everywherebu} -import org.bouncycastle.math.ec.ECPoint -import org.ergoplatform.{ErgoLikeContext, ErgoBox} -import scalan.Nullable -import scorex.crypto.authds.SerializedAdProof +import org.ergoplatform.ErgoLikeContext +import org.ergoplatform.validation.ValidationException +import scalan.{Nullable, RType} +import scorex.crypto.authds.{ADDigest, SerializedAdProof} import scorex.crypto.authds.avltree.batch.BatchAVLVerifier import scorex.crypto.hash.{Digest32, Blake2b256} import scalan.util.CollectionUtil._ -import scorex.util.serialization.Serializer import sigmastate.SCollection.SByteArray import sigmastate.interpreter.CryptoConstants.EcPointType -import sigmastate.interpreter.{Context, CryptoConstants, CryptoFunctions} -import sigmastate.serialization._ -import sigmastate.serialization.{ErgoTreeSerializer, OpCodes, ConstantStore} +import sigmastate.interpreter.CryptoConstants +import sigmastate.serialization.{OpCodes, ConstantStore, _} import sigmastate.serialization.OpCodes._ -import sigmastate.utxo.CostTable.Cost -import sigma.util.Extensions._ import sigmastate.TrivialProp.{FalseProp, TrueProp} +import sigmastate.Values.ErgoTree.substConstants import sigmastate.basics.DLogProtocol.ProveDlog import sigmastate.basics.ProveDHTuple import sigmastate.lang.Terms._ import sigmastate.utxo._ import special.sigma.Extensions._ +import sigmastate.eval._ +import sigmastate.eval.Extensions._ +import sigma.util.Extensions.ByteOps import scala.language.implicitConversions import scala.reflect.ClassTag @@ -34,9 +35,11 @@ import sigmastate.lang.DefaultSigmaBuilder._ import sigmastate.serialization.ErgoTreeSerializer.DefaultSerializer import sigmastate.serialization.transformers.ProveDHTupleSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} -import special.sigma.{Header, Extensions, AnyValue, TestValue, PreHeader} +import special.sigma.{AnyValue, AvlTree, PreHeader, Header, _} import sigmastate.lang.SourceContext +import special.collection.Coll +import scala.collection.mutable object Values { @@ -45,18 +48,16 @@ object Values { type Idn = String trait Value[+S <: SType] extends SigmaNode { - def companion: ValueCompanion = - sys.error(s"Companion object is not defined for AST node ${this.getClass}") + def companion: ValueCompanion /*= + sys.error(s"Companion object is not defined for AST node ${this.getClass}")*/ /** Unique id of the node class used in serialization of ErgoTree. */ - val opCode: OpCode + def opCode: OpCode = companion.opCode /** The type of the value represented by this node. If the value is an operation it is * the type of operation result. */ def tpe: S - lazy val bytes = DefaultSerializer.serializeWithSegregation(this) - /** Every value represents an operation and that operation can be associated with a function type, * describing functional meaning of the operation, kind of operation signature. * Thus we can obtain global operation identifiers by combining Value.opName with Value.opType, @@ -97,9 +98,6 @@ object Values { } } - trait ValueCompanion extends SigmaNodeCompanion { - } - object Value { type PropositionCode = Byte @@ -110,11 +108,13 @@ object Values { implicit def liftByteArray(arr: Array[Byte]): Value[SByteArray] = ByteArrayConstant(arr) - implicit def liftBigInt(arr: BigInteger): Value[SBigInt.type] = BigIntConstant(arr) + implicit def liftBigInt(arr: BigInt): Value[SBigInt.type] = BigIntConstant(arr) - implicit def liftGroupElement(g: CryptoConstants.EcPointType): Value[SGroupElement.type] = GroupElementConstant(g) + implicit def liftGroupElement(g: GroupElement): Value[SGroupElement.type] = GroupElementConstant(g) + implicit def liftECPoint(g: EcPointType): Value[SGroupElement.type] = GroupElementConstant(g) - implicit def liftSigmaProp(g: SigmaBoolean): Value[SSigmaProp.type] = SigmaPropConstant(g) + implicit def liftSigmaProp(g: SigmaProp): Value[SSigmaProp.type] = SigmaPropConstant(g) + implicit def liftSigmaBoolean(sb: SigmaBoolean): Value[SSigmaProp.type] = SigmaPropConstant(SigmaDsl.SigmaProp(sb)) def apply[S <: SType](tS: S)(const: tS.WrappedType): Value[S] = tS.mkConstant(const) @@ -125,13 +125,36 @@ object Values { throw new IllegalArgumentException(s"Method $opName is not supported for node $v") } + trait ValueCompanion extends SigmaNodeCompanion { + import ValueCompanion._ + /** Unique id of the node class used in serialization of ErgoTree. */ + def opCode: OpCode + + override def toString: Idn = s"${this.getClass.getSimpleName}(${opCode.toUByte})" + + def typeName: String = this.getClass.getSimpleName.replace("$", "") + + def init() { + if (this.opCode != 0 && _allOperations.contains(this.opCode)) + throw sys.error(s"Operation $this already defined") + _allOperations += (this.opCode -> this) + } + + init() + + } + object ValueCompanion { + private val _allOperations: mutable.HashMap[Byte, ValueCompanion] = mutable.HashMap.empty + lazy val allOperations = _allOperations.toMap + } + trait EvaluatedValue[+S <: SType] extends Value[S] { val value: S#WrappedType def opType: SFunc = { val resType = tpe match { case ct : SCollection[_] => SCollection(ct.typeParams.head.ident) - case ft @ SFunc(tD, tR, _) => + case ft @ SFunc(_, _, _) => ft.getGenericType case _ => tpe } @@ -142,21 +165,21 @@ object Values { trait Constant[+S <: SType] extends EvaluatedValue[S] {} case class ConstantNode[S <: SType](value: S#WrappedType, tpe: S) extends Constant[S] { + assert(Constant.isCorrectType(value, tpe), s"Invalid type of constant value $value, expected type $tpe") override def companion: ValueCompanion = Constant - - override val opCode: OpCode = ConstantCode + override def opCode: OpCode = companion.opCode override def opName: String = s"Const" - override def equals(obj: scala.Any): Boolean = obj match { - case c: Constant[_] => Objects.deepEquals(value, c.value) && tpe == c.tpe + override def equals(obj: scala.Any): Boolean = (obj != null) && (this.eq(obj.asInstanceOf[AnyRef]) || (obj match { + case c: Constant[_] => tpe == c.tpe && Objects.deepEquals(value, c.value) case _ => false - } + })) - override def hashCode(): Int = Arrays.deepHashCode(Array(value.asInstanceOf[AnyRef], tpe)) + override def hashCode(): Int = util.Arrays.deepHashCode(Array(value.asInstanceOf[AnyRef], tpe)) override def toString: String = tpe.asInstanceOf[SType] match { - case SGroupElement if value.isInstanceOf[ECPoint] => - s"ConstantNode(${showECPoint(value.asInstanceOf[ECPoint])},$tpe)" + case SGroupElement if value.isInstanceOf[GroupElement] => + s"ConstantNode(${showECPoint(value.asInstanceOf[GroupElement])},$tpe)" case SGroupElement => sys.error(s"Invalid value in Constant($value, $tpe)") case SInt => s"IntConstant($value)" @@ -168,17 +191,53 @@ object Values { } object Constant extends ValueCompanion { + override def opCode: OpCode = ConstantCode def apply[S <: SType](value: S#WrappedType, tpe: S): Constant[S] = ConstantNode(value, tpe) def unapply[S <: SType](v: EvaluatedValue[S]): Option[(S#WrappedType, S)] = v match { case ConstantNode(value, tpe) => Some((value, tpe)) case _ => None } + + /** Checks that the type of the value corresponds to the descriptor `tpe`. + * If the value has complex structure only root type constructor is checked. + * NOTE, this is surface check with possible false positives, but it is ok + * when used in assertions, like `assert(isCorrestType(...))`, see `ConstantNode`. + */ + def isCorrectType[T <: SType](value: Any, tpe: T): Boolean = value match { + case c: Coll[_] => tpe match { + case STuple(items) => c.tItem == RType.AnyType && c.length == items.length + case tpeColl: SCollection[_] => true + case _ => sys.error(s"Collection value $c has unexpected type $tpe") + } + case _: Option[_] => tpe.isOption + case _: Tuple2[_,_] => tpe.isTuple && tpe.asTuple.items.length == 2 + case _: Boolean => tpe == SBoolean + case _: Byte => tpe == SByte + case _: Short => tpe == SShort + case _: Int => tpe == SInt + case _: Long => tpe == SLong + case _: BigInt => tpe == SBigInt + case _: String => tpe == SString + case _: GroupElement => tpe.isGroupElement + case _: SigmaProp => tpe.isSigmaProp + case _: AvlTree => tpe.isAvlTree + case _: Box => tpe.isBox + case _: PreHeader => tpe == SPreHeader + case _: Header => tpe == SHeader + case _: Context => tpe == SContext + case _: Function1[_,_] => tpe.isFunc + case _: Unit => tpe == SUnit + case _ => false + } } /** Placeholder for a constant in ErgoTree. Zero based index in ErgoTree.constants array. */ case class ConstantPlaceholder[S <: SType](id: Int, override val tpe: S) extends Value[S] { def opType = SFunc(SInt, tpe) - override val opCode: OpCode = ConstantPlaceholderIndexCode + override def companion: ValueCompanion = ConstantPlaceholder + } + object ConstantPlaceholder extends ValueCompanion { + override def opCode: OpCode = ConstantPlaceholderCode } trait NotReadyValue[S <: SType] extends Value[S] { @@ -195,19 +254,23 @@ object Values { case class TaggedVariableNode[T <: SType](varId: Byte, override val tpe: T) extends TaggedVariable[T] { - override val opCode: OpCode = TaggedVariableCode + override def companion = TaggedVariable def opType: SFunc = Value.notSupportedError(this, "opType") } - object TaggedVariable { + object TaggedVariable extends ValueCompanion { + override def opCode: OpCode = TaggedVariableCode def apply[T <: SType](varId: Byte, tpe: T): TaggedVariable[T] = TaggedVariableNode(varId, tpe) } case class UnitConstant() extends EvaluatedValue[SUnit.type] { - override val opCode = UnitConstantCode override def tpe = SUnit val value = () + override def companion: ValueCompanion = UnitConstant + } + object UnitConstant extends ValueCompanion { + override val opCode = UnitConstantCode } type BoolValue = Value[SBoolean.type] @@ -265,10 +328,11 @@ object Values { } } object BigIntConstant { - def apply(value: BigInteger): Constant[SBigInt.type] = Constant[SBigInt.type](value, SBigInt) - def apply(value: Long): Constant[SBigInt.type] = Constant[SBigInt.type](BigInt(value).underlying(), SBigInt) - def unapply(v: SValue): Option[BigInteger] = v match { - case Constant(value: BigInteger, SBigInt) => Some(value) + def apply(value: BigInt): Constant[SBigInt.type] = Constant[SBigInt.type](value, SBigInt) + def apply(value: BigInteger): Constant[SBigInt.type] = Constant[SBigInt.type](SigmaDsl.BigInt(value), SBigInt) + def apply(value: Long): Constant[SBigInt.type] = Constant[SBigInt.type](SigmaDsl.BigInt(BigInteger.valueOf(value)), SBigInt) + def unapply(v: SValue): Option[BigInt] = v match { + case Constant(value: BigInt, SBigInt) => Some(value) case _ => None } } @@ -283,38 +347,39 @@ object Values { } object BoxConstant { - def apply(value: ErgoBox): Constant[SBox.type] = Constant[SBox.type](value, SBox) - def unapply(v: SValue): Option[ErgoBox] = v match { - case Constant(value: ErgoBox, SBox) => Some(value) + def apply(value: Box): Constant[SBox.type] = Constant[SBox.type](value, SBox) + def unapply(v: SValue): Option[Box] = v match { + case Constant(value: Box, SBox) => Some(value) case _ => None } } object GroupElementConstant { - def apply(value: EcPointType): Constant[SGroupElement.type] = Constant[SGroupElement.type](value, SGroupElement) - def unapply(v: SValue): Option[EcPointType] = v match { - case Constant(value: EcPointType, SGroupElement) => Some(value) + def apply(value: GroupElement): Constant[SGroupElement.type] = Constant[SGroupElement.type](value, SGroupElement) + def unapply(v: SValue): Option[GroupElement] = v match { + case Constant(value: GroupElement, SGroupElement) => Some(value) case _ => None } } - val FalseSigmaProp = SigmaPropConstant(TrivialProp.FalseProp) - val TrueSigmaProp = SigmaPropConstant(TrivialProp.TrueProp) + val FalseSigmaProp = SigmaPropConstant(SigmaDsl.SigmaProp(TrivialProp.FalseProp)) + val TrueSigmaProp = SigmaPropConstant(SigmaDsl.SigmaProp(TrivialProp.TrueProp)) implicit def boolToSigmaProp(b: BoolValue): SigmaPropValue = BoolToSigmaProp(b) object SigmaPropConstant { - def apply(value: SigmaBoolean): Constant[SSigmaProp.type] = Constant[SSigmaProp.type](value, SSigmaProp) - def unapply(v: SValue): Option[SigmaBoolean] = v match { - case Constant(value: SigmaBoolean, SSigmaProp) => Some(value) + def apply(value: SigmaProp): Constant[SSigmaProp.type] = Constant[SSigmaProp.type](value, SSigmaProp) + def apply(value: SigmaBoolean): Constant[SSigmaProp.type] = Constant[SSigmaProp.type](SigmaDsl.SigmaProp(value), SSigmaProp) + def unapply(v: SValue): Option[SigmaProp] = v match { + case Constant(value: SigmaProp, SSigmaProp) => Some(value) case _ => None } } object AvlTreeConstant { - def apply(value: AvlTreeData): Constant[SAvlTree.type] = Constant[SAvlTree.type](value, SAvlTree) - def unapply(v: SValue): Option[AvlTreeData] = v match { - case Constant(value: AvlTreeData, SAvlTree) => Some(value) + def apply(value: AvlTree): Constant[SAvlTree.type] = Constant[SAvlTree.type](value, SAvlTree) + def unapply(v: SValue): Option[AvlTree] = v match { + case Constant(value: AvlTree, SAvlTree) => Some(value) case _ => None } } @@ -322,7 +387,7 @@ object Values { implicit class AvlTreeConstantOps(val c: AvlTreeConstant) extends AnyVal { def createVerifier(proof: SerializedAdProof) = new BatchAVLVerifier[Digest32, Blake2b256.type]( - c.value.digest, + ADDigest @@ c.value.digest.toArray, proof, c.value.keyLength, c.value.valueLengthOpt) @@ -418,12 +483,12 @@ object Values { type CollectionConstant[T <: SType] = Constant[SCollection[T]] object CollectionConstant { - def apply[T <: SType](value: Array[T#WrappedType], elementType: T): Constant[SCollection[T]] = + def apply[T <: SType](value: Coll[T#WrappedType], elementType: T): Constant[SCollection[T]] = Constant[SCollection[T]](value, SCollection(elementType)) - def unapply[T <: SType](node: Value[SCollection[T]]): Option[(Array[T#WrappedType], T)] = node match { - case arr: Constant[SCollection[a]] @unchecked if arr.tpe.isCollection => - val v = arr.value.asInstanceOf[Array[T#WrappedType]] - val t = arr.tpe.elemType.asInstanceOf[T] + def unapply[T <: SType](node: Value[SCollection[T]]): Option[(Coll[T#WrappedType], T)] = node match { + case c: Constant[SCollection[a]] @unchecked if c.tpe.isCollection => + val v = c.value.asInstanceOf[Coll[T#WrappedType]] + val t = c.tpe.elemType Some((v, t)) case _ => None } @@ -432,7 +497,7 @@ object Values { implicit class CollectionConstantOps[T <: SType](val c: CollectionConstant[T]) extends AnyVal { def toConcreteCollection: ConcreteCollection[T] = { val tElem = c.tpe.elemType - val items = c.value.map(v => tElem.mkConstant(v.asInstanceOf[tElem.WrappedType])) + val items = c.value.toArray.map(v => tElem.mkConstant(v.asInstanceOf[tElem.WrappedType])) ConcreteCollection(items, tElem) } } @@ -440,8 +505,9 @@ object Values { val ByteArrayTypeCode = (SCollectionType.CollectionTypeCode + SByte.typeCode).toByte object ByteArrayConstant { - def apply(value: Array[Byte]): CollectionConstant[SByte.type] = CollectionConstant[SByte.type](value, SByte) - def unapply(node: SValue): Option[Array[Byte]] = node match { + def apply(value: Coll[Byte]): CollectionConstant[SByte.type] = CollectionConstant[SByte.type](value, SByte) + def apply(value: Array[Byte]): CollectionConstant[SByte.type] = CollectionConstant[SByte.type](value.toColl, SByte) + def unapply(node: SValue): Option[Coll[Byte]] = node match { case coll: CollectionConstant[SByte.type] @unchecked => coll match { case CollectionConstant(arr, SByte) => Some(arr) case _ => None @@ -451,8 +517,9 @@ object Values { } object ShortArrayConstant { - def apply(value: Array[Short]): CollectionConstant[SShort.type] = CollectionConstant[SShort.type](value, SShort) - def unapply(node: SValue): Option[Array[Short]] = node match { + def apply(value: Coll[Short]): CollectionConstant[SShort.type] = CollectionConstant[SShort.type](value, SShort) + def apply(value: Array[Short]): CollectionConstant[SShort.type] = CollectionConstant[SShort.type](value.toColl, SShort) + def unapply(node: SValue): Option[Coll[Short]] = node match { case coll: CollectionConstant[SShort.type] @unchecked => coll match { case CollectionConstant(arr, SShort) => Some(arr) case _ => None @@ -462,8 +529,9 @@ object Values { } object IntArrayConstant { - def apply(value: Array[Int]): CollectionConstant[SInt.type] = CollectionConstant[SInt.type](value, SInt) - def unapply(node: SValue): Option[Array[Int]] = node match { + def apply(value: Coll[Int]): CollectionConstant[SInt.type] = CollectionConstant[SInt.type](value, SInt) + def apply(value: Array[Int]): CollectionConstant[SInt.type] = CollectionConstant[SInt.type](value.toColl, SInt) + def unapply(node: SValue): Option[Coll[Int]] = node match { case coll: CollectionConstant[SInt.type] @unchecked => coll match { case CollectionConstant(arr, SInt) => Some(arr) case _ => None @@ -473,8 +541,9 @@ object Values { } object LongArrayConstant { - def apply(value: Array[Long]): CollectionConstant[SLong.type] = CollectionConstant[SLong.type](value, SLong) - def unapply(node: SValue): Option[Array[Long]] = node match { + def apply(value: Coll[Long]): CollectionConstant[SLong.type] = CollectionConstant[SLong.type](value, SLong) + def apply(value: Array[Long]): CollectionConstant[SLong.type] = CollectionConstant[SLong.type](value.toColl, SLong) + def unapply(node: SValue): Option[Coll[Long]] = node match { case coll: CollectionConstant[SLong.type] @unchecked => coll match { case CollectionConstant(arr, SLong) => Some(arr) case _ => None @@ -484,8 +553,9 @@ object Values { } object BigIntArrayConstant { - def apply(value: Array[BigInteger]): CollectionConstant[SBigInt.type] = CollectionConstant[SBigInt.type](value, SBigInt) - def unapply(node: SValue): Option[Array[BigInteger]] = node match { + def apply(value: Coll[BigInt]): CollectionConstant[SBigInt.type] = CollectionConstant[SBigInt.type](value, SBigInt) + def apply(value: Array[BigInt]): CollectionConstant[SBigInt.type] = CollectionConstant[SBigInt.type](value.toColl, SBigInt) + def unapply(node: SValue): Option[Coll[BigInt]] = node match { case coll: CollectionConstant[SBigInt.type] @unchecked => coll match { case CollectionConstant(arr, SBigInt) => Some(arr) case _ => None @@ -497,8 +567,9 @@ object Values { val BoolArrayTypeCode = (SCollectionType.CollectionTypeCode + SBoolean.typeCode).toByte object BoolArrayConstant { - def apply(value: Array[Boolean]): CollectionConstant[SBoolean.type] = CollectionConstant[SBoolean.type](value, SBoolean) - def unapply(node: SValue): Option[Array[Boolean]] = node match { + def apply(value: Coll[Boolean]): CollectionConstant[SBoolean.type] = CollectionConstant[SBoolean.type](value, SBoolean) + def apply(value: Array[Boolean]): CollectionConstant[SBoolean.type] = CollectionConstant[SBoolean.type](value.toColl, SBoolean) + def unapply(node: SValue): Option[Coll[Boolean]] = node match { case coll: CollectionConstant[SBoolean.type] @unchecked => coll match { case CollectionConstant(arr, SBoolean) => Some(arr) case _ => None @@ -515,17 +586,14 @@ object Values { override def tpe = SAvlTree } - case object GroupGenerator extends EvaluatedValue[SGroupElement.type] { - - override val opCode: OpCode = OpCodes.GroupGeneratorCode - - import CryptoConstants.dlogGroup - + case object GroupGenerator extends EvaluatedValue[SGroupElement.type] with ValueCompanion { + override def opCode: OpCode = OpCodes.GroupGeneratorCode override def tpe = SGroupElement - - override val value: CryptoConstants.EcPointType = dlogGroup.generator + override val value = SigmaDsl.GroupElement(CryptoConstants.dlogGroup.generator) + override def companion = this } + trait NotReadyValueGroupElement extends NotReadyValue[SGroupElement.type] { override def tpe = SGroupElement } @@ -541,8 +609,15 @@ object Values { } } - val TrueLeaf: Constant[SBoolean.type] = Constant[SBoolean.type](true, SBoolean) - val FalseLeaf: Constant[SBoolean.type] = Constant[SBoolean.type](false, SBoolean) + object TrueLeaf extends ConstantNode[SBoolean.type](true, SBoolean) with ValueCompanion { + override def companion = this + override def opCode: OpCode = TrueCode + } + + object FalseLeaf extends ConstantNode[SBoolean.type](false, SBoolean) with ValueCompanion { + override def companion = this + override def opCode: OpCode = FalseCode + } trait NotReadyValueBoolean extends NotReadyValue[SBoolean.type] { override def tpe = SBoolean @@ -551,12 +626,7 @@ object Values { /** Algebraic data type of sigma proposition expressions. * Values of this type are used as values of SigmaProp type of SigmaScript and SigmaDsl */ - trait SigmaBoolean /*extends NotReadyValue[SBoolean.type]*/ { - def tpe = SBoolean - - /** This is not used as operation, but rather as data value of SigmaProp type. */ - def opType: SFunc = Value.notSupportedError(this, "opType") - + trait SigmaBoolean { /** Unique id of the node class used in serialization of SigmaBoolean. */ val opCode: OpCode } @@ -591,12 +661,14 @@ object Values { } override def parse(r: SigmaByteReader): SigmaBoolean = { + val depth = r.level + r.level = depth + 1 val opCode = r.getByte() val res = opCode match { case FalseProp.opCode => FalseProp case TrueProp.opCode => TrueProp case ProveDlogCode => dlogSerializer.parse(r) - case ProveDiffieHellmanTupleCode => dhtSerializer.parse(r) + case ProveDHTupleCode => dhtSerializer.parse(r) case AndCode => val n = r.getUShort() val children = (0 until n).map(_ => serializer.parse(r)) @@ -611,6 +683,7 @@ object Values { val children = (0 until n).map(_ => serializer.parse(r)) CTHRESHOLD(k, children) } + r.level = r.level - 1 res } } @@ -621,16 +694,17 @@ object Values { } case class Tuple(items: IndexedSeq[Value[SType]]) extends EvaluatedValue[STuple] with EvaluatedCollection[SAny.type, STuple] { - override val opCode: OpCode = TupleCode + override def companion = Tuple override def elementType = SAny lazy val tpe = STuple(items.map(_.tpe)) lazy val value = { val xs = items.cast[EvaluatedValue[SAny.type]].map(_.value) - xs.toArray(SAny.classTag.asInstanceOf[ClassTag[SAny.WrappedType]]) + Colls.fromArray(xs.toArray(SAny.classTag.asInstanceOf[ClassTag[SAny.WrappedType]]))(RType.AnyType) } } - object Tuple { + object Tuple extends ValueCompanion { + override def opCode: OpCode = TupleCode def apply(items: Value[SType]*): Tuple = Tuple(items.toIndexedSeq) } @@ -638,39 +712,49 @@ object Values { } case class SomeValue[T <: SType](x: Value[T]) extends OptionValue[T] { - override val opCode = SomeValueCode + override def companion = SomeValue val tpe = SOption(x.tpe) def opType = SFunc(x.tpe, tpe) } + object SomeValue extends ValueCompanion { + override val opCode = SomeValueCode + } case class NoneValue[T <: SType](elemType: T) extends OptionValue[T] { - override val opCode = NoneValueCode + override def companion = NoneValue val tpe = SOption(elemType) def opType = SFunc(elemType, tpe) } + object NoneValue extends ValueCompanion { + override val opCode = NoneValueCode + } case class ConcreteCollection[V <: SType](items: IndexedSeq[Value[V]], elementType: V) extends EvaluatedCollection[V, SCollection[V]] { - override val opCode: OpCode = - if (elementType == SBoolean && items.forall(_.isInstanceOf[Constant[_]])) - ConcreteCollectionBooleanConstantCode - else - ConcreteCollectionCode + private val isBooleanConstants = elementType == SBoolean && items.forall(_.isInstanceOf[Constant[_]]) + override def companion = + if (isBooleanConstants) ConcreteCollectionBooleanConstant + else ConcreteCollection val tpe = SCollection[V](elementType) lazy val value = { val xs = items.cast[EvaluatedValue[V]].map(_.value) - xs.toArray(elementType.classTag.asInstanceOf[ClassTag[V#WrappedType]]) + val tElement = Evaluation.stypeToRType(elementType) + Colls.fromArray(xs.toArray(elementType.classTag.asInstanceOf[ClassTag[V#WrappedType]]))(tElement) } } - object ConcreteCollection { + object ConcreteCollection extends ValueCompanion { + override def opCode: OpCode = ConcreteCollectionCode def apply[V <: SType](items: Value[V]*)(implicit tV: V): ConcreteCollection[V] = ConcreteCollection(items.toIndexedSeq, tV) def apply[V <: SType](items: => Seq[Value[V]])(implicit tV: V): ConcreteCollection[V] = ConcreteCollection(items.toIndexedSeq, tV) } + object ConcreteCollectionBooleanConstant extends ValueCompanion { + override def opCode: OpCode = ConcreteCollectionBooleanConstantCode + } trait LazyCollection[V <: SType] extends NotReadyValue[SCollection[V]] @@ -700,13 +784,14 @@ object Values { implicit class SigmaPropValueOps(val p: Value[SSigmaProp.type]) extends AnyVal { def isProven: Value[SBoolean.type] = SigmaPropIsProven(p) def propBytes: Value[SByteArray] = SigmaPropBytes(p) + def treeWithSegregation: ErgoTree = ErgoTree.withSegregation(p) } implicit class SigmaBooleanOps(val sb: SigmaBoolean) extends AnyVal { def toSigmaProp: SigmaPropValue = SigmaPropConstant(sb) def isProven: Value[SBoolean.type] = SigmaPropIsProven(SigmaPropConstant(sb)) def propBytes: Value[SByteArray] = SigmaPropBytes(SigmaPropConstant(sb)) - def toAnyValue: AnyValue = Extensions.toAnyValue(sb)(SType.SigmaBooleanRType) + def toAnyValue: AnyValue = eval.Extensions.toAnyValue(sb)(SType.SigmaBooleanRType) def showToString: String = sb match { case ProveDlog(v) => s"ProveDlog(${showECPoint(v)})" @@ -735,20 +820,22 @@ object Values { * This representation is more compact in serialized form. * @param id unique identifier of the variable in the current scope. */ case class ValDef(override val id: Int, - tpeArgs: Seq[STypeIdent], + tpeArgs: Seq[STypeVar], override val rhs: SValue) extends BlockItem { require(id >= 0, "id must be >= 0") - val opCode: OpCode = if (tpeArgs.isEmpty) ValDefCode else FunDefCode + override def companion = if (tpeArgs.isEmpty) ValDef else FunDef def tpe: SType = rhs.tpe def isValDef: Boolean = tpeArgs.isEmpty /** This is not used as operation, but rather to form a program structure */ def opType: SFunc = Value.notSupportedError(this, "opType") } - object ValDef { + object ValDef extends ValueCompanion { + def opCode: OpCode = ValDefCode def apply(id: Int, rhs: SValue): ValDef = ValDef(id, Nil, rhs) } - object FunDef { - def unapply(d: BlockItem): Option[(Int, Seq[STypeIdent], SValue)] = d match { + object FunDef extends ValueCompanion { + def opCode: OpCode = FunDefCode + def unapply(d: BlockItem): Option[(Int, Seq[STypeVar], SValue)] = d match { case ValDef(id, targs, rhs) if !d.isValDef => Some((id, targs, rhs)) case _ => None } @@ -756,10 +843,13 @@ object Values { /** Special node which represents a reference to ValDef in was introduced as result of CSE. */ case class ValUse[T <: SType](valId: Int, tpe: T) extends NotReadyValue[T] { - override val opCode: OpCode = ValUseCode + override def companion = ValUse /** This is not used as operation, but rather to form a program structure */ def opType: SFunc = Value.notSupportedError(this, "opType") } + object ValUse extends ValueCompanion { + override def opCode: OpCode = ValUseCode + } /** The order of ValDefs in the block is used to assign ids to ValUse(id) nodes * For all i: items(i).id == {number of ValDefs preceded in a graph} with respect to topological order. @@ -769,23 +859,26 @@ object Values { * in a fixed well defined order. */ case class BlockValue(items: IndexedSeq[BlockItem], result: SValue) extends NotReadyValue[SType] { - val opCode: OpCode = BlockValueCode + override def companion = BlockValue def tpe: SType = result.tpe /** This is not used as operation, but rather to form a program structure */ def opType: SFunc = Value.notSupportedError(this, "opType") } - + object BlockValue extends ValueCompanion { + override def opCode: OpCode = BlockValueCode + } /** * @param args parameters list, where each parameter has an id and a type. * @param body expression, which refers function parameters with ValUse. */ case class FuncValue(args: IndexedSeq[(Int,SType)], body: Value[SType]) extends NotReadyValue[SFunc] { + override def companion = FuncValue lazy val tpe: SFunc = SFunc(args.map(_._2), body.tpe) - val opCode: OpCode = FuncValueCode /** This is not used as operation, but rather to form a program structure */ override def opType: SFunc = SFunc(Vector(), tpe) } - object FuncValue { + object FuncValue extends ValueCompanion { + override def opCode: OpCode = FuncValueCode def apply(argId: Int, tArg: SType, body: SValue): FuncValue = FuncValue(IndexedSeq((argId,tArg)), body) } @@ -806,6 +899,11 @@ object Values { def GetVarSigmaProp(varId: Byte): GetVar[SSigmaProp.type] = GetVar(varId, SSigmaProp) def GetVarByteArray(varId: Byte): GetVar[SCollection[SByte.type]] = GetVar(varId, SByteArray) + /** This is alternative representation of ErgoTree expression when it cannot be parsed + * due to `error`. This is used by the nodes running old versions of code to recognize + * soft-fork conditions and skip validation of box propositions which are unparsable. */ + case class UnparsedErgoTree(bytes: mutable.WrappedArray[Byte], error: ValidationException) + /** The root of ErgoScript IR. Serialized instances of this class are self sufficient and can be passed around. * ErgoTreeSerializer defines top-level serialization format of the scripts. * The interpretation of the byte array depend on the first `header` byte, which uses VLQ encoding up to 30 bits. @@ -819,7 +917,7 @@ object Values { * Bit 5 == 1 - reserved for context dependent costing (should be = 0) * Bit 4 == 1 if constant segregation is used for this ErgoTree (default = 0) * (see https://github.com/ScorexFoundation/sigmastate-interpreter/issues/264) - * Bit 3 - reserved (should be 0) + * Bit 3 == 1 if size of the whole tree is serialized after the header byte (default = 0) * Bits 2-0 - language version (current version == 0) * * Currently we don't specify interpretation for the second and other bytes of the header. @@ -836,27 +934,58 @@ object Values { * consistency. In case of any inconsistency the serializer throws exception. * * @param header the first byte of serialized byte array which determines interpretation of the rest of the array + * * @param constants If isConstantSegregation == true contains the constants for which there may be * ConstantPlaceholders in the tree. * If isConstantSegregation == false this array should be empty and any placeholder in * the tree will lead to exception. - * @param root if isConstantSegregation == true contains ConstantPlaceholder instead of some Constant nodes. - * Otherwise may not contain placeholders. + * + * @param root On the right side it has valid expression of `SigmaProp` type. Or alternatively, + * on the left side, it has unparsed bytes along with the ValidationException, + * which caused the deserializer to fail. + * `Right(tree)` if isConstantSegregation == true contains ConstantPlaceholder + * instead of some Constant nodes. Otherwise, it may not contain placeholders. * It is possible to have both constants and placeholders in the tree, but for every placeholder * there should be a constant in `constants` array. - * @param proposition When isConstantSegregation == false this is the same as root. - * Otherwise, it is equivalent to `root` where all placeholders are replaced by Constants - * */ + */ case class ErgoTree private( header: Byte, constants: IndexedSeq[Constant[SType]], - root: SigmaPropValue, - proposition: SigmaPropValue + root: Either[UnparsedErgoTree, SigmaPropValue] ) { - assert(isConstantSegregation || constants.isEmpty) + require(isConstantSegregation || constants.isEmpty) + require(version == 0 || hasSize, s"For newer version the size bit is required: $this") + /** Then it throws the error from UnparsedErgoTree. + * It does so on every usage of `proposition` because the lazy value remains uninitialized. + */ + @deprecated("Use toProposition instead", "v2.1") + lazy val proposition: SigmaPropValue = toProposition(isConstantSegregation) + + @inline def version: Byte = ErgoTree.getVersion(header) + @inline def isRightParsed: Boolean = root.isRight @inline def isConstantSegregation: Boolean = ErgoTree.isConstantSegregation(header) + @inline def hasSize: Boolean = ErgoTree.hasSize(header) @inline def bytes: Array[Byte] = DefaultSerializer.serializeErgoTree(this) + + /** Get proposition expression from this contract. + * When root.isRight then + * if replaceConstants == false this is the same as `root.right.get`. + * Otherwise, it is equivalent to `root.right.get` where all placeholders are replaced by Constants. + * When root.isLeft then + * throws the error from UnparsedErgoTree. + * It does so on every usage of `proposition` because the lazy value remains uninitialized. + */ + def toProposition(replaceConstants: Boolean): SigmaPropValue = root match { + case Right(tree) => + val prop = if (replaceConstants) + substConstants(tree, constants).asSigmaProp + else + tree + prop + case Left(UnparsedErgoTree(_, error)) => + throw error + } } object ErgoTree { @@ -869,10 +998,18 @@ object Values { /** Header flag to indicate that constant segregation should be applied. */ val ConstantSegregationFlag: Byte = 0x10 + /** Header flag to indicate that whole size of ErgoTree should be saved before tree content. */ + val SizeFlag: Byte = 0x08 + + /** Header mask to extract version bits. */ + val VersionMask: Byte = 0x07 + /** Default header with constant segregation enabled. */ - val ConstantSegregationHeader = (DefaultHeader | ConstantSegregationFlag).toByte + val ConstantSegregationHeader: Byte = (DefaultHeader | ConstantSegregationFlag).toByte @inline def isConstantSegregation(header: Byte): Boolean = (header & ConstantSegregationFlag) != 0 + @inline def hasSize(header: Byte): Boolean = (header & SizeFlag) != 0 + @inline def getVersion(header: Byte): Byte = (header & VersionMask).toByte def substConstants(root: SValue, constants: IndexedSeq[Constant[SType]]): SValue = { val store = new ConstantStore(constants) @@ -884,33 +1021,51 @@ object Values { everywherebu(substRule)(root).fold(root)(_.asInstanceOf[SValue]) } - def apply(header: Byte, constants: IndexedSeq[Constant[SType]], root: SigmaPropValue) = { - if (isConstantSegregation(header)) { - val prop = substConstants(root, constants).asSigmaProp - new ErgoTree(header, constants, root, prop) - } else - new ErgoTree(header, constants, root, root) + def apply(header: Byte, constants: IndexedSeq[Constant[SType]], root: SigmaPropValue): ErgoTree = { + new ErgoTree(header, constants, Right(root)) } - val EmptyConstants = IndexedSeq.empty[Constant[SType]] + val EmptyConstants: IndexedSeq[Constant[SType]] = IndexedSeq.empty[Constant[SType]] - def withoutSegregation(root: SigmaPropValue) = { + def withoutSegregation(root: SigmaPropValue): ErgoTree = ErgoTree(ErgoTree.DefaultHeader, EmptyConstants, root) - } implicit def fromProposition(prop: SigmaPropValue): ErgoTree = { prop match { case SigmaPropConstant(_) => withoutSegregation(prop) - case _ => - // get ErgoTree with segregated constants - // todo rewrite with everywherebu? - DefaultSerializer.deserializeErgoTree(DefaultSerializer.serializeWithSegregation(prop)) + case _ => withSegregation(prop) } } implicit def fromSigmaBoolean(pk: SigmaBoolean): ErgoTree = { withoutSegregation(pk.toSigmaProp) } + + /** Build ErgoTree via serialization of the value with ConstantSegregationHeader, constants segregated + * from the tree and ConstantPlaceholders referring to the segregated constants. + * + * This method uses single traverse of the tree to: + * 1) find and segregate all constants; + * 2) replace constants with ConstantPlaceholders in the `tree`; + * 3) write the `tree` to the Writer's buffer obtaining `treeBytes`; + * 4) deserialize `tree` with ConstantPlaceholders. + **/ + def withSegregation(headerFlags: Byte, value: SigmaPropValue): ErgoTree = { + val constantStore = new ConstantStore() + val byteWriter = SigmaSerializer.startWriter(constantStore) + // serialize value and segregate constants into constantStore + ValueSerializer.serialize(value, byteWriter) + val extractedConstants = constantStore.getAll + val r = SigmaSerializer.startReader(byteWriter.toBytes) + r.constantStore = new ConstantStore(extractedConstants) + // deserialize value with placeholders + val valueWithPlaceholders = ValueSerializer.deserialize(r).asSigmaProp + val header = (ErgoTree.ConstantSegregationHeader | headerFlags).toByte + new ErgoTree(header, extractedConstants, Right(valueWithPlaceholders)) + } + + def withSegregation(value: SigmaPropValue): ErgoTree = + withSegregation(0, value) } } diff --git a/src/main/scala/sigmastate/basics/DLogProtocol.scala b/src/main/scala/sigmastate/basics/DLogProtocol.scala index 31769fe14a..1474ca9c25 100644 --- a/src/main/scala/sigmastate/basics/DLogProtocol.scala +++ b/src/main/scala/sigmastate/basics/DLogProtocol.scala @@ -6,11 +6,13 @@ import org.bouncycastle.util.BigIntegers import sigmastate.Values._ import Value.PropositionCode import sigmastate._ +import sigmastate.eval._ import sigmastate.basics.VerifierMessage.Challenge import sigmastate.interpreter.CryptoConstants.{EcPointType, dlogGroup} import sigmastate.interpreter.CryptoConstants -import sigmastate.serialization.{GroupElementSerializer, OpCodes} +import sigmastate.serialization.{OpCodes, GroupElementSerializer} import sigmastate.serialization.OpCodes.OpCode +import special.sigma.SigmaProp object DLogProtocol { @@ -22,9 +24,7 @@ object DLogProtocol { /** Construct a new SigmaBoolean value representing public key of discrete logarithm signature protocol. */ case class ProveDlog(value: EcPointType) extends SigmaProofOfKnowledgeTree[DLogSigmaProtocol, DLogProverInput] { - override val opCode: OpCode = OpCodes.ProveDlogCode - //todo: fix, we should consider that class parameter could be not evaluated lazy val h: EcPointType = value lazy val pkBytes: Array[Byte] = GroupElementSerializer.toBytes(h) } @@ -33,6 +33,14 @@ object DLogProtocol { val Code: PropositionCode = 102: Byte } + /** Helper extractor to match SigmaProp values and extract ProveDlog out of it. */ + object ProveDlogProp { + def unapply(p: SigmaProp): Option[ProveDlog] = SigmaDsl.toSigmaBoolean(p) match { + case d: ProveDlog => Some(d) + case _ => None + } + } + case class DLogProverInput(w: BigInteger) extends SigmaProtocolPrivateInput[DLogSigmaProtocol, ProveDlog] { diff --git a/src/main/scala/sigmastate/basics/DiffieHellmanTupleProtocol.scala b/src/main/scala/sigmastate/basics/DiffieHellmanTupleProtocol.scala index 2ebbc8a0c9..d2d23de5d6 100644 --- a/src/main/scala/sigmastate/basics/DiffieHellmanTupleProtocol.scala +++ b/src/main/scala/sigmastate/basics/DiffieHellmanTupleProtocol.scala @@ -4,13 +4,14 @@ import java.math.BigInteger import org.bouncycastle.util.BigIntegers import sigmastate.Values.Value.PropositionCode -import sigmastate.Values._ import sigmastate._ import sigmastate.basics.VerifierMessage.Challenge +import sigmastate.eval.SigmaDsl import sigmastate.interpreter.CryptoConstants.EcPointType import sigmastate.interpreter.CryptoConstants -import sigmastate.serialization.{GroupElementSerializer, OpCodes} +import sigmastate.serialization.{OpCodes, GroupElementSerializer} import sigmastate.serialization.OpCodes.OpCode +import special.sigma.SigmaProp trait DiffieHellmanTupleProtocol extends SigmaProtocol[DiffieHellmanTupleProtocol] { @@ -59,7 +60,7 @@ case class SecondDiffieHellmanTupleProverMessage(z: BigInteger) case class ProveDHTuple(gv: EcPointType, hv: EcPointType, uv: EcPointType, vv: EcPointType) extends SigmaProtocolCommonInput[DiffieHellmanTupleProtocol] with SigmaProofOfKnowledgeTree[DiffieHellmanTupleProtocol, DiffieHellmanTupleProverInput] { - override val opCode: OpCode = OpCodes.ProveDiffieHellmanTupleCode + override val opCode: OpCode = OpCodes.ProveDHTupleCode lazy val g = gv lazy val h = hv lazy val u = uv @@ -70,6 +71,13 @@ object ProveDHTuple { val Code: PropositionCode = 103: Byte } +/** Helper extractor to match SigmaProp values and extract ProveDHTuple out of it. */ +object ProveDHTupleProp { + def unapply(p: SigmaProp): Option[ProveDHTuple] = SigmaDsl.toSigmaBoolean(p) match { + case d: ProveDHTuple => Some(d) + case _ => None + } +} class DiffieHellmanTupleInteractiveProver(override val publicInput: ProveDHTuple, override val privateInputOpt: Option[DiffieHellmanTupleProverInput]) diff --git a/src/main/scala/sigmastate/eval/CompiletimeCosting.scala b/src/main/scala/sigmastate/eval/CompiletimeCosting.scala index 8610d35099..2ac84d114c 100644 --- a/src/main/scala/sigmastate/eval/CompiletimeCosting.scala +++ b/src/main/scala/sigmastate/eval/CompiletimeCosting.scala @@ -14,7 +14,7 @@ import sigmastate.Values.Value.Typed import sigmastate.lang.Terms import sigma.util.Extensions._ -trait CompiletimeCosting extends RuntimeCosting { IR: Evaluation => +trait CompiletimeCosting extends RuntimeCosting { IR: IRContext => import builder._ override def evalNode[T <: SType](ctx: RCosted[Context], env: CostingEnv, node: Value[T]): RCosted[T#WrappedType] = { @@ -108,6 +108,11 @@ trait CompiletimeCosting extends RuntimeCosting { IR: Evaluation => case Select(input, ModQMethod.name, _) => eval(mkModQ(input.asBigInt)) + case Terms.Apply(Select(l, PlusModQMethod.name, _), Seq(r)) => + eval(mkPlusModQ(l.asBigInt, r.asBigInt)) + case Terms.Apply(Select(l, MinusModQMethod.name, _), Seq(r)) => + eval(mkMinusModQ(l.asBigInt, r.asBigInt)) + case _ => super.evalNode(ctx, env, node) } diff --git a/src/main/scala/sigmastate/eval/CostingDataContext.scala b/src/main/scala/sigmastate/eval/CostingDataContext.scala index bb5506911c..d0849bd017 100644 --- a/src/main/scala/sigmastate/eval/CostingDataContext.scala +++ b/src/main/scala/sigmastate/eval/CostingDataContext.scala @@ -1,29 +1,31 @@ package sigmastate.eval import java.math.BigInteger +import java.util import org.bouncycastle.math.ec.ECPoint import org.ergoplatform.ErgoBox +import org.ergoplatform.validation.ValidationRules import scorex.crypto.authds.avltree.batch._ import scorex.crypto.authds.{ADDigest, ADKey, SerializedAdProof, ADValue} import sigmastate.SCollection.SByteArray import sigmastate.{TrivialProp, _} -import sigmastate.Values.{Constant, SValue, ConstantNode, Value, IntConstant, ErgoTree, SigmaBoolean} +import sigmastate.Values.{Constant, EvaluatedValue, SValue, ConstantNode, Value, ErgoTree, SigmaBoolean} import sigmastate.interpreter.CryptoConstants.EcPointType import sigmastate.interpreter.{CryptoConstants, Interpreter} -import special.collection.{CSizePrim, Builder, Size, CSizeOption, SizeColl, CCostedBuilder, CollType, SizeOption, CostedBuilder, Coll} -import special.sigma._ -import special.sigma.Extensions._ +import special.collection.{Size, CSizeOption, SizeColl, CCostedBuilder, CollType, SizeOption, CostedBuilder, Coll} +import special.sigma.{Box, _} +import sigmastate.eval.Extensions._ import scala.util.{Success, Failure} -import scalan.{NeverInline, RType} +import scalan.RType import scorex.crypto.hash.{Digest32, Sha256, Blake2b256} import sigmastate.basics.DLogProtocol.ProveDlog import sigmastate.basics.ProveDHTuple -import sigmastate.interpreter.Interpreter.emptyEnv import sigmastate.lang.Terms.OperationId import sigmastate.serialization.ErgoTreeSerializer.DefaultSerializer -import sigmastate.serialization.{GroupElementSerializer, SigmaSerializer} +import sigmastate.serialization.{SigmaSerializer, GroupElementSerializer} +import special.Types.TupleType import scala.reflect.ClassTag @@ -55,7 +57,7 @@ case class CSigmaProp(sigmaTree: SigmaBoolean) extends SigmaProp with WrapperOf[ // the same serialization method is used in both cases val ergoTree = ErgoTree.fromSigmaBoolean(sigmaTree) val bytes = DefaultSerializer.serializeErgoTree(ergoTree) - Builder.DefaultCollBuilder.fromArray(bytes) + Colls.fromArray(bytes) } override def &&(other: SigmaProp): SigmaProp = other match { @@ -221,7 +223,7 @@ class EvalSizeBox( val foundSize = varSize.asInstanceOf[SizeOption[AnyValue]].sizeOpt val regSize = foundSize match { case Some(varSize: SizeAnyValue) => - assert(varSize.tVal == tT, s"Unexpected register type found ${varSize.tVal}: expected $tT") + assert(varSize.tVal == tT, s"Unexpected register type found at register #$id: ${varSize.tVal}, expected $tT") val regSize = varSize.valueSize.asInstanceOf[Size[T]] regSize case _ => @@ -240,18 +242,16 @@ class EvalSizeBuilder extends CSizeBuilder { new EvalSizeBox(propositionBytes, bytes, bytesWithoutRef, registers, tokens) } } -case class CostingBox(val IR: Evaluation, - isCost: Boolean, - val ebox: ErgoBox) - extends TestBox( - colBytes(ebox.id)(IR), - ebox.value, - colBytes(ebox.bytes)(IR), - colBytes(ebox.bytesWithNoRef)(IR), - colBytes(ebox.propositionBytes)(IR), - regs(ebox, isCost)(IR) - ) with WrapperOf[ErgoBox] { - override val builder = new CostingSigmaDslBuilder() + +case class CostingBox(isCost: Boolean, val ebox: ErgoBox) extends Box with WrapperOf[ErgoBox] { + val builder = CostingSigmaDslBuilder + + val value = ebox.value + lazy val id: Coll[Byte] = Colls.fromArray(ebox.id) + lazy val bytes: Coll[Byte] = Colls.fromArray(ebox.bytes) + lazy val bytesWithoutRef: Coll[Byte] = Colls.fromArray(ebox.bytesWithNoRef) + lazy val propositionBytes: Coll[Byte] = Colls.fromArray(ebox.propositionBytes) + lazy val registers: Coll[AnyValue] = regs(ebox, isCost) override def wrappedValue: ErgoBox = ebox @@ -277,8 +277,20 @@ case class CostingBox(val IR: Evaluation, val default = builder.Costing.defaultValue(tT).asInstanceOf[SType#WrappedType] Some(Constant[SType](default, tpe).asInstanceOf[T]) } - } else - super.getReg(i)(tT) + } else { + if (i < 0 || i >= registers.length) return None + val value = registers(i) + if (value != null ) { + // once the value is not null it should be of the right type + value match { + case value: TestValue[_] if value.value != null && value.tA == tT => + Some(value.value.asInstanceOf[T]) + case _ => + throw new InvalidType(s"Cannot getReg[${tT.name}]($i): invalid type of value $value at id=$i") + } + } else None + } + override def creationInfo: (Int, Coll[Byte]) = { this.getReg[(Int, Coll[Byte])](3).get.asInstanceOf[Any] match { @@ -288,37 +300,45 @@ case class CostingBox(val IR: Evaluation, case v => sys.error(s"Invalid value $v of creationInfo register R3") } + } + override def tokens: Coll[(Coll[Byte], Long)] = { + this.getReg[Coll[(Coll[Byte], Long)]](ErgoBox.R2.asIndex).get } + + override def executeFromRegister[T](regId: Byte)(implicit cT: RType[T]): T = ??? // TODO implement + + override def hashCode(): Int = id.toArray.hashCode() // TODO optimize using just 4 bytes of id (since it is already hash) + + override def equals(obj: Any): Boolean = (this eq obj.asInstanceOf[AnyRef]) || (obj != null && ( obj match { + case obj: Box => util.Arrays.equals(id.toArray, obj.id.toArray) + })) } object CostingBox { import Evaluation._ - import sigmastate.SType._ def colBytes(b: Array[Byte])(implicit IR: Evaluation): Coll[Byte] = IR.sigmaDslBuilderValue.Colls.fromArray(b) - def regs(ebox: ErgoBox, isCost: Boolean)(implicit IR: Evaluation): Coll[AnyValue] = { + def regs(ebox: ErgoBox, isCost: Boolean): Coll[AnyValue] = { val res = new Array[AnyValue](ErgoBox.maxRegisters) def checkNotYetDefined(id: Int, newValue: SValue) = require(res(id) == null, s"register $id is defined more then once: previous value ${res(id)}, new value $newValue") - for ((k, v: Value[t]) <- ebox.additionalRegisters) { + for ((k, v: EvaluatedValue[t]) <- ebox.additionalRegisters) { checkNotYetDefined(k.number, v) - val dslData = toDslData(v, v.tpe, isCost) - res(k.number) = toAnyValue(dslData.asWrappedType)(stypeToRType(v.tpe)) + res(k.number) = toAnyValue(v.value)(stypeToRType(v.tpe)) } for (r <- ErgoBox.mandatoryRegisters) { val regId = r.number - val v = ebox.get(r).get + val v = ebox.get(r).get.asInstanceOf[EvaluatedValue[SType]] checkNotYetDefined(regId, v) - val dslData = Evaluation.toDslData(v, v.tpe, isCost) - res(regId) = toAnyValue(dslData.asWrappedType)(stypeToRType(v.tpe)) + res(regId) = toAnyValue(v.value)(stypeToRType(v.tpe)) } - IR.sigmaDslBuilderValue.Colls.fromArray(res) + Colls.fromArray(res) } } @@ -393,7 +413,7 @@ case class CHeader( version: Byte, parentId: Coll[Byte], ADProofsRoot: Coll[Byte], - stateRoot: CAvlTree, + stateRoot: AvlTree, transactionsRoot: Coll[Byte], timestamp: Long, nBits: Long, @@ -432,7 +452,7 @@ class CCostModel extends CostModel { def SelectField: Int = costOf("SelectField", SFunc(IndexedSeq(), SUnit)) - def CollectionConst: Int = costOf("Const", SFunc(IndexedSeq(), SCollection(STypeIdent("IV")))) + def CollectionConst: Int = costOf("Const", SFunc(IndexedSeq(), SCollection(STypeVar("IV")))) def AccessKiloByteOfData: Int = costOf("AccessKiloByteOfData", SFunc(IndexedSeq(), SUnit)) @@ -443,6 +463,8 @@ class CCostModel extends CostModel { class CostingSigmaDslBuilder extends TestSigmaDslBuilder { dsl => + implicit val validationSettings = ValidationRules.currentSettings + override val Costing: CostedBuilder = new CCostedBuilder { import RType._ @@ -495,6 +517,13 @@ class CostingSigmaDslBuilder extends TestSigmaDslBuilder { CAvlTree(treeData) } + def avlTree(treeData: AvlTreeData): AvlTree = { + CAvlTree(treeData) + } + + def Box(ebox: ErgoBox): Box = CostingBox(false, ebox) + def toErgoBox(b: Box): ErgoBox = b.asInstanceOf[CostingBox].ebox + private def toSigmaTrees(props: Array[SigmaProp]): Array[SigmaBoolean] = { props.map { case csp: CSigmaProp => csp.sigmaTree @@ -612,7 +641,6 @@ case class CostingDataContext( override def getVar[T](id: Byte)(implicit tT: RType[T]): Option[T] = { if (isCost) { - // implicit val tag: ClassTag[T] = cT.classTag val optV = if (id < 0 || id >= vars.length) None else { diff --git a/src/main/scala/sigmastate/eval/CostingRules.scala b/src/main/scala/sigmastate/eval/CostingRules.scala index 14c9ff62d6..e2d7e3577b 100644 --- a/src/main/scala/sigmastate/eval/CostingRules.scala +++ b/src/main/scala/sigmastate/eval/CostingRules.scala @@ -1,13 +1,12 @@ package sigmastate.eval -import org.ergoplatform.{ErgoLikeContext, ErgoBox} -import scalan.{SigmaLibrary, RType} +import org.ergoplatform.ErgoLikeContext +import scalan.{SigmaLibrary, MutableLazy} import sigmastate._ import sigmastate.Values._ import sigmastate.SType.AnyOps import sigmastate.interpreter.CryptoConstants import sigmastate.utxo.CostTable -import special.collection.Coll trait CostingRules extends SigmaLibrary { IR: RuntimeCosting => import Coll._ @@ -16,11 +15,9 @@ trait CostingRules extends SigmaLibrary { IR: RuntimeCosting => import AvlTree._ import GroupElement._ import CollBuilder._ - import SizeBuilder._ import CostedBuilder._ import Costed._ import Size._ - import SizePrim._ import SizeColl._ import SizeOption._ import SizePair._ @@ -28,6 +25,7 @@ trait CostingRules extends SigmaLibrary { IR: RuntimeCosting => import SizeBox._ import SizeContext._ import CCostedPrim._ + import CostedPair._ import CCostedPair._ import CCostedOption._ import CostedFunc._ @@ -41,62 +39,88 @@ trait CostingRules extends SigmaLibrary { IR: RuntimeCosting => import WOption._ import Box._ - abstract class CostingHandler[T](createCoster: (RCosted[T], SMethod, Seq[RCosted[_]]) => Coster[T]) { - def apply(obj: RCosted[_], method: SMethod, args: Seq[RCosted[_]]): RCosted[_] = { - val coster = createCoster(asCosted[T](obj), method, args) + /** Implements basic costing rule invocation mechanism. + * Each MethodCall node of ErgoTree is costed using the same mechanism. + * When MethodCall is matched during traverse of ErgoTree in `RuntimeCosting.evalNode`: + * 1) the type of the receiver object is used to lookup the corresponding CostingHandler + * 2) The apply method of CostingHandler is called to create the Coster + * 3) When Coster is created, the costing-rule-method is looked up using reflection and then invoked. + * 4) The result of costing-rule-method is returned as the result of MethodCall node costing. + * + * Instances of this class are typically singleton objects (see below). + * @param createCoster constructor of Coster for given parameters of MethodCall: obj, method, costedArgs, args. + * @see Coster + */ + abstract class CostingHandler[T](createCoster: (RCosted[T], SMethod, Seq[RCosted[_]], Seq[Sym]) => Coster[T]) { + def apply(obj: RCosted[_], method: SMethod, costedArgs: Seq[RCosted[_]], args: Seq[Sym] = Nil): RCosted[_] = { + val coster = createCoster(asCosted[T](obj), method, costedArgs, args) val costerClass = coster.getClass - val costerMethod = costerClass.getMethod(method.name, Array.fill(args.length)(classOf[Sym]):_*) - val res = costerMethod.invoke(coster, args:_*) + val parameterTypes = Array.fill(costedArgs.length + args.length)(classOf[Sym]) + val costerMethod = costerClass.getMethod(method.name, parameterTypes:_*) + val res = costerMethod.invoke(coster, costedArgs ++ args:_*) res.asInstanceOf[RCosted[_]] } } - /** Special graph node to represent accumulation of the operation costs. - * In general, due to node sharing it is incorrect to just sum up all the `args` costs - * and add `resCost` to that value. - * Example:
- * - * val x = .. - * val y = op1(x) - * val z = op2(x) - * val res = op3(y, z) - * - * The naive summation will lead to the cost of x` is accumulated both into `cost of y` - * and into `cost of z`, so in the `cost of res` it is accumulated twice. - * To avoid this problem OpCost nodes require special handling in during evaluation. - * - * @param args costs of the arguments, which are here represent dependency information. - * @param opCost operation cost, which should be added to the currently accumulated cost - * @see `Evaluation` - */ - case class OpCost(args: Seq[Rep[Int]], opCost: Rep[Int]) extends BaseDef[Int] { - override def transform(t: Transformer) = OpCost(t(args), t(opCost)) - } - def opCost(args: Seq[Rep[Int]], opCost: Rep[Int]): Rep[Int] = OpCost(args, opCost) - - def selectFieldCost = sigmaDslBuilder.CostModel.SelectField - - // TODO move initialization to init() to support resetContext - lazy val SizeUnit: RSize[Unit] = costedBuilder.mkSizePrim(0L, UnitElement) - lazy val SizeBoolean: RSize[Boolean] = costedBuilder.mkSizePrim(1L, BooleanElement) - lazy val SizeByte: RSize[Byte] = costedBuilder.mkSizePrim(1L, ByteElement) - lazy val SizeShort: RSize[Short] = costedBuilder.mkSizePrim(2L, ShortElement) - lazy val SizeInt: RSize[Int] = costedBuilder.mkSizePrim(4L, IntElement) - lazy val SizeLong: RSize[Long] = costedBuilder.mkSizePrim(8L, LongElement) - lazy val SizeBigInt: RSize[BigInt] = costedBuilder.mkSizePrim(SBigInt.MaxSizeInBytes, element[BigInt]) - lazy val SizeString: RSize[String] = costedBuilder.mkSizePrim(256L, StringElement) - lazy val SizeAvlTree: RSize[AvlTree] = costedBuilder.mkSizePrim(AvlTreeData.TreeDataSize.toLong, element[AvlTree]) - lazy val SizeGroupElement: RSize[GroupElement] = costedBuilder.mkSizePrim(CryptoConstants.EncodedGroupElementLength.toLong, element[GroupElement]) - - lazy val SizeHashBytes: RSize[Coll[Byte]] = { + /** Lazy values, which are immutable, but can be reset, so that the next time they are accessed + * the expression is re-evaluated. Each value should be reset in onReset() method. */ + private val _selectFieldCost = MutableLazy(sigmaDslBuilder.CostModel.SelectField) + @inline def selectFieldCost = _selectFieldCost.value + + private val _getRegisterCost = MutableLazy(sigmaDslBuilder.CostModel.GetRegister) + @inline def getRegisterCost = _getRegisterCost.value + + private val _sizeUnit: LazyRep[Size[Unit]] = MutableLazy(costedBuilder.mkSizePrim(0L, UnitElement)) + @inline def SizeUnit: RSize[Unit] = _sizeUnit.value + + private val _sizeBoolean: LazyRep[Size[Boolean]] = MutableLazy(costedBuilder.mkSizePrim(1L, BooleanElement)) + @inline def SizeBoolean: RSize[Boolean] = _sizeBoolean.value + + private val _sizeByte: LazyRep[Size[Byte]] = MutableLazy(costedBuilder.mkSizePrim(1L, ByteElement)) + @inline def SizeByte: RSize[Byte] = _sizeByte.value + + private val _sizeShort: LazyRep[Size[Short]] = MutableLazy(costedBuilder.mkSizePrim(2L, ShortElement)) + @inline def SizeShort: RSize[Short] = _sizeShort.value + + private val _sizeInt: LazyRep[Size[Int]] = MutableLazy(costedBuilder.mkSizePrim(4L, IntElement)) + @inline def SizeInt: RSize[Int] = _sizeInt.value + + private val _sizeLong: LazyRep[Size[Long]] = MutableLazy(costedBuilder.mkSizePrim(8L, LongElement)) + @inline def SizeLong: RSize[Long] = _sizeLong.value + + private val _sizeBigInt: LazyRep[Size[BigInt]] = MutableLazy(costedBuilder.mkSizePrim(SBigInt.MaxSizeInBytes, element[BigInt])) + @inline def SizeBigInt: RSize[BigInt] = _sizeBigInt.value + + private val _sizeString: LazyRep[Size[String]] = MutableLazy(costedBuilder.mkSizePrim(256L, StringElement)) + @inline def SizeString: RSize[String] = _sizeString.value + + private val _sizeAvlTree: LazyRep[Size[AvlTree]] = MutableLazy(costedBuilder.mkSizePrim(AvlTreeData.TreeDataSize.toLong, element[AvlTree])) + @inline def SizeAvlTree: RSize[AvlTree] = _sizeAvlTree.value + + private val _sizeGroupElement: LazyRep[Size[GroupElement]] = MutableLazy(costedBuilder.mkSizePrim(CryptoConstants.EncodedGroupElementLength.toLong, element[GroupElement])) + @inline def SizeGroupElement: RSize[GroupElement] = _sizeGroupElement.value + + private val _wRTypeSigmaProp: LazyRep[WRType[SigmaProp]] = MutableLazy(liftElem(element[SigmaProp])) + @inline def WRTypeSigmaProp: Rep[WRType[SigmaProp]] = _wRTypeSigmaProp.value + + private val _sizeHashBytes: LazyRep[Size[Coll[Byte]]] = MutableLazy { val len: Rep[Int] = CryptoConstants.hashLength val sizes = colBuilder.replicate(len, SizeByte) costedBuilder.mkSizeColl(sizes) } + @inline def SizeHashBytes: RSize[Coll[Byte]] = _sizeHashBytes.value + + protected override def onReset(): Unit = { + super.onReset() + // WARNING: every lazy value should be listed here, otherwise bevavior after resetContext is undefined and may throw. + Array(_selectFieldCost, _getRegisterCost, _sizeUnit, _sizeBoolean, _sizeByte, _sizeShort, + _sizeInt, _sizeLong, _sizeBigInt, _sizeString, _sizeAvlTree, _sizeGroupElement, _wRTypeSigmaProp, _sizeHashBytes) + .foreach(_.reset()) + } - def mkSizeSigmaProp(size: Rep[Long]): RSize[SigmaProp] = costedBuilder.mkSizePrim(size, element[SigmaProp]) + def mkSizeSigmaProp(size: Rep[Long]): RSize[SigmaProp] = costedBuilder.mkSizePrim(size, WRTypeSigmaProp) - def SizeOfSigmaBoolean(sb: SigmaBoolean): RSize[SigmaProp] = mkSizeSigmaProp(SSigmaProp.dataSize(sb.asWrappedType)) + def SizeOfSigmaProp(p: SSigmaProp): RSize[SigmaProp] = mkSizeSigmaProp(SSigmaProp.dataSize(p.asWrappedType)) case class Cast[To](eTo: Elem[To], x: Rep[Def[_]]) extends BaseDef[To]()(eTo) { override def transform(t: Transformer) = Cast(eTo, t(x)) @@ -113,6 +137,11 @@ trait CostingRules extends SigmaLibrary { IR: RuntimeCosting => implicit val eT = collC.elem.eVal.eItem tryCast[CostedColl[T]](collC) } + def asCostedPair[A,B](pC: RCosted[(A,B)]): Rep[CostedPair[A,B]] = { + implicit val eA = pC.elem.eVal.eFst + implicit val eB = pC.elem.eVal.eSnd + tryCast[CostedPair[A,B]](pC) + } def asCostedFunc[A,B](fC: RCosted[A => B]): Rep[CostedFunc[Unit,A,B]] = { implicit val eA = fC.elem.eVal.eDom implicit val eB = fC.elem.eVal.eRange @@ -134,7 +163,7 @@ trait CostingRules extends SigmaLibrary { IR: RuntimeCosting => def asSizeBox(ctx: RSize[Box]): Rep[SizeBox] = tryCast[SizeBox](ctx) def asSizeContext(ctx: RSize[Context]): Rep[SizeContext] = tryCast[SizeContext](ctx) - def SOME[A](x: Rep[A]): Rep[WOption[A]] = RWSpecialPredef.some(x) + def SOME[A](x: Rep[A]): Rep[WOption[A]] = specialPredef.some(x) def mkSizeColl[T:Elem](len: Rep[Int]): Rep[Size[Coll[T]]] = { val sizes = colBuilder.replicate(len, costedBuilder.mkSizePrim(typeSize[T], element[T]): RSize[T]) @@ -161,102 +190,112 @@ trait CostingRules extends SigmaLibrary { IR: RuntimeCosting => RCCostedFunc(envC, f, cost, sFunc) } - abstract class Coster[T](obj: RCosted[T], method: SMethod, args: Seq[RCosted[_]]) { - def costOfArgs = (obj +: args).map(_.cost) - def sizeOfArgs = args.foldLeft(obj.size.dataSize)({ case (s, e) => s + e.size.dataSize }) + /** For each Sigma type there should be one Coster class (derived from this). + * Each coster object implements a set of costing rules, one rule for each method of the corresponding Sigma type. + * For example, BoxCoster is coster for Box type, it contains rules for all methods registered in SBox type descriptor. + * This class defines generic costing helpers, to unify and simplify costing rules of individual methods. + */ + abstract class Coster[T](obj: RCosted[T], method: SMethod, costedArgs: Seq[RCosted[_]], args: Seq[Sym]) { + def costOfArgs: Seq[Rep[Int]] = (obj +: costedArgs).map(_.cost) + def sizeOfArgs: Rep[Long] = costedArgs.foldLeft(obj.size.dataSize)({ case (s, e) => s + e.size.dataSize }) - def constantSizeProperyAccess[R](prop: Rep[T] => Rep[R]): RCosted[R] = - withConstantSize(prop(obj.value), opCost(costOfArgs, selectFieldCost)) + def constantSizePropertyAccess[R](prop: Rep[T] => Rep[R]): RCosted[R] = { + val value = prop(obj.value) + withConstantSize(value, opCost(value, costOfArgs, selectFieldCost)) + } - def knownSizeProperyAccess[R](prop: Rep[T] => Rep[R], size: RSize[R]): RCosted[R] = - RCCostedPrim(prop(obj.value), opCost(costOfArgs, selectFieldCost), size) + def knownSizePropertyAccess[R](prop: Rep[T] => Rep[R], size: RSize[R]): RCosted[R] = { + val value = prop(obj.value) + RCCostedPrim(value, opCost(value, costOfArgs, selectFieldCost), size) + } - def knownLengthCollProperyAccess[R](prop: Rep[T] => Rep[Coll[R]], len: Rep[Int]): Rep[CostedColl[R]] = - mkCostedColl(prop(obj.value), len, opCost(costOfArgs, selectFieldCost)) + def knownLengthCollPropertyAccess[R](prop: Rep[T] => Rep[Coll[R]], len: Rep[Int]): Rep[CostedColl[R]] = { + val value = prop(obj.value) + mkCostedColl(value, len, opCost(value, costOfArgs, selectFieldCost)) + } - def digest32ProperyAccess(prop: Rep[T] => Rep[Coll[Byte]]): Rep[CostedColl[Byte]] = - knownLengthCollProperyAccess(prop, CryptoConstants.hashLength) + def digest32PropertyAccess(prop: Rep[T] => Rep[Coll[Byte]]): Rep[CostedColl[Byte]] = + knownLengthCollPropertyAccess(prop, CryptoConstants.hashLength) - def groupElementProperyAccess(prop: Rep[T] => Rep[GroupElement]): RCosted[GroupElement] = - knownSizeProperyAccess(prop, SizeGroupElement) + def groupElementPropertyAccess(prop: Rep[T] => Rep[GroupElement]): RCosted[GroupElement] = + knownSizePropertyAccess(prop, SizeGroupElement) - def bigIntProperyAccess(prop: Rep[T] => Rep[BigInt]): RCosted[BigInt] = - knownSizeProperyAccess(prop, SizeBigInt) + def bigIntPropertyAccess(prop: Rep[T] => Rep[BigInt]): RCosted[BigInt] = + knownSizePropertyAccess(prop, SizeBigInt) - def defaultProperyAccess[R](prop: Rep[T] => Rep[R], propSize: RSize[T] => RSize[R]): RCosted[R] = - RCCostedPrim(prop(obj.value), opCost(costOfArgs, selectFieldCost), propSize(obj.size)) + def defaultPropertyAccess[R](prop: Rep[T] => Rep[R], propSize: RSize[T] => RSize[R]): RCosted[R] = { + val value = prop(obj.value) + RCCostedPrim(value, opCost(value, costOfArgs, selectFieldCost), propSize(obj.size)) + } - def defaultOptionProperyAccess[R: Elem](prop: Rep[T] => ROption[R], propSize: RSize[T] => RSize[WOption[R]], itemCost: Rep[Int]): RCostedOption[R] = { + def defaultOptionPropertyAccess[R: Elem](prop: Rep[T] => ROption[R], propSize: RSize[T] => RSize[WOption[R]], itemCost: Rep[Int]): RCostedOption[R] = { val v = prop(obj.value) val s = propSize(obj.size) - RCCostedOption(v, SOME(itemCost), asSizeOption(s).sizeOpt, opCost(costOfArgs, selectFieldCost)) + RCCostedOption(v, SOME(itemCost), asSizeOption(s).sizeOpt, opCost(v, costOfArgs, selectFieldCost)) } - def defaultCollProperyAccess[R: Elem](prop: Rep[T] => RColl[R], propSize: RSize[T] => RSize[Coll[R]], itemCost: Rep[Int]): RCostedColl[R] = { + def defaultCollPropertyAccess[R: Elem](prop: Rep[T] => RColl[R], propSize: RSize[T] => RSize[Coll[R]], itemCost: Rep[Int]): RCostedColl[R] = { val v = prop(obj.value) val s = propSize(obj.size) val sizes = asSizeColl(s).sizes val costs = colBuilder.replicate(sizes.length, itemCost) - RCCostedColl(v, costs, sizes, opCost(costOfArgs, selectFieldCost)) + RCCostedColl(v, costs, sizes, opCost(v, costOfArgs, selectFieldCost)) } def boxPropertyAccess(prop: Rep[T] => Rep[Box], propSize: RSize[T] => RSize[Box]): RCosted[Box] = { val v = prop(obj.value) - val c = opCost(costOfArgs, sigmaDslBuilder.CostModel.AccessBox) + val c = opCost(v, costOfArgs, sigmaDslBuilder.CostModel.AccessBox) val s = propSize(obj.size) RCCostedPrim(v, c, s) } -// /** Cost of collection with static size elements. */ -// def costColWithConstSizedItem[T](xs: Coll[T], len: Int, itemSize: Long): CostedColl[T] = { -// val perItemCost = (len.toLong * itemSize / 1024L + 1L) * this.CostModel.AccessKiloByteOfData.toLong -// val costs = this.Colls.replicate(len, perItemCost.toInt) -// val sizes = this.Colls.replicate(len, itemSize) -// val valueCost = this.CostModel.CollectionConst -// this.Costing.mkCostedColl(xs, costs, sizes, valueCost) -// } -// -// def costOption[T](opt: Option[T], opCost: Int)(implicit cT: RType[T]): CostedOption[T] = { -// val none = this.Costing.mkCostedNone[T](opCost) -// opt.fold[CostedOption[T]](none)(x => this.Costing.mkCostedSome(this.Costing.costedValue(x, SpecialPredef.some(opCost)))) -// } } - class GroupElementCoster(obj: RCosted[GroupElement], method: SMethod, args: Seq[RCosted[_]]) extends Coster[GroupElement](obj, method, args){ + /** Costing rules for SGroupElement methods */ + class GroupElementCoster(obj: RCosted[GroupElement], method: SMethod, costedArgs: Seq[RCosted[_]], args: Seq[Sym]) extends Coster[GroupElement](obj, method, costedArgs, args){ import GroupElement._ def getEncoded: RCosted[Coll[Byte]] = - knownLengthCollProperyAccess(_.getEncoded, CryptoConstants.EncodedGroupElementLength.toInt) + knownLengthCollPropertyAccess(_.getEncoded, CryptoConstants.EncodedGroupElementLength.toInt) def negate: RCosted[GroupElement] = { - RCCostedPrim(obj.value.negate, opCost(costOfArgs, costOf(method)), SizeGroupElement) + val value = obj.value.negate + RCCostedPrim(value, opCost(value, costOfArgs, costOf(method)), SizeGroupElement) } } - object GroupElementCoster extends CostingHandler[GroupElement]((obj, m, args) => new GroupElementCoster(obj, m, args)) + /** CostingHandler for SGroupElement, see SGroupElement.coster */ + object GroupElementCoster extends CostingHandler[GroupElement]((obj, m, costedArgs, args) => new GroupElementCoster(obj, m, costedArgs, args)) - class AvlTreeCoster(obj: RCosted[AvlTree], method: SMethod, args: Seq[RCosted[_]]) extends Coster[AvlTree](obj, method, args){ + /** Costing rules for SAvlTree methods */ + class AvlTreeCoster(obj: RCosted[AvlTree], method: SMethod, costedArgs: Seq[RCosted[_]], args: Seq[Sym]) extends Coster[AvlTree](obj, method, costedArgs, args){ import AvlTree._ - def digest() = knownLengthCollProperyAccess(_.digest, AvlTreeData.DigestSize) - def enabledOperations() = constantSizeProperyAccess(_.enabledOperations) - def keyLength() = constantSizeProperyAccess(_.keyLength) - def valueLengthOpt() = defaultOptionProperyAccess(_.valueLengthOpt, _ => mkSizeOption(SizeInt), 0) - def isInsertAllowed() = constantSizeProperyAccess(_.isInsertAllowed) - def isUpdateAllowed() = constantSizeProperyAccess(_.isUpdateAllowed) - def isRemoveAllowed() = constantSizeProperyAccess(_.isRemoveAllowed) - + def digest() = knownLengthCollPropertyAccess(_.digest, AvlTreeData.DigestSize) + def enabledOperations() = constantSizePropertyAccess(_.enabledOperations) + def keyLength() = constantSizePropertyAccess(_.keyLength) + def valueLengthOpt() = defaultOptionPropertyAccess(_.valueLengthOpt, _ => mkSizeOption(SizeInt), 0) + def isInsertAllowed() = constantSizePropertyAccess(_.isInsertAllowed) + def isUpdateAllowed() = constantSizePropertyAccess(_.isUpdateAllowed) + def isRemoveAllowed() = constantSizePropertyAccess(_.isRemoveAllowed) + + def updateDigest(newDigest: RCosted[Coll[Byte]]) = { + val value = obj.value.updateDigest(newDigest.value) + RCCostedPrim(value, opCost(value, costOfArgs, costOf(method)), obj.size) + } def updateOperations(flags: RCosted[Byte]) = { - RCCostedPrim(obj.value.updateOperations(flags.value), opCost(costOfArgs, costOf(method)), obj.size) + val value = obj.value.updateOperations(flags.value) + RCCostedPrim(value, opCost(value, costOfArgs, costOf(method)), obj.size) } def contains(key: RCosted[Coll[Byte]], proof: RCosted[Coll[Byte]]): RCosted[Boolean] = { - withConstantSize(obj.value.contains(key.value, proof.value), opCost(costOfArgs, perKbCostOf(method, sizeOfArgs))) + val value = obj.value.contains(key.value, proof.value) + withConstantSize(value, opCost(value, costOfArgs, perKbCostOf(method, sizeOfArgs))) } def get(key: RCosted[Coll[Byte]], proof: RCosted[Coll[Byte]]): RCosted[WOption[Coll[Byte]]] = { val value = obj.value.get(key.value, proof.value) val size = sizeOfArgs - val c = opCost(costOfArgs, perKbCostOf(method, size)) + val c = opCost(value, costOfArgs, perKbCostOf(method, size)) val res = RCCostedOption(value, - RWSpecialPredef.some(0), - RWSpecialPredef.some(proof.size), + specialPredef.some(0), + specialPredef.some(proof.size), c) res } @@ -267,7 +306,7 @@ trait CostingRules extends SigmaLibrary { IR: RuntimeCosting => val value = obj.value.getMany(keysC.value, proofC.value) val costs = colBuilder.replicate(nKeys, 0) - val valuesCost = opCost(costOfArgs, perKbCostOf(method, sizeOfArgs)) + val valuesCost = opCost(value, costOfArgs, perKbCostOf(method, sizeOfArgs)) val treeValueLengthPerKey = proofC.sizes.length div nKeys val treeValueS = mkSizeColl[Byte](treeValueLengthPerKey) @@ -281,8 +320,8 @@ trait CostingRules extends SigmaLibrary { IR: RuntimeCosting => val value = meth(obj.value) val size = sizeOfArgs RCCostedOption(value, - RWSpecialPredef.some(0), - SOME(obj.size), opCost(costOfArgs, perKbCostOf(method, size))) + specialPredef.some(0), + SOME(obj.size), opCost(value, costOfArgs, perKbCostOf(method, size))) } def insert(kvs: RCosted[Coll[(Coll[Byte], Coll[Byte])]], proof: RCosted[Coll[Byte]]): RCosted[WOption[AvlTree]] = { @@ -296,17 +335,18 @@ trait CostingRules extends SigmaLibrary { IR: RuntimeCosting => } } - object AvlTreeCoster extends CostingHandler[AvlTree]((obj, m, args) => new AvlTreeCoster(obj, m, args)) + object AvlTreeCoster extends CostingHandler[AvlTree]((obj, m, costedArgs, args) => new AvlTreeCoster(obj, m, costedArgs, args)) - class ContextCoster(obj: RCosted[Context], method: SMethod, args: Seq[RCosted[_]]) extends Coster[Context](obj, method, args){ + /** Costing rules for SContext methods */ + class ContextCoster(obj: RCosted[Context], method: SMethod, costedArgs: Seq[RCosted[_]], args: Seq[Sym]) extends Coster[Context](obj, method, costedArgs, args){ import Context._ def boxCollProperty(prop: Rep[Context] => Rep[Coll[Box]], propSize: Rep[SizeContext] => RSize[Coll[Box]]) = { - defaultCollProperyAccess(prop, ctxS => propSize(asSizeContext(ctxS)), sigmaDslBuilder.CostModel.AccessBox) + defaultCollPropertyAccess(prop, ctxS => propSize(asSizeContext(ctxS)), sigmaDslBuilder.CostModel.AccessBox) } def headers() = { - knownLengthCollProperyAccess(_.headers, ErgoLikeContext.MaxHeaders) + knownLengthCollPropertyAccess(_.headers, ErgoLikeContext.MaxHeaders) } - def preHeader() = constantSizeProperyAccess(_.preHeader) + def preHeader() = constantSizePropertyAccess(_.preHeader) def dataInputs(): RCostedColl[Box] = { boxCollProperty(_.dataInputs, _.dataInputs) @@ -320,37 +360,37 @@ trait CostingRules extends SigmaLibrary { IR: RuntimeCosting => boxCollProperty(_.INPUTS, _.inputs) } - def HEIGHT: RCosted[Int] = constantSizeProperyAccess(_.HEIGHT) + def HEIGHT: RCosted[Int] = constantSizePropertyAccess(_.HEIGHT) def SELF: RCosted[Box] = boxPropertyAccess(_.SELF, asSizeContext(_).selfBox) def LastBlockUtxoRootHash: RCosted[AvlTree] = - knownSizeProperyAccess(_.LastBlockUtxoRootHash, SizeAvlTree) + knownSizePropertyAccess(_.LastBlockUtxoRootHash, SizeAvlTree) def minerPubKey: RCostedColl[Byte] = - knownLengthCollProperyAccess(_.minerPubKey, CryptoConstants.EncodedGroupElementLength.toInt) + knownLengthCollPropertyAccess(_.minerPubKey, CryptoConstants.EncodedGroupElementLength.toInt) def getVar[T](id: RCosted[Byte])(implicit tT: Rep[WRType[T]]): RCostedOption[T] = { ??? -// defaultOptionProperyAccess(_.getVar(id.value)(tT.eA), asSizeContext(_).reg) +// defaultOptionPropertyAccess(_.getVar(id.value)(tT.eA), asSizeContext(_).reg) // val opt = ctx.getVar(id)(cT) // dsl.costOption(opt, dsl.CostModel.GetVar) } - def selfBoxIndex: RCosted[Int] = constantSizeProperyAccess(_.selfBoxIndex) + def selfBoxIndex: RCosted[Int] = constantSizePropertyAccess(_.selfBoxIndex) } - object ContextCoster extends CostingHandler[Context]((obj, m, args) => new ContextCoster(obj, m, args)) + object ContextCoster extends CostingHandler[Context]((obj, m, costedArgs, args) => new ContextCoster(obj, m, costedArgs, args)) - class BoxCoster(obj: RCosted[Box], method: SMethod, args: Seq[RCosted[_]]) extends Coster[Box](obj, method, args){ + /** Costing rules for SBox methods */ + class BoxCoster(obj: RCosted[Box], method: SMethod, costedArgs: Seq[RCosted[_]], args: Seq[Sym]) extends Coster[Box](obj, method, costedArgs, args){ import Box._ - import ErgoBox._ def creationInfo: RCosted[(Int, Coll[Byte])] = { val info = obj.value.creationInfo - val cost = opCost(Seq(obj.cost), sigmaDslBuilder.CostModel.SelectField) val l = RCCostedPrim(info._1, 0, SizeInt) val r = mkCostedColl(info._2, CryptoConstants.hashLength, 0) + val cost = opCost(Pair(l, r), Seq(obj.cost), getRegisterCost) RCCostedPair(l, r, cost) } @@ -363,160 +403,170 @@ trait CostingRules extends SigmaLibrary { IR: RuntimeCosting => val sInfo = mkSizeColl(len, sToken) val costs = colBuilder.replicate(len, 0) val sizes = colBuilder.replicate(len, sToken) - RCCostedColl(tokens, costs, sizes, opCost(Seq(obj.cost), costOf(method))) - } - -// def id: CostedColl[Byte] = dsl.costColWithConstSizedItem(box.id, box.id.length, 1) -// def valueCosted: Costed[Long] = { -// val cost = dsl.CostModel.SelectField -// new CCostedPrim(box.value, cost, 8L) -// } -// def bytes: CostedColl[Byte] = dsl.costColWithConstSizedItem(box.bytes, box.bytes.length, 1) -// def bytesWithoutRef: CostedColl[Byte] = dsl.costColWithConstSizedItem(box.bytesWithoutRef, box.bytesWithoutRef.length, 1) -// def propositionBytes: CostedColl[Byte] = dsl.costColWithConstSizedItem(box.propositionBytes, box.propositionBytes.length, 1) -// def registers: CostedColl[AnyValue] = { -// val len = box.registers.length -// val costs = dsl.Colls.replicate(len, dsl.CostModel.AccessBox) -// val sizes = box.registers.map(o => o.dataSize) -// new CCostedColl(box.registers, costs, sizes, dsl.CostModel.CollectionConst) -// } -// def getReg[@Reified T](id: Int)(implicit cT:RType[T]): CostedOption[T] = { -// val opt = box.getReg(id)(cT) -// dsl.costOption(opt, dsl.CostModel.GetRegister) -// } -// -// @NeverInline -// def creationInfo: Costed[(Int, Coll[Byte])] = SpecialPredef.rewritableMethod + RCCostedColl(tokens, costs, sizes, opCost(tokens, Seq(obj.cost), costOf(method))) + } + + def getReg[T](i: RCosted[Int])(implicit tT: Rep[WRType[T]]): RCosted[WOption[T]] = { + val sBox = asSizeBox(obj.size) + implicit val elem = tT.eA + val valueOpt = obj.value.getReg(i.value)(elem) + val sReg = asSizeOption(sBox.getReg(downcast[Byte](i.value))(elem)) + RCCostedOption(valueOpt, SOME(0), sReg.sizeOpt, opCost(valueOpt, Seq(obj.cost), getRegisterCost)) + } } - object BoxCoster extends CostingHandler[Box]((obj, m, args) => new BoxCoster(obj, m, args)) + object BoxCoster extends CostingHandler[Box]((obj, m, costedArgs, args) => new BoxCoster(obj, m, costedArgs, args)) - class HeaderCoster(obj: RCosted[Header], method: SMethod, args: Seq[RCosted[_]]) extends Coster[Header](obj, method, args){ + /** Costing rules for SHeader methods */ + class HeaderCoster(obj: RCosted[Header], method: SMethod, costedArgs: Seq[RCosted[_]], args: Seq[Sym]) extends Coster[Header](obj, method, costedArgs, args){ import Header._ - def version() = constantSizeProperyAccess(_.version) + def id() = digest32PropertyAccess(_.id) - def parentId() = digest32ProperyAccess(_.parentId) + def version() = constantSizePropertyAccess(_.version) - def ADProofsRoot() = digest32ProperyAccess(_.ADProofsRoot) + def parentId() = digest32PropertyAccess(_.parentId) - def stateRoot() = knownSizeProperyAccess(_.stateRoot, SizeAvlTree) + def ADProofsRoot() = digest32PropertyAccess(_.ADProofsRoot) - def transactionsRoot() = digest32ProperyAccess(_.transactionsRoot) + def stateRoot() = knownSizePropertyAccess(_.stateRoot, SizeAvlTree) - def timestamp() = constantSizeProperyAccess(_.timestamp) + def transactionsRoot() = digest32PropertyAccess(_.transactionsRoot) - def nBits() = constantSizeProperyAccess(_.nBits) + def timestamp() = constantSizePropertyAccess(_.timestamp) - def height() = constantSizeProperyAccess(_.height) + def nBits() = constantSizePropertyAccess(_.nBits) - def extensionRoot() = digest32ProperyAccess(_.extensionRoot) + def height() = constantSizePropertyAccess(_.height) - def minerPk() = groupElementProperyAccess(_.minerPk) + def extensionRoot() = digest32PropertyAccess(_.extensionRoot) - def powOnetimePk() = groupElementProperyAccess(_.powOnetimePk) + def minerPk() = groupElementPropertyAccess(_.minerPk) - def powNonce() = knownLengthCollProperyAccess(_.powNonce, 8) + def powOnetimePk() = groupElementPropertyAccess(_.powOnetimePk) - def powDistance() = bigIntProperyAccess(_.powDistance) + def powNonce() = knownLengthCollPropertyAccess(_.powNonce, 8) - def votes() = knownLengthCollProperyAccess(_.votes, 3) + def powDistance() = bigIntPropertyAccess(_.powDistance) + + def votes() = knownLengthCollPropertyAccess(_.votes, 3) } - object HeaderCoster extends CostingHandler[Header]((obj, m, args) => new HeaderCoster(obj, m, args)) + object HeaderCoster extends CostingHandler[Header]((obj, m, costedArgs, args) => new HeaderCoster(obj, m, costedArgs, args)) - class PreHeaderCoster(obj: RCosted[PreHeader], method: SMethod, args: Seq[RCosted[_]]) extends Coster[PreHeader](obj, method, args){ + /** Costing rules for SPreHeader methods */ + class PreHeaderCoster(obj: RCosted[PreHeader], method: SMethod, costedArgs: Seq[RCosted[_]], args: Seq[Sym]) extends Coster[PreHeader](obj, method, costedArgs, args){ import PreHeader._ -// def id() = digest32ProperyAccess(_.id) - - def version() = constantSizeProperyAccess(_.version) + def version() = constantSizePropertyAccess(_.version) - def parentId() = digest32ProperyAccess(_.parentId) + def parentId() = digest32PropertyAccess(_.parentId) - def timestamp() = constantSizeProperyAccess(_.timestamp) + def timestamp() = constantSizePropertyAccess(_.timestamp) - def nBits() = constantSizeProperyAccess(_.nBits) + def nBits() = constantSizePropertyAccess(_.nBits) - def height() = constantSizeProperyAccess(_.height) + def height() = constantSizePropertyAccess(_.height) - def minerPk() = groupElementProperyAccess(_.minerPk) + def minerPk() = groupElementPropertyAccess(_.minerPk) - def votes() = knownLengthCollProperyAccess(_.votes, 3) + def votes() = knownLengthCollPropertyAccess(_.votes, 3) } - object PreHeaderCoster extends CostingHandler[PreHeader]((obj, m, args) => new PreHeaderCoster(obj, m, args)) + object PreHeaderCoster extends CostingHandler[PreHeader]((obj, m, costedArgs, args) => new PreHeaderCoster(obj, m, costedArgs, args)) - class OptionCoster[T](obj: RCosted[WOption[T]], method: SMethod, args: Seq[RCosted[_]]) extends Coster[WOption[T]](obj, method, args){ + /** Costing rules for SOption methods (see object SOption) */ + class OptionCoster[T](obj: RCosted[WOption[T]], method: SMethod, costedArgs: Seq[RCosted[_]], args: Seq[Sym]) extends Coster[WOption[T]](obj, method, costedArgs, args){ import WOption._ implicit val eT = obj.elem.eVal.eItem - def get(): RCosted[T] = defaultProperyAccess(_.get, asSizeOption(_).sizeOpt.get) + def get(): RCosted[T] = defaultPropertyAccess(_.get, asSizeOption(_).sizeOpt.get) def getOrElse(default: RCosted[T]): RCosted[T] = { val v = obj.value.getOrElse(default.value) - val c = opCost(costOfArgs, selectFieldCost) + val c = opCost(v, costOfArgs, selectFieldCost) val s = asSizeOption(obj.size).sizeOpt.getOrElse(default.size) RCCostedPrim(v, c, s) } - def isDefined: RCosted[Boolean] = constantSizeProperyAccess(_.isDefined) - def isEmpty: RCosted[Boolean] = constantSizeProperyAccess(_.isEmpty) - } + def isDefined: RCosted[Boolean] = constantSizePropertyAccess(_.isDefined) + def isEmpty: RCosted[Boolean] = constantSizePropertyAccess(_.isEmpty) + + def map[B](_f: RCosted[T => B]): RCosted[WOption[B]] = { + val f = asCostedFunc[T,B](_f) + val calcF = f.sliceCalc + val costF = f.sliceCost + val sizeF = f.sliceSize + val v = obj.value.map(calcF) + val sizeOpt = asSizeOption(obj.size).sizeOpt + val c = costF(Pair(obj.cost, sizeOpt.get)) + val s = sizeOpt.map(sizeF) + RCCostedOption(v, SOME(c), s, opCost(v, costOfArgs, costOf(method))) + } - object OptionCoster extends CostingHandler[WOption[Any]]((obj, m, args) => new OptionCoster[Any](obj, m, args)) + def filter(_f: RCosted[T => Boolean]): RCosted[WOption[T]] = { + val f = asCostedFunc[T,Boolean](_f) + val calcF = f.sliceCalc + val costF = f.sliceCost + val v = obj.value.filter(calcF) + val sizeOpt = asSizeOption(obj.size).sizeOpt + val c = costF(Pair(obj.cost, sizeOpt.get)) + RCCostedOption(v, SOME(c), sizeOpt, opCost(v, costOfArgs, costOf(method))) + } + } + object OptionCoster extends CostingHandler[WOption[Any]]((obj, m, costedArgs, args) => new OptionCoster[Any](obj, m, costedArgs, args)) - class CollCoster[T](obj: RCosted[Coll[T]], method: SMethod, args: Seq[RCosted[_]]) extends Coster[Coll[T]](obj, method, args) { + /** Costing rules for SCollection methods (see object SCollection) */ + class CollCoster[T](obj: RCosted[Coll[T]], method: SMethod, costedArgs: Seq[RCosted[_]], args: Seq[Sym]) extends Coster[Coll[T]](obj, method, costedArgs, args) { import Coll._ implicit val eT = obj.elem.eVal.eItem def indices(): RCostedColl[Int] = - knownLengthCollProperyAccess(_.indices, asSizeColl(obj.size).sizes.length) - - def getSizePropertyMethod[B](mc: MethodCall): RSize[T] => RColl[Size[B]] = { - ??? + knownLengthCollPropertyAccess(_.indices, asSizeColl(obj.size).sizes.length) + + def map[B](_f: RCosted[T => B]): RCosted[Coll[B]] = { + val xs = asCostedColl(obj) + val f = asCostedFunc[T,B](_f) + val calcF = f.sliceCalc + val costF = f.sliceCost + val sizeF = f.sliceSize + val vals = xs.values.map(calcF) + implicit val eT = xs.elem.eItem + implicit val eB = f.elem.eVal.eRange + val costs = xs.costs.zip(xs.sizes).map(costF) + val sizes = if (eB.isConstantSize) { + colBuilder.replicate(xs.sizes.length, constantTypeSize(eB): RSize[B]) + } else { + xs.sizes.map(sizeF) + } + RCCostedColl(vals, costs, sizes, opCost(vals, costOfArgs, costOf(method))) } def flatMap[B](fC: RCosted[T => Coll[B]]): RCostedColl[B] = { - val f = fC.value - f match { + val fV = fC.value + fV match { // Pattern: xs.flatMap(x => x.property) case Def(Lambda(l,_,_,Def(mc @ MethodCall(x, m, Nil, _)))) if x == l.x => - val sObj = asSizeColl(obj.size) - val sizes: RColl[Size[B]] = sObj.sizes.flatMap(fun { s: RSize[T] => - val sizeProp = getSizePropertyMethod[B](mc) - sizeProp(s) + val cfC = asCostedFunc[T, Coll[B]](fC) + val calcF = cfC.sliceCalc + val sizeF = cfC.sliceSize + val costF = cfC.sliceCost + val xs = asCostedColl(obj) + val vals = xs.values.flatMap(calcF) + val sizes: RColl[Size[B]] = xs.sizes.flatMap(fun { s: RSize[T] => + asSizeColl(sizeF(s)).sizes }) - val values = obj.value.flatMap(f) - val costs = colBuilder.replicate(sizes.length, 0) - RCCostedColl(values, costs, sizes, opCost(costOfArgs, costOf(method))) + val costs = xs.costs.zip(xs.sizes).map(costF) + RCCostedColl(vals, costs, sizes, opCost(vals, costOfArgs, costOf(method))) case _ => !!!(s"Unsupported lambda in flatMap: allowed usage `xs.flatMap(x => x.property)`") } } def indexOf(elem: RCosted[T], from: RCosted[Int]): RCosted[Int] = { - val c = opCost(costOfArgs, perKbCostOf(method, obj.size.dataSize)) - RCCostedPrim(obj.value.indexOf(elem.value, from.value), c, SizeInt) - } - - def segmentLength(p: RCosted[T => Boolean], from: RCosted[Int]): RCosted[Int] = { -// val pCost: Rep[((Int, Size[A])) => Int] = asCostedFunc(p).func.sliceCost - // TODO costing rule should be more accurate - val c = opCost(costOfArgs, costOf(method)) - RCCostedPrim(obj.value.segmentLength(p.value, from.value), c, SizeInt) - } - - def indexWhere(p: RCosted[T => Boolean], from: RCosted[Int]): RCosted[Int] = { - // TODO costing rule should be more accurate - val c = opCost(costOfArgs, costOf(method)) - RCCostedPrim(obj.value.indexWhere(p.value, from.value), c, SizeInt) - } - - def lastIndexWhere(p: RCosted[T => Boolean], end: RCosted[Int]): RCosted[Int] = { - // TODO costing rule should be more accurate - val c = opCost(costOfArgs, costOf(method)) - RCCostedPrim(obj.value.lastIndexWhere(p.value, end.value), c, SizeInt) + val value = obj.value.indexOf(elem.value, from.value) + val c = opCost(value, costOfArgs, perKbCostOf(method, obj.size.dataSize)) + RCCostedPrim(value, c, SizeInt) } def zip[B](ys: RCosted[Coll[B]]): RCosted[Coll[(T, B)]] = { @@ -527,44 +577,54 @@ trait CostingRules extends SigmaLibrary { IR: RuntimeCosting => // TODO optimize: it make sence to add more high level operations to avoid building large graphs val costs = xsC.costs.zip(ysC.costs).map(fun { in: Rep[(Int,Int)] => in._1 + in._2 }) val sizes = xsC.sizes.zip(ysC.sizes).map(fun { in: Rep[(Size[T],Size[B])] => RCSizePair(in._1, in._2): RSize[(T,B)] }) - val c = opCost(costOfArgs, costOf(method)) + val c = opCost(values, costOfArgs, costOf(method)) RCCostedColl(values, costs, sizes, c) } - def partition(pred: RCosted[T => Boolean]): RCosted[(Coll[T], Coll[T])] = { - // TODO costing rule should be more accurate - val xsC = asCostedColl(obj) - val Pair(lvalues, rvalues) = xsC.value.partition(pred.value) - val costs = xsC.costs - val sizes = xsC.sizes - val c = opCost(costOfArgs, costOf(method)) - RCCostedPair( - RCCostedColl(lvalues, costs, sizes, CostTable.newCollValueCost), - RCCostedColl(rvalues, costs, sizes, CostTable.newCollValueCost), c) - } - def patch(from: RCosted[Int], patch: RCosted[Coll[T]], replaced: RCosted[Int]): RCosted[Coll[T]] = { val xsC = asCostedColl(obj) val patchC = asCostedColl(patch) val values = xsC.value.patch(from.value, patch.value, replaced.value) val sizes = xsC.sizes.append(patchC.sizes) val costs = xsC.costs.append(patchC.costs) - val c = opCost(costOfArgs, costOf(method)) // TODO costing rule should be more accurate + val c = opCost(values, costOfArgs, costOf(method)) RCCostedColl(values, costs, sizes, c) } def updated(index: RCosted[Int], elem: RCosted[T]): RCosted[Coll[T]] = { val xsC = asCostedColl(obj) - val c = opCost(costOfArgs, costOf(method)) - RCCostedColl(xsC.value.updated(index.value, elem.value), xsC.costs, xsC.sizes, c) + val v = xsC.value.updated(index.value, elem.value) + val c = opCost(v, costOfArgs, costOf(method)) + RCCostedColl(v, xsC.costs, xsC.sizes, c) } def updateMany(indexes: RCosted[Coll[Int]], values: RCosted[Coll[T]]): RCosted[Coll[T]] = { val xsC = asCostedColl(obj) - val c = opCost(costOfArgs, perKbCostOf(method, values.size.dataSize)) // TODO costing rule should be more accurate with sizes - RCCostedColl(xsC.value.updateMany(indexes.value, values.value), xsC.costs, xsC.sizes, c) + val v = xsC.value.updateMany(indexes.value, values.value) + val c = opCost(v, costOfArgs, perKbCostOf(method, values.size.dataSize)) + RCCostedColl(v, xsC.costs, xsC.sizes, c) } + + def filter(_f: RCosted[T => Boolean]): RCosted[Coll[T]] = { + val xs = asCostedColl(obj) + val f = asCostedFunc[T,Boolean](_f) + val calcF = f.sliceCalc + val costF = f.sliceCost + val vals = xs.values.filter(calcF) + val costs = xs.costs.zip(xs.sizes).map(costF) + val zeros = colBuilder.replicate(xs.costs.length, 0) + RCCostedColl(vals, zeros, xs.sizes, opCost(vals, costOfArgs, costOf(method) + costs.sum(intPlusMonoid))) + } + } + + object CollCoster extends CostingHandler[Coll[Any]]((obj, m, costedArgs, args) => new CollCoster[Any](obj, m, costedArgs, args)) + + /** Costing rules for SGlobal methods */ + class SigmaDslBuilderCoster(obj: RCosted[SigmaDslBuilder], method: SMethod, costedArgs: Seq[RCosted[_]], args: Seq[Sym]) extends Coster[SigmaDslBuilder](obj, method, costedArgs, args){ + + def groupGenerator() = groupElementPropertyAccess(_.groupGenerator) } - object CollCoster extends CostingHandler[Coll[Any]]((obj, m, args) => new CollCoster[Any](obj, m, args)) + object SigmaDslBuilderCoster extends CostingHandler[SigmaDslBuilder]((obj, m, costedArgs, args) => new SigmaDslBuilderCoster(obj, m, costedArgs, args)) + } diff --git a/src/main/scala/sigmastate/eval/Evaluation.scala b/src/main/scala/sigmastate/eval/Evaluation.scala index c49c242914..0bd02f2ff7 100644 --- a/src/main/scala/sigmastate/eval/Evaluation.scala +++ b/src/main/scala/sigmastate/eval/Evaluation.scala @@ -13,19 +13,19 @@ import scala.reflect.ClassTag import scala.util.Try import sigmastate.SType._ import sigmastate.interpreter.CryptoConstants.EcPointType -import special.sigma.InvalidType import scalan.{Nullable, RType} import scalan.RType._ import sigma.types.PrimViewType import sigmastate.basics.DLogProtocol.ProveDlog import sigmastate.basics.{ProveDHTuple, DLogProtocol} import special.sigma.Extensions._ -import scorex.util.Extensions._ import sigmastate.lang.exceptions.CosterException import special.SpecialPredef -import special.collection.Coll +import special.Types._ -trait Evaluation extends RuntimeCosting { IR => +/** This is a slice in IRContext cake which implements evaluation of graphs. + */ +trait Evaluation extends RuntimeCosting { IR: IRContext => import Context._ import SigmaProp._ import Coll._ @@ -36,8 +36,6 @@ trait Evaluation extends RuntimeCosting { IR => import AvlTree._ import CollBuilder._ import SigmaDslBuilder._ - import CostedBuilder._ - import MonoidBuilder._ import WBigInteger._ import WArray._ import WOption._ @@ -86,66 +84,74 @@ trait Evaluation extends RuntimeCosting { IR => private val BIM = WBigIntegerMethods private val SPCM = WSpecialPredefCompanionMethods - def isValidCostPrimitive(d: Def[_]): Unit = d match { - case _: Const[_] => - case _: OpCost | _: Cast[_] => - case _: Tup[_,_] | _: First[_,_] | _: Second[_,_] => - case _: FieldApply[_] => - case _: IntPlusMonoid => - case _: Lambda[_,_] => - case _: ThunkDef[_] => - case ApplyUnOp(_: NumericToLong[_] | _: NumericToInt[_], _) => - case ApplyBinOp(_: NumericPlus[_] | _: NumericTimes[_] | _: OrderingMax[_] | _: IntegralDivide[_] ,_,_) => + /** Checks is the operation is among the allowed in costF graph, created by costing. + * @throws StagingException if the given graph node `d` is not matched. + */ + def isValidCostPrimitive(d: Def[_]): Boolean = d match { + case _: Const[_] => true + case _: OpCost | _: PerKbCostOf | _: Cast[_] => true + case _: Tup[_,_] | _: First[_,_] | _: Second[_,_] => true + case _: FieldApply[_] => true + case _: IntPlusMonoid => true + case _: Lambda[_,_] => true + case _: ThunkDef[_] => true + case ApplyUnOp(_: NumericToLong[_] | _: NumericToInt[_], _) => true + case ApplyBinOp(_: NumericPlus[_] | _: NumericTimes[_] | _: OrderingMax[_] | _: IntegralDivide[_] ,_,_) => true case SCM.inputs(_) | SCM.outputs(_) | SCM.dataInputs(_) | SCM.selfBox(_) | SCM.lastBlockUtxoRootHash(_) | SCM.headers(_) | - SCM.preHeader(_) | SCM.getVar(_,_,_) => + SCM.preHeader(_) | SCM.getVar(_,_,_) => true case SBM.propositionBytes(_) | SBM.bytes(_) | SBM.bytesWithoutRef(_) | SBM.registers(_) | SBM.getReg(_,_,_) | - SBM.tokens(_) => - case SSPM.propBytes(_) => - case SAVM.tVal(_) | SAVM.valueSize(_) => - case SizeM.dataSize(_) => - case SPairM.l(_) | SPairM.r(_) => - case SCollM.sizes(_) => - case SOptM.sizeOpt(_) => - case SFuncM.sizeEnv(_) => + SBM.tokens(_) => true + case SSPM.propBytes(_) => true + case SAVM.tVal(_) | SAVM.valueSize(_) => true + case SizeM.dataSize(_) => true + case SPairM.l(_) | SPairM.r(_) => true + case SCollM.sizes(_) => true + case SOptM.sizeOpt(_) => true + case SFuncM.sizeEnv(_) => true case _: CSizePairCtor[_,_] | _: CSizeFuncCtor[_,_,_] | _: CSizeOptionCtor[_] | _: CSizeCollCtor[_] | - _: CSizeBoxCtor | _: CSizeContextCtor | _: CSizeAnyValueCtor => + _: CSizeBoxCtor | _: CSizeContextCtor | _: CSizeAnyValueCtor => true case ContextM.SELF(_) | ContextM.OUTPUTS(_) | ContextM.INPUTS(_) | ContextM.dataInputs(_) | ContextM.LastBlockUtxoRootHash(_) | - ContextM.getVar(_,_,_) /*| ContextM.cost(_) | ContextM.dataSize(_)*/ => - case SigmaM.propBytes(_) => - case _: CReplCollCtor[_] | _: PairOfColsCtor[_,_] => + ContextM.getVar(_,_,_) => true + case SigmaM.propBytes(_) => true + case _: CReplCollCtor[_] | _: PairOfColsCtor[_,_] => true case CollM.length(_) | CollM.map(_,_) | CollM.sum(_,_) | CollM.zip(_,_) | CollM.slice(_,_,_) | CollM.apply(_,_) | - CollM.append(_,_) | CollM.foldLeft(_,_,_) => - case CBM.replicate(_,_,_) | CBM.fromItems(_,_,_) => - case BoxM.propositionBytes(_) | BoxM.bytesWithoutRef(_) /*| BoxM.cost(_) | BoxM.dataSize(_)*/ | BoxM.getReg(_,_,_) => -// case AvlM.dataSize(_) => - case OM.get(_) | OM.getOrElse(_,_) | OM.fold(_,_,_) | OM.isDefined(_) => - case _: CostOf | _: SizeOf[_] => - case _: Upcast[_,_] => - case _: Apply[_,_] => - case SPCM.some(_) => - case _ => !!!(s"Invalid primitive in Cost function: $d") - } - - def verifyCalcFunc[A](f: Rep[Context => A], eA: Elem[A]) = { - if (f.elem.eRange != eA) - !!!(s"Expected function of type ${f.elem.eDom.name} => ${eA.name}, but was $f: ${f.elem.name}") + CollM.append(_,_) | CollM.foldLeft(_,_,_) => true + case CBM.replicate(_,_,_) | CBM.fromItems(_,_,_) => true + case BoxM.propositionBytes(_) | BoxM.bytesWithoutRef(_) | BoxM.getReg(_,_,_) => true + case OM.get(_) | OM.getOrElse(_,_) | OM.fold(_,_,_) | OM.isDefined(_) => true + case _: CostOf | _: SizeOf[_] => true + case _: Upcast[_,_] => true + case _: Apply[_,_] => true + case SPCM.some(_) => true + case _ => false } + /** Checks if the function (Lambda node) given by the symbol `costF` contains only allowed operations + * in the schedule. */ def verifyCostFunc(costF: Rep[Any => Int]): Try[Unit] = { val Def(Lambda(lam,_,_,_)) = costF - Try { lam.scheduleAll.foreach(te => isValidCostPrimitive(te.rhs)) } + Try { + lam.scheduleAll.forall { te => + val ok = isValidCostPrimitive(te.rhs) + if (!ok) !!!(s"Invalid primitive in Cost function: ${te.rhs}") + ok + } + } } + /** Finds SigmaProp.isProven method calls in the given Lambda `f` */ def findIsProven[T](f: Rep[Context => T]): Option[Sym] = { val Def(Lambda(lam,_,_,_)) = f - val ok = lam.scheduleAll.find(te => te.rhs match { + val s = lam.scheduleAll.find(te => te.rhs match { case SigmaM.isValid(_) => true case _ => false }).map(_.sym) - ok + s } + /** Checks that if SigmaProp.isProven method calls exists in the given Lambda's schedule, + * then it is the last operation. */ def verifyIsProven[T](f: Rep[Context => T]): Try[Unit] = { val isProvenOpt = findIsProven(f) Try { @@ -156,12 +162,14 @@ trait Evaluation extends RuntimeCosting { IR => } } } + object IsTupleFN { def unapply(fn: String): Nullable[Byte] = { if (fn.startsWith("_")) Nullable[Byte](fn.substring(1).toByte) else Nullable.None.asInstanceOf[Nullable[Byte]] } } + import sigmastate._ import special.sigma.{Context => SigmaContext} @@ -171,31 +179,33 @@ trait Evaluation extends RuntimeCosting { IR => val costedBuilderValue: special.collection.CostedBuilder val monoidBuilderValue: special.collection.MonoidBuilder + /** Constructs a new data environment for evaluation of graphs using `compile` method. + * This environment contains global variables. */ def getDataEnv: DataEnv = { val env = Map[Sym, AnyRef]( - RWSpecialPredef -> SpecialPredef, + specialPredef -> SpecialPredef, sigmaDslBuilder -> sigmaDslBuilderValue, - sigmaDslBuilder.Colls -> sigmaDslBuilderValue.Colls, - costedBuilder -> costedBuilderValue, - costedBuilder.monoidBuilder -> monoidBuilderValue, - costedBuilder.monoidBuilder.intPlusMonoid -> monoidBuilderValue.intPlusMonoid, - costedBuilder.monoidBuilder.longPlusMonoid -> monoidBuilderValue.longPlusMonoid + colBuilder -> sigmaDslBuilderValue.Colls, + costedBuilder -> costedBuilderValue, + monoidBuilder -> monoidBuilderValue, + intPlusMonoid -> monoidBuilderValue.intPlusMonoid, + longPlusMonoid -> monoidBuilderValue.longPlusMonoid ) env } case class EvaluatedEntry(env: DataEnv, sym: Sym, value: AnyRef) - def printEnvEntry(sym: Sym, value: AnyRef) = { + protected def printEnvEntry(sym: Sym, value: AnyRef) = { def trim[A](arr: Array[A]) = arr.take(arr.length min 100) def show(x: Any) = x match { case arr: Array[_] => s"Array(${trim(arr).mkString(",")})" case col: special.collection.Coll[_] => s"Coll(${trim(col.toArray).mkString(",")})" case p: SGroupElement => p.showToString - case ProveDlog(GroupElementConstant(g)) => s"ProveDlog(${showECPoint(g)})" + case ProveDlog(GroupElementConstant(g)) => s"ProveDlog(${g.showToString})" case ProveDHTuple( GroupElementConstant(g), GroupElementConstant(h), GroupElementConstant(u), GroupElementConstant(v)) => - s"ProveDHT(${showECPoint(g)},${showECPoint(h)},${showECPoint(u)},${showECPoint(v)})" + s"ProveDHT(${g.showToString},${h.showToString},${u.showToString},${v.showToString})" case _ => x.toString } sym match { @@ -218,17 +228,19 @@ trait Evaluation extends RuntimeCosting { IR => def msgCostLimitError(cost: Int, limit: Long) = s"Estimated expression complexity $cost exceeds the limit $limit" /** Incapsulate simple monotonic (add only) counter with reset. */ - class CostCounter(initialCost: Int) { + class CostCounter(val initialCost: Int) { private var _currentCost: Int = initialCost @inline def += (n: Int) = { + // println(s"${_currentCost} + $n") this._currentCost += n } @inline def currentCost: Int = _currentCost @inline def resetCost() = { _currentCost = initialCost } } - /** Implements finite state machine with stack of graph blocks (lambdas and thunks). + /** Implements finite state machine with stack of graph blocks (scopes), + * which correspond to lambdas and thunks. * It accepts messages: startScope(), endScope(), add(), reset() * At any time `totalCost` is the currently accumulated cost. */ class CostAccumulator(initialCost: Int, costLimit: Option[Long]) { @@ -240,28 +252,37 @@ trait Evaluation extends RuntimeCosting { IR => @inline def currentScope: Scope = _scopeStack.head @inline private def getCostFromEnv(dataEnv: DataEnv, s: Sym): Int = getFromEnv(dataEnv, s).asInstanceOf[Int] + /** Represents a single scope during execution of the graph. + * The lifetime of each instance is bound to scope execution. + * When the evaluation enters a new scope (e.g. calling a lambda) a new Scope instance is created and pushed + * to _scopeStack, then is starts receiving `add` method calls. + * When the evaluation leaves the scope, the top is popped off the stack. */ class Scope(visitiedOnEntry: Set[Sym], initialCost: Int) extends CostCounter(initialCost) { private var _visited: Set[Sym] = visitiedOnEntry - @inline def visited = _visited - @inline def add(op: OpCost, opCost: Int, dataEnv: DataEnv) = { + @inline def visited: Set[Sym] = _visited + @inline def add(s: Sym, op: OpCost, opCost: Int, dataEnv: DataEnv): Unit = { for (arg <- op.args) { if (!_visited.contains(arg)) { val argCost = getCostFromEnv(dataEnv, arg) +// println(s"${this.currentCost} + $argCost ($arg <- $op)") this += argCost _visited += arg } } - this += opCost - _visited += op.opCost + if (!_visited.contains(op.opCost)) { +// println(s"${this.currentCost} + $opCost (${op.opCost} <- $op)") + this += opCost + } + _visited += s } } /** Called once for each operation of a scope (lambda or thunk). - * if isCosting then delegate to the currentScope */ - def add(op: OpCost, dataEnv: DataEnv) = { + * if costLimit is defined then delegates to currentScope. */ + def add(s: Sym, op: OpCost, dataEnv: DataEnv): Int = { val opCost = getFromEnv(dataEnv, op.opCost).asInstanceOf[Int] if (costLimit.isDefined) { - currentScope.add(op, opCost, dataEnv) + currentScope.add(s, op, opCost, dataEnv) // check that we are still withing the limit val cost = currentScope.currentCost val limit = costLimit.get @@ -278,9 +299,9 @@ trait Evaluation extends RuntimeCosting { IR => /** Called after all operations of a scope are executed (lambda or thunk)*/ def endScope() = { - val cost = currentScope.currentCost + val deltaCost = currentScope.currentCost - currentScope.initialCost _scopeStack = _scopeStack.tail - _scopeStack.head += cost + _scopeStack.head += deltaCost } /** Resets this accumulator into initial state to be ready for new graph execution. */ @@ -305,34 +326,18 @@ trait Evaluation extends RuntimeCosting { IR => object In { def unapply(s: Sym): Option[Any] = Some(getFromEnv(dataEnv, s)) } def out(v: Any): (DataEnv, Sym) = { val vBoxed = v.asInstanceOf[AnyRef]; (dataEnv + (te.sym -> vBoxed), te.sym) } try { - var startTime = if (okMeasureOperationTime) System.nanoTime() else 0L + val startTime = if (okMeasureOperationTime) System.nanoTime() else 0L val res: (DataEnv, Sym) = te.rhs match { case d @ ContextM.getVar(ctx @ In(ctxObj: CostingDataContext), _, elem) => val mc = d.asInstanceOf[MethodCall] val declaredTpe = elemToSType(elem) val valueInCtx = invokeUnlifted(ctx.elem, mc, dataEnv) - val data = valueInCtx match { - case Some(Constant(v, `declaredTpe`)) => - Some(Evaluation.toDslData(v, declaredTpe, ctxObj.isCost)(IR)) - case opt @ Some(v) => opt - case None => None - case _ => throw new InvalidType(s"Expected Constant($declaredTpe) but found $valueInCtx") - } - out(data) + out(valueInCtx) case d @ BoxM.getReg(box, _, elem) => val mc = d.asInstanceOf[MethodCall] val declaredTpe = elemToSType(elem) val valueInReg = invokeUnlifted(box.elem, mc, dataEnv) - val data = valueInReg match { - case Some(Constant(v, `declaredTpe`)) => - Some(Evaluation.toDslData(v, declaredTpe, false)(IR)) - case Some(v) => - valueInReg - case None => None - case _ => throw new InvalidType( - s"Expected Some(Constant($declaredTpe)) but found $valueInReg value of register: $d") - } - out(data) + out(valueInReg) case Const(x) => out(x.asInstanceOf[AnyRef]) case Tup(In(a), In(b)) => out((a,b)) case First(In(p: Tuple2[_,_])) => out(p._1) @@ -361,7 +366,7 @@ trait Evaluation extends RuntimeCosting { IR => case Nullable(v) => v case _ => sys.error(s"Cannot evaluate substConstants($input, $positions, $newVals): cannot lift value $v") }) - val byteArray = SubstConstants.eval(input.toArray, positions.toArray, typedNewVals) + val byteArray = SubstConstants.eval(input.toArray, positions.toArray, typedNewVals)(sigmaDslBuilderValue.validationSettings) out(sigmaDslBuilderValue.Colls.fromArray(byteArray)) case AM.length(In(arr: Array[_])) => out(arr.length) @@ -376,7 +381,7 @@ trait Evaluation extends RuntimeCosting { IR => val dataRes = obj.elem match { case _: CollElem[_, _] => mc match { case CollMethods.flatMap(xs, f) => - val newMC = mc.copy(args = mc.args :+ f.elem.eRange)(mc.selfType, mc.isAdapterCall) + val newMC = mc.copy(args = mc.args :+ f.elem.eRange.eItem)(mc.selfType, mc.isAdapterCall) invokeUnlifted(obj.elem, newMC, dataEnv) case _ => invokeUnlifted(obj.elem, mc, dataEnv) @@ -491,8 +496,10 @@ trait Evaluation extends RuntimeCosting { IR => case costOp: CostOf => out(costOp.eval) + case op @ PerKbCostOf(_,In(size: Long)) => + out(op.eval(size)) case op: OpCost => - val c = costAccumulator.add(op, dataEnv) + val c = costAccumulator.add(te.sym, op, dataEnv) out(c) case SizeOf(sym @ In(data)) => val tpe = elemToSType(sym.elem) @@ -519,13 +526,13 @@ trait Evaluation extends RuntimeCosting { IR => case Downcast(In(from), eTo) => val tpe = elemToSType(eTo).asNumType if (tpe == SBigInt) - out(sigmaDslBuilderValue.BigInt(SBigInt.downcast(from.asInstanceOf[AnyVal]))) + out(SBigInt.downcast(from.asInstanceOf[AnyVal])) else out(tpe.downcast(from.asInstanceOf[AnyVal])) case Upcast(In(from), eTo) => val tpe = elemToSType(eTo).asNumType if (tpe == SBigInt) - out(sigmaDslBuilderValue.BigInt(SBigInt.upcast(from.asInstanceOf[AnyVal]))) + out(SBigInt.upcast(from.asInstanceOf[AnyVal])) else out(tpe.upcast(from.asInstanceOf[AnyVal])) @@ -579,9 +586,11 @@ trait Evaluation extends RuntimeCosting { IR => object Evaluation { import special.sigma._ import special.collection._ - import ErgoLikeContext._ - - def stypeToRType[T <: SType](t: T): RType[T#WrappedType] = (t match { + + /** Transforms a serializable ErgoTree type descriptor to the corresponding RType descriptor of SigmaDsl, + * which is used during evaluation. + */ + def stypeToRType[T <: SType](t: T): RType[T#WrappedType] = (t match { // TODO optimize using memoization case SBoolean => BooleanType case SByte => ByteType case SShort => ShortType @@ -589,9 +598,11 @@ object Evaluation { case SLong => LongType case SString => StringType case SAny => AnyType + case SUnit => UnitType case SBigInt => BigIntRType case SBox => BoxRType case SContext => ContextRType + case SGlobal => SigmaDslBuilderRType case SHeader => HeaderRType case SPreHeader => PreHeaderRType case SGroupElement => GroupElementRType @@ -608,7 +619,10 @@ object Evaluation { case _ => sys.error(s"Don't know how to convert SType $t to RType") }).asInstanceOf[RType[T#WrappedType]] - def rtypeToSType[T](t: RType[T]): SType = t match { + /** Transforms RType descriptor of SigmaDsl, which is used during evaluation, + * to the corresponding serializable ErgoTree type descriptor, + */ + def rtypeToSType[T](t: RType[T]): SType = t match { // TODO optimize using memoization case BooleanType => SBoolean case ByteType => SByte case ShortType => SShort @@ -616,6 +630,7 @@ object Evaluation { case LongType => SLong case StringType => SString case AnyType => SAny + case UnitType => SUnit case BigIntegerRType => SBigInt case BigIntRType => SBigInt @@ -627,6 +642,7 @@ object Evaluation { case ot: OptionType[_] => sigmastate.SOption(rtypeToSType(ot.tA)) case BoxRType => SBox case ContextRType => SContext + case SigmaDslBuilderRType => SGlobal case HeaderRType => SHeader case PreHeaderRType => SPreHeader case SigmaPropRType => SSigmaProp @@ -654,123 +670,48 @@ object Evaluation { case coll: Coll[_] => collRType(coll.tItem) // all primitive types - case v: Boolean => BooleanType - case v: Byte => ByteType - case v: Short => ShortType - case v: Int => IntType - case v: Long => LongType - case v: Char => CharType - case v: Float => FloatType - case v: Double => DoubleType - case v: String => StringType - case v: Unit => UnitType - - case v: BigInteger => BigIntegerRType - case n: special.sigma.BigInt => BigIntRType - - case v: ECPoint => ECPointRType - case ge: GroupElement => GroupElementRType - - case b: ErgoBox => ErgoBoxRType - case b: Box => BoxRType - - case avl: AvlTreeData => AvlTreeDataRType - case avl: AvlTree => AvlTreeRType - - case sb: SigmaBoolean => SigmaBooleanRType - case p: SigmaProp => SigmaPropRType - + case _: Boolean => BooleanType + case _: Byte => ByteType + case _: Short => ShortType + case _: Int => IntType + case _: Long => LongType + case _: Char => CharType + case _: Float => FloatType + case _: Double => DoubleType + case _: String => StringType + case _: Unit => UnitType + + case _: BigInteger => BigIntegerRType + case _: special.sigma.BigInt => BigIntRType + + case _: ECPoint => ECPointRType + case _: GroupElement => GroupElementRType + + case _: ErgoBox => ErgoBoxRType + case _: Box => BoxRType + + case _: AvlTreeData => AvlTreeDataRType + case _: AvlTree => AvlTreeRType + + case _: SigmaBoolean => SigmaBooleanRType + case _: SigmaProp => SigmaPropRType + case _: Context => ContextRType case _ => sys.error(s"Don't know how to compute typeOf($value)") }} - /** Generic translation of any ErgoDsl type to the corresponding type used in ErgoTree. */ - def toErgoTreeType(dslType: RType[_]): RType[_] = dslType match { - case p: PrimitiveType[_] => p - case w: WrapperType[_] => - w match { - case BigIntRType => BigIntegerRType - case GroupElementRType => ECPointRType - case SigmaPropRType => SigmaBooleanRType - case BoxRType => ErgoBoxRType - case AvlTreeRType => AvlTreeDataRType - case ContextRType => ErgoLikeContextRType - case _ => sys.error(s"Unknown WrapperType: $w") - } - case p: ArrayType[_] => arrayRType(toErgoTreeType(p.tA)) - case p: OptionType[_] => optionRType(toErgoTreeType(p.tA)) - case p: CollType[_] => arrayRType(toErgoTreeType(p.tItem)) - case p: PairType[_,_] => tupleRType(Array(toErgoTreeType(p.tFst), toErgoTreeType(p.tSnd))) - case p: EitherType[_,_] => eitherRType(toErgoTreeType(p.tLeft), toErgoTreeType(p.tRight)) - case p: FuncType[_,_] => funcRType(toErgoTreeType(p.tDom), toErgoTreeType(p.tRange)) - case t: TupleType => tupleRType(t.items.map(x => toErgoTreeType(x))) - case HeaderRType | PreHeaderRType => dslType - case AnyType | AnyRefType | NothingType | StringType => dslType - case _ => - sys.error(s"Don't know how to toErgoTreeType($dslType)") - } - - /** Generic converter from types used in SigmaDsl to types used in ErgoTree values. - * @param tRes should describe ErgoTree type (i.e. it can be obtained using toErgoTreeType method)*/ - def fromDslData[T](value: Any, tRes: RType[T]): T = { - val res = (value, tRes) match { - case (w: WrapperOf[_], _) => w.wrappedValue - case (coll: Coll[a], tarr: ArrayType[a1]) => - val tItem = tarr.tA - coll.map[a1](x => fromDslData(x, tItem))(tItem).toArray - case (tup: Tuple2[a,b], tTup: TupleType) => - val x = fromDslData(tup._1, tTup.items(0)) - val y = fromDslData(tup._2, tTup.items(1)) - Array[Any](x, y) - case _ => value - } - res.asInstanceOf[T] - } - - /** Convert SigmaDsl representation of tuple to ErgoTree representation. */ - def fromDslTuple(value: Any, tupleTpe: STuple): Array[Any] = value match { - case t: Tuple2[_,_] => Array[Any](t._1, t._2) - case a: Array[Any] => a + /** Convert SigmaDsl representation of tuple to ErgoTree serializable representation. */ + def fromDslTuple(value: Any, tupleTpe: STuple): Coll[Any] = value match { + case t: Tuple2[_,_] => TupleColl(t._1, t._2) + case a: Coll[Any]@unchecked if a.tItem == RType.AnyType => a case _ => sys.error(s"Cannot execute fromDslTuple($value, $tupleTpe)") } - /** Generic converter from types used in ErgoTree values to types used in ErgoDsl. */ - def toDslData(value: Any, tpe: SType, isCost: Boolean)(implicit IR: Evaluation): Any = { - val dsl = IR.sigmaDslBuilderValue - (value, tpe) match { - case (c: Constant[_], tpe) => toDslData(c.value, c.tpe, isCost) - case (_, STuple(Seq(tpeA, tpeB))) => - value match { - case tup: Tuple2[_,_] => - val valA = toDslData(tup._1, tpeA, isCost) - val valB = toDslData(tup._2, tpeB, isCost) - (valA, valB) - case arr: Array[Any] => - val valA = toDslData(arr(0), tpeA, isCost) - val valB = toDslData(arr(1), tpeB, isCost) - (valA, valB) - } - case (arr: Array[a], STuple(items)) => - val res = arr.zip(items).map { case (x, t) => toDslData(x, t, isCost)} - dsl.Colls.fromArray(res)(RType.AnyType) - case (arr: Array[a], SCollectionType(elemType)) => - implicit val elemRType: RType[SType#WrappedType] = Evaluation.stypeToRType(elemType) - elemRType.asInstanceOf[RType[_]] match { - case _: CollType[_] | _: TupleType | _: PairType[_,_] | _: WrapperType[_] => - val testArr = arr.map(x => toDslData(x, elemType, isCost).asWrappedType).toArray(elemRType.classTag) - dsl.Colls.fromArray(testArr.asInstanceOf[Array[SType#WrappedType]]) - case _ => - dsl.Colls.fromArray(arr.asInstanceOf[Array[SType#WrappedType]]) - } - case (b: ErgoBox, SBox) => b.toTestBox(isCost) - case (n: BigInteger, SBigInt) => - dsl.BigInt(n) - case (p: ECPoint, SGroupElement) => dsl.GroupElement(p) - case (t: SigmaBoolean, SSigmaProp) => dsl.SigmaProp(t) - case (t: AvlTreeData, SAvlTree) => CAvlTree(t) - case (x, _) => x - } + /** Convert ErgoTree serializable representation of tuple to SigmaDsl representation. */ + def toDslTuple(value: Coll[Any], tupleTpe: STuple): Any = tupleTpe match { + case t if t.items.length == 2 => (value(0), value(1)) + case _ => value } } diff --git a/src/main/scala/sigmastate/eval/Extensions.scala b/src/main/scala/sigmastate/eval/Extensions.scala index 803a15f69d..4f343bbc79 100644 --- a/src/main/scala/sigmastate/eval/Extensions.scala +++ b/src/main/scala/sigmastate/eval/Extensions.scala @@ -3,13 +3,13 @@ package sigmastate.eval import java.math.BigInteger import scalan.RType -import scalan.RType._ -import sigmastate.{SHeader, SType, SByte, SPreHeader} +import sigmastate.SType import sigmastate.Values.Constant import sigmastate.lang.DefaultSigmaBuilder -import special.collection.{CSizePrim, Size, CSizeOption, Coll, CSizeColl} +import special.collection.Coll import special.sigma._ import SType.AnyOps +import spire.syntax.all._ object Extensions { private val Colls = CostingSigmaDslBuilder.Colls @@ -27,13 +27,35 @@ object Extensions { @inline def toColl: Coll[T] = Colls.fromArray(arr) } + implicit class EvalIterableOps[T: RType](seq: Iterable[T]) { + @inline def toColl: Coll[T] = Colls.fromArray[T](seq.toArray(RType[T].classTag)) + } + + implicit class EvalCollOps[T](val coll: Coll[T]) extends AnyVal { + def foreach(f: T => Unit) = { + val limit = coll.length + cfor(0)(_ < limit, _ + 1) { i => + f(coll(i)) + } + } + } + + // NOTE: it cannot extend AnyVal because of compiler error: type parameter of value class may not be specialized + implicit class PairCollOps[@specialized A, @specialized B](val coll: Coll[(A,B)]) { + def foreach(f: (A, B) => Unit) = { + val (as, bs) = Colls.unzip(coll) + val limit = coll.length + cfor(0)(_ < limit, _ + 1) { i => + f(as(i), bs(i)) + } + } + } + implicit class DslDataOps[A](data: A)(implicit tA: RType[A]) { def toTreeData: Constant[SType] = { - val treeType = Evaluation.toErgoTreeType(tA) - val treeData = Evaluation.fromDslData(data, tRes = treeType) - DefaultSigmaBuilder.mkConstant(treeData.asWrappedType, Evaluation.rtypeToSType(tA)) + DefaultSigmaBuilder.mkConstant(data.asWrappedType, Evaluation.rtypeToSType(tA)) } } - + def toAnyValue[A:RType](x: A) = new TestValue(x, RType[A].asInstanceOf[RType[Any]]) } diff --git a/src/main/scala/sigmastate/eval/IRContext.scala b/src/main/scala/sigmastate/eval/IRContext.scala index 6a13b8521a..e24ed5dfe3 100644 --- a/src/main/scala/sigmastate/eval/IRContext.scala +++ b/src/main/scala/sigmastate/eval/IRContext.scala @@ -1,11 +1,11 @@ package sigmastate.eval -import java.lang.reflect.Method - +import org.ergoplatform.validation.ValidationRules import sigmastate.SType -import sigmastate.Values.{Value, SValue} +import sigmastate.Values.{Value, SValue, TrueSigmaProp} import sigmastate.interpreter.Interpreter.ScriptEnv import sigmastate.lang.TransformingSigmaBuilder +import sigmastate.interpreter.Interpreter import scala.util.Try @@ -44,10 +44,13 @@ trait IRContext extends Evaluation with TreeBuilding { } def doCostingEx(env: ScriptEnv, typed: SValue, okRemoveIsProven: Boolean): RCostingResultEx[Any] = { - val costed = buildCostedGraph[SType](env.map { case (k, v) => (k: Any, builder.liftAny(v).get) }, typed) - val f = asRep[Costed[Context] => Costed[Any]](costed) - val calcF = f.sliceCalc(okRemoveIsProven) - val costF = f.sliceCostEx + def buildGraph(env: ScriptEnv, exp: SValue) = { + val costed = buildCostedGraph[SType](env.map { case (k, v) => (k: Any, builder.liftAny(v).get) }, exp) + asRep[Costed[Context] => Costed[Any]](costed) + } + val g = buildGraph(env, typed) + val calcF = g.sliceCalc(okRemoveIsProven) + val costF = g.sliceCostEx Pair(calcF, costF) } @@ -92,14 +95,9 @@ trait IRContext extends Evaluation with TreeBuilding { /** IR context to be used by blockchain nodes to validate transactions. */ class RuntimeIRContext extends IRContext with CompiletimeCosting { -// override def isInvokeEnabled(d: Def[_], m: Method): Boolean = invokeAll -// override def shouldUnpack(e: Elem[_]): Boolean = true } /** IR context to be used by script development tools to compile ErgoScript into ErgoTree bytecode. */ class CompiletimeIRContext extends IRContext with CompiletimeCosting { -// override def invokeAll: Boolean = true -// override def isInvokeEnabled(d: Def[_], m: Method): Boolean = invokeAll -// override def shouldUnpack(e: Elem[_]): Boolean = true } diff --git a/src/main/scala/sigmastate/eval/RuntimeCosting.scala b/src/main/scala/sigmastate/eval/RuntimeCosting.scala index 67755c86e0..c2980b7f83 100644 --- a/src/main/scala/sigmastate/eval/RuntimeCosting.scala +++ b/src/main/scala/sigmastate/eval/RuntimeCosting.scala @@ -1,39 +1,37 @@ package sigmastate.eval -import java.math.BigInteger - import scala.language.implicitConversions import scala.language.existentials -import org.bouncycastle.math.ec.ECPoint -import scalan.{Lazy, SigmaLibrary, Nullable, RType} +import scalan.{Nullable, MutableLazy, Lazy, RType} import scalan.util.CollectionUtil.TraversableOps import org.ergoplatform._ import sigmastate._ import sigmastate.Values._ -import sigmastate.interpreter.{CryptoConstants, CryptoFunctions} +import sigmastate.interpreter.CryptoConstants import sigmastate.lang.Terms._ import sigmastate.lang.exceptions.CosterException import sigmastate.serialization.OpCodes import sigmastate.utxo.CostTable.Cost import sigmastate.utxo._ -import sigma.util.Extensions._ -import ErgoLikeContext._ import scalan.compilation.GraphVizConfig import SType._ -import scalan.RType.{StringType, AnyType, LongType, IntType, ArrayType, OptionType, TupleType, BooleanType, PairType, FuncType, ByteType, ShortType} +import scalan.RType._ import scorex.crypto.hash.{Sha256, Blake2b256} import sigmastate.interpreter.Interpreter.ScriptEnv import sigmastate.lang.{Terms, SourceContext} import scalan.staged.Slicing import sigma.types.PrimViewType import sigmastate.basics.DLogProtocol.ProveDlog -import sigmastate.basics.{ProveDHTuple, DLogProtocol} -import sigmastate.eval.Evaluation.rtypeToSType +import sigmastate.basics.ProveDHTuple +import sigmastate.interpreter.CryptoConstants.EcPointType import special.collection.CollType -import special.sigma.{GroupElementRType, TestGroupElement, AvlTreeRType, BigIntegerRType, BoxRType, ECPointRType, BigIntRType, SigmaPropRType} +import special.Types._ +import special.sigma.{GroupElementRType, AvlTreeRType, BigIntegerRType, BoxRType, ECPointRType, BigIntRType, SigmaPropRType} import special.sigma.Extensions._ +import org.ergoplatform.validation.ValidationRules._ + -trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Evaluation => +trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: IRContext => import Context._; import Header._; import PreHeader._; @@ -48,8 +46,6 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev import SigmaProp._; import Box._ import CollOverArrayBuilder._; - import CostedBuilder._ - import SizeBuilder._ import CCostedBuilder._ import CSizeBuilder._ import Size._; @@ -58,13 +54,10 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev import SizeOption._; import SizePair._; import SizeContext._ - import CSizeContext._ import CSizePrim._ import CSizePair._ import CSizeColl._ - import CSizeOption._ import Costed._; - import CostedPrim._; import CCostedPrim._; import CostedPair._; import CCostedPair._; @@ -77,28 +70,28 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev import CCostedOption._ import SigmaDslBuilder._ import MonoidBuilder._ - import MonoidBuilderInst._ import AvlTree._ - import Monoid._ import IntPlusMonoid._ import LongPlusMonoid._ import WSpecialPredef._ import TestSigmaDslBuilder._ import CostModel._ - import Liftables._ override val performViewsLifting = false val okMeasureOperationTime: Boolean = false this.isInlineThunksOnForce = true // this required for splitting of cost graph this.keepOriginalFunc = false // original lambda of Lambda node contains invocations of evalNode and we don't want that -// this.useAlphaEquality = false + this.useAlphaEquality = false // unfoldWithOriginalFunc = unfoldWithOrig /** Whether to create CostOf nodes or substutute costs from CostTable as constants in the graph. * true - substitute; false - create CostOf nodes */ var substFromCostTable: Boolean = true + /** Whether to save calcF and costF graphs in the file given by ScriptNameProp environment variable */ + var saveGraphsInFile: Boolean = true + // /** Pass configuration which is used by default in IRContext. */ // val calcPass = new DefaultPass("calcPass", Pass.defaultPassConfig.copy(constantPropagation = true)) // @@ -141,16 +134,6 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev super.createAllMarking(e) } - def zeroSize[V](eVal: Elem[V]): RSize[V] = asRep[Size[V]](eVal match { - case pe: PairElem[a,b] => costedBuilder.mkSizePair(zeroSize[a](pe.eFst), zeroSize[b](pe.eSnd)) - case ce: CollElem[_,_] => - implicit val eItem = ce.eItem - costedBuilder.mkSizeColl(colBuilder.fromItems(zeroSize(eItem))) - case oe: WOptionElem[_,_] => costedBuilder.mkSizeOption(RWSpecialPredef.some(zeroSize(oe.eItem))) - case _: BaseElem[_] | _: EntityElem[_] => costedBuilder.mkSizePrim(0L, eVal) - case _ => error(s"Cannot create zeroSize($eVal)") - }) - case class CostOf(opName: String, opType: SFunc) extends BaseDef[Int] { override def transform(t: Transformer): Def[IntPlusMonoidData] = this def eval: Int = { @@ -160,38 +143,60 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev } } - def costOf(opName: String, opType: SFunc, doEval: Boolean): Rep[Int] = { - val costOp = CostOf(opName, opType) + /** Graph node which represents cost of operation, which depends on size of the data. + * @param operId id of the operation in CostTable + * @param size size of the data which is used to compute operation cost + */ + case class PerKbCostOf(operId: OperationId, size: Rep[Long]) extends BaseDef[Int] { + override def transform(t: Transformer): Def[IntPlusMonoidData] = PerKbCostOf(operId, t(size)) + /** Cost rule which is used to compute operation cost, depending on dataSize. + * Per kilobite cost of the oparation is obtained from CostTable and multiplied on + * the data size in Kb. */ + def eval(dataSize: Long): Int = { + val cost = CostTable.DefaultCosts(operId) + ((dataSize / 1024L).toInt + 1) * cost + } + } + + def costOf(costOp: CostOf, doEval: Boolean): Rep[Int] = { val res = if (doEval) toRep(costOp.eval) else (costOp: Rep[Int]) res } + def costOf(opName: String, opType: SFunc, doEval: Boolean): Rep[Int] = { + val costOp = CostOf(opName, opType) + costOf(costOp, doEval) + } + def costOf(opName: String, opType: SFunc): Rep[Int] = { costOf(opName, opType, substFromCostTable) } def costOf(method: SMethod): Rep[Int] = { - val methodTemplate = method.objType.getMethodById(method.methodId) + val methodTemplate = method.objType.methodById(method.methodId) val opId = methodTemplate.opId costOf(opId.name, opId.opType.copy(tpeParams = Nil), substFromCostTable) } def perKbCostOf(method: SMethod, dataSize: Rep[Long]): Rep[Int] = { - val methodTemplate = method.objType.getMethodById(method.methodId) + val methodTemplate = method.objType.methodById(method.methodId) val opId = methodTemplate.opId perKbCostOf(opId.name, opId.opType.copy(tpeParams = Nil), dataSize) } - def costOfProveDlog: Rep[Int] = costOf("ProveDlogEval", SFunc(SUnit, SSigmaProp)) - def costOfDHTuple: Rep[Int] = costOf("ProveDHTuple", SFunc(SUnit, SSigmaProp)) * 2 // cost ??? + val _costOfProveDlogEval = CostOf("ProveDlogEval", SFunc(SUnit, SSigmaProp)) + val _costOfProveDHTuple = CostOf("ProveDHTuple", SFunc(SUnit, SSigmaProp)) + + def costOfProveDlog: Rep[Int] = costOf(_costOfProveDlogEval, substFromCostTable) + def costOfDHTuple: Rep[Int] = costOf(_costOfProveDHTuple, substFromCostTable) // see CostTable for how it relate to ProveDlogEval def costOfSigmaTree(sigmaTree: SigmaBoolean): Int = sigmaTree match { - case dlog: ProveDlog => CostOf("ProveDlogEval", SFunc(SUnit, SSigmaProp)).eval - case dlog: ProveDHTuple => CostOf("ProveDHTuple", SFunc(SUnit, SSigmaProp)).eval * 2 + case _: ProveDlog => _costOfProveDlogEval.eval + case _: ProveDHTuple => _costOfProveDHTuple.eval case CAND(children) => children.map(costOfSigmaTree(_)).sum case COR(children) => children.map(costOfSigmaTree(_)).sum - case CTHRESHOLD(k, children) => children.map(costOfSigmaTree(_)).sum + case CTHRESHOLD(_, children) => children.map(costOfSigmaTree(_)).sum case _ => CostTable.MinimalCost } @@ -203,7 +208,7 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev def perKbCostOf(opName: String, opType: SFunc, dataSize: Rep[Long]): Rep[Int] = { val opNamePerKb = s"${opName}_per_kb" - (dataSize.div(1024L).toInt + 1) * costOf(opNamePerKb, opType) + PerKbCostOf(OperationId(opNamePerKb, opType), dataSize) } def perKbCostOf(node: SValue, dataSize: Rep[Long]): Rep[Int] = { @@ -216,8 +221,8 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev } def constCost(tpe: SType): Rep[Int] = tpe match { - case f: SFunc => - costOf(s"Lambda", Constant[SType](SType.DummyValue, tpe).opType) + case _: SFunc => + costOf(s"Lambda", Constant[SType](SFunc.identity.asWrappedType, tpe).opType) case _ => costOf(s"Const", Constant[SType](SType.DummyValue, tpe).opType) } @@ -273,7 +278,7 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev def cost: Rep[Int] = { val costs = costedFields.fields.map { case (_, cf: RCosted[a]@unchecked) => cf.cost } - opCost(costs, structCost) + opCost(value, costs, structCost) } override def size: Rep[Size[Struct]] = { @@ -523,7 +528,7 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev def unapply(d: Def[_]): Nullable[Rep[Costed[(A, B)]] forSome {type A; type B}] = d.selfType match { case ce: CostedElem[_,_] if !ce.isInstanceOf[CostedPairElem[_, _, _]] => ce.eVal match { - case pE: PairElem[a,b] => + case _: PairElem[a,b] => val res = d.self.asInstanceOf[Rep[Costed[(A, B)]] forSome {type A; type B}] Nullable(res) case _ => Nullable.None @@ -600,21 +605,6 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev case SDBM.sigmaProp(_, SigmaM.isValid(p)) => p - case CCM.mapCosted(xs: RCostedColl[a], _f: RFuncCosted[_, b]) => - val f = asRep[Costed[a] => Costed[b]](_f) - val (calcF, costF, sizeF) = splitCostedFunc[a, b](f) - val vals = xs.values.map(calcF) - implicit val eA = xs.elem.eItem - implicit val eB = f.elem.eRange.eVal - - val costs = xs.costs.zip(xs.sizes).map(costF) - val sizes = if (eB.isConstantSize) { - colBuilder.replicate(xs.sizes.length, constantTypeSize(eB): RSize[b]) - } else { - xs.sizes.map(sizeF) - } - RCCostedColl(vals, costs, sizes, xs.valuesCost) // TODO add cost of map node - case CCM.foldCosted(xs: RCostedColl[a], zero: RCosted[b], _f) => val f = asRep[Costed[(b,a)] => Costed[b]](_f) val (calcF/*: Rep[((b,a)) => b]*/, @@ -635,19 +625,13 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev } ) RCCostedPrim(resV, resC, resS) - -// case CCM.filterCosted(xs: RCostedColl[a], _f: RCostedFunc[_,_]) => -// val f = asRep[Costed[a] => Costed[Boolean]](_f) -// val (calcF, costF, _) = splitCostedFunc[a, Boolean](f) -// val vals = xs.values.filter(calcF) -// val costs = xs.costs.zip(xs.sizes).map(costF) // TODO how to filter our sizes and costs -// val sizes = colBuilder.replicate(xs.sizes.length, 1L) -// RCostedColl(vals, costs, sizes, xs.valuesCost) - - case CostedM.cost(Def(CCostedCollCtor(_, costs, _, accCost))) => opCost(Seq(accCost), costs.sum(intPlusMonoid)) - case CostedM.cost(Def(CCostedOptionCtor(_, costOpt, _, accCost))) => opCost(Seq(accCost), costOpt.getOrElse(Thunk(0))) - case CostedM.cost(Def(CCostedPairCtor(l, r, accCost))) => opCost(Seq(accCost), l.cost + r.cost) + case CostedM.cost(Def(CCostedCollCtor(values, costs, _, accCost))) => + opCost(values, Seq(accCost), costs.sum(intPlusMonoid)) + case CostedM.cost(Def(CCostedOptionCtor(v, costOpt, _, accCost))) => + opCost(v, Seq(accCost), costOpt.getOrElse(Thunk(0))) + case CostedM.cost(Def(CCostedPairCtor(l, r, accCost))) => + opCost(Pair(l.value, r.value), Seq(accCost), l.cost + r.cost) case CostedM.value(Def(CCostedFuncCtor(_, func: RFuncCosted[a,b], _,_))) => func.sliceCalc @@ -676,22 +660,19 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev case CCostedPrimCtor(v, c, s) => val res = v.elem.asInstanceOf[Elem[_]] match { - case pe: PairElem[a,b] /*if s.elem.isInstanceOf[CSizePairElem[_,_]]*/ => + case pe: PairElem[a,b] => val p = asRep[(a,b)](v) costedPrimToPair(p, c, asRep[Size[(a,b)]](s)) - case ce: CollElem[a,_] /*if s.elem.isInstanceOf[CSizeCollElem[_]]*/ => + case ce: CollElem[a,_] => val col = asRep[Coll[a]](v) costedPrimToColl(col, c, asRep[Size[Coll[a]]](s)) - case oe: WOptionElem[a,_] /*if s.elem.isInstanceOf[CSizeOptionElem[_]]*/ => + case oe: WOptionElem[a,_] => val opt = asRep[WOption[a]](v) costedPrimToOption(opt, c, asRep[Size[WOption[a]]](s)) case _ => super.rewriteDef(d) } res -// case CostedBuilderM.costedValue(b, x, SPCM.some(cost)) => -// dataCost(x, Some(asRep[Int](cost))) - case IsConstSizeCostedColl(col) if !d.isInstanceOf[MethodCall] => // see also rewriteNonInvokableMethodCall mkCostedColl(col.value, col.value.length, col.cost) @@ -727,7 +708,10 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev def costedPrimToPair[A,B](p: Rep[(A,B)], c: Rep[Int], s: RSize[(A,B)]) = s.elem.asInstanceOf[Any] match { case se: SizeElem[_,_] if se.eVal.isInstanceOf[PairElem[_,_]] => val sPair = asSizePair(s) - RCCostedPair(RCCostedPrim(p._1, 0, sPair.l), RCCostedPrim(p._2, 0, sPair.r), c) + val l = RCCostedPrim(p._1, 0, sPair.l) + val r = RCCostedPrim(p._2, 0, sPair.r) + val newCost = opCost(Pair(l, r), Seq(c), 0) + RCCostedPair(l, r, newCost) case _ => !!!(s"Expected RCSizePair node but was $s -> ${s.rhs}") } @@ -753,69 +737,42 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev val builder: sigmastate.lang.SigmaBuilder import builder._ - private var _colBuilder: Rep[CollBuilder] = _ - private var _sizeBuilder: Rep[SizeBuilder] = _ - private var _costedBuilder: Rep[CostedBuilder] = _ - private var _intPlusMonoid: Rep[Monoid[Int]] = _ - private var _longPlusMonoid: Rep[Monoid[Long]] = _ - private var _sigmaDslBuilder: Rep[SigmaDslBuilder] = _ - - init() // initialize global context state - - def colBuilder: Rep[CollBuilder] = _colBuilder - def sizeBuilder: Rep[SizeBuilder] = _sizeBuilder - def costedBuilder: Rep[CostedBuilder] = _costedBuilder - def intPlusMonoid: Rep[Monoid[Int]] = _intPlusMonoid - def longPlusMonoid: Rep[Monoid[Long]] = _longPlusMonoid - def sigmaDslBuilder: Rep[SigmaDslBuilder] = _sigmaDslBuilder - - protected def init(): Unit = { - _colBuilder = RCollOverArrayBuilder() - _sizeBuilder = RCSizeBuilder() - _costedBuilder = RCCostedBuilder() - _intPlusMonoid = costedBuilder.monoidBuilder.intPlusMonoid - _longPlusMonoid = costedBuilder.monoidBuilder.longPlusMonoid - _sigmaDslBuilder = RTestSigmaDslBuilder() - } + /** Lazy values, which are immutable, but can be reset, so that the next time they are accessed + * the expression is re-evaluated. Each value should be reset in onReset() method. */ + private val _sigmaDslBuilder: LazyRep[SigmaDslBuilder] = MutableLazy(RTestSigmaDslBuilder()) + def sigmaDslBuilder: Rep[SigmaDslBuilder] = _sigmaDslBuilder.value + + private val _colBuilder: LazyRep[CollBuilder] = MutableLazy(sigmaDslBuilder.Colls) + def colBuilder: Rep[CollBuilder] = _colBuilder.value + + private val _sizeBuilder: LazyRep[SizeBuilder] = MutableLazy(RCSizeBuilder()) + def sizeBuilder: Rep[SizeBuilder] = _sizeBuilder.value + + private val _costedBuilder: LazyRep[CostedBuilder] = MutableLazy(RCCostedBuilder()) + def costedBuilder: Rep[CostedBuilder] = _costedBuilder.value + + private val _monoidBuilder: LazyRep[MonoidBuilder] = MutableLazy(costedBuilder.monoidBuilder) + def monoidBuilder: Rep[MonoidBuilder] = _monoidBuilder.value + + private val _intPlusMonoid: LazyRep[Monoid[Int]] = MutableLazy(monoidBuilder.intPlusMonoid) + def intPlusMonoid: Rep[Monoid[Int]] = _intPlusMonoid.value + + private val _longPlusMonoid: LazyRep[Monoid[Long]] = MutableLazy(monoidBuilder.longPlusMonoid) + def longPlusMonoid: Rep[Monoid[Long]] = _longPlusMonoid.value + + private val _costedGlobal: LazyRep[Costed[SigmaDslBuilder]] = + MutableLazy(RCCostedPrim(sigmaDslBuilder, 0, costedBuilder.mkSizePrim(1L, sigmaDslBuilderElement))) + def costedGlobal: RCosted[SigmaDslBuilder] = _costedGlobal.value protected override def onReset(): Unit = { super.onReset() - init() + // WARNING: every lazy value should be listed here, otherwise bevavior after resetContext is undefined and may throw. + Seq(_sigmaDslBuilder, _colBuilder, _sizeBuilder, _costedBuilder, + _monoidBuilder, _intPlusMonoid, _longPlusMonoid, _costedGlobal) + .foreach(_.reset()) + _contextDependantNodes = debox.Set.ofSize[Int](InitDependantNodes) } -// TODO This is experimental alternative which is 10x faster in MeasureIRContext benchmark -// However it is not fully correct. -// It can be used if current implementation is not fast enough. -// def colBuilder: Rep[CollBuilder] = { -// if (_colBuilder == null) _colBuilder = RCollOverArrayBuilder() -// _colBuilder -// } -// def costedBuilder: Rep[CostedBuilder] = { -// if (_costedBuilder == null) _costedBuilder = RCCostedBuilder() -// _costedBuilder -// } -// def intPlusMonoid: Rep[Monoid[Int]] = { -// if (_intPlusMonoid == null) _intPlusMonoid = costedBuilder.monoidBuilder.intPlusMonoid -// _intPlusMonoid -// } -// def longPlusMonoid: Rep[Monoid[Long]] = { -// if (_longPlusMonoid == null) _longPlusMonoid = costedBuilder.monoidBuilder.longPlusMonoid -// _longPlusMonoid -// } -// def sigmaDslBuilder: Rep[SigmaDslBuilder] = { -// if (_sigmaDslBuilder == null) _sigmaDslBuilder = RTestSigmaDslBuilder() -// _sigmaDslBuilder -// } -// -// protected override def onReset(): Unit = { -// super.onReset() -// _colBuilder = null -// _costedBuilder = null -// _intPlusMonoid = null -// _longPlusMonoid = null -// _sigmaDslBuilder = null -// } - import Cost._ def removeIsProven[T,R](f: Rep[T] => Rep[Any]): Rep[T] => Rep[Any] = { x: Rep[T] => @@ -830,41 +787,6 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev private[sigmastate] var funUnderCosting: Sym = null def isCostingProcess: Boolean = funUnderCosting != null -// def costingOf[T,R](f: Rep[T => Costed[R]]): Rep[T] => Rep[Int] = { x: Rep[T] => -// funUnderCosting = f -// val c = f(x).cost; -// funUnderCosting = null -// c -// } - -// def sizingOf[T,R](f: Rep[T => Costed[R]]): Rep[T] => Rep[Long] = { x: Rep[T] => -// funUnderCosting = f -// val c = f(x).dataSize; -// funUnderCosting = null -// c -// } - -// def split2[T,R](f: Rep[((T, Size[T])) => Costed[R]]): Rep[(T => Any, T => Int)] = { -// implicit val eT = f.elem.eDom -// val calc = fun(removeIsProven { x: Rep[T] => -// val y = f(x); -// y.value -// }) -// val cost = fun(costingOf(f)) -// Pair(calc, cost) -// } - -// def split3[T,R](f: Rep[T => Costed[R]]): Rep[(T => Any, (T => Int, T => Long))] = { -// implicit val eT = f.elem.eDom -// val calc = fun(removeIsProven { x: Rep[T] => -// val y = f(x); -// y.value -// }) -// val cost = fun(costingOf(f)) -// val size = fun(sizingOf(f)) -// Tuple(calc, cost, size) -// } - def stypeToElem[T <: SType](t: T): Elem[T#WrappedType] = (t match { case SBoolean => BooleanElement case SByte => ByteElement @@ -876,6 +798,7 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev case SBigInt => bigIntElement case SBox => boxElement case SContext => contextElement + case SGlobal => sigmaDslBuilderElement case SHeader => headerElement case SPreHeader => preHeaderElement case SGroupElement => groupElementElement @@ -903,6 +826,7 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev case oe: WOptionElem[_, _] => sigmastate.SOption(elemToSType(oe.eItem)) case _: BoxElem[_] => SBox case _: ContextElem[_] => SContext + case _: SigmaDslBuilderElem[_] => SGlobal case _: HeaderElem[_] => SHeader case _: PreHeaderElem[_] => SPreHeader case _: SigmaPropElem[_] => SSigmaProp @@ -943,7 +867,6 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev case _ => sys.error(s"Don't know how to convert RType $t to Elem") } - /** For a given data type returns the corresponding specific descendant of CostedElem[T] */ def elemToCostedElem[T](implicit e: Elem[T]): Elem[Costed[T]] = (e match { case oe: WOptionElem[a,_] => costedOptionElement(oe.eItem) @@ -1056,31 +979,12 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev /** Helper to create costed collection of some constant size type T */ def mkCostedColl[T](col: Rep[Coll[T]], len: Rep[Int], cost: Rep[Int]): Rep[CostedColl[T]] = { - // TODO optimization: the method should be specialized on T so that mkSizePrim is not used + // TODO optimize: the method should be specialized on T so that mkSizePrim is not used val eT = col.elem.eItem val costs = colBuilder.replicate(len, 0) val sizes = colBuilder.replicate(len, costedBuilder.mkSizePrim(typeSize(eT), eT): RSize[T]) RCCostedColl(col, costs, sizes, cost) } -// def mkCostedColl[T](col: Rep[Coll[T]], cost: Rep[Int]): Rep[CostedColl[T]] = { -// mkCostedColl(col, col.length, cost) -// } - -// def mkCosted[T](v: Rep[T], cost: Rep[Int], size: Rep[Long]): Rep[Costed[T]] = { -// val res = v.elem match { -// case colE: CollElem[a,_] => -// val xs = asRep[Coll[a]](v) -// costedPrimToColl(xs, cost, size) -// case _ => -// RCCostedPrim(v, cost, size) -// } -// asRep[Costed[T]](res) -// } - -// def mkCostedOption[T](opt: Rep[WOption[T]], sizeOpt: Rep[WOption[Long]], cost: Rep[Int]): Rep[CostedOption[T]] = { -// val costOpt = RWSpecialPredef.some(0) -// RCCostedOption(opt, costOpt, sizeOpt, cost) -// } @inline final def asCosted[T](x: Rep[_]): Rep[Costed[T]] = x.asInstanceOf[Rep[Costed[T]]] @@ -1112,6 +1016,8 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev @inline def SigmaDsl = sigmaDslBuilderValue @inline def Colls = sigmaDslBuilderValue.Colls + protected implicit def groupElementToECPoint(g: special.sigma.GroupElement): EcPointType = SigmaDsl.toECPoint(g).asInstanceOf[EcPointType] + def constantTypeSize[T](implicit eT: Elem[T]): RSize[T] = RCSizePrim(typeSize(eT), eT) def withConstantSize[T](v: Rep[T], cost: Rep[Int]): RCosted[T] = RCCostedPrim(v, cost, constantTypeSize(v.elem)) @@ -1145,6 +1051,73 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev liftConst(Sized.sizeOf(x.asInstanceOf[(a,b)])) }) + /** Build a new costed value with the given cost in a dependency list. + * This is required to correctly handle tuple field accesses like `v._1` + * and not to lose the cost of `v` in the cost of resulting value. */ + def attachCost[T](source: RCosted[T], accCost: Rep[Int], cost: Rep[Int]): RCosted[T] = asRep[Costed[T]] { + def newCost(v: Sym, c: Rep[Int]) = opCost(v, Seq(accCost, c), cost) // put cost in dependency list + + source.elem.eVal match { + case e: CollElem[a, _] => + val xsC = asCostedColl[a](asCosted[Coll[a]](source)) + val v = xsC.values + val c = xsC.cost + RCCostedColl(v, xsC.costs, xsC.sizes, newCost(v, c)) + case e: PairElem[a,b] => + val pC = asCostedPair[a,b](asCosted[(a,b)](source)) + RCCostedPair(pC.l, pC.r, newCost(Pair(pC.l, pC.r), pC.cost)) + case e => + val c = source.cost // this is a current cost of the value + val v = source.value + RCCostedPrim(v, newCost(v, c), source.size) + } + } + + /** Initial capacity of the hash set, large enough to avoid many rebuidings + * and small enough to not consume too much memory. */ + private val InitDependantNodes = 10000 + + /** Mutable IR context state, make sure it is reset in onReset() to its initial state. */ + private[this] var _contextDependantNodes = debox.Set.ofSize[Int](InitDependantNodes) + + def isContextDependant(sym: Sym): Boolean = + if (sym.isConst) true + else { + _contextDependantNodes(sym.rhs.nodeId) + } + + /** Here we hook into graph building process at the point where each new graph node is added to the graph. + * First, we call `super.createDefinition`, which adds the new node `d` to the graph (`s` is the node's symbol). + * Next, we update context dependence analysis information (see isSupportedIndexExpression) + * The graph node is `context-dependent` if: + * 1) it is the node of Context type + * 2) all nodes it depends on are `context-dependent` + * + * @see super.createDefinition, isSupportedIndexExpression + */ + override protected def createDefinition[T](optScope: Nullable[ThunkScope], s: Rep[T], d: Def[T]): TableEntry[T] = { + val res = super.createDefinition(optScope, s, d) + res.rhs match { + case d if d.selfType.isInstanceOf[ContextElem[_]] => + // the node is of Context type => `context-dependent` + _contextDependantNodes += (d.nodeId) + case d => + val allArgs = d.getDeps.forall(isContextDependant) + if (allArgs) { + // all arguments are `context-dependent` => d is `context-dependent` + _contextDependantNodes += (d.nodeId) + } + } + res + } + + /** Checks that index expression sub-graph (which root is `i`) consists of `context-dependent` nodes. + * This is used in the validation rule for the costing of ByIndex operation. + * @see RuntimeCosting, CheckIsSupportedIndexExpression */ + def isSupportedIndexExpression(i: Rep[Int]): Boolean = { + isContextDependant(i) + } + protected def evalNode[T <: SType](ctx: RCosted[Context], env: CostingEnv, node: Value[T]): RCosted[T#WrappedType] = { import WOption._ def eval[T <: SType](node: Value[T]): RCosted[T#WrappedType] = evalNode(ctx, env, node) @@ -1174,22 +1147,19 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev env.getOrElse(id, !!!(s"TaggedVariable $id not found in environment $env")) case c @ Constant(v, tpe) => v match { - case st: SigmaBoolean => + case p: SSigmaProp => assert(tpe == SSigmaProp) - val p = SigmaDsl.SigmaProp(st) val resV = liftConst(p) - RCCostedPrim(resV, costOfSigmaTree(st), SizeOfSigmaBoolean(st)) - case bi: BigInteger => + RCCostedPrim(resV, costOfSigmaTree(p), SizeOfSigmaProp(p)) + case bi: SBigInt => assert(tpe == SBigInt) - val resV = liftConst(sigmaDslBuilderValue.BigInt(bi)) + val resV = liftConst(bi) withConstantSize(resV, costOf(c)) - case p: ECPoint => + case p: SGroupElement => assert(tpe == SGroupElement) - val resV = liftConst(sigmaDslBuilderValue.GroupElement(p): SGroupElement) -// val size = SGroupElement.dataSize(ge.asWrappedType) + val resV = liftConst(p) withConstantSize(resV, costOf(c)) - case arr: Array[a] => - val coll = Evaluation.toDslData(arr, tpe, false)(IR).asInstanceOf[SColl[a]] + case coll: SColl[a] => val tpeA = tpe.asCollection[SType].elemType stypeToElem(tpeA) match { case eWA: Elem[wa] => @@ -1205,12 +1175,10 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev } RCCostedColl(resVals, resCosts, resSizes, costOf(c)) } - case ergoBox: ErgoBox => - val box = ergoBox.toTestBox(false)(IR) + case box: SBox => val boxV = liftConst(box) RCCostedPrim(boxV, costOf(c), sizeOfData(box)) - case treeData: AvlTreeData => - val tree: special.sigma.AvlTree = CAvlTree(treeData) + case tree: special.sigma.AvlTree => val treeV = liftConst(tree) RCCostedPrim(treeV, costOf(c), SizeAvlTree) case s: String => @@ -1221,7 +1189,8 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev withConstantSize(resV, costOf(c)) } - case org.ergoplatform.Context => ctx + case org.ergoplatform.Context => ctx + case Global => costedGlobal case Height => ContextCoster(ctx, SContext.heightMethod, Nil) case Inputs => ContextCoster(ctx, SContext.inputsMethod, Nil) case Outputs => ContextCoster(ctx, SContext.outputsMethod, Nil) @@ -1233,7 +1202,7 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev stypeToElem(optTpe.elemType) match { case e: Elem[t] => val v = ctx.value.getVar[t](id)(e) val s = tryCast[SizeContext](ctx.size).getVar(id)(e) - RCCostedPrim(v, sigmaDslBuilder.CostModel.GetVar, s) + RCCostedPrim(v, opCost(v, Nil, sigmaDslBuilder.CostModel.GetVar), s) } case Terms.Block(binds, res) => @@ -1262,7 +1231,7 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev case CreateProveDlog(In(_v)) => val vC = asRep[Costed[GroupElement]](_v) val resV: Rep[SigmaProp] = sigmaDslBuilder.proveDlog(vC.value) - val cost = opCost(Seq(vC.cost), costOfDHTuple) + val cost = opCost(resV, Seq(vC.cost), costOfDHTuple) RCCostedPrim(resV, cost, mkSizeSigmaProp(vC.size.dataSize)) case CreateProveDHTuple(In(_gv), In(_hv), In(_uv), In(_vv)) => @@ -1271,39 +1240,38 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev val uvC = asRep[Costed[GroupElement]](_uv) val vvC = asRep[Costed[GroupElement]](_vv) val resV: Rep[SigmaProp] = sigmaDslBuilder.proveDHTuple(gvC.value, hvC.value, uvC.value, vvC.value) - val cost = opCost(Seq(gvC.cost, hvC.cost, uvC.cost, vvC.cost), costOfDHTuple) + val cost = opCost(resV, Seq(gvC.cost, hvC.cost, uvC.cost, vvC.cost), costOfDHTuple) RCCostedPrim(resV, cost, mkSizeSigmaProp(gvC.size.dataSize * 4L)) case sigmastate.Exponentiate(In(_l), In(_r)) => val l = asRep[Costed[GroupElement]](_l) val r = asRep[Costed[BigInt]](_r) val value = l.value.exp(r.value) - val cost = opCost(Seq(l.cost, r.cost), costOf(node)) + val cost = opCost(value, Seq(l.cost, r.cost), costOf(node)) RCCostedPrim(value, cost, SizeGroupElement) case sigmastate.MultiplyGroup(In(_l), In(_r)) => val l = asRep[Costed[GroupElement]](_l) val r = asRep[Costed[GroupElement]](_r) val value = l.value.multiply(r.value) - val cost = opCost(Seq(l.cost, r.cost), costOf(node)) + val cost = opCost(value, Seq(l.cost, r.cost), costOf(node)) RCCostedPrim(value, cost, SizeGroupElement) case Values.GroupGenerator => - val value = sigmaDslBuilder.groupGenerator - RCCostedPrim(value, opCost(Nil, costOf(node)), SizeGroupElement) + SigmaDslBuilderCoster(costedGlobal, SGlobal.groupGeneratorMethod, Nil) case sigmastate.ByteArrayToBigInt(In(_arr)) => val arrC = asRep[Costed[Coll[Byte]]](_arr) val arr = arrC.value val value = sigmaDslBuilder.byteArrayToBigInt(arr) val size = arrC.size.dataSize - val cost = opCost(Seq(arrC.cost), costOf(node) + costOf("new_BigInteger_per_item", node.opType) * size.toInt) + val cost = opCost(value, Seq(arrC.cost), costOf(node) + costOf("new_BigInteger_per_item", node.opType) * size.toInt) RCCostedPrim(value, cost, SizeBigInt) case sigmastate.LongToByteArray(In(_x)) => val xC = asRep[Costed[Long]](_x) val col = sigmaDslBuilder.longToByteArray(xC.value) // below we assume col.length == typeSize[Long] - val cost = opCost(Seq(xC.cost), costOf(node)) + val cost = opCost(col, Seq(xC.cost), costOf(node)) val len = SizeLong.dataSize.toInt mkCostedColl(col, len, cost) @@ -1320,30 +1288,37 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev OptionCoster(_opt, SOption.GetOrElseMethod, Seq(_default)) case SelectField(In(_tup), fieldIndex) => - _tup.elem.eVal.asInstanceOf[Elem[_]] match { - case se: StructElem[_] => - val tup = asRep[Costed[Struct]](_tup) - val fn = STuple.componentNameByIndex(fieldIndex - 1) - val v = tup.value.getUntyped(fn) - val c = opCost(Seq(tup.cost), costedBuilder.SelectFieldCost) - val s: RSize[Any] = ??? //asRep[SizeStruct](tup.size).sizeFi.getUntyped(fn) - RCCostedPrim(v, c, s) + val eTuple = _tup.elem.eVal.asInstanceOf[Elem[_]] + CheckTupleType(IR)(eTuple) {} + eTuple match { case pe: PairElem[a,b] => assert(fieldIndex == 1 || fieldIndex == 2, s"Invalid field index $fieldIndex of the pair ${_tup}: $pe") implicit val ea = pe.eFst implicit val eb = pe.eSnd val pair = tryCast[CostedPair[a,b]](_tup) - val res = if (fieldIndex == 1) pair.l else pair.r + val res = if (fieldIndex == 1) + attachCost(pair.l, pair.accCost, selectFieldCost) + else + attachCost(pair.r, pair.accCost, selectFieldCost) res +// TODO soft-fork: implement similar to Pair case +// case se: StructElem[_] => +// val tup = asRep[Costed[Struct]](_tup) +// val fn = STuple.componentNameByIndex(fieldIndex - 1) +// val v = tup.value.getUntyped(fn) +// val c = opCost(v, Seq(tup.cost), costedBuilder.SelectFieldCost) +// val s: RSize[Any] = ??? +// RCCostedPrim(v, c, s) } case Values.Tuple(InSeq(Seq(x, y))) => - RCCostedPair(x, y, opCost(Seq(x.cost, y.cost), CostTable.newPairValueCost)) + RCCostedPair(x, y, opCost(Pair(x, y), Seq(x.cost, y.cost), CostTable.newPairValueCost)) case Values.Tuple(InSeq(items)) => val fields = items.zipWithIndex.map { case (x, i) => (s"_${i+1}", x)} - val cost = opCost(items.map(_.cost), costedBuilder.ConstructTupleCost) - RCostedStruct(struct(fields), cost) + val value = struct(fields) + val cost = opCost(value, items.map(_.cost), costedBuilder.ConstructTupleCost) + RCostedStruct(value, cost) case node: BooleanTransformer[_] => val eIn = stypeToElem(node.input.tpe.elemType) @@ -1356,15 +1331,7 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev val (calcF, costF) = splitCostedFunc2(condC, okRemoveIsValid = true) val sizeF = condC.sliceSize val values = xs.values.map(calcF) -// val mRes = AllMarking(element[Int]) -// val mCostF = sliceAnalyzer.analyzeFunc(costF, mRes) -// val cost = mCostF.mDom match { -// case PairMarking(markA,_) if markA.isEmpty => // no dependency on values -// val slicedCostF = fun { in: Rep[(Int, Long)] => costF(Pair(variable[Any](Lazy(eAny)), in)) } val cost = xs.costs.zip(xs.sizes).map(costF).sum(intPlusMonoid) -// case _ => -// xs.values.zip(xs.costs.zip(xs.sizes)).map(costF).sum(intPlusMonoid) -// } val res = calcF.elem.eRange match { case e if e == BooleanElement => node match { @@ -1390,12 +1357,9 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev res case MapCollection(input, sfunc) => - val eIn = stypeToElem(input.tpe.elemType) - val inputC = asRep[CostedColl[Any]](evalNode(ctx, env, input)) - implicit val eAny = inputC.elem.asInstanceOf[CostedElem[Coll[Any], _]].eVal.eA - assert(eIn == eAny, s"Types should be equal: but $eIn != $eAny") - val mapperC = asRep[CostedFunc[Unit, Any, SType#WrappedType]](evalNode(ctx, env, sfunc)).func - val res = inputC.mapCosted(mapperC) + val inputC = evalNode(ctx, env, input) + val mapper = evalNode(ctx, env, sfunc) + val res = CollCoster(inputC, SCollection.MapMethod, Seq(mapper)) res case Fold(input, zero, sfunc) => @@ -1433,7 +1397,7 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev val vals = inputC.values.slice(f, u) val costs = inputC.costs val sizes = inputC.sizes - RCCostedColl(vals, costs, sizes, opCost(Seq(inputC.valuesCost), costOf(op))) + RCCostedColl(vals, costs, sizes, opCost(vals, Seq(inputC.valuesCost), costOf(op))) case Append(In(_col1), In(_col2)) => val col1 = asRep[CostedColl[Any]](_col1) @@ -1441,27 +1405,13 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev val values = col1.values.append(col2.values) val costs = col1.costs.append(col2.costs) val sizes = col1.sizes.append(col2.sizes) - RCCostedColl(values, costs, sizes, opCost(Seq(col1.cost, col2.cost), costOf(node))) - - case Terms.Apply(Select(col, "where", _), Seq(Terms.Lambda(_, Seq((n, t)), _, Some(body)))) => - val input = col.asValue[SCollection[SType]] - val cond = body.asValue[SBoolean.type] - val eIn = stypeToElem(input.tpe.elemType) - val inputC = asRep[CostedColl[Any]](evalNode(ctx, env, input)) - implicit val eAny = inputC.elem.asInstanceOf[CostedElem[Coll[Any],_]].eVal.eA - assert(eIn == eAny, s"Types should be equal: but $eIn != $eAny") - val condC = fun { x: Rep[Costed[Any]] => - evalNode(ctx, env + (n -> x), cond) - } - val res = inputC.filterCosted(condC) - res - -// case Terms.Apply(Select(col,"fold", _), Seq(zero, Terms.Lambda(Seq((zeroArg, tZero), (opArg, tOp)), _, Some(body)))) => -// val taggedZero = mkTaggedVariable(21, tZero) -// val taggedOp = mkTaggedVariable(22, tOp) -// val env1 = env ++ Seq(zeroArg -> taggedZero, opArg -> taggedOp) -// some(mkFold(col.asValue[SCollection[SType]], taggedZero.varId, zero, taggedOp.varId, body))(env1) + RCCostedColl(values, costs, sizes, opCost(values, Seq(col1.cost, col2.cost), costOf(node))) + case Filter(input, p) => + val inputC = evalNode(ctx, env, input) + val pC = evalNode(ctx, env, p) + val res = CollCoster(inputC, SCollection.FilterMethod, Seq(pC)) + res case Terms.Apply(f, Seq(x)) if f.tpe.isFunc => val fC = asRep[CostedFunc[Unit, Any, Any]](evalNode(ctx, env, f)) @@ -1482,10 +1432,12 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev // val sizes: Rep[WOption[Long]]= Apply(sizeF, Pair(value, xC.dataSize), false) // RCCostedOption(values, costRes._1, sizes, costRes._2) case _ => - val (calcF, costF, sizeF) = splitCostedFunc(fC.func) + val calcF = fC.sliceCalc + val costF = fC.sliceCost + val sizeF = fC.sliceSize val value = xC.value val y: Rep[Any] = Apply(calcF, value, false) - val c: Rep[Int] = opCost(Seq(fC.cost, xC.cost), Apply(costF, Pair(xC.cost, xC.size), false)) + val c: Rep[Int] = opCost(y, Seq(fC.cost, xC.cost), Apply(costF, Pair(xC.cost, xC.size), false)) val s: Rep[Size[Any]]= Apply(sizeF, xC.size, false) RCCostedPrim(y, c, s) } @@ -1496,87 +1448,97 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev case CalcBlake2b256(In(input)) => val bytesC = asRep[Costed[Coll[Byte]]](input) val res = sigmaDslBuilder.blake2b256(bytesC.value) - val cost = opCost(Seq(bytesC.cost), perKbCostOf(node, bytesC.size.dataSize)) + val cost = opCost(res, Seq(bytesC.cost), perKbCostOf(node, bytesC.size.dataSize)) mkCostedColl(res, Blake2b256.DigestSize, cost) case CalcSha256(In(input)) => val bytesC = asRep[Costed[Coll[Byte]]](input) val res = sigmaDslBuilder.sha256(bytesC.value) - val cost = opCost(Seq(bytesC.cost), perKbCostOf(node, bytesC.size.dataSize)) + val cost = opCost(res, Seq(bytesC.cost), perKbCostOf(node, bytesC.size.dataSize)) mkCostedColl(res, Sha256.DigestSize, cost) case utxo.SizeOf(In(xs)) => - xs.elem.eVal match { + xs.elem.eVal.asInstanceOf[Any] match { case ce: CollElem[a,_] => val xsC = asRep[Costed[Coll[a]]](xs) val v = xsC.value.length - RCCostedPrim(v, opCost(Seq(xsC.cost), costOf(node)), SizeInt) + RCCostedPrim(v, opCost(v, Seq(xsC.cost), costOf(node)), SizeInt) case se: StructElem[_] => val xsC = asRep[Costed[Struct]](xs) - RCCostedPrim(se.fields.length, opCost(Seq(xsC.cost), costOf(node)), SizeInt) + val v = se.fields.length + RCCostedPrim(v, opCost(v, Seq(xsC.cost), costOf(node)), SizeInt) + case pe: PairElem[a,b] => + val xsC = asRep[Costed[(a,b)]](xs) + val v: Rep[Int] = 2 + RCCostedPrim(v, opCost(v, Seq(xsC.cost), costOf(node)), SizeInt) } - case ByIndex(xs, i, default) => + case ByIndex(xs, i, defaultOpt) => val xsC = asRep[CostedColl[Any]](eval(xs)) val iC = asRep[Costed[Int]](eval(i)) val iV = iC.value - val size = xsC.sizes(iV) // TO - default match { + val size = if (xs.tpe.elemType.isConstantSize) + constantTypeSize(xsC.elem.eItem) + else + CheckIsSupportedIndexExpression(IR)(xs, i, iV) { + xsC.sizes(iV) + } + defaultOpt match { case Some(defaultValue) => val defaultC = asRep[Costed[Any]](eval(defaultValue)) val default = defaultC.value val value = xsC.value.getOrElse(iV, default) - val cost = opCost(Seq(xsC.cost, iC.cost, defaultC.cost), costOf(node)) + val cost = opCost(value, Seq(xsC.cost, iC.cost, defaultC.cost), costOf(node)) RCCostedPrim(value, cost, size) case None => - RCCostedPrim(xsC.value(iV), opCost(Seq(xsC.cost, iC.cost), costOf(node)), size) + val value = xsC.value(iV) + RCCostedPrim(value, opCost(value, Seq(xsC.cost, iC.cost), costOf(node)), size) } case SigmaPropIsProven(p) => val pC = asRep[Costed[SigmaProp]](eval(p)) val v = pC.value.isValid - val c = opCost(Seq(pC.cost), costOf(node)) + val c = opCost(v, Seq(pC.cost), costOf(node)) // val s = pC.size // NOTE: we pass SigmaProp's size, this is handled in buildCostedGraph RCCostedPrim(v, c, SizeBoolean) case SigmaPropBytes(p) => val pC = asRep[Costed[SigmaProp]](eval(p)) val v = pC.value.propBytes - mkCostedColl(v, pC.size.dataSize.toInt, opCost(Seq(pC.cost), costOf(node))) + mkCostedColl(v, pC.size.dataSize.toInt, opCost(v, Seq(pC.cost), costOf(node))) case utxo.ExtractId(In(box)) => // TODO costing: use special CostedCollFixed for fixed-size collections val boxC = asRep[Costed[Box]](box) val id = boxC.value.id - mkCostedColl(id, Blake2b256.DigestSize, opCost(Seq(boxC.cost), costOf(node))) + mkCostedColl(id, Blake2b256.DigestSize, opCost(id, Seq(boxC.cost), costOf(node))) case utxo.ExtractBytesWithNoRef(In(box)) => val boxC = asRep[Costed[Box]](box) val sBox = tryCast[SizeBox](boxC.size) - mkCostedColl(boxC.value.bytesWithoutRef, sBox.bytesWithoutRef.dataSize.toInt, opCost(Seq(boxC.cost), costOf(node))) + val v = boxC.value.bytesWithoutRef + mkCostedColl(v, sBox.bytesWithoutRef.dataSize.toInt, opCost(v, Seq(boxC.cost), costOf(node))) case utxo.ExtractAmount(In(box)) => val boxC = asRep[Costed[Box]](box) - withConstantSize(boxC.value.value, opCost(Seq(boxC.cost), costOf(node))) + val v = boxC.value.value + withConstantSize(v, opCost(v, Seq(boxC.cost), costOf(node))) case utxo.ExtractScriptBytes(In(box)) => val boxC = asRep[Costed[Box]](box) val sBox = tryCast[SizeBox](boxC.size) val bytes = boxC.value.propositionBytes - mkCostedColl(bytes, sBox.propositionBytes.dataSize.toInt, opCost(Seq(boxC.cost), costOf(node))) + mkCostedColl(bytes, sBox.propositionBytes.dataSize.toInt, opCost(bytes, Seq(boxC.cost), costOf(node))) case utxo.ExtractBytes(In(box)) => val boxC = asRep[Costed[Box]](box) val sBox = tryCast[SizeBox](boxC.size) val bytes = boxC.value.bytes - mkCostedColl(bytes, sBox.bytes.dataSize.toInt, opCost(Seq(boxC.cost), costOf(node))) + mkCostedColl(bytes, sBox.bytes.dataSize.toInt, opCost(bytes, Seq(boxC.cost), costOf(node))) case utxo.ExtractCreationInfo(In(box)) => BoxCoster(box, SBox.creationInfoMethod, Nil) case utxo.ExtractRegisterAs(In(box), regId, optTpe) => - val boxC = asRep[Costed[Box]](box) - val sBox = asSizeBox(boxC.size) implicit val elem = stypeToElem(optTpe.elemType).asElem[Any] - val valueOpt = boxC.value.getReg(regId.number.toInt)(elem) - val sReg = asSizeOption(sBox.getReg(regId.number)(elem)) - RCCostedOption(valueOpt, SOME(0), sReg.sizeOpt, opCost(Seq(boxC.cost), sigmaDslBuilder.CostModel.GetRegister)) + val i: RCosted[Int] = RCCostedPrim(regId.number.toInt, 0, SizeInt) + BoxCoster(box, SBox.getRegMethod, Seq(i), Seq(liftElem(elem))) case BoolToSigmaProp(bool) => val boolC = eval(bool) val value = sigmaDslBuilder.sigmaProp(boolC.value) - RCCostedPrim(value, opCost(Seq(boolC.cost), costOf(node)), mkSizeSigmaProp(1L)) + RCCostedPrim(value, opCost(value, Seq(boolC.cost), costOf(node)), mkSizeSigmaProp(1L)) case AtLeast(bound, input) => val inputC = asRep[CostedColl[SigmaProp]](evalNode(ctx, env, input)) @@ -1587,7 +1549,7 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev } val boundC = eval(bound) val res = sigmaDslBuilder.atLeast(boundC.value, inputC.values) - val cost = opCost(Seq(boundC.cost, inputC.cost), costOf(node)) + val cost = opCost(res, Seq(boundC.cost, inputC.cost), costOf(node)) val sInput = tryCast[SizeColl[SigmaProp]](inputC.size) RCCostedPrim(res, cost, mkSizeSigmaProp(sInput.dataSize)) @@ -1614,7 +1576,7 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev v = xC.value.max(yC.value) case code => error(s"Cannot perform Costing.evalNode($op): unknown opCode ${code}", op.sourceContext.toOption) } - val c = opCost(Seq(xC.cost, yC.cost), costOf(op)) + val c = opCost(v, Seq(xC.cost, yC.cost), costOf(op)) RCCostedPrim(v, c, SizeBigInt) case op: ArithOp[t] => @@ -1624,29 +1586,43 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev val x = evalNode(ctx, env, op.left) val y = evalNode(ctx, env, op.right) (x, y) match { case (x: RCosted[a], y: RCosted[b]) => - withConstantSize(ApplyBinOp(binop, x.value, y.value), opCost(Seq(x.cost, y.cost), costOf(op))) + val v = ApplyBinOp(binop, x.value, y.value) + withConstantSize(v, opCost(v, Seq(x.cost, y.cost), costOf(op))) } case LogicalNot(input) => val inputC = evalNode(ctx, env, input) - withConstantSize(ApplyUnOp(Not, inputC.value), opCost(Seq(inputC.cost), costOf(node))) + val v = ApplyUnOp(Not, inputC.value) + withConstantSize(v, opCost(v, Seq(inputC.cost), costOf(node))) case ModQ(input) => val inputC = asRep[Costed[BigInt]](eval(input)) val v = inputC.value.modQ - RCCostedPrim(v, opCost(Seq(inputC.cost), costOf(node)), SizeBigInt) + RCCostedPrim(v, opCost(v, Seq(inputC.cost), costOf(node)), SizeBigInt) + + case ModQArithOp(l, r, code) => + val lC = asRep[Costed[BigInt]](eval(l)) + val rC = asRep[Costed[BigInt]](eval(r)) + val v = code match { + case OpCodes.PlusModQCode => lC.value.plusModQ(rC.value) + case OpCodes.MinusModQCode => lC.value.minusModQ(rC.value) + case code => error(s"unknown code for modular arithmetic op: $code") + } + RCCostedPrim(v, opCost(v, Seq(lC.cost, rC.cost), costOf(node)), SizeBigInt) case OR(input) => input match { case ConcreteCollection(items, tpe) => val itemsC = items.map(item => eval(adaptSigmaBoolean(item))) val res = sigmaDslBuilder.anyOf(colBuilder.fromItems(itemsC.map(_.value): _*)) val costs = itemsC.map(_.cost) - val cost = opCost(costs, perItemCostOf(node, costs.length)) + val nOps = costs.length - 1 + val cost = opCost(res, costs, perItemCostOf(node, nOps)) withConstantSize(res, cost) case _ => val inputC = asRep[CostedColl[Boolean]](eval(input)) val res = sigmaDslBuilder.anyOf(inputC.value) - val cost = opCost(Seq(inputC.cost), perItemCostOf(node, inputC.sizes.length)) + val nOps = inputC.sizes.length - 1 + val cost = opCost(res, Seq(inputC.cost), perItemCostOf(node, nOps)) withConstantSize(res, cost) } @@ -1655,12 +1631,14 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev val itemsC = items.map(item => eval(adaptSigmaBoolean(item))) val res = sigmaDslBuilder.allOf(colBuilder.fromItems(itemsC.map(_.value): _*)) val costs = itemsC.map(_.cost) - val cost = opCost(costs, perItemCostOf(node, costs.length)) + val nOps = costs.length - 1 + val cost = opCost(res, costs, perItemCostOf(node, nOps)) withConstantSize(res, cost) case _ => val inputC = tryCast[CostedColl[Boolean]](eval(input)) val res = sigmaDslBuilder.allOf(inputC.value) - val cost = opCost(Seq(inputC.cost), perItemCostOf(node, inputC.sizes.length)) + val nOps = inputC.sizes.length - 1 + val cost = opCost(res, Seq(inputC.cost), perItemCostOf(node, nOps)) withConstantSize(res, cost) } @@ -1669,12 +1647,14 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev val itemsC = items.map(item => eval(item)) val res = sigmaDslBuilder.xorOf(colBuilder.fromItems(itemsC.map(_.value): _*)) val costs = itemsC.map(_.cost) - val cost = opCost(costs, perItemCostOf(node, costs.length)) + val nOps = costs.length - 1 + val cost = opCost(res, costs, perItemCostOf(node, nOps)) withConstantSize(res, cost) case _ => val inputC = tryCast[CostedColl[Boolean]](eval(input)) val res = sigmaDslBuilder.xorOf(inputC.value) - val cost = opCost(Seq(inputC.cost), perItemCostOf(node, inputC.sizes.length)) + val nOps = inputC.sizes.length - 1 + val cost = opCost(res, Seq(inputC.cost), perItemCostOf(node, nOps)) withConstantSize(res, cost) } @@ -1682,7 +1662,7 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev val lC = evalNode(ctx, env, l) val rC = RCostedThunk(Thunk(evalNode(ctx, env, r)), 0) val v = Or.applyLazy(lC.value, rC.value) - val c = opCost(Seq(lC.cost, rC.cost), costOf(node)) + val c = opCost(v, Seq(lC.cost, rC.cost), costOf(node)) withConstantSize(v, c) @@ -1690,30 +1670,31 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev val lC = evalNode(ctx, env, l) val rC = RCostedThunk(Thunk(evalNode(ctx, env, r)), 0) val v = And.applyLazy(lC.value, rC.value) - val c = opCost(Seq(lC.cost, rC.cost), costOf(node)) + val c = opCost(v, Seq(lC.cost, rC.cost), costOf(node)) withConstantSize(v, c) -// case BinXor(l, r) => -// val lC = evalNode(ctx, env, l) -// val rC = RCostedThunk(Thunk(evalNode(ctx, env, r)), 0) -// val v = sigmaDslBuilder.binXor(lC.value, rC.value) -// val c = lC.cost + rC.cost + costOf(node) -// withDefaultSize(v, c) + case BinXor(l, r) => + val lC = evalNode(ctx, env, l) + val rC = evalNode(ctx, env, r) + val v = BinaryXorOp.apply(lC.value, rC.value) + val c = opCost(v, Seq(lC.cost, rC.cost), costOf(node)) + withConstantSize(v, c) - case neg: Negation[t] => + case neg: Negation[SNumericType]@unchecked => val tpe = neg.input.tpe val et = stypeToElem(tpe) val op = NumericNegate(elemToNumeric(et))(et) val inputC = evalNode(ctx, env, neg.input) inputC match { case x: RCosted[a] => - withConstantSize(ApplyUnOp(op, x.value), opCost(Seq(x.cost), costOf(neg))) + val v = ApplyUnOp(op, x.value) + withConstantSize(v, opCost(v, Seq(x.cost), costOf(neg))) } case SigmaAnd(items) => val itemsC = items.map(eval) val res = sigmaDslBuilder.allZK(colBuilder.fromItems(itemsC.map(s => asRep[SigmaProp](s.value)): _*)) val costs = itemsC.map(_.cost) - val cost = opCost(costs, perItemCostOf(node, costs.length)) + val cost = opCost(res, costs, perItemCostOf(node, costs.length)) val size = colBuilder.fromItems(itemsC.map(_.size.dataSize): _*).sum(longPlusMonoid) RCCostedPrim(res, cost, mkSizeSigmaProp(size)) @@ -1721,7 +1702,7 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev val itemsC = items.map(eval) val res = sigmaDslBuilder.anyZK(colBuilder.fromItems(itemsC.map(s => asRep[SigmaProp](s.value)): _*)) val costs = itemsC.map(_.cost) - val cost = opCost(costs, perItemCostOf(node, costs.length)) + val cost = opCost(res, costs, perItemCostOf(node, costs.length)) val size = colBuilder.fromItems(itemsC.map(_.size.dataSize): _*).sum(longPlusMonoid) RCCostedPrim(res, cost, mkSizeSigmaProp(size)) @@ -1738,8 +1719,8 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev def tC = evalNode(ctx, env, t) def eC = evalNode(ctx, env, e) val resV = IF (cC.value) THEN tC.value ELSE eC.value - val resCost = opCost(Seq(cC.cost, tC.cost, eC.cost), costOf("If", SFunc(Vector(SBoolean, If.tT, If.tT), If.tT))) - RCCostedPrim(resV, resCost, tC.size) // TODO implement tC.size max eC.size + val resCost = opCost(resV, Seq(cC.cost, tC.cost, eC.cost), costOf("If", SFunc(Vector(SBoolean, If.tT, If.tT), If.tT))) + RCCostedPrim(resV, resCost, tC.size) // TODO costing: implement tC.size max eC.size case rel: Relation[t, _] => val tpe = rel.left.tpe @@ -1755,9 +1736,9 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev costOf(rel.opName, SBigInt.RelationOpType) } else costOf(rel) - opCost(Seq(x.cost, y.cost), opcost) + opCost(value, Seq(x.cost, y.cost), opcost) } - else opCost(Seq(x.cost, y.cost), perKbCostOf(node, x.size.dataSize + y.size.dataSize)) + else opCost(value, Seq(x.cost, y.cost), perKbCostOf(node, x.size.dataSize + y.size.dataSize)) val res = withConstantSize(value, cost) res } @@ -1769,7 +1750,7 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev evalNode(ctx, env + (n -> x), body) }(Lazy(eCostedArg)) val eRes = f.elem.eRange.eVal - mkCostedFunc(f, opCost(Nil, costOf(node)), l.tpe.dataSize(SType.DummyValue), eArg, eRes) + mkCostedFunc(f, opCost(f, Nil, costOf(node)), l.tpe.dataSize(SType.DummyValue), eArg, eRes) case l @ FuncValue(Seq((n, argTpe)), body) => val eArg = stypeToElem(argTpe).asElem[Any] @@ -1778,35 +1759,35 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev evalNode(ctx, env + (n -> x), body) }(Lazy(xElem)) val eRes = f.elem.eRange.eVal - mkCostedFunc(f, opCost(Nil, costOf(node)), l.tpe.dataSize(SType.DummyValue), eArg, eRes) + mkCostedFunc(f, opCost(f, Nil, costOf(node)), l.tpe.dataSize(SType.DummyValue), eArg, eRes) case col @ ConcreteCollection(InSeqUnzipped(vs, cs, ss), elemType) => implicit val eAny = stypeToElem(elemType).asElem[Any] val values = colBuilder.fromItems(vs: _*)(eAny) val costs = colBuilder.fromItems(cs: _*) val sizes = colBuilder.fromItems(ss: _*)(sizeElement(eAny)) - RCCostedColl(values, costs, sizes, opCost(cs, costOf(col))) + RCCostedColl(values, costs, sizes, opCost(values, cs, costOf(col))) case sigmastate.Upcast(In(inputC), tpe) => val elem = stypeToElem(tpe.asNumType) val res = upcast(inputC.value)(elem) - withConstantSize(res, opCost(Seq(inputC.cost), costOf(node))) + withConstantSize(res, opCost(res, Seq(inputC.cost), costOf(node))) case sigmastate.Downcast(In(inputC), tpe) => val elem = stypeToElem(tpe.asNumType) val res = downcast(inputC.value)(elem) - withConstantSize(res, opCost(Seq(inputC.cost), costOf(node))) + withConstantSize(res, opCost(res, Seq(inputC.cost), costOf(node))) case LongToByteArray(In(input)) => val inputC = asRep[Costed[Long]](input) val res = sigmaDslBuilder.longToByteArray(inputC.value) - val cost = opCost(Seq(inputC.cost), costOf(node)) + val cost = opCost(res, Seq(inputC.cost), costOf(node)) mkCostedColl(res, 8, cost) case ByteArrayToLong(In(arr)) => val arrC = asRep[Costed[Coll[Byte]]](arr) val value = sigmaDslBuilder.byteArrayToLong(arrC.value) - val cost = opCost(Seq(arrC.cost), costOf(node)) + val cost = opCost(value, Seq(arrC.cost), costOf(node)) withConstantSize(value, cost) case Xor(InCollByte(l), InCollByte(r)) => @@ -1814,94 +1795,25 @@ trait RuntimeCosting extends CostingRules with DataCosting with Slicing { IR: Ev val sizes = r.sizes val len = sizes.length val costs = colBuilder.replicate(len, 0) - val cost = opCost(Seq(l.cost, r.cost), perKbCostOf(node, len.toLong)) + val cost = opCost(values, Seq(l.cost, r.cost), perKbCostOf(node, len.toLong)) RCCostedColl(values, costs, sizes, cost) -// TODO should be -// case ErgoAddressToSigmaProp(input) => -// val inputC = evalNode(ctx, env, input) -// withDefaultSize(inputC.value, inputC.cost + costOf(node)) - -// TODO why we need this here? - case sigmastate.Values.ConstantPlaceholder(index, tpe) => - val elem = toLazyElem(stypeToElem(tpe)) - val res = constantPlaceholder(index)(elem) - withConstantSize(res, costOf(node)) - case SubstConstants(InCollByte(bytes), InCollInt(positions), InCollAny(newValues)) => val values = sigmaDslBuilder.substConstants(bytes.values, positions.values, newValues.values)(AnyElement) val len = bytes.size.dataSize + newValues.size.dataSize - val cost = opCost(Seq(bytes.cost, positions.cost, newValues.cost), perKbCostOf(node, len)) + val cost = opCost(values, Seq(bytes.cost, positions.cost, newValues.cost), perKbCostOf(node, len)) mkCostedColl(values, len.toInt, cost) case DecodePoint(InCollByte(bytes)) => val res = sigmaDslBuilder.decodePoint(bytes.values) - RCCostedPrim(res, opCost(Seq(bytes.cost), costOf(node)), SizeGroupElement) - -// case Terms.MethodCall(obj, method, args, _) if obj.tpe.isCollectionLike => -// val xsC = asRep[CostedColl[Any]](evalNode(ctx, env, obj)) -// val (argsVals, argsCosts) = args.map { -// case sfunc: Value[SFunc]@unchecked if sfunc.tpe.isFunc => -// val funC = asRep[CostedFunc[Unit, Any, Any]](evalNode(ctx, env, sfunc)).func -// val (calcF, costF) = splitCostedFunc2(funC, okRemoveIsValid = true) -// val cost = xsC.values.zip(xsC.costs.zip(xsC.sizes)).map(costF).sum(intPlusMonoid) -// (calcF, cost) -// case a => -// val aC = eval(a) -// (aC.value, aC.cost) -// }.unzip -// // todo add costOf(node) -// val cost = argsCosts.foldLeft(xsC.cost)({ case (s, e) => s + e }) // + costOf(node) -// val xsV = xsC.value -// val value = (method.name, argsVals) match { -// case (SCollection.IndexOfMethod.name, Seq(e, from)) => xsV.indexOf(e, asRep[Int](from)) -// case (SCollection.IndicesMethod.name, _) => xsV.indices -// case (SCollection.FlatMapMethod.name, Seq(f)) => xsV.flatMap(asRep[Any => Coll[Any]](f)) -// case (SCollection.SegmentLengthMethod.name, Seq(f, from)) => -// xsV.segmentLength(asRep[Any => Boolean](f), asRep[Int](from)) -// case (SCollection.IndexWhereMethod.name, Seq(f, from)) => -// xsV.indexWhere(asRep[Any => Boolean](f), asRep[Int](from)) -// case (SCollection.LastIndexWhereMethod.name, Seq(f, end)) => -// xsV.lastIndexWhere(asRep[Any => Boolean](f), asRep[Int](end)) -// case (SCollection.ZipMethod.name, Seq(col2)) => xsV.zip(asRep[Coll[Any]](col2)) -// case (SCollection.PartitionMethod.name, Seq(f)) => xsV.partition(asRep[Any => Boolean](f)) -// case (SCollection.PatchMethod.name, Seq(from, col, repl)) => -// xsV.patch(asRep[Int](from), asRep[Coll[Any]](col), asRep[Int](repl)) -// case (SCollection.UpdatedMethod.name, Seq(index, elem)) => -// xsV.updated(asRep[Int](index), asRep[Any](elem)) -// case (SCollection.UpdateManyMethod.name, Seq(indexCol, elemCol)) => -// xsV.updateMany(asRep[Coll[Int]](indexCol), asRep[Coll[Any]](elemCol)) -// case _ => error(s"method $method is not supported") -// } -// withConstantSize(value, cost) -// -// case Terms.MethodCall(obj, method, args, _) if obj.tpe.isOption => -// val optC = asRep[CostedOption[Any]](eval(obj)) -// val argsC = args.map(eval) -// (method.name, argsC) match { -// case (SOption.MapMethod.name, Seq(f)) => optC.map(asRep[Costed[Any => Any]](f)) -// case (SOption.FilterMethod.name, Seq(f)) => optC.filter(asRep[Costed[Any => Boolean]](f)) -// case _ => error(s"method $method is not supported in object $obj") -// } -// -// case Terms.MethodCall(obj, method, args, typeSubst) if obj.tpe.isBox => -// val boxC = asRep[CostedBox](eval(obj)) -// val argsC = args.map(eval) -// (method.name, argsC) match { -// case (SBox.getRegMethod.name, Seq(index)) => -// val tpe = typeSubst(SBox.tT) -// implicit val elem = stypeToElem(tpe).asElem[Any] -// boxC.getReg(asRep[Int](index.value))(elem) -// case _ if method.objType.coster.isDefined => -// method.objType.coster.get(IR)(boxC, method, argsC) -// case _ => error(s"method $method is not supported in object $obj") -// } + RCCostedPrim(res, opCost(res, Seq(bytes.cost), costOf(node)), SizeGroupElement) // fallback rule for MethodCall, should be the last case in the list - case Terms.MethodCall(obj, method, args, _) if method.objType.coster.isDefined => + case Terms.MethodCall(obj, method, args, typeSubst) if method.objType.coster.isDefined => val objC = eval(obj) val argsC = args.map(eval) - method.objType.coster.get(IR)(objC, method, argsC) + val elems = typeSubst.values.toSeq.map(tpe => liftElem(stypeToElem(tpe).asElem[Any])) + method.objType.coster.get(IR)(objC, method, argsC, elems) case _ => error(s"Don't know how to evalNode($node)", node.sourceContext.toOption) diff --git a/src/main/scala/sigmastate/eval/Sized.scala b/src/main/scala/sigmastate/eval/Sized.scala index 2b51f9e927..f8dedd8808 100644 --- a/src/main/scala/sigmastate/eval/Sized.scala +++ b/src/main/scala/sigmastate/eval/Sized.scala @@ -1,7 +1,7 @@ package sigmastate.eval -import scalan.{Nullable, RType} -import special.collection.{CSizePrim, CSizePair, Size, CSizeOption, CollType, Coll, CSizeColl} +import scalan.RType +import special.collection._ import scalan.RType._ import sigmastate._ import sigmastate.SBigInt.MaxSizeInBytes @@ -9,11 +9,27 @@ import special.sigma._ import SType.AnyOps import sigmastate.interpreter.CryptoConstants +/** Type-class to give types a capability to build a Size structure. */ trait Sized[T] { + /** Given data value `x` returns it's size descriptor `Size[T]` */ def size(x: T): Size[T] } + trait SizedLowPriority { - implicit def collIsSized[T: Sized: RType]: Sized[Coll[T]] = (xs: Coll[T]) => new CSizeColl(xs.map(Sized[T].size)) + /** Sized instance for Coll[T]. + * Takes advantage of RType.isConstantSize to use ReplColl representation of Coll when all items are the same. + * When all elements of T are of the same size, then only single Size[T] value is created and replicated + * to the length of source collection `xs`. */ + implicit def collIsSized[T: Sized]: Sized[Coll[T]] = (xs: Coll[T]) => { + implicit val tT = xs.tItem + val sizes = + if (xs.isEmpty) Colls.emptyColl[Size[T]] + else if (xs.tItem.isConstantSize) + Colls.replicate(xs.length, Sized.sizeOf(xs(0))) + else + new CViewColl(xs, Sized[T].size) + new CSizeColl(sizes) + } implicit def optionIsSized[T: Sized]: Sized[Option[T]] = (xs: Option[T]) => new CSizeOption(xs.map(Sized[T].size)) implicit def pairIsSized[A: Sized, B: Sized]: Sized[(A,B)] = (in: (A,B)) => new CSizePair(Sized[A].size(in._1), Sized[B].size(in._2)) } @@ -30,14 +46,14 @@ object Sized extends SizedLowPriority { val SizeGroupElement: Size[GroupElement] = new CSizePrim(CryptoConstants.EncodedGroupElementLength, GroupElementRType) val SizeAvlTree: Size[AvlTree] = new CSizePrim(AvlTreeData.TreeDataSize, AvlTreeRType) - implicit val BooleanIsSized: Sized[Boolean] = (x: Boolean) => SizeBoolean - implicit val ByteIsSized: Sized[Byte] = (x: Byte) => SizeByte - implicit val ShortIsSized: Sized[Short] = (x: Short) => SizeShort - implicit val IntIsSized: Sized[Int] = (x: Int) => SizeInt - implicit val LongIsSized: Sized[Long] = (x: Long) => SizeLong - implicit val BigIntIsSized: Sized[BigInt] = (x: BigInt) => SizeBigInt - implicit val GroupElementIsSized: Sized[GroupElement] = (x: GroupElement) => SizeGroupElement - implicit val AvlTreeIsSized: Sized[AvlTree] = (x: AvlTree) => SizeAvlTree + implicit val BooleanIsSized: Sized[Boolean] = (_: Boolean) => SizeBoolean + implicit val ByteIsSized: Sized[Byte] = (_: Byte) => SizeByte + implicit val ShortIsSized: Sized[Short] = (_: Short) => SizeShort + implicit val IntIsSized: Sized[Int] = (_: Int) => SizeInt + implicit val LongIsSized: Sized[Long] = (_: Long) => SizeLong + implicit val BigIntIsSized: Sized[BigInt] = (_: BigInt) => SizeBigInt + implicit val GroupElementIsSized: Sized[GroupElement] = (_: GroupElement) => SizeGroupElement + implicit val AvlTreeIsSized: Sized[AvlTree] = (_: AvlTree) => SizeAvlTree def typeToSized[T](t: RType[T]): Sized[T] = (t match { case BooleanType => Sized[Boolean] @@ -54,7 +70,7 @@ object Sized extends SizedLowPriority { case HeaderRType => headerIsSized case PreHeaderRType => preHeaderIsSized case ContextRType => contextIsSized - case ct: CollType[a] => collIsSized(typeToSized(ct.tItem), ct.tItem) + case ct: CollType[a] => collIsSized(typeToSized(ct.tItem)) case ct: OptionType[a] => optionIsSized(typeToSized(ct.tA)) case ct: PairType[a, b] => pairIsSized(typeToSized(ct.tFst), typeToSized(ct.tSnd)) case _ => sys.error(s"Don't know how to compute Sized for type $t") @@ -104,8 +120,8 @@ object Sized extends SizedLowPriority { sizeOfTokens(b) ) } - implicit val headerIsSized: Sized[Header] = (b: Header) => new CSizePrim(SHeader.dataSize(0.asWrappedType), HeaderRType) - implicit val preHeaderIsSized: Sized[PreHeader] = (b: PreHeader) => new CSizePrim(SPreHeader.dataSize(0.asWrappedType), PreHeaderRType) + implicit val headerIsSized: Sized[Header] = (_: Header) => new CSizePrim(SHeader.dataSize(0.asWrappedType), HeaderRType) + implicit val preHeaderIsSized: Sized[PreHeader] = (_: PreHeader) => new CSizePrim(SPreHeader.dataSize(0.asWrappedType), PreHeaderRType) implicit val contextIsSized: Sized[Context] = (ctx: Context) => { val outputs = sizeOf(ctx.OUTPUTS) val inputs = sizeOf(ctx.INPUTS) diff --git a/src/main/scala/sigmastate/eval/TreeBuilding.scala b/src/main/scala/sigmastate/eval/TreeBuilding.scala index c74693d4b3..a87f5eecc8 100644 --- a/src/main/scala/sigmastate/eval/TreeBuilding.scala +++ b/src/main/scala/sigmastate/eval/TreeBuilding.scala @@ -1,32 +1,22 @@ package sigmastate.eval -import scala.collection.mutable.ArrayBuffer -import sigmastate._ -import sigmastate.Values.{BlockValue, BoolValue, BooleanConstant, ConcreteCollection, Constant, ConstantNode, EvaluatedCollection, FalseLeaf, FuncValue, GroupElementConstant, SValue, SigmaBoolean, SigmaPropConstant, ValDef, ValUse, Value} -import sigmastate.serialization.OpCodes._ + +import sigmastate.Values.{BlockValue, BoolValue, Constant, ConstantNode, EvaluatedCollection, SValue, SigmaPropConstant, ValDef, ValUse, Value} import org.ergoplatform._ -import java.math.BigInteger import org.ergoplatform.{Height, Inputs, Outputs, Self} import sigmastate._ -import sigmastate.lang.Terms.{OperationId, ValueOps} +import sigmastate.lang.Terms.ValueOps import sigmastate.serialization.OpCodes._ -import sigmastate.serialization.{ConstantStore, ValueSerializer} -import sigmastate.utxo.{CostTable, ExtractAmount, SizeOf} -import ErgoLikeContext._ +import sigmastate.serialization.ConstantStore -import scala.collection.mutable import scala.collection.mutable.ArrayBuffer -import scala.reflect.{ClassTag, classTag} -import scala.util.Try import SType._ -import org.bouncycastle.math.ec.ECPoint import sigmastate.basics.DLogProtocol.ProveDlog import sigmastate.basics.ProveDHTuple -import sigmastate.interpreter.CryptoConstants.EcPointType -import sigmastate.lang.{SigmaBuilder, SigmaTyper} +import sigmastate.lang.SigmaTyper -trait TreeBuilding extends RuntimeCosting { IR: Evaluation => +trait TreeBuilding extends RuntimeCosting { IR: IRContext => import Liftables._ import Context._ import SigmaProp._ @@ -86,6 +76,7 @@ trait TreeBuilding extends RuntimeCosting { IR: Evaluation => def unapply(op: BinOp[_,_]): Option[(BoolValue, BoolValue) => Value[SBoolean.type]] = op match { case And => Some(builder.mkBinAnd) case Or => Some(builder.mkBinOr) + case BinaryXorOp => Some(builder.mkBinXor) case _ => None } } @@ -141,6 +132,7 @@ trait TreeBuilding extends RuntimeCosting { IR: Evaluation => defId: Int, constantsProcessing: Option[ConstantStore]): SValue = { import builder._ + import TestSigmaDslBuilder._ def recurse[T <: SType](s: Sym) = buildValue(ctx, mainG, env, s, defId, constantsProcessing).asValue[T] object In { def unapply(s: Sym): Option[SValue] = Some(buildValue(ctx, mainG, env, s, defId, constantsProcessing)) } s match { @@ -172,12 +164,10 @@ trait TreeBuilding extends RuntimeCosting { IR: Evaluation => } case Def(wc: LiftedConst[a,_]) => val tpe = elemToSType(s.elem) - val t = Evaluation.stypeToRType(tpe) - val tRes = Evaluation.toErgoTreeType(t) - val v = Evaluation.fromDslData(wc.constValue, tRes) - mkConstant[tpe.type](v.asInstanceOf[tpe.WrappedType], tpe) + mkConstant[tpe.type](wc.constValue.asInstanceOf[tpe.WrappedType], tpe) case Def(IsContextProperty(v)) => v + case Def(TestSigmaDslBuilderCtor()) => Global case Def(ApplyBinOp(IsArithOp(opCode), xSym, ySym)) => val Seq(x, y) = Seq(xSym, ySym).map(recurse) @@ -185,6 +175,9 @@ trait TreeBuilding extends RuntimeCosting { IR: Evaluation => case Def(ApplyBinOp(IsRelationOp(mkNode), xSym, ySym)) => val Seq(x, y) = Seq(xSym, ySym).map(recurse) mkNode(x, y) + case Def(ApplyBinOp(IsLogicalBinOp(mkNode), xSym, ySym)) => + val Seq(x, y) = Seq(xSym, ySym).map(recurse) + mkNode(x, y) case Def(ApplyBinOpLazy(IsLogicalBinOp(mkNode), xSym, ySym)) => val Seq(x, y) = Seq(xSym, ySym).map(recurse) mkNode(x, y) @@ -221,6 +214,10 @@ trait TreeBuilding extends RuntimeCosting { IR: Evaluation => mkArith(x.asNumValue, y.asNumValue, MaxCode) case BIM.modQ(In(x)) => mkModQ(x.asBigInt) + case BIM.plusModQ(In(l), In(r)) => + mkPlusModQ(l.asBigInt, r.asBigInt) + case BIM.minusModQ(In(l), In(r)) => + mkMinusModQ(l.asBigInt, r.asBigInt) case Def(ApplyBinOp(IsArithOp(opCode), xSym, ySym)) => val Seq(x, y) = Seq(xSym, ySym).map(recurse) mkArith(x.asNumValue, y.asNumValue, opCode) @@ -261,19 +258,22 @@ trait TreeBuilding extends RuntimeCosting { IR: Evaluation => case CollM.foldLeft(colSym, zeroSym, pSym) => val Seq(col, zero, p) = Seq(colSym, zeroSym, pSym).map(recurse) mkFold(col, zero, p.asFunc) + case CollM.filter(colSym, pSym) => + val Seq(col, p) = Seq(colSym, pSym).map(recurse) + mkFilter(col.asCollection[SType], p.asFunc) case Def(MethodCall(receiver, m, argsSyms, _)) if receiver.elem.isInstanceOf[CollElem[_, _]] => val colSym = receiver.asInstanceOf[Rep[Coll[Any]]] val args = argsSyms.map(_.asInstanceOf[Sym]).map(recurse) val col = recurse(colSym).asCollection[SType] - val colTpe = col.tpe // elemToSType(colSym.elem).asCollection + val colTpe = col.tpe val method = SCollection.methods.find(_.name == m.getName).getOrElse(error(s"unknown method Coll.${m.getName}")) val typeSubst = (method, args) match { case (mth @ SCollection.FlatMapMethod, Seq(f)) => val typeSubst = Map(SCollection.tOV -> f.asFunc.tpe.tRange.asCollection.elemType) typeSubst - case (mth @ SCollection.ZipMethod, Seq(coll: EvaluatedCollection[_, _])) => - val typeSubst = Map(SCollection.tOV -> coll.elementType) + case (mth @ SCollection.ZipMethod, Seq(coll)) => + val typeSubst = Map(SCollection.tOV -> coll.asCollection[SType].tpe.elemType) typeSubst case (mth, _) => SigmaTyper.emptySubst } @@ -289,8 +289,7 @@ trait TreeBuilding extends RuntimeCosting { IR: Evaluation => if (regId.isConst) mkExtractRegisterAs(box.asBox, ErgoBox.allRegisters(regId.asValue), tpe) else - builder.mkMethodCall(box, SBox.getRegMethod, IndexedSeq(recurse(regId)), - Map(SBox.tT -> tpe.elemType)) + error(s"Non constant expressions (${regId.rhs}) are not supported in getReg") case BoxM.creationInfo(In(box)) => mkExtractCreationInfo(box.asBox) case BoxM.id(In(box)) => @@ -380,7 +379,7 @@ trait TreeBuilding extends RuntimeCosting { IR: Evaluation => mkIf(cond, thenP, elseP) case Def(Tup(In(x), In(y))) => - mkTuple(Seq(x, y)) + mkTuple(Seq(x, y)) case Def(First(pair)) => mkSelectField(recurse(pair), 1) case Def(Second(pair)) => diff --git a/src/main/scala/sigmastate/eval/package.scala b/src/main/scala/sigmastate/eval/package.scala index bd6c563ddd..d5abcedf35 100644 --- a/src/main/scala/sigmastate/eval/package.scala +++ b/src/main/scala/sigmastate/eval/package.scala @@ -1,6 +1,64 @@ package sigmastate +import java.math.BigInteger + +import org.ergoplatform.ErgoBox +import scalan.RType +import scorex.crypto.hash.Digest32 +import sigmastate.Values.SigmaBoolean +import sigmastate.interpreter.CryptoConstants.EcPointType +import special.collection.{Coll, CollBuilder} +import special.sigma._ +import supertagged.TaggedType + +import scala.language.implicitConversions + package object eval { + /** The primary reference to Global instance of SigmaDsl. + * Besides operations of SigmaDslBuilder class, this instance also contains methods, + * which are not available in Dsl code, and which are not in SigmaDslBuilder interface. + * For example methods like `Box`, `toErgoBox` are available here, but not available in Dsl. + * @see SigmaDslBuilder + */ val SigmaDsl = CostingSigmaDslBuilder - val Colls = SigmaDsl.Colls + + /** The primary reference to global Coll operations. Can be used to create collections from Array etc. + * @see CollBuilder + */ + val Colls: CollBuilder = SigmaDsl.Colls + + /** Constructor of tuple value with more than 2 items. + * Such long tuples are represented as Coll[Any]. + * This representaion of tuples is different from representation of pairs (x, y), + * where Tuple2 type is used instead of Coll. */ + def TupleColl(items: Any*): Coll[Any] = Colls.fromItems(items:_*)(RType.AnyType) + + trait BaseDigestColl extends TaggedType[Coll[Byte]] + + type DigestColl = BaseDigestColl#Type + + object Digest32Coll extends BaseDigestColl + + type Digest32Coll = Digest32Coll.Type + implicit val Digest32CollRType: RType[Digest32Coll] = RType[Coll[Byte]].asInstanceOf[RType[Digest32Coll] ] + implicit val Digest32RType: RType[Digest32] = RType[Array[Byte]].asInstanceOf[RType[Digest32] ] + + /** Implicit conversions between Dsl type and the type wrapped by the corresponding type Dsl type. + * Here BigInt is Dsl type and BigInteger is wrapped type. + * @see `special.sigma.CBigInt` + */ + implicit def bigIntegerToBigInt(bi: BigInteger): BigInt = SigmaDsl.BigInt(bi) + implicit def bigIntToBigInteger(bi: BigInt): BigInteger = SigmaDsl.toBigInteger(bi) + + implicit def ecPointToGroupElement(p: EcPointType): GroupElement = SigmaDsl.GroupElement(p) + implicit def groupElementToECPoint(p: GroupElement): EcPointType = SigmaDsl.toECPoint(p).asInstanceOf[EcPointType] + + implicit def sigmaBooleanToSigmaProp(p: SigmaBoolean): SigmaProp = SigmaDsl.SigmaProp(p) + implicit def sigmaPropToSigmaBoolean(p: SigmaProp): SigmaBoolean = SigmaDsl.toSigmaBoolean(p) + + implicit def avlTreeDataToAvlTree(p: AvlTreeData): AvlTree = SigmaDsl.avlTree(p) + implicit def avlTreeToAvlTreeData(p: AvlTree): AvlTreeData = SigmaDsl.toAvlTreeData(p) + + implicit def ergoBoxToBox(p: ErgoBox): Box = SigmaDsl.Box(p) + implicit def boxToErgoBox(p: Box): ErgoBox = SigmaDsl.toErgoBox(p) } diff --git a/src/main/scala/sigmastate/interpreter/Interpreter.scala b/src/main/scala/sigmastate/interpreter/Interpreter.scala index 381e9da977..aab62c3310 100644 --- a/src/main/scala/sigmastate/interpreter/Interpreter.scala +++ b/src/main/scala/sigmastate/interpreter/Interpreter.scala @@ -2,33 +2,34 @@ package sigmastate.interpreter import java.util -import org.bitbucket.inkytonik.kiama.rewriting.Rewriter.{strategy, rule, everywherebu} +import org.bitbucket.inkytonik.kiama.rewriting.Rewriter.{everywherebu, rule, strategy} import org.bitbucket.inkytonik.kiama.rewriting.Strategy -import sigmastate.basics.DLogProtocol.{FirstDLogProverMessage, DLogInteractiveProver} +import org.ergoplatform.ErgoConstants +import sigmastate.basics.DLogProtocol.{DLogInteractiveProver, FirstDLogProverMessage} import scorex.util.ScorexLogging import sigmastate.SCollection.SByteArray import sigmastate.Values._ import sigmastate.eval.{IRContext, Sized} import sigmastate.lang.Terms.ValueOps import sigmastate.basics._ -import sigmastate.interpreter.Interpreter.{VerificationResult, ScriptEnv} -import sigmastate.lang.exceptions.{InterpreterException, CosterException} +import sigmastate.interpreter.Interpreter.{ScriptEnv, VerificationResult} +import sigmastate.lang.exceptions.{CosterException, InterpreterException} import sigmastate.serialization.ValueSerializer import sigmastate.utxo.DeserializeContext import sigmastate.{SType, _} +import org.ergoplatform.validation.ValidationRules._ import scala.util.Try - trait Interpreter extends ScorexLogging { import Interpreter.ReductionResult - type CTX <: Context + type CTX <: InterpreterContext - type ProofT = UncheckedTree //todo: ProofT <: UncheckedTree ? + type ProofT = UncheckedTree - final val MaxByteArrayLength = 10000 + final val MaxByteArrayLength = ErgoConstants.MaxByteArrayLength.get /** * Max cost of a script interpreter can accept @@ -40,7 +41,10 @@ trait Interpreter extends ScorexLogging { if (context.extension.values.contains(d.id)) context.extension.values(d.id) match { case eba: EvaluatedValue[SByteArray]@unchecked if eba.tpe == SByteArray => - Some(ValueSerializer.deserialize(eba.value)) + val script = ValueSerializer.deserialize(eba.value.toArray) + CheckDeserializedScriptType(d, script) { + Some(script) + } case _ => None } else @@ -80,7 +84,7 @@ trait Interpreter extends ScorexLogging { def calcResult(context: special.sigma.Context, calcF: Rep[IR.Context => Any]): special.sigma.SigmaProp = { import IR._; import Context._; import SigmaProp._ val res = calcF.elem.eRange.asInstanceOf[Any] match { - case sp: SigmaPropElem[_] => + case _: SigmaPropElem[_] => val valueFun = compile[SContext, SSigmaProp, Context, SigmaProp](getDataEnv, asRep[Context => SigmaProp](calcF)) val (sp, _) = valueFun(context) sp @@ -101,63 +105,78 @@ trait Interpreter extends ScorexLogging { * @return */ def reduceToCrypto(context: CTX, env: ScriptEnv, exp: Value[SType]): Try[ReductionResult] = Try { - import IR._; import Size._; import Context._; import SigmaProp._ - val costingRes @ Pair(calcF, costF) = doCostingEx(env, exp, true) - IR.onCostingResult(env, exp, costingRes) + import IR._ + implicit val vs = context.validationSettings + trySoftForkable[ReductionResult](whenSoftFork = TrivialProp.TrueProp -> 0) { + val costingRes @ Pair(calcF, costF) = doCostingEx(env, exp, true) + IR.onCostingResult(env, exp, costingRes) - verifyCostFunc(asRep[Any => Int](costF)).fold(t => throw t, x => x) + CheckCostFunc(IR)(asRep[Any => Int](costF)) { } - verifyIsProven(calcF).fold(t => throw t, x => x) + CheckCalcFunc(IR)(calcF) { } - val costingCtx = context.toSigmaContext(IR, isCost = true) - val estimatedCost = checkCostWithContext(costingCtx, exp, costF, maxCost) - .fold(t => throw new CosterException( - s"Script cannot be executed $exp: ", exp.sourceContext.toList.headOption, Some(t)), identity) - -// println(s"reduceToCrypto: estimatedCost: $estimatedCost") - - // check calc - val calcCtx = context.toSigmaContext(IR, isCost = false) - val res = calcResult(calcCtx, calcF) - SigmaDsl.toSigmaBoolean(res) -> estimatedCost + val costingCtx = context.toSigmaContext(IR, isCost = true) + val estimatedCost = CheckCostWithContext(IR)(costingCtx, exp, costF, maxCost) + + // check calc + val calcCtx = context.toSigmaContext(IR, isCost = false) + val res = calcResult(calcCtx, calcF) + SigmaDsl.toSigmaBoolean(res) -> estimatedCost + } } def reduceToCrypto(context: CTX, exp: Value[SType]): Try[ReductionResult] = reduceToCrypto(context, Interpreter.emptyEnv, exp) - def verify(env: ScriptEnv, exp: ErgoTree, + /** Extracts proposition for ErgoTree handing soft-fork condition. + * @note soft-fork handler */ + def propositionFromErgoTree(tree: ErgoTree, ctx: CTX): SigmaPropValue = { + val prop = tree.root match { + case Right(_) => + tree.toProposition(tree.isConstantSegregation) + case Left(UnparsedErgoTree(_, error)) if ctx.validationSettings.isSoftFork(error) => + TrueSigmaProp + case Left(UnparsedErgoTree(_, error)) => + throw new InterpreterException( + "Script has not been recognized due to ValidationException, and it cannot be accepted as soft-fork.", None, Some(error)) + } + prop + } + + def verify(env: ScriptEnv, tree: ErgoTree, context: CTX, proof: Array[Byte], message: Array[Byte]): Try[VerificationResult] = Try { - val propTree = applyDeserializeContext(context, exp.proposition) + val prop = propositionFromErgoTree(tree, context) + implicit val vs = context.validationSettings + val propTree = trySoftForkable[BoolValue](whenSoftFork = TrueLeaf) { + applyDeserializeContext(context, prop) + } + + // here we assume that when `propTree` is TrueProp then `reduceToCrypto` always succeeds + // and the rest of the verification is also trivial val (cProp, cost) = reduceToCrypto(context, env, propTree).get val checkingResult = cProp match { - case TrueLeaf => true - case FalseLeaf => false - case cProp: SigmaBoolean => - cProp match { - case TrivialProp.TrueProp => true - case TrivialProp.FalseProp => false - case _ => - // Perform Verifier Steps 1-3 - SigSerializer.parseAndComputeChallenges(cProp, proof) match { - case NoProof => false - case sp: UncheckedSigmaTree => - // Perform Verifier Step 4 - val newRoot = computeCommitments(sp).get.asInstanceOf[UncheckedSigmaTree] // todo: is this "asInstanceOf" necessary? - - /** - * Verifier Steps 5-6: Convert the tree to a string s for input to the Fiat-Shamir hash function, - * using the same conversion as the prover in 7 - * Accept the proof if the challenge at the root of the tree is equal to the Fiat-Shamir hash of s - * (and, if applicable, the associated data). Reject otherwise. - */ - val expectedChallenge = CryptoFunctions.hashFn(FiatShamirTree.toBytes(newRoot) ++ message) - util.Arrays.equals(newRoot.challenge, expectedChallenge) - } + case TrivialProp.TrueProp => true + case TrivialProp.FalseProp => false + case _ => + // Perform Verifier Steps 1-3 + SigSerializer.parseAndComputeChallenges(cProp, proof) match { + case NoProof => false + case sp: UncheckedSigmaTree => + // Perform Verifier Step 4 + val newRoot = computeCommitments(sp).get.asInstanceOf[UncheckedSigmaTree] + + /** + * Verifier Steps 5-6: Convert the tree to a string s for input to the Fiat-Shamir hash function, + * using the same conversion as the prover in 7 + * Accept the proof if the challenge at the root of the tree is equal to the Fiat-Shamir hash of s + * (and, if applicable, the associated data). Reject otherwise. + */ + val expectedChallenge = CryptoFunctions.hashFn(FiatShamirTree.toBytes(newRoot) ++ message) + util.Arrays.equals(newRoot.challenge, expectedChallenge) } -// case _: Value[_] => false } checkingResult -> cost } @@ -189,7 +208,8 @@ trait Interpreter extends ScorexLogging { verify(Interpreter.emptyEnv, exp, ctxv, proverResult.proof, message) } - def verify(env: ScriptEnv, exp: ErgoTree, + def verify(env: ScriptEnv, + exp: ErgoTree, context: CTX, proverResult: ProverResult, message: Array[Byte]): Try[VerificationResult] = { @@ -198,7 +218,6 @@ trait Interpreter extends ScorexLogging { } - //todo: do we need the method below? def verify(exp: ErgoTree, context: CTX, proof: ProofT, diff --git a/src/main/scala/sigmastate/interpreter/Context.scala b/src/main/scala/sigmastate/interpreter/InterpreterContext.scala similarity index 86% rename from src/main/scala/sigmastate/interpreter/Context.scala rename to src/main/scala/sigmastate/interpreter/InterpreterContext.scala index eb2099be7f..b7efbe325c 100644 --- a/src/main/scala/sigmastate/interpreter/Context.scala +++ b/src/main/scala/sigmastate/interpreter/InterpreterContext.scala @@ -1,5 +1,6 @@ package sigmastate.interpreter +import org.ergoplatform.validation.SigmaValidationSettings import sigmastate.SType import sigmastate.Values.EvaluatedValue import sigmastate.eval.Evaluation @@ -40,12 +41,13 @@ object ContextExtension { } -trait Context { +trait InterpreterContext { val extension: ContextExtension + val validationSettings: SigmaValidationSettings - def withExtension(newExtension: ContextExtension): Context + def withExtension(newExtension: ContextExtension): InterpreterContext - def withBindings(bindings: (Byte, EvaluatedValue[_ <: SType])*): Context = { + def withBindings(bindings: (Byte, EvaluatedValue[_ <: SType])*): InterpreterContext = { val ext = extension.add(bindings: _*) withExtension(ext) } diff --git a/src/main/scala/sigmastate/interpreter/ProverInterpreter.scala b/src/main/scala/sigmastate/interpreter/ProverInterpreter.scala index 438edf07b8..37a4176422 100644 --- a/src/main/scala/sigmastate/interpreter/ProverInterpreter.scala +++ b/src/main/scala/sigmastate/interpreter/ProverInterpreter.scala @@ -4,13 +4,7 @@ import java.util import gf2t.{GF2_192, GF2_192_Poly} import org.bitbucket.inkytonik.kiama.attribution.AttributionCore -import sigmastate.basics.DLogProtocol._ -import sigmastate._ -import sigmastate.utils.{Helpers, SigmaByteReader, SigmaByteWriter} -import Values._ -import scalan.util.CollectionUtil._ -import scala.util.Try -import org.bitbucket.inkytonik.kiama.rewriting.Rewriter.{everywherebu, everywheretd, rule} +import org.bitbucket.inkytonik.kiama.rewriting.Rewriter.{rule, everywheretd, everywherebu} import org.bitbucket.inkytonik.kiama.rewriting.Strategy import scalan.util.CollectionUtil._ import scorex.util.encode.Base16 @@ -18,9 +12,9 @@ import sigmastate.Values._ import sigmastate._ import sigmastate.basics.DLogProtocol._ import sigmastate.basics.VerifierMessage.Challenge -import sigmastate.basics.{DiffieHellmanTupleInteractiveProver, DiffieHellmanTupleProverInput, ProveDHTuple, SigmaProtocolPrivateInput} +import sigmastate.basics.{ProveDHTuple, SigmaProtocolPrivateInput, DiffieHellmanTupleInteractiveProver, DiffieHellmanTupleProverInput} import sigmastate.serialization.SigmaSerializer -import sigmastate.utils.{Helpers, SigmaByteReader, SigmaByteWriter} +import sigmastate.utils.{SigmaByteReader, SigmaByteWriter, Helpers} import scala.util.Try @@ -130,34 +124,21 @@ trait ProverInterpreter extends Interpreter with AttributionCore { def prove(exp: ErgoTree, context: CTX, message: Array[Byte]): Try[CostedProverResult] = prove(emptyEnv, exp, context, message) - def prove(env: ScriptEnv, exp: ErgoTree, ctx: CTX, message: Array[Byte]): Try[CostedProverResult] = Try { + def prove(env: ScriptEnv, tree: ErgoTree, ctx: CTX, message: Array[Byte]): Try[CostedProverResult] = Try { import TrivialProp._ - val propTree = applyDeserializeContext(ctx, exp.proposition) + val prop = propositionFromErgoTree(tree, ctx) + val propTree = applyDeserializeContext(ctx, prop) val tried = reduceToCrypto(ctx, env, propTree) val (reducedProp, cost) = tried.fold(t => throw t, identity) def errorReducedToFalse = error("Script reduced to false") val proofTree = reducedProp match { - case BooleanConstant(boolResult) => - if (boolResult) NoProof - else errorReducedToFalse - case sigmaBoolean: SigmaBoolean => - sigmaBoolean match { - case TrueProp => NoProof - case FalseProp => errorReducedToFalse - case _ => - val unprovenTree = convertToUnproven(sigmaBoolean) - prove(unprovenTree, message) - } - case _ => - error(s"Unexpected result of reduceToCrypto($ctx, $env, $propTree)") - // TODO this case should be removed, because above cases should cover all possible variants - // val sigmaBoolean = Try { reducedProp.asInstanceOf[SigmaBoolean] } - // .recover { case _ => throw new InterpreterException(s"failed to cast to SigmaBoolean: $reducedProp") } - // .get - // val ct = convertToUnproven(sigmaBoolean) - // prove(ct, message) + case TrueProp => NoProof + case FalseProp => errorReducedToFalse + case sigmaTree => + val unprovenTree = convertToUnproven(sigmaTree) + prove(unprovenTree, message) } // Prover Step 10: output the right information into the proof val proof = SigSerializer.toBytes(proofTree) diff --git a/src/main/scala/sigmastate/lang/SigmaBinder.scala b/src/main/scala/sigmastate/lang/SigmaBinder.scala index 777d424b5f..7b2b9aeb62 100644 --- a/src/main/scala/sigmastate/lang/SigmaBinder.scala +++ b/src/main/scala/sigmastate/lang/SigmaBinder.scala @@ -12,7 +12,6 @@ import sigmastate.interpreter.Interpreter.ScriptEnv import sigmastate.lang.SigmaPredef.PredefinedFuncRegistry import sigmastate.lang.Terms._ import sigmastate.lang.exceptions.{BinderException, InvalidArguments} -import sigma.util.Extensions._ object SrcCtxCallbackRewriter extends CallbackRewriter { override def rewriting[T](oldTerm: T, newTerm: T): T = (oldTerm, newTerm) match { @@ -50,18 +49,14 @@ class SigmaBinder(env: ScriptEnv, builder: SigmaBuilder, case "EmptyByteArray" => Some(ByteArrayConstant(Array.emptyByteArray)) case "SELF" => Some(Self) case "CONTEXT" => Some(Context) + case "Global" => Some(Global) case "None" => Some(mkNoneValue(NoType)) case _ => None } } // Rule: Coll[Int](...) --> - case e @ Apply(ApplyTypes(Ident("Coll", _), Seq(tpe)), args) => -// args.foreach{ e => -// if (e.tpe != tpe) -// error(s"Invalid construction of collection $e: expected type $tpe, actual type ${e.tpe}", -// e.sourceContext) -// } + case _ @ Apply(ApplyTypes(Ident("Coll", _), Seq(tpe)), args) => Some(mkConcreteCollection(args, tpe)) // Rule: Coll(...) --> @@ -117,7 +112,7 @@ class SigmaBinder(env: ScriptEnv, builder: SigmaBuilder, None case a @ Apply(PKFunc.symNoType, args) => - Some(PKFunc.irBuilder(PKFunc.sym, args).withPropagatedSrcCtx(a.sourceContext)) + Some(PKFunc.irInfo.irBuilder(PKFunc.sym, args).withPropagatedSrcCtx(a.sourceContext)) })))(e) diff --git a/src/main/scala/sigmastate/lang/SigmaBuilder.scala b/src/main/scala/sigmastate/lang/SigmaBuilder.scala index 2184663534..7e492e7cf9 100644 --- a/src/main/scala/sigmastate/lang/SigmaBuilder.scala +++ b/src/main/scala/sigmastate/lang/SigmaBuilder.scala @@ -9,9 +9,9 @@ import sigmastate.Values.{StringConstant, FuncValue, FalseLeaf, Constant, SValue import sigmastate.Values._ import sigmastate._ import sigmastate.interpreter.CryptoConstants -import sigmastate.lang.Constraints.{TypeConstraint2, onlyNumeric2, sameType2} +import sigmastate.lang.Constraints.{TypeConstraint2, sameType2, onlyNumeric2} import sigmastate.basics.DLogProtocol.ProveDlog -import sigmastate.lang.Constraints.{TypeConstraint2, onlyNumeric2, sameType2} +import sigmastate.lang.Constraints.{TypeConstraint2, sameType2, onlyNumeric2} import sigmastate.lang.Terms._ import sigmastate.lang.exceptions.ConstraintFailed import sigmastate.serialization.OpCodes @@ -20,12 +20,13 @@ import scalan.Nullable import sigmastate.SOption.SIntOption import sigmastate.basics.ProveDHTuple import sigmastate.eval.{CostingSigmaDslBuilder, Evaluation} +import sigmastate.eval._ import sigmastate.eval.Extensions._ import sigmastate.interpreter.CryptoConstants.EcPointType import special.collection.Coll import special.sigma.{AvlTree, SigmaProp, GroupElement} import sigmastate.lang.SigmaTyper.STypeSubst -import special.sigma.{GroupElement, SigmaProp} +import special.sigma.{SigmaProp, GroupElement} import scala.util.DynamicVariable @@ -97,8 +98,7 @@ trait SigmaBuilder { mapper: Value[SFunc]): Value[SCollection[OV]] def mkFilter[IV <: SType](input: Value[SCollection[IV]], - id: Byte, - condition: Value[SBoolean.type]): Value[SCollection[IV]] + condition: Value[SFunc]): Value[SCollection[IV]] def mkExists[IV <: SType](input: Value[SCollection[IV]], condition: Value[SFunc]): Value[SBoolean.type] @@ -195,6 +195,8 @@ trait SigmaBuilder { def mkConstantPlaceholder[T <: SType](id: Int, tpe: T): Value[SType] def mkCollectionConstant[T <: SType](values: Array[T#WrappedType], elementType: T): Constant[SCollection[T]] + def mkCollectionConstant[T <: SType](values: Coll[T#WrappedType], + elementType: T): Constant[SCollection[T]] def mkStringConcat(left: Constant[SString.type], right: Constant[SString.type]): Value[SString.type] def mkGetVar[T <: SType](varId: Byte, tpe: T): Value[SOption[T]] @@ -219,44 +221,50 @@ trait SigmaBuilder { def mkUnitConstant: Value[SUnit.type] - def liftAny(v: Any): Nullable[SValue] = v match { + /** Created a new Value instance with an appropriate type derived from the given data `obj`. + * If `obj` is already Value, then it is returned as result. + * Uses scalan.Nullable instead of scala.Option to avoid allocation on consensus hot path. + */ + def liftAny(obj: Any): Nullable[SValue] = obj match { + case v: SValue => Nullable(v) case arr: Array[Boolean] => Nullable(mkCollectionConstant[SBoolean.type](arr, SBoolean)) case arr: Array[Byte] => Nullable(mkCollectionConstant[SByte.type](arr, SByte)) case arr: Array[Short] => Nullable(mkCollectionConstant[SShort.type](arr, SShort)) case arr: Array[Int] => Nullable(mkCollectionConstant[SInt.type](arr, SInt)) case arr: Array[Long] => Nullable(mkCollectionConstant[SLong.type](arr, SLong)) - case arr: Array[BigInteger] => Nullable(mkCollectionConstant[SBigInt.type](arr, SBigInt)) + case arr: Array[BigInteger] => Nullable(mkCollectionConstant[SBigInt.type](arr.map(SigmaDsl.BigInt(_)), SBigInt)) case arr: Array[String] => Nullable(mkCollectionConstant[SString.type](arr, SString)) case v: Byte => Nullable(mkConstant[SByte.type](v, SByte)) case v: Short => Nullable(mkConstant[SShort.type](v, SShort)) case v: Int => Nullable(mkConstant[SInt.type](v, SInt)) case v: Long => Nullable(mkConstant[SLong.type](v, SLong)) - case v: BigInteger => Nullable(mkConstant[SBigInt.type](v, SBigInt)) - case n: special.sigma.BigInt => Nullable(mkConstant[SBigInt.type](CostingSigmaDslBuilder.toBigInteger(n), SBigInt)) + case v: BigInteger => Nullable(mkConstant[SBigInt.type](SigmaDsl.BigInt(v), SBigInt)) + case n: special.sigma.BigInt => Nullable(mkConstant[SBigInt.type](n, SBigInt)) - case v: EcPointType => Nullable(mkConstant[SGroupElement.type](v, SGroupElement)) - case ge: GroupElement => Nullable(mkConstant[SGroupElement.type](CostingSigmaDslBuilder.toECPoint(ge).asInstanceOf[EcPointType], SGroupElement)) + case v: EcPointType => Nullable(mkConstant[SGroupElement.type](SigmaDsl.GroupElement(v), SGroupElement)) + case ge: GroupElement => Nullable(mkConstant[SGroupElement.type](ge, SGroupElement)) case b: Boolean => Nullable(if(b) TrueLeaf else FalseLeaf) case v: String => Nullable(mkConstant[SString.type](v, SString)) case b: ErgoBox => Nullable(mkConstant[SBox.type](b, SBox)) - case avl: AvlTreeData => Nullable(mkConstant[SAvlTree.type](avl, SAvlTree)) - case avl: AvlTree => Nullable(mkConstant[SAvlTree.type](CostingSigmaDslBuilder.toAvlTreeData(avl), SAvlTree)) + case avl: AvlTreeData => Nullable(mkConstant[SAvlTree.type](SigmaDsl.avlTree(avl), SAvlTree)) + case avl: AvlTree => Nullable(mkConstant[SAvlTree.type](avl, SAvlTree)) - case sb: SigmaBoolean => Nullable(mkConstant[SSigmaProp.type](sb, SSigmaProp)) - case p: SigmaProp => Nullable(mkConstant[SSigmaProp.type](CostingSigmaDslBuilder.toSigmaBoolean(p), SSigmaProp)) + case sb: SigmaBoolean => Nullable(mkConstant[SSigmaProp.type](SigmaDsl.SigmaProp(sb), SSigmaProp)) + case p: SigmaProp => Nullable(mkConstant[SSigmaProp.type](p, SSigmaProp)) case coll: Coll[a] => - implicit val tA = coll.tItem - Nullable(coll.toTreeData) - case v: SValue => Nullable(v) - case _ => Nullable.None + val tpeItem = Evaluation.rtypeToSType(coll.tItem) + Nullable(mkCollectionConstant(coll.asInstanceOf[SCollection[SType]#WrappedType], tpeItem)) + + case _ => + Nullable.None } - def unliftAny(v: SValue): Nullable[Any] = v match { - case Constant(v, t) => Nullable(v) + def unliftAny(value: SValue): Nullable[Any] = value match { + case Constant(v, _) => Nullable(v) case _ => Nullable.None } } @@ -404,9 +412,8 @@ class StdSigmaBuilder extends SigmaBuilder { Slice(input, from, until).withSrcCtx(currentSrcCtx.value) override def mkFilter[IV <: SType](input: Value[SCollection[IV]], - id: Byte, - condition: Value[SBoolean.type]): Value[SCollection[IV]] = - Filter(input, id, condition).withSrcCtx(currentSrcCtx.value) + condition: Value[SFunc]): Value[SCollection[IV]] = + Filter(input, condition).withSrcCtx(currentSrcCtx.value) override def mkExists[IV <: SType](input: Value[SCollection[IV]], condition: Value[SFunc]): Value[SBoolean.type] = @@ -575,9 +582,17 @@ class StdSigmaBuilder extends SigmaBuilder { ConstantPlaceholder[T](id, tpe).withSrcCtx(currentSrcCtx.value) override def mkCollectionConstant[T <: SType](values: Array[T#WrappedType], - elementType: T): Constant[SCollection[T]] = + elementType: T): Constant[SCollection[T]] = { + implicit val tElement = Evaluation.stypeToRType(elementType) + ConstantNode[SCollection[T]](Colls.fromArray(values), SCollection(elementType)) + .withSrcCtx(currentSrcCtx.value).asInstanceOf[ConstantNode[SCollection[T]]] + } + + override def mkCollectionConstant[T <: SType](values: Coll[T#WrappedType], + elementType: T): Constant[SCollection[T]] = { ConstantNode[SCollection[T]](values, SCollection(elementType)) - .withSrcCtx(currentSrcCtx.value).asInstanceOf[ConstantNode[SCollection[T]]] + .withSrcCtx(currentSrcCtx.value).asInstanceOf[ConstantNode[SCollection[T]]] + } override def mkStringConcat(left: Constant[SString.type], right: Constant[SString.type]): Value[SString.type] = StringConstant(left.value + right.value).withSrcCtx(currentSrcCtx.value) diff --git a/src/main/scala/sigmastate/lang/SigmaCompiler.scala b/src/main/scala/sigmastate/lang/SigmaCompiler.scala index 3468ec010c..7ce2e4a6e0 100644 --- a/src/main/scala/sigmastate/lang/SigmaCompiler.scala +++ b/src/main/scala/sigmastate/lang/SigmaCompiler.scala @@ -21,7 +21,7 @@ class SigmaCompiler(networkPrefix: NetworkPrefix, builder: SigmaBuilder) { def parse(x: String): SValue = { SigmaParser(x, builder) match { - case Success(v, i) => v + case Success(v, _) => v case f: Parsed.Failure[_,String] => throw new ParserException(s"Syntax error: $f", Some(SourceContext.fromParserFailure(f))) } diff --git a/src/main/scala/sigmastate/lang/SigmaPredef.scala b/src/main/scala/sigmastate/lang/SigmaPredef.scala index 57c231e464..401393b016 100644 --- a/src/main/scala/sigmastate/lang/SigmaPredef.scala +++ b/src/main/scala/sigmastate/lang/SigmaPredef.scala @@ -3,30 +3,40 @@ package sigmastate.lang import org.ergoplatform.ErgoAddressEncoder.NetworkPrefix import org.ergoplatform.{ErgoAddressEncoder, P2PKAddress} import scalan.Nullable -import scorex.util.encode.{Base58, Base64} -import sigmastate.SCollection.{SByteArray, SIntArray} +import scorex.util.encode.{Base64, Base58} +import sigmastate.SCollection.{SIntArray, SByteArray} import sigmastate.SOption._ -import sigmastate.Values.{BoolValue, ByteArrayConstant, Constant, EvaluatedValue, IntValue, SValue, SigmaPropConstant, SigmaPropValue, StringConstant, Value} +import sigmastate.Values.{StringConstant, Constant, EvaluatedValue, SValue, IntValue, SigmaPropConstant, ConstantPlaceholder, BoolValue, Value, ByteArrayConstant, SigmaPropValue, ValueCompanion} import sigmastate._ import sigmastate.lang.Terms._ import sigmastate.lang.exceptions.InvalidArguments import sigmastate.serialization.ValueSerializer +import sigmastate.utxo.{GetVar, DeserializeContext, DeserializeRegister, SelectField} object SigmaPredef { type IrBuilderFunc = PartialFunction[(SValue, Seq[SValue]), SValue] + /** Metadata for predefined function. + * @param irBuilder Builder of SigmaIR node which is equivalent to function application + * Rule: Apply(f, args) --> irBuilder(f, args) + */ + case class PredefFuncInfo(irBuilder: IrBuilderFunc) + case class PredefinedFunc( /** A name which is used in scripts */ name: String, /** Function declaration without body */ declaration: Lambda, - /** Builder of SigmaIR node which is equivalent to function application - * Rule: Apply(f, args) --> irBuilder(f, args) */ - irBuilder: IrBuilderFunc) { + /** Metadata for this function */ + irInfo: PredefFuncInfo, + docInfo: OperationInfo) { val sym: Ident = Ident(name, declaration.tpe) val symNoType: Ident = Ident(name, NoType) + + def argInfo(argName: String): ArgInfo = + docInfo.args.find(_.name == argName).get } class PredefinedFuncRegistry(builder: SigmaBuilder) { @@ -34,30 +44,63 @@ object SigmaPredef { import builder._ /** Type variable used in the signatures of global functions below. */ - private val tT = STypeIdent("T") - private val tK = STypeIdent("K") - private val tL = STypeIdent("L") - private val tR = STypeIdent("R") - private val tO = STypeIdent("O") + private val tT = STypeVar("T") + private val tK = STypeVar("K") + private val tL = STypeVar("L") + private val tR = STypeVar("R") + private val tO = STypeVar("O") private val undefined: IrBuilderFunc = PartialFunction.empty[(SValue, Seq[SValue]), SValue] val AllOfFunc = PredefinedFunc("allOf", Lambda(IndexedSeq("conditions" -> SCollection(SBoolean)), SBoolean, None), - { case (_, Seq(col: Value[SCollection[SBoolean.type]]@unchecked)) => mkAND(col) } + PredefFuncInfo({ case (_, Seq(col: Value[SCollection[SBoolean.type]]@unchecked)) => mkAND(col) }), + OperationInfo(AND, "Returns true if \\emph{all} the elements in collection are \\lst{true}.", + Seq(ArgInfo("conditions", "a collection of conditions"))) ) val AnyOfFunc = PredefinedFunc("anyOf", Lambda(Vector("conditions" -> SCollection(SBoolean)), SBoolean, None), - { case (_, Seq(col: Value[SCollection[SBoolean.type]]@unchecked)) => mkOR(col) } + PredefFuncInfo( { case (_, Seq(col: Value[SCollection[SBoolean.type]]@unchecked)) => mkOR(col) }), + OperationInfo(OR, "Returns true if \\emph{any} the elements in collection are \\lst{true}.", + Seq(ArgInfo("conditions", "a collection of conditions"))) + ) + + val XorOfFunc = PredefinedFunc("xorOf", + Lambda(Vector("conditions" -> SCollection(SBoolean)), SBoolean, None), + PredefFuncInfo({ case (_, Seq(col: Value[SCollection[SBoolean.type]]@unchecked)) => mkXorOf(col) }), + OperationInfo(XorOf, "Similar to \\lst{allOf}, but performing logical XOR operation between all conditions instead of \\lst{&&}", + Seq(ArgInfo("conditions", "a collection of conditions"))) + ) + + val AllZKFunc = PredefinedFunc("allZK", + Lambda(IndexedSeq("propositions" -> SCollection(SSigmaProp)), SSigmaProp, None), + PredefFuncInfo(undefined), + OperationInfo(SigmaAnd, "Returns sigma proposition which is proven when \\emph{all} the elements in collection are proven.", + Seq(ArgInfo("propositions", "a collection of propositions"))) + ) + + val AnyZKFunc = PredefinedFunc("anyZK", + Lambda(IndexedSeq("propositions" -> SCollection(SSigmaProp)), SSigmaProp, None), + PredefFuncInfo(undefined), + OperationInfo(SigmaOr, "Returns sigma proposition which is proven when \\emph{any} of the elements in collection is proven.", + Seq(ArgInfo("propositions", "a collection of propositions"))) ) val AtLeastFunc = PredefinedFunc("atLeast", Lambda(Vector("k" -> SInt, "conditions" -> SCollection(SSigmaProp)), SSigmaProp, None), - { case (_, Seq(bound: IntValue@unchecked, arr: Value[SCollection[SSigmaProp.type]]@unchecked)) => - mkAtLeast(bound, arr) - } + PredefFuncInfo( + { case (_, Seq(bound: IntValue@unchecked, arr: Value[SCollection[SSigmaProp.type]]@unchecked)) => + mkAtLeast(bound, arr) + }), + OperationInfo(AtLeast, + """ Logical threshold. + | AtLeast has two inputs: integer \lst{bound} and \lst{children} same as in AND/OR. + | The result is true if at least \lst{bound} children are proven. + """.stripMargin, Seq( + ArgInfo("bound", "required minimum of proven children"), + ArgInfo("children", "proposition to be proven/validated"))) ) val OuterJoinFunc = PredefinedFunc( @@ -72,38 +115,57 @@ object SigmaPredef { "inner" -> SFunc(IndexedSeq(tK, tL, tR), tO), ), SCollection(STuple(tK, tO)), None), - undefined + PredefFuncInfo(undefined), + OperationInfo(MethodCall, "", + Seq(ArgInfo("", ""))) ) val ZKProofFunc = PredefinedFunc("ZKProof", Lambda(Vector("block" -> SSigmaProp), SBoolean, None), - { case (_, Seq(block: SigmaPropValue@unchecked)) => mkZKProofBlock(block) } + PredefFuncInfo({ case (_, Seq(block: SigmaPropValue@unchecked)) => mkZKProofBlock(block) }), + OperationInfo(ZKProofBlock, "", + Seq(ArgInfo("", ""))) ) val SigmaPropFunc = PredefinedFunc("sigmaProp", Lambda(Vector("condition" -> SBoolean), SSigmaProp, None), - { case (_, Seq(b: BoolValue@unchecked)) => mkBoolToSigmaProp(b) } + PredefFuncInfo({ case (_, Seq(b: BoolValue@unchecked)) => mkBoolToSigmaProp(b) }), + OperationInfo(BoolToSigmaProp, + """Embedding of \lst{Boolean} values to \lst{SigmaProp} values. + | As an example, this operation allows boolean experessions + | to be used as arguments of \lst{atLeast(..., sigmaProp(boolExpr), ...)} operation. + | During execution results to either \lst{TrueProp} or \lst{FalseProp} values of \lst{SigmaProp} type. + """.stripMargin, + Seq(ArgInfo("condition", "boolean value to embed in SigmaProp value"))) ) val GetVarFunc = PredefinedFunc("getVar", Lambda(Seq(STypeParam(tT)), Vector("varId" -> SByte), SOption(tT), None), - { case (Ident(_, SFunc(_, SOption(rtpe), _)), Seq(id: Constant[SNumericType]@unchecked)) => - mkGetVar(SByte.downcast(id.value.asInstanceOf[AnyVal]), rtpe) - } + PredefFuncInfo( + { case (Ident(_, SFunc(_, SOption(rtpe), _)), Seq(id: Constant[SNumericType]@unchecked)) => + mkGetVar(SByte.downcast(id.value.asInstanceOf[AnyVal]), rtpe) + }), + OperationInfo(GetVar, + "Get context variable with given \\lst{varId} and type.", + Seq(ArgInfo("varId", "\\lst{Byte} identifier of context variable"))) ) def PKFunc(networkPrefix: NetworkPrefix) = PredefinedFunc("PK", Lambda(Vector("input" -> SString), SSigmaProp, None), - { case (_, Seq(arg: EvaluatedValue[SString.type]@unchecked)) => - ErgoAddressEncoder(networkPrefix).fromString(arg.value).get match { - case a: P2PKAddress => mkConstant[SSigmaProp.type](a.pubkey, SSigmaProp) - case a@_ => sys.error(s"unsupported address $a") - } - } + PredefFuncInfo( + { case (_, Seq(arg: EvaluatedValue[SString.type]@unchecked)) => + ErgoAddressEncoder(networkPrefix).fromString(arg.value).get match { + case a: P2PKAddress => SigmaPropConstant(a.pubkey) + case a@_ => sys.error(s"unsupported address $a") + } + }), + OperationInfo(Constant, "", + Seq(ArgInfo("", ""))) ) val DeserializeFunc = PredefinedFunc("deserialize", Lambda(Seq(STypeParam(tT)), Vector("str" -> SString), tT, None), + PredefFuncInfo( { case (Ident(_, SFunc(_, tpe, _)), args) => if (args.length != 1) throw new InvalidArguments(s"Wrong number of arguments in $args: expected one argument") @@ -117,129 +179,134 @@ object SigmaPredef { if (res.tpe != tpe) throw new InvalidArguments(s"Wrong type after deserialization, expected $tpe, got ${res.tpe}") res - } + }), + OperationInfo(Constant, "", + Seq(ArgInfo("", ""))) ) val FromBase58Func = PredefinedFunc("fromBase58", Lambda(Vector("input" -> SString), SByteArray, None), - { case (_, Seq(arg: EvaluatedValue[SString.type]@unchecked)) => - ByteArrayConstant(Base58.decode(arg.value).get) - } + PredefFuncInfo( + { case (_, Seq(arg: EvaluatedValue[SString.type]@unchecked)) => + ByteArrayConstant(Base58.decode(arg.value).get) + }), + OperationInfo(Constant, "", + Seq(ArgInfo("", ""))) ) val FromBase64Func = PredefinedFunc("fromBase64", Lambda(Vector("input" -> SString), SByteArray, None), - { case (_, Seq(arg: EvaluatedValue[SString.type]@unchecked)) => - ByteArrayConstant(Base64.decode(arg.value).get) - } + PredefFuncInfo( + { case (_, Seq(arg: EvaluatedValue[SString.type]@unchecked)) => + ByteArrayConstant(Base64.decode(arg.value).get) + }), + OperationInfo(Constant, "", + Seq(ArgInfo("", ""))) ) val Blake2b256Func = PredefinedFunc("blake2b256", Lambda(Vector("input" -> SByteArray), SByteArray, None), - { case (_, Seq(arg: Value[SByteArray]@unchecked)) => - mkCalcBlake2b256(arg) - } + PredefFuncInfo( + { case (_, Seq(arg: Value[SByteArray]@unchecked)) => + mkCalcBlake2b256(arg) + }), + OperationInfo(CalcBlake2b256, "Calculate Blake2b hash from \\lst{input} bytes.", + Seq(ArgInfo("input", "collection of bytes"))) ) val Sha256Func = PredefinedFunc("sha256", Lambda(Vector("input" -> SByteArray), SByteArray, None), - { case (_, Seq(arg: Value[SByteArray]@unchecked)) => - mkCalcSha256(arg) - } + PredefFuncInfo( + { case (_, Seq(arg: Value[SByteArray]@unchecked)) => + mkCalcSha256(arg) + }), + OperationInfo(CalcSha256, "Calculate Sha256 hash from \\lst{input} bytes.", + Seq(ArgInfo("input", "collection of bytes"))) ) val ByteArrayToBigIntFunc = PredefinedFunc("byteArrayToBigInt", Lambda(Vector("input" -> SByteArray), SBigInt, None), - { case (_, Seq(arg: Value[SByteArray]@unchecked)) => - mkByteArrayToBigInt(arg) - } + PredefFuncInfo( + { case (_, Seq(arg: Value[SByteArray]@unchecked)) => + mkByteArrayToBigInt(arg) + }), + OperationInfo(ByteArrayToBigInt, + "Convert big-endian bytes representation (Coll[Byte]) to BigInt value.", + Seq(ArgInfo("input", "collection of bytes in big-endian format"))) ) val ByteArrayToLongFunc = PredefinedFunc("byteArrayToLong", Lambda(Vector("input" -> SByteArray), SLong, None), - { case (_, Seq(arg: Value[SByteArray]@unchecked)) => - mkByteArrayToLong(arg) - } + PredefFuncInfo( + { case (_, Seq(arg: Value[SByteArray]@unchecked)) => + mkByteArrayToLong(arg) + }), + OperationInfo(ByteArrayToLong, "Convert big-endian bytes representation (Coll[Byte]) to Long value.", + Seq(ArgInfo("input", "collection of bytes in big-endian format"))) ) val DecodePointFunc = PredefinedFunc("decodePoint", Lambda(Vector("input" -> SByteArray), SGroupElement, None), - { case (_, Seq(arg: Value[SByteArray]@unchecked)) => - mkDecodePoint(arg) - } + PredefFuncInfo( + { case (_, Seq(arg: Value[SByteArray]@unchecked)) => + mkDecodePoint(arg) + }), + OperationInfo(DecodePoint, + "Convert \\lst{Coll[Byte]} to \\lst{GroupElement} using \\lst{GroupElementSerializer}", + Seq(ArgInfo("input", "serialized bytes of some \\lst{GroupElement} value"))) ) val LongToByteArrayFunc = PredefinedFunc("longToByteArray", Lambda(Vector("input" -> SLong), SByteArray, None), - { case (_, Seq(arg: Value[SLong.type]@unchecked)) => - mkLongToByteArray(arg) - } + PredefFuncInfo( + { case (_, Seq(arg: Value[SLong.type]@unchecked)) => + mkLongToByteArray(arg) + }), + OperationInfo(LongToByteArray, + "Converts \\lst{Long} value to big-endian bytes representation.", + Seq(ArgInfo("input", "value to convert"))) ) val ProveDHTupleFunc = PredefinedFunc("proveDHTuple", Lambda(Vector("g" -> SGroupElement, "h" -> SGroupElement, "u" -> SGroupElement, "v" -> SGroupElement), SSigmaProp, None), - { case (_, Seq(g, h, u, v)) => - mkCreateProveDHTuple(g.asGroupElement, h.asGroupElement, u.asGroupElement, v.asGroupElement) - } + PredefFuncInfo( + { case (_, Seq(g, h, u, v)) => + mkCreateProveDHTuple(g.asGroupElement, h.asGroupElement, u.asGroupElement, v.asGroupElement) + }), + OperationInfo(CreateProveDHTuple, + """ ErgoTree operation to create a new SigmaProp value representing public key + | of Diffie Hellman signature protocol. + | Common input: (g,h,u,v) + """.stripMargin, + Seq(ArgInfo("g", ""),ArgInfo("h", ""),ArgInfo("u", ""),ArgInfo("v", ""))) ) val ProveDlogFunc = PredefinedFunc("proveDlog", Lambda(Vector("value" -> SGroupElement), SSigmaProp, None), - { case (_, Seq(arg: Value[SGroupElement.type]@unchecked)) => - mkCreateProveDlog(arg) - } + PredefFuncInfo( + { case (_, Seq(arg: Value[SGroupElement.type]@unchecked)) => + mkCreateProveDlog(arg) + }), + OperationInfo(CreateProveDlog, + """ErgoTree operation to create a new \lst{SigmaProp} value representing public key + | of discrete logarithm signature protocol. + """.stripMargin, + Seq(ArgInfo("value", "element of elliptic curve group"))) ) + val AvlTreeFunc = PredefinedFunc("avlTree", Lambda(Vector("operationFlags" -> SByte, "digest" -> SByteArray, "keyLength" -> SInt, "valueLengthOpt" -> SIntOption), SAvlTree, None), - { case (_, Seq(arg: Value[SGroupElement.type]@unchecked)) => - mkCreateProveDlog(arg) - } - ) - -// val IsMemberFunc = PredefinedFunc("isMember", -// Lambda(Vector("tree" -> SAvlTree, "key" -> SByteArray, "proof" -> SByteArray), SBoolean, None), -// { case (_, Seq(tree: Value[SAvlTree.type]@unchecked, key: Value[SByteArray]@unchecked, -// proof: Value[SByteArray]@unchecked)) => -// mkIsMember(tree, key, proof) -// } -// ) -// -// val TreeLookupFunc = PredefinedFunc("treeLookup", -// Lambda(Vector("tree" -> SAvlTree, "key" -> SByteArray, "proof" -> SByteArray), SOption[SByteArray], None), -// { case (_, Seq(tree: Value[SAvlTree.type]@unchecked, key: Value[SByteArray]@unchecked, -// proof: Value[SByteArray]@unchecked)) => -// mkTreeLookup(tree, key, proof) -// } -// ) -// -// val TreeModificationsFunc = PredefinedFunc("treeModifications", -// Lambda(Vector("tree" -> SAvlTree, "ops" -> SByteArray, "proof" -> SByteArray), SOption[SByteArray], None), -// { case (_, Seq(tree: Value[SAvlTree.type]@unchecked, operations: Value[SByteArray]@unchecked, -// proof: Value[SByteArray]@unchecked)) => -// mkTreeModifications(tree, operations, proof) -// } -// ) -// -// val TreeInsertsFunc = PredefinedFunc("treeInserts", -// Lambda(Vector("tree" -> SAvlTree, "ops" -> SCollection(STuple(IndexedSeq(SByteArray, SByteArray))), "proof" -> SByteArray), -// SOption[SByteArray], None), -// { case (_, Seq(tree: Value[SAvlTree.type]@unchecked, operations: Value[SCollection[STuple]]@unchecked, -// proof: Value[SByteArray]@unchecked)) => -// mkTreeInserts(tree, operations, proof) -// } -// ) -// -// val TreeRemovalsFunc = PredefinedFunc("treeRemovals", -// Lambda(Vector("tree" -> SAvlTree, "ops" -> SCollection[SByteArray], "proof" -> SByteArray), SOption[SByteArray], None), -// { case (_, Seq(tree: Value[SAvlTree.type]@unchecked, operations: Value[SCollection[SByteArray]]@unchecked, -// proof: Value[SByteArray]@unchecked)) => -// mkTreeRemovals(tree, operations, proof) -// } -// ) - - val XorOfFunc = PredefinedFunc("xorOf", - Lambda(Vector("conditions" -> SCollection(SBoolean)), SBoolean, None), - { case (_, Seq(col: Value[SCollection[SBoolean.type]]@unchecked)) => mkXorOf(col) } + PredefFuncInfo( + { case (_, Seq(flags, digest, keyLength, valueLength)) => + mkCreateAvlTree(flags.asByteValue, digest.asByteArray, keyLength.asIntValue, valueLength.asOption[SInt.type]) + }), + OperationInfo(CreateAvlTree, + "Construct a new authenticated dictionary with given parameters and tree root digest.", + Seq( + ArgInfo("operationFlags", "flags of available operations"), + ArgInfo("digest", "hash of merkle tree root"), + ArgInfo("keyLength", "length of dictionary keys in bytes"), + ArgInfo("valueLengthOpt", "optional width of dictionary values in bytes"))) ) val SubstConstantsFunc = PredefinedFunc("substConstants", @@ -248,7 +315,22 @@ object SigmaPredef { Vector("scriptBytes" -> SByteArray, "positions" -> SIntArray, "newValues" -> SCollection(tT)), SByteArray, None ), - undefined + PredefFuncInfo( undefined), + OperationInfo(SubstConstants, + """Transforms serialized bytes of ErgoTree with segregated constants by replacing constants + | at given positions with new values. This operation allow to use serialized scripts as + | pre-defined templates. + | The typical usage is "check that output box have proposition equal to given script bytes, + | where minerPk (constants(0)) is replaced with currentMinerPk". + | Each constant in original scriptBytes have SType serialized before actual data (see ConstantSerializer). + | During substitution each value from newValues is checked to be an instance of the corresponding type. + | This means, the constants during substitution cannot change their types. + | + | Returns original scriptBytes array where only specified constants are replaced and all other bytes remain exactly the same. + """.stripMargin, Seq( + ArgInfo("scriptBytes", "serialized ErgoTree with ConstantSegregationFlag set to 1."), + ArgInfo("positions", "zero based indexes in ErgoTree.constants array which should be replaced with new values"), + ArgInfo("newValues", "new values to be injected into the corresponding positions in ErgoTree.constants array"))) ) val ExecuteFromVarFunc = PredefinedFunc("executeFromVar", @@ -257,12 +339,43 @@ object SigmaPredef { Vector("id" -> SByte), tT, None ), - undefined + PredefFuncInfo(undefined), + OperationInfo(DeserializeContext, + """Extracts context variable as \lst{Coll[Byte]}, deserializes it to script + | and then executes this script in the current context. + | The original \lst{Coll[Byte]} of the script is available as \lst{getVar[Coll[Byte]](id)}. + | Type parameter \lst{V} result type of the deserialized script. + | Throws an exception if the actual script type doesn't conform to T. + | Returns a result of the script execution in the current context + """.stripMargin, + Seq(ArgInfo("id", "identifier of the context variable"))) + ) + + val ExecuteFromSelfRegFunc = PredefinedFunc("executeFromSelfReg", + Lambda( + Seq(STypeParam(tT)), + Vector("id" -> SByte, "default" -> SOption(tT)), + tT, None + ), + PredefFuncInfo(undefined), + OperationInfo(DeserializeRegister, + """Extracts SELF register as \lst{Coll[Byte]}, deserializes it to script + | and then executes this script in the current context. + | The original \lst{Coll[Byte]} of the script is available as \lst{SELF.getReg[Coll[Byte]](id)}. + | Type parameter \lst{T} result type of the deserialized script. + | Throws an exception if the actual script type doesn't conform to \lst{T}. + | Returns a result of the script execution in the current context + """.stripMargin, + Seq(ArgInfo("id", "identifier of the register"), + ArgInfo("default", "optional default value, if register is not available"))) ) - val funcs: Seq[PredefinedFunc] = Seq( + val globalFuncs: Map[String, PredefinedFunc] = Seq( AllOfFunc, AnyOfFunc, + XorOfFunc, + AllZKFunc, + AnyZKFunc, AtLeastFunc, OuterJoinFunc, ZKProofFunc, @@ -280,17 +393,186 @@ object SigmaPredef { ProveDHTupleFunc, ProveDlogFunc, AvlTreeFunc, - XorOfFunc, SubstConstantsFunc, ExecuteFromVarFunc, - ) + ExecuteFromSelfRegFunc, + ).map(f => f.name -> f).toMap + + def comparisonOp(symbolName: String, opDesc: ValueCompanion, desc: String, args: Seq[ArgInfo]) = { + PredefinedFunc(symbolName, + Lambda(Seq(STypeParam(tT)), Vector("left" -> tT, "right" -> tT), SBoolean, None), + PredefFuncInfo(undefined), + OperationInfo(opDesc, desc, args) + ) + } + def binaryOp(symbolName: String, opDesc: ValueCompanion, desc: String, args: Seq[ArgInfo]) = { + PredefinedFunc(symbolName, + Lambda(Seq(STypeParam(tT)), Vector("left" -> tT, "right" -> tT), tT, None), + PredefFuncInfo(undefined), + OperationInfo(opDesc, desc, args) + ) + } + def logicalOp(symbolName: String, opDesc: ValueCompanion, desc: String, args: Seq[ArgInfo]) = { + PredefinedFunc(symbolName, + Lambda(Vector("left" -> SBoolean, "right" -> SBoolean), SBoolean, None), + PredefFuncInfo(undefined), + OperationInfo(opDesc, desc, args) + ) + } + + val infixFuncs: Map[String, PredefinedFunc] = Seq( + comparisonOp("==", EQ, "Compare equality of \\lst{left} and \\lst{right} arguments", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + comparisonOp("!=", NEQ, "Compare inequality of \\lst{left} and \\lst{right} arguments", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + + comparisonOp("<", LT, + "Returns \\lst{true} is the left operand is less then the right operand, \\lst{false} otherwise.", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + comparisonOp("<=", LE, + "Returns \\lst{true} is the left operand is less then or equal to the right operand, \\lst{false} otherwise.", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + comparisonOp(">", GT, + "Returns \\lst{true} is the left operand is greater then the right operand, \\lst{false} otherwise.", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + comparisonOp(">=", GE, + "Returns \\lst{true} is the left operand is greater then or equal to the right operand, \\lst{false} otherwise.", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + + binaryOp("+", ArithOp.Plus, "Returns a sum of two numeric operands", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + binaryOp("-", ArithOp.Minus, "Returns a result of subtracting second numeric operand from the first.", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + binaryOp("*", ArithOp.Multiply, "Returns a multiplication of two numeric operands", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + binaryOp("/", ArithOp.Division, "Integer division of the first operand by the second operand.", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + binaryOp("%", ArithOp.Modulo, "Reminder from division of the first operand by the second operand.", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + binaryOp("min", ArithOp.Min, "Minimum value of two operands.", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + binaryOp("max", ArithOp.Max, "Maximum value of two operands.", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + + binaryOp("bit_|", BitOp.BitOr, "Bitwise OR of two numeric operands.", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + binaryOp("bit_&", BitOp.BitAnd, "Bitwise AND of two numeric operands.", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + binaryOp("bit_^", BitOp.BitXor, "Bitwise XOR of two numeric operands.", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + + binaryOp("bit_>>", BitOp.BitShiftRight, "Right shift of bits.", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + binaryOp("bit_<<", BitOp.BitShiftLeft, "Left shift of bits.", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + binaryOp("bit_>>>", BitOp.BitShiftRightZeroed, "Right shift of bits.", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + + PredefinedFunc("binary_|", + Lambda(Vector("left" -> SByteArray, "right" -> SByteArray), SByteArray, None), + PredefFuncInfo(undefined), + OperationInfo(Xor, "Byte-wise XOR of two collections of bytes", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))) + ), + + logicalOp("||", BinOr, "Logical OR of two operands", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + logicalOp("&&", BinAnd, "Logical AND of two operands", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + logicalOp("^", BinXor, "Logical XOR of two operands", + Seq(ArgInfo("left", "left operand"), ArgInfo("right", "right operand"))), + ).map(f => f.name -> f).toMap + + val unaryFuncs: Map[String, PredefinedFunc] = Seq( + PredefinedFunc("unary_!", + Lambda(Vector("input" -> SBoolean), SBoolean, None), + PredefFuncInfo(undefined), + OperationInfo(LogicalNot, + "Logical NOT operation. Returns \\lst{true} if input is \\lst{false} and \\lst{false} if input is \\lst{true}.", + Seq(ArgInfo("input", "input \\lst{Boolean} value"))) + ), + PredefinedFunc("unary_-", + Lambda(Seq(STypeParam(tT)), Vector("input" -> tT), tT, None), + PredefFuncInfo(undefined), + OperationInfo(Negation, + "Negates numeric value \\lst{x} by returning \\lst{-x}.", + Seq(ArgInfo("input", "value of numeric type"))) + ), + PredefinedFunc("unary_~", + Lambda(Seq(STypeParam(tT)), Vector("input" -> tT), tT, None), + PredefFuncInfo(undefined), + OperationInfo(BitInversion, + "Invert every bit of the numeric value.", + Seq(ArgInfo("input", "value of numeric type"))) + ), + ).map(f => f.name -> f).toMap + + val funcs: Map[String, PredefinedFunc] = globalFuncs ++ infixFuncs ++ unaryFuncs + + /** WARNING: This operations are not used in frontend, and should be be used. + * They are used in SpecGen only the source of metadata for the corresponding ErgoTree nodes. + */ + val specialFuncs: Map[String, PredefinedFunc] = Seq( + PredefinedFunc("selectField", + Lambda(Seq(STypeParam(tT), STypeParam(tR)), Vector("input" -> tT, "fieldIndex" -> SByte), tR, None), + PredefFuncInfo(undefined), + OperationInfo(SelectField, + "Select tuple field by its 1-based index. E.g. \\lst{input._1} is transformed to \\lst{SelectField(input, 1)}", + Seq(ArgInfo("input", "tuple of items"), ArgInfo("fieldIndex", "index of an item to select"))) + ), + PredefinedFunc("treeLookup", + Lambda(Vector("tree" -> SAvlTree, "key" -> SByteArray, "proof" -> SByteArray), SOption(SByteArray), None), + PredefFuncInfo(undefined), + OperationInfo(TreeLookup, + "", + Seq(ArgInfo("tree", "tree to lookup the key"), + ArgInfo("key", "a key of an item in the \\lst{tree} to lookup"), + ArgInfo("proof", "proof to perform verification of the operation"))) + ), + PredefinedFunc("if", + Lambda(Seq(STypeParam(tT)), Vector("condition" -> SBoolean, "trueBranch" -> tT, "falseBranch" -> tT), tT, None), + PredefFuncInfo(undefined), + OperationInfo(If, + "Compute condition, if true then compute trueBranch else compute falseBranch", + Seq(ArgInfo("condition", "condition expression"), + ArgInfo("trueBranch", "expression to execute when \\lst{condition == true}"), + ArgInfo("falseBranch", "expression to execute when \\lst{condition == false}"))) + ), + PredefinedFunc("upcast", + Lambda(Seq(STypeParam(tT), STypeParam(tR)), Vector("input" -> tT), tR, None), + PredefFuncInfo(undefined), + OperationInfo(Upcast, + "Cast this numeric value to a bigger type (e.g. Int to Long)", + Seq(ArgInfo("input", "value to cast"))) + ), + PredefinedFunc("downcast", + Lambda(Seq(STypeParam(tT), STypeParam(tR)), Vector("input" -> tT), tR, None), + PredefFuncInfo(undefined), + OperationInfo(Downcast, + "Cast this numeric value to a smaller type (e.g. Long to Int). Throws exception if overflow.", + Seq(ArgInfo("input", "value to cast"))) + ), + PredefinedFunc("apply", + Lambda(Seq(STypeParam(tT), STypeParam(tR)), Vector("func" -> SFunc(tT, tR), "args" -> tT), tR, None), + PredefFuncInfo(undefined), + OperationInfo(Apply, + "Apply the function to the arguments. ", + Seq(ArgInfo("func", "function which is applied"), + ArgInfo("args", "list of arguments"))) + ), + PredefinedFunc("placeholder", + Lambda(Seq(STypeParam(tT)), Vector("id" -> SInt), tT, None), + PredefFuncInfo(undefined), + OperationInfo(ConstantPlaceholder, + "Create special ErgoTree node which can be replaced by constant with given id.", + Seq(ArgInfo("index", "index of the constant in ErgoTree header"))) + ), + ).map(f => f.name -> f).toMap - private val funcNameToIrBuilderMap: Map[String, IrBuilderFunc] = - funcs.filter(_.irBuilder != undefined) - .map(f => f.name -> f.irBuilder) - .toMap + private val funcNameToIrBuilderMap: Map[String, PredefinedFunc] = + funcs.filter { case (n, f) => f.irInfo.irBuilder != undefined } - def irBuilderForFunc(name: String): Option[IrBuilderFunc] = funcNameToIrBuilderMap.get(name) + def irBuilderForFunc(name: String): Option[IrBuilderFunc] = funcNameToIrBuilderMap.get(name).map(_.irInfo.irBuilder) } object PredefinedFuncApply { diff --git a/src/main/scala/sigmastate/lang/SigmaSpecializer.scala b/src/main/scala/sigmastate/lang/SigmaSpecializer.scala index a7315c06f7..ccc05721e5 100644 --- a/src/main/scala/sigmastate/lang/SigmaSpecializer.scala +++ b/src/main/scala/sigmastate/lang/SigmaSpecializer.scala @@ -42,10 +42,10 @@ class SigmaSpecializer(val builder: SigmaBuilder) { val res1 = eval(curEnv, res) Some(res1) - case Upcast(Constant(value, tpe), toTpe: SNumericType) => + case Upcast(Constant(value, _), toTpe: SNumericType) => Some(mkConstant(toTpe.upcast(value.asInstanceOf[AnyVal]), toTpe)) - case Downcast(Constant(value, tpe), toTpe: SNumericType) => + case Downcast(Constant(value, _), toTpe: SNumericType) => Some(mkConstant(toTpe.downcast(value.asInstanceOf[AnyVal]), toTpe)) // Rule: numeric.to* casts @@ -114,10 +114,8 @@ class SigmaSpecializer(val builder: SigmaBuilder) { case Apply(Select(col, SliceMethod.name, _), Seq(from, until)) => Some(mkSlice(col.asValue[SCollection[SType]], from.asIntValue, until.asIntValue)) - case Apply(Select(col, FilterMethod.name, _), Seq(Lambda(_, Seq((n, t)), _, Some(body)))) => - val tagged = mkTagged(n, t, 21) - val body1 = eval(env + (n -> tagged), body) - Some(mkFilter(col.asValue[SCollection[SType]], tagged.varId, body1.asValue[SBoolean.type])) + case Apply(Select(col, FilterMethod.name, _), Seq(l @ Lambda(_, _, _, _))) => + Some(mkFilter(col.asValue[SCollection[SType]], l)) case Apply(Select(col, ExistsMethod.name, _), Seq(l @ Lambda(_, _, _, _))) => Some(mkExists(col.asValue[SCollection[SType]], l)) diff --git a/src/main/scala/sigmastate/lang/SigmaTyper.scala b/src/main/scala/sigmastate/lang/SigmaTyper.scala index d37842231e..de62844ec3 100644 --- a/src/main/scala/sigmastate/lang/SigmaTyper.scala +++ b/src/main/scala/sigmastate/lang/SigmaTyper.scala @@ -12,7 +12,6 @@ import sigmastate.lang.exceptions._ import sigmastate.lang.SigmaPredef._ import sigmastate.serialization.OpCodes import sigmastate.utxo._ -import sigma.util.Extensions._ import scala.collection.mutable.ArrayBuffer @@ -26,11 +25,21 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe private implicit val implicitPredefFuncRegistry: PredefinedFuncRegistry = predefFuncRegistry - private val tT = STypeIdent("T") // to be used in typing rules + private val tT = STypeVar("T") // to be used in typing rules private val predefinedEnv: Map[String, SType] = - predefFuncRegistry.funcs.map(f => f.name -> f.declaration.tpe).toMap - + predefFuncRegistry.funcs.mapValues(f => f.declaration.tpe) + + private def processGlobalMethod(srcCtx: Nullable[SourceContext], + method: SMethod, + args: IndexedSeq[SValue]) = { + val global = Global.withPropagatedSrcCtx(srcCtx) + val node = for { + pf <- method.irInfo.irBuilder + res <- pf.lift((builder, global, method, args, emptySubst)) + } yield res + node.getOrElse(mkMethodCall(global, method, args, emptySubst).withPropagatedSrcCtx(srcCtx)) + } /** * Rewrite tree to typed tree. Checks constituent names and types. Uses * the env map to resolve bound variables and their types. @@ -59,11 +68,17 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe val newItems = items.map(assignType(env, _)) assignConcreteCollection(c, newItems) - case Ident(n, _) => + case i @ Ident(n, _) => env.get(n) match { case Some(t) => mkIdent(n, t) case None => - error(s"Cannot assign type for variable '$n' because it is not found in env $env", bound.sourceContext) + SGlobal.method(n) match { + case Some(method) if method.stype.tDom.length == 1 => // this is like `groupGenerator` without parentheses + val srcCtx = i.sourceContext + processGlobalMethod(srcCtx, method, IndexedSeq()) + case _ => + error(s"Cannot assign type for variable '$n' because it is not found in env $env", bound.sourceContext) + } } case sel @ Select(obj, n, None) => @@ -87,10 +102,10 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe else tMethSpec.copy(tDom = tMethSpec.tDom.tail, tRange = tMethSpec.tRange) case _ => tMeth } - if (method.irBuilder.isDefined && !tRes.isFunc) { + if (method.irInfo.irBuilder.isDefined && !tRes.isFunc) { // this is MethodCall of parameter-less property, so invoke builder and/or fallback to just MethodCall val methodConcrType = method.withSType(SFunc(newObj.tpe, tRes)) - methodConcrType.irBuilder.flatMap(_.lift(builder, newObj, methodConcrType, IndexedSeq(), Map())) + methodConcrType.irInfo.irBuilder.flatMap(_.lift(builder, newObj, methodConcrType, IndexedSeq(), Map())) .getOrElse(mkMethodCall(newObj, methodConcrType, IndexedSeq(), Map())) } else { mkSelect(newObj, n, Some(tRes)) @@ -118,7 +133,7 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe obj.tpe match { case p: SProduct => p.method(n) match { - case Some(method @ SMethod(_, _, genFunTpe @ SFunc(_, _, _), _, _)) => + case Some(method @ SMethod(_, _, genFunTpe @ SFunc(_, _, _), _, _, _)) => val subst = Map(genFunTpe.tpeParams.head.ident -> rangeTpe) val concrFunTpe = applySubst(genFunTpe, subst) val expectedArgs = concrFunTpe.asFunc.tDom.tail @@ -126,8 +141,8 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe if (expectedArgs.length != newArgTypes.length || !expectedArgs.zip(newArgTypes).forall { case (ea, na) => ea == SAny || ea == na }) error(s"For method $n expected args: $expectedArgs; actual: $newArgTypes", sel.sourceContext) - if (method.irBuilder.isDefined) { - method.irBuilder.flatMap(_.lift(builder, newObj, method, newArgs, subst)) + if (method.irInfo.irBuilder.isDefined) { + method.irInfo.irBuilder.flatMap(_.lift(builder, newObj, method, newArgs, subst)) .getOrElse(mkMethodCall(newObj, method, newArgs, subst)) } else { val newSelect = mkSelect(newObj, n, Some(concrFunTpe)).withSrcCtx(sel.sourceContext) @@ -146,7 +161,7 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe val newSel = assignType(env, sel) val newArgs = args.map(assignType(env, _)) newSel.tpe match { - case genFunTpe @ SFunc(argTypes, tRes, _) => + case genFunTpe @ SFunc(argTypes, _, _) => // If it's a function then the application has type of that function's return type. val newObj = assignType(env, obj) val newArgTypes = newArgs.map(_.tpe) @@ -154,13 +169,13 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe case Some(subst) => val concrFunTpe = applySubst(genFunTpe, subst) newObj.tpe.asInstanceOf[SProduct].method(n) match { - case Some(method) if method.irBuilder.isDefined => + case Some(method) if method.irInfo.irBuilder.isDefined => val expectedArgs = concrFunTpe.asFunc.tDom if (expectedArgs.length != newArgTypes.length || !expectedArgs.zip(newArgTypes).forall { case (ea, na) => ea == SAny || ea == na }) error(s"For method $n expected args: $expectedArgs; actual: $newArgTypes", sel.sourceContext) val methodConcrType = method.withSType(concrFunTpe.asFunc.withReceiverType(newObj.tpe)) - methodConcrType.irBuilder.flatMap(_.lift(builder, newObj, methodConcrType, newArgs, Map())) + methodConcrType.irInfo.irBuilder.flatMap(_.lift(builder, newObj, methodConcrType, newArgs, Map())) .getOrElse(mkMethodCall(newObj, methodConcrType, newArgs, Map())) case _ => val newSelect = mkSelect(newObj, n, Some(concrFunTpe)).withSrcCtx(sel.sourceContext) @@ -173,10 +188,16 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe mkApply(newSel, newArgs) } + case a @ Apply(ident: Ident, args) if SGlobal.hasMethod(ident.name) => // example: groupGenerator() + val method = SGlobal.method(ident.name).get + val srcCtx = a.sourceContext + val newArgs = args.map(assignType(env, _)) + processGlobalMethod(srcCtx, method, newArgs) + case app @ Apply(f, args) => val new_f = assignType(env, f) (new_f.tpe match { - case SFunc(argTypes, tRes, _) => + case SFunc(argTypes, _, _) => // If it's a pre-defined function application if (args.length != argTypes.length) error(s"Invalid argument type of application $app: invalid number of arguments", app.sourceContext) @@ -264,7 +285,7 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe } case _ => emptySubst } - method.irBuilder.flatMap(_.lift(builder, newObj, method, newArgs, typeSubst)) + method.irInfo.irBuilder.flatMap(_.lift(builder, newObj, method, newArgs, typeSubst)) .getOrElse(mkMethodCall(newObj, method, newArgs, typeSubst)) case _ => @@ -297,14 +318,22 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe } case SSigmaProp => (m, newArgs) match { - case ("||" | "&&", Seq(r)) => r.tpe match { + case ("||" | "&&" | "^", Seq(r)) => r.tpe match { case SBoolean => - val (a,b) = (Select(newObj, SSigmaProp.IsProven, Some(SBoolean)).asBoolValue, r.asBoolValue) - val res = if (m == "||") mkBinOr(a,b) else mkBinAnd(a,b) + val (a, b) = (Select(newObj, SSigmaProp.IsProven, Some(SBoolean)).asBoolValue, r.asBoolValue) + val res = m match { + case "||" => mkBinOr(a, b) + case "&&" => mkBinAnd(a, b) + case "^" => mkBinXor(a, b) + } res case SSigmaProp => - val (a,b) = (newObj.asSigmaProp, r.asSigmaProp) - val res = if (m == "||") mkSigmaOr(Seq(a,b)) else mkSigmaAnd(Seq(a,b)) + val (a, b) = (newObj.asSigmaProp, r.asSigmaProp) + val res = m match { + case "||" => mkSigmaOr(Seq(a, b)) + case "&&" => mkSigmaAnd(Seq(a, b)) + case "^" => throw new NotImplementedError(s"Xor operation is not defined between SigmaProps") + } res case _ => error(s"Invalid argument type for $m, expected $SSigmaProp but was ${r.tpe}", r.sourceContext) @@ -318,11 +347,14 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe case "||" => mkBinOr(newObj.asBoolValue, r.asBoolValue) case "&&" => mkBinAnd(newObj.asBoolValue, r.asBoolValue) case "^" => mkBinXor(newObj.asBoolValue, r.asBoolValue) - } case SSigmaProp => - val (a,b) = (newObj.asBoolValue, Select(r, SSigmaProp.IsProven, Some(SBoolean)).asBoolValue) - val res = if (m == "||") mkBinOr(a,b) else mkBinAnd(a,b) + val (a, b) = (newObj.asBoolValue, Select(r, SSigmaProp.IsProven, Some(SBoolean)).asBoolValue) + val res = m match { + case "||" => mkBinOr(a, b) + case "&&" => mkBinAnd(a, b) + case "^" => mkBinXor(a, b) + } res case _ => error(s"Invalid argument type for $m, expected ${newObj.tpe} but was ${r.tpe}", r.sourceContext) @@ -404,9 +436,9 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe case ArithOp(l, r, OpCodes.MinCode) => bimap(env, "min", l.asNumValue, r.asNumValue)(mkMin)(tT, tT) case ArithOp(l, r, OpCodes.MaxCode) => bimap(env, "max", l.asNumValue, r.asNumValue)(mkMax)(tT, tT) - case BitOp(l, r, OpCodes.BitOrCode) => bimap(env, "|", l.asNumValue, r.asNumValue)(mkBitOr)(tT, tT) - case BitOp(l, r, OpCodes.BitAndCode) => bimap(env, "&", l.asNumValue, r.asNumValue)(mkBitAnd)(tT, tT) - case BitOp(l, r, OpCodes.BitXorCode) => bimap(env, "^", l.asNumValue, r.asNumValue)(mkBitXor)(tT, tT) + case BitOp(l, r, OpCodes.BitOrCode) => bimap(env, BitOp.BitOr.name, l.asNumValue, r.asNumValue)(mkBitOr)(tT, tT) + case BitOp(l, r, OpCodes.BitAndCode) => bimap(env, BitOp.BitAnd.name, l.asNumValue, r.asNumValue)(mkBitAnd)(tT, tT) + case BitOp(l, r, OpCodes.BitXorCode) => bimap(env, BitOp.BitXor.name, l.asNumValue, r.asNumValue)(mkBitXor)(tT, tT) case Xor(l, r) => bimap(env, "|", l, r)(mkXor)(SByteArray, SByteArray) case MultiplyGroup(l, r) => bimap(env, "*", l, r)(mkMultiplyGroup)(SGroupElement, SGroupElement) @@ -453,6 +485,7 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe case SomeValue(x) => SomeValue(assignType(env, x)) case v: NoneValue[_] => v + case Global => Global case Context => Context case Height => Height case MinerPubkey => MinerPubkey @@ -577,8 +610,8 @@ class SigmaTyper(val builder: SigmaBuilder, predefFuncRegistry: PredefinedFuncRe object SigmaTyper { - type STypeSubst = Map[STypeIdent, SType] - val emptySubst = Map.empty[STypeIdent, SType] + type STypeSubst = Map[STypeVar, SType] + val emptySubst = Map.empty[STypeVar, SType] /** Performs pairwise type unification making sure each type variable is equally * substituted in all items. */ @@ -604,13 +637,13 @@ object SigmaTyper { /** Finds a substitution `subst` of type variables such that unifyTypes(applySubst(t1, subst), t2) shouldBe Some(emptySubst) */ def unifyTypes(t1: SType, t2: SType): Option[STypeSubst] = (t1, t2) match { - case (id1 @ STypeIdent(n1), id2 @ STypeIdent(n2)) => + case (_ @ STypeVar(n1), _ @ STypeVar(n2)) => if (n1 == n2) unifiedWithoutSubst else None - case (id1 @ STypeIdent(n), _) => + case (id1 @ STypeVar(_), _) => Some(Map(id1 -> t2)) case (e1: SCollectionType[_], e2: SCollectionType[_]) => unifyTypes(e1.elemType, e2.elemType) - case (e1: SCollectionType[_], e2: STuple) => + case (e1: SCollectionType[_], _: STuple) => unifyTypes(e1.elemType, SAny) case (e1: SOption[_], e2: SOption[_]) => unifyTypes(e1.elemType, e2.elemType) @@ -636,7 +669,7 @@ object SigmaTyper { SFunc(args.map(applySubst(_, subst)), applySubst(res, subst), remainingVars) case _ => val substRule = rule[SType] { - case id: STypeIdent if subst.contains(id) => subst(id) + case id: STypeVar if subst.contains(id) => subst(id) } rewrite(everywherebu(substRule))(tpe) } diff --git a/src/main/scala/sigmastate/lang/Terms.scala b/src/main/scala/sigmastate/lang/Terms.scala index 57378f5d1c..49351b41c6 100644 --- a/src/main/scala/sigmastate/lang/Terms.scala +++ b/src/main/scala/sigmastate/lang/Terms.scala @@ -6,13 +6,9 @@ import sigmastate.SCollection.SByteArray import sigmastate.Values._ import sigmastate.utils.Overloading.Overload1 import sigmastate._ -import sigmastate.lang.SigmaTyper.STypeSubst import sigmastate.serialization.OpCodes import sigmastate.serialization.OpCodes.OpCode import sigmastate.lang.TransformingSigmaBuilder._ -import sigmastate.utxo.CostTable.Cost -import sigmastate.utxo.{ExtractRegisterAs, SigmaPropIsProven, Slice} -import special.sigma.{AnyValue, TestValue} import scala.language.implicitConversions @@ -22,13 +18,14 @@ object Terms { * { val x = ...; val y = ... } * This node is not part of ErgoTree and hence have Undefined opCode. */ case class Block(bindings: Seq[Val], result: SValue) extends Value[SType] { - override val opCode: OpCode = OpCodes.Undefined + override def companion = Block override def tpe: SType = result.tpe /** This is not used as operation, but rather to form a program structure */ override def opType: SFunc = Value.notSupportedError(this, "opType") } - object Block { + object Block extends ValueCompanion { + override def opCode: OpCode = OpCodes.Undefined def apply(let: Val, result: SValue)(implicit o1: Overload1): Block = Block(Seq(let), result) } @@ -46,37 +43,43 @@ object Terms { * For motivation and details see https://github.com/ScorexFoundation/sigmastate-interpreter/issues/236 * */ case class ZKProofBlock(body: SigmaPropValue) extends BoolValue { - override val opCode: OpCode = OpCodes.Undefined + override def companion = ZKProofBlock override def tpe = SBoolean override def opType: SFunc = SFunc(SSigmaProp, SBoolean) } + object ZKProofBlock extends ValueCompanion { + override def opCode: OpCode = OpCodes.Undefined + } trait Val extends Value[SType] { val name: String val givenType: SType val body: SValue } + object Val { + def apply(name: String, body: SValue): Val = ValNode(name, NoType, body) + def apply(name: String, givenType: SType, body: SValue): Val = ValNode(name, givenType, body) + def unapply(v: SValue): Option[(String, SType, SValue)] = v match { + case ValNode(name, givenType, body) => Some((name, givenType, body)) + case _ => None + } + } case class ValNode(name: String, givenType: SType, body: SValue) extends Val { - override val opCode: OpCode = OpCodes.Undefined + override def companion = ValNode override def tpe: SType = givenType ?: body.tpe /** This is not used as operation, but rather to form a program structure */ override def opType: SFunc = Value.notSupportedError(this, "opType") } - object Val { - def apply(name: String, body: SValue): Val = ValNode(name, NoType, body) - def apply(name: String, givenType: SType, body: SValue): Val = ValNode(name, givenType, body) - def unapply(v: SValue): Option[(String, SType, SValue)] = v match { - case ValNode(name, givenType, body) => Some((name, givenType, body)) - case _ => None - } + object ValNode extends ValueCompanion { + override def opCode: OpCode = OpCodes.Undefined } /** Frontend node to select a field from an object. Should be transformed to SelectField*/ case class Select(obj: Value[SType], field: String, resType: Option[SType] = None) extends Value[SType] { - override val opCode: OpCode = OpCodes.Undefined + override def companion = Select override val tpe: SType = resType.getOrElse(obj.tpe match { case p: SProduct => val i = p.methodIndex(field) @@ -86,20 +89,24 @@ object Terms { }) override def opType: SFunc = SFunc(obj.tpe, tpe) } + object Select extends ValueCompanion { + override def opCode: OpCode = OpCodes.Undefined + } /** Frontend node to represent variable names parsed in a source code. * Should be resolved during compilation to lambda argument, Val definition or * compilation environment value. */ case class Ident(name: String, tpe: SType = NoType) extends Value[SType] { - override val opCode: OpCode = OpCodes.Undefined + override def companion = Ident override def opType: SFunc = SFunc(Vector(), tpe) } - object Ident { + object Ident extends ValueCompanion { + override def opCode: OpCode = OpCodes.Undefined def apply(name: String): Ident = Ident(name, NoType) } case class Apply(func: Value[SType], args: IndexedSeq[Value[SType]]) extends Value[SType] { - override val opCode: OpCode = OpCodes.FuncApplyCode + override def companion = Apply override lazy val tpe: SType = func.tpe match { case SFunc(_, r, _) => r case tColl: SCollectionType[_] => tColl.elemType @@ -107,10 +114,13 @@ object Terms { } override def opType: SFunc = SFunc(Vector(func.tpe +: args.map(_.tpe):_*), tpe) } + object Apply extends ValueCompanion { + override def opCode: OpCode = OpCodes.FuncApplyCode + } /** Apply types for type parameters of input value. */ case class ApplyTypes(input: Value[SType], tpeArgs: Seq[SType]) extends Value[SType] { node => - override val opCode: OpCode = OpCodes.Undefined + override def companion = ApplyTypes override lazy val tpe: SType = input.tpe match { case funcType: SFunc => val subst = funcType.tpeParams.map(_.ident).zip(tpeArgs).toMap @@ -120,16 +130,31 @@ object Terms { /** This is not used as operation, but rather to form a program structure */ override def opType: SFunc = Value.notSupportedError(this, "opType") } + object ApplyTypes extends ValueCompanion { + override def opCode: OpCode = OpCodes.Undefined + } /** Frontend node to represent potential method call in a source code. * Should be resolved during compilation to MethodCall. * Cannot be serialized to ErgoTree. */ case class MethodCallLike(obj: Value[SType], name: String, args: IndexedSeq[Value[SType]], tpe: SType = NoType) extends Value[SType] { - override val opCode: OpCode = OpCodes.Undefined + override def companion = MethodCallLike override def opType: SFunc = SFunc(obj.tpe +: args.map(_.tpe), tpe) } + object MethodCallLike extends ValueCompanion { + override def opCode: OpCode = OpCodes.Undefined + } /** Represents in ErgoTree an invocation of method of the object `obj` with arguments `args`. + * The SMethod instances in STypeCompanions may have type STypeIdent in methods types, + * but valid ErgoTree should have SMethod instances specialized for specific types of + * obj and args using `specializeFor`. + * This means, if we save typeId, mathodId, and we save all the arguments, + * we can restore the specialized SMethod instance. + * This work by induction, if we assume all arguments are monomorphic, + * then we can make MethodCall monomorphic. + * Thus, all ErgoTree instances are monomorphic by construction. + * * @param obj object on which method will be invoked * @param method method to be invoked * @param args arguments passed to the method on invocation @@ -138,21 +163,27 @@ object Terms { case class MethodCall(obj: Value[SType], method: SMethod, args: IndexedSeq[Value[SType]], - typeSubst: Map[STypeIdent, SType]) extends Value[SType] { - override val opCode: OpCode = if (args.isEmpty) OpCodes.PropertyCallCode else OpCodes.MethodCallCode + typeSubst: Map[STypeVar, SType]) extends Value[SType] { + override def companion = if (args.isEmpty) PropertyCall else MethodCall override def opType: SFunc = SFunc(obj.tpe +: args.map(_.tpe), tpe) override val tpe: SType = method.stype match { case f: SFunc => f.tRange.withSubstTypes(typeSubst) case t => t.withSubstTypes(typeSubst) } } + object MethodCall extends ValueCompanion { + override def opCode: OpCode = OpCodes.MethodCallCode + } + object PropertyCall extends ValueCompanion { + override def opCode: OpCode = OpCodes.PropertyCallCode + } - case class STypeParam(ident: STypeIdent, upperBound: Option[SType] = None, lowerBound: Option[SType] = None) { + case class STypeParam(ident: STypeVar, upperBound: Option[SType] = None, lowerBound: Option[SType] = None) { assert(upperBound.isEmpty && lowerBound.isEmpty, s"Type parameters with bounds are not supported, but found $this") override def toString = ident.toString + upperBound.fold("")(u => s" <: $u") + lowerBound.fold("")(l => s" >: $l") } object STypeParam { - implicit def typeIdentToTypeParam(id: STypeIdent): STypeParam = STypeParam(id) + implicit def typeIdentToTypeParam(id: STypeVar): STypeParam = STypeParam(id) } /** Frontend implementation of lambdas. Should be transformed to FuncValue. */ @@ -163,7 +194,7 @@ object Terms { body: Option[Value[SType]]) extends Value[SFunc] { require(!(tpeParams.nonEmpty && body.nonEmpty), s"Generic function definitions are not supported, but found $this") - override val opCode: OpCode = OpCodes.Undefined + override def companion = Lambda override lazy val tpe: SFunc = { val sRange = givenResType ?: body.fold(NoType: SType)(_.tpe) SFunc(args.map(_._2), sRange, tpeParams) @@ -171,7 +202,8 @@ object Terms { /** This is not used as operation, but rather to form a program structure */ override def opType: SFunc = SFunc(Vector(), tpe) } - object Lambda { + object Lambda extends ValueCompanion { + override def opCode: OpCode = OpCodes.Undefined def apply(args: IndexedSeq[(String,SType)], resTpe: SType, body: Value[SType]): Lambda = Lambda(Nil, args, resTpe, Some(body)) def apply(args: IndexedSeq[(String,SType)], resTpe: SType, body: Option[Value[SType]]): Lambda = @@ -223,7 +255,6 @@ object Terms { } /** * Set source context to all nodes missing source context in the given tree. - * @param tree AST to traverse * @param srcCtx source context to set * @return AST where all nodes with missing source context are set to the given srcCtx */ diff --git a/src/main/scala/sigmastate/lang/Types.scala b/src/main/scala/sigmastate/lang/Types.scala index d13f62d4f8..ebd66a2627 100644 --- a/src/main/scala/sigmastate/lang/Types.scala +++ b/src/main/scala/sigmastate/lang/Types.scala @@ -24,6 +24,7 @@ trait Types extends Core { "Boolean" -> SBoolean, "Byte" -> SByte, "Short" -> SShort, "Int" -> SInt,"Long" -> SLong, "BigInt" -> SBigInt, "ByteArray" -> SByteArray, "AvlTree" -> SAvlTree, "Context" -> SContext, "GroupElement" -> SGroupElement, "SigmaProp" -> SSigmaProp, + "SigmaDslBuilder" -> SGlobal, "Header" -> SHeader, "PreHeader" -> SPreHeader, "String" -> SString, @@ -107,7 +108,7 @@ trait Types extends Core { case (_, STypeApply("Coll", IndexedSeq()), Seq(Seq(t))) => SCollection(t) case (_, STypeApply("Option", IndexedSeq()), Seq(Seq(t))) => SOption(t) case (_, SPrimType(t), Seq()) => t - case (_, STypeApply(tn, IndexedSeq()), args) if args.isEmpty => STypeIdent(tn) + case (_, STypeApply(tn, IndexedSeq()), args) if args.isEmpty => STypeVar(tn) case (index, t, typeArgs) => error(s"Unsupported type $t[$typeArgs]", Some(srcCtx(index))) } diff --git a/src/main/scala/sigmastate/lang/exceptions/Exceptions.scala b/src/main/scala/sigmastate/lang/exceptions/Exceptions.scala index 93145ee15d..4e7f0ebee3 100644 --- a/src/main/scala/sigmastate/lang/exceptions/Exceptions.scala +++ b/src/main/scala/sigmastate/lang/exceptions/Exceptions.scala @@ -21,8 +21,8 @@ class TyperException(message: String, source: Option[SourceContext] = None) class SpecializerException(message: String, source: Option[SourceContext] = None) extends SigmaException(message, source) -class SerializerException(message: String, source: Option[SourceContext] = None) - extends SigmaException(message, source) +class SerializerException(message: String, source: Option[SourceContext] = None, cause: Option[Throwable] = None) + extends SigmaException(message, source, cause) class BuilderException(message: String, source: Option[SourceContext] = None) extends SigmaException(message, source) @@ -30,5 +30,5 @@ class BuilderException(message: String, source: Option[SourceContext] = None) class CosterException(message: String, source: Option[SourceContext], cause: Option[Throwable] = None) extends SigmaException(message, source, cause) -class InterpreterException(message: String, source: Option[SourceContext] = None) - extends SigmaException(message, source) +class InterpreterException(message: String, source: Option[SourceContext] = None, cause: Option[Throwable] = None) + extends SigmaException(message, source, cause) diff --git a/src/main/scala/sigmastate/lang/exceptions/SigmaSerializerExceptions.scala b/src/main/scala/sigmastate/lang/exceptions/SigmaSerializerExceptions.scala index 04211a82a8..9d7678fa3b 100644 --- a/src/main/scala/sigmastate/lang/exceptions/SigmaSerializerExceptions.scala +++ b/src/main/scala/sigmastate/lang/exceptions/SigmaSerializerExceptions.scala @@ -2,17 +2,14 @@ package sigmastate.lang.exceptions import sigmastate.lang.SourceContext -final class InvalidTypePrefix(message: String, source: Option[SourceContext] = None) - extends SerializerException(message, source) +final class InvalidTypePrefix(message: String, source: Option[SourceContext] = None, cause: Option[Throwable] = None) + extends SerializerException(message, source, cause) -final class InputSizeLimitExceeded(message: String, source: Option[SourceContext] = None) - extends SerializerException(message, source) +final class InputSizeLimitExceeded(message: String, source: Option[SourceContext] = None, cause: Option[Throwable] = None) + extends SerializerException(message, source, cause) -final class TypeDeserializeCallDepthExceeded(message: String, source: Option[SourceContext] = None) - extends SerializerException(message, source) +final class DeserializeCallDepthExceeded(message: String, source: Option[SourceContext] = None, cause: Option[Throwable] = None) + extends SerializerException(message, source, cause) -final class ValueDeserializeCallDepthExceeded(message: String, source: Option[SourceContext] = None) - extends SerializerException(message, source) - -final class InvalidOpCode(message: String, source: Option[SourceContext] = None) - extends SerializerException(message, source) +final class InvalidOpCode(message: String, source: Option[SourceContext] = None, cause: Option[Throwable] = None) + extends SerializerException(message, source, cause) diff --git a/src/main/scala/sigmastate/serialization/ApplySerializer.scala b/src/main/scala/sigmastate/serialization/ApplySerializer.scala index 5042b04b4e..38b0127c50 100644 --- a/src/main/scala/sigmastate/serialization/ApplySerializer.scala +++ b/src/main/scala/sigmastate/serialization/ApplySerializer.scala @@ -3,17 +3,16 @@ package sigmastate.serialization import sigmastate.Values._ import sigmastate._ import sigmastate.lang.Terms.Apply -import sigmastate.serialization.OpCodes._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} case class ApplySerializer(cons: (Value[SType], IndexedSeq[Value[SType]]) => Value[SType]) extends ValueSerializer[Apply] { - - override val opCode: OpCode = FuncApplyCode + import sigmastate.Operations.ApplyInfo._ + override def opDesc = Apply override def serialize(obj: Apply, w: SigmaByteWriter): Unit = { - w.putValue(obj.func) - w.putValues(obj.args) + w.putValue(obj.func, funcArg) + w.putValues(obj.args, argsArg) } override def parse(r: SigmaByteReader): Value[SType] = { diff --git a/src/main/scala/sigmastate/serialization/BlockValueSerializer.scala b/src/main/scala/sigmastate/serialization/BlockValueSerializer.scala index 6b8638dab8..a8686bc140 100644 --- a/src/main/scala/sigmastate/serialization/BlockValueSerializer.scala +++ b/src/main/scala/sigmastate/serialization/BlockValueSerializer.scala @@ -2,19 +2,21 @@ package sigmastate.serialization import sigmastate.Values._ import sigmastate._ -import sigmastate.serialization.OpCodes._ import scorex.util.Extensions._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} +import ValueSerializer._ case class BlockValueSerializer(cons: (IndexedSeq[BlockItem], Value[SType]) => Value[SType]) extends ValueSerializer[BlockValue] { - - override val opCode: OpCode = BlockValueCode + override def opDesc = BlockValue override def serialize(obj: BlockValue, w: SigmaByteWriter): Unit = { - w.putUInt(obj.items.length) - obj.items.foreach(w.putValue(_)) - w.putValue(obj.result) + val sizeVar = "numItems" + w.putUInt(obj.items.length, ArgInfo(sizeVar, "number of block items")) + foreach(sizeVar, obj.items){ i => + w.putValue(i, ArgInfo("item_i", "block's item in i-th position")) + } + w.putValue(obj.result, ArgInfo("result", "result expression of the block")) } override def parse(r: SigmaByteReader): Value[SType] = { diff --git a/src/main/scala/sigmastate/serialization/BoolToSigmaPropSerializer.scala b/src/main/scala/sigmastate/serialization/BoolToSigmaPropSerializer.scala index 045ca3a010..4c684af3b4 100644 --- a/src/main/scala/sigmastate/serialization/BoolToSigmaPropSerializer.scala +++ b/src/main/scala/sigmastate/serialization/BoolToSigmaPropSerializer.scala @@ -2,16 +2,15 @@ package sigmastate.serialization import sigmastate.Values.{BoolValue, SigmaPropValue} import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.{BoolToSigmaProp, SType, Values} case class BoolToSigmaPropSerializer(cons: BoolValue => SigmaPropValue) extends ValueSerializer[BoolToSigmaProp] { - - override val opCode: Byte = BoolToSigmaPropCode + import sigmastate.Operations.BoolToSigmaPropInfo._ + override def opDesc = BoolToSigmaProp def serialize(obj: BoolToSigmaProp, w: SigmaByteWriter): Unit = { - w.putValue(obj.value) + w.putValue(obj.value, conditionArg) } def parse(r: SigmaByteReader): Values.Value[SType] = { diff --git a/src/main/scala/sigmastate/serialization/CaseObjectSerialization.scala b/src/main/scala/sigmastate/serialization/CaseObjectSerialization.scala index a42e18ae8c..a1ece38023 100644 --- a/src/main/scala/sigmastate/serialization/CaseObjectSerialization.scala +++ b/src/main/scala/sigmastate/serialization/CaseObjectSerialization.scala @@ -1,11 +1,10 @@ package sigmastate.serialization import sigmastate.SType -import sigmastate.Values.Value -import sigmastate.serialization.OpCodes.OpCode +import sigmastate.Values.{Value, ValueCompanion} import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} -case class CaseObjectSerialization[V <: Value[SType]](override val opCode: OpCode, obj: V) +case class CaseObjectSerialization[V <: Value[SType]](override val opDesc: ValueCompanion, obj: V) extends ValueSerializer[V] { override def serialize(obj: V, w: SigmaByteWriter): Unit = () diff --git a/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala b/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala index 02f3d45061..3b5a092094 100644 --- a/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala +++ b/src/main/scala/sigmastate/serialization/ConcreteCollectionBooleanConstantSerializer.scala @@ -1,22 +1,22 @@ package sigmastate.serialization -import sigmastate.{SBoolean, SCollection} +import sigmastate.{SCollection, SBoolean, ArgInfo} import sigmastate.Values._ -import sigmastate.serialization.OpCodes._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} +import SigmaByteWriter._ case class ConcreteCollectionBooleanConstantSerializer(cons: (IndexedSeq[Value[SBoolean.type]], SBoolean.type) => Value[SCollection[SBoolean.type]]) extends ValueSerializer[ConcreteCollection[SBoolean.type]] { - - override val opCode: Byte = ConcreteCollectionBooleanConstantCode + override def opDesc = ConcreteCollectionBooleanConstant override def serialize(cc: ConcreteCollection[SBoolean.type], w: SigmaByteWriter): Unit = { - w.putUShort(cc.items.size) + w.putUShort(cc.items.size, ArgInfo("numBits", "number of items in a collection of Boolean values")) w.putBits( cc.items.map { case v: BooleanConstant => v.value case v => error(s"Expected collection of BooleanConstant values, got: $v") - }.toArray) + }.toArray, + maxBitsInfo("bits", 0x1FFF, "Boolean values encoded as as bits (right most byte is zero-padded on the right)")) } override def parse(r: SigmaByteReader): Value[SCollection[SBoolean.type]] = { diff --git a/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala b/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala index 2cab981248..cc5d86adba 100644 --- a/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala +++ b/src/main/scala/sigmastate/serialization/ConcreteCollectionSerializer.scala @@ -1,20 +1,18 @@ package sigmastate.serialization -import sigmastate.{SCollection, SType} +import sigmastate.{SCollection, SType, ArgInfo} import sigmastate.Values._ -import sigmastate.serialization.OpCodes._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} -import scorex.util.Extensions._ +import ValueSerializer._ case class ConcreteCollectionSerializer(cons: (IndexedSeq[Value[SType]], SType) => Value[SCollection[SType]]) extends ValueSerializer[ConcreteCollection[_ <: SType]] { - - override val opCode: Byte = ConcreteCollectionCode + override def opDesc = ConcreteCollection override def serialize(cc: ConcreteCollection[_ <: SType], w: SigmaByteWriter): Unit = { - w.putUShort(cc.items.size) - w.putType(cc.tpe.elemType) - cc.items.foreach(w.putValue(_)) + w.putUShort(cc.items.size, ArgInfo("numItems", "number of item in a collection of expressions")) + w.putType(cc.tpe.elemType, ArgInfo("elementType", "type of each expression in the collection")) + foreach("numItems", cc.items)(w.putValue(_, ArgInfo("item_i", "expression in i-th position"))) } override def parse(r: SigmaByteReader): Value[SCollection[SType]] = { diff --git a/src/main/scala/sigmastate/serialization/ConstantPlaceholderSerializer.scala b/src/main/scala/sigmastate/serialization/ConstantPlaceholderSerializer.scala index d9481bba20..e9c19ca9e5 100644 --- a/src/main/scala/sigmastate/serialization/ConstantPlaceholderSerializer.scala +++ b/src/main/scala/sigmastate/serialization/ConstantPlaceholderSerializer.scala @@ -2,13 +2,11 @@ package sigmastate.serialization import sigmastate.Values._ import sigmastate._ -import sigmastate.serialization.OpCodes._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} case class ConstantPlaceholderSerializer(cons: (Int, SType) => Value[SType]) extends ValueSerializer[ConstantPlaceholder[SType]] { - - override val opCode: OpCode = ConstantPlaceholderIndexCode + override def opDesc = ConstantPlaceholder override def serialize(obj: ConstantPlaceholder[SType], w: SigmaByteWriter): Unit = { w.putUInt(obj.id) diff --git a/src/main/scala/sigmastate/serialization/ConstantSerializer.scala b/src/main/scala/sigmastate/serialization/ConstantSerializer.scala index cb3867ec12..d3d82fa58f 100644 --- a/src/main/scala/sigmastate/serialization/ConstantSerializer.scala +++ b/src/main/scala/sigmastate/serialization/ConstantSerializer.scala @@ -4,15 +4,13 @@ import sigmastate.SType import sigmastate.Values._ import sigmastate.lang.SigmaBuilder import sigmastate.lang.Terms.OperationId -import sigmastate.serialization.OpCodes.OpCode -import sigmastate.utils.{SigmaByteWriter, SigmaByteReader} +import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.CostTable.Cost /** This works in tandem with DataSerializer, if you change one make sure to check the other.*/ case class ConstantSerializer(builder: SigmaBuilder) extends ByteBufferSerializer[Constant[SType]] with ValueSerializer[Constant[SType]] { - - val opCode: OpCode = OpCodes.ConstantCode + override def opDesc = Constant override def opCost(opId: OperationId): Int = Cost.ConstantNode diff --git a/src/main/scala/sigmastate/serialization/CreateAvlTreeSerializer.scala b/src/main/scala/sigmastate/serialization/CreateAvlTreeSerializer.scala index 15fa88db47..901e532c9a 100644 --- a/src/main/scala/sigmastate/serialization/CreateAvlTreeSerializer.scala +++ b/src/main/scala/sigmastate/serialization/CreateAvlTreeSerializer.scala @@ -2,7 +2,6 @@ package sigmastate.serialization import sigmastate.SCollection._ import sigmastate.SOption.SIntOption -import sigmastate.serialization.OpCodes.OpCode import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate._ import sigmastate.Values._ @@ -13,13 +12,14 @@ case class CreateAvlTreeSerializer( ) extends ValueSerializer[CreateAvlTree] { - override val opCode: OpCode = OpCodes.AvlTreeCode + import sigmastate.Operations.CreateAvlTreeInfo._ + override def opDesc = CreateAvlTree override def serialize(obj: CreateAvlTree, w: SigmaByteWriter): Unit = { - w.putValue(obj.operationFlags) - w.putValue(obj.digest) - w.putValue(obj.keyLength) - w.putValue(obj.valueLengthOpt) + w.putValue(obj.operationFlags, operationFlagsArg) + w.putValue(obj.digest, digestArg) + w.putValue(obj.keyLength, keyLengthArg) + w.putValue(obj.valueLengthOpt, valueLengthOptArg) } override def parse(r: SigmaByteReader) = { diff --git a/src/main/scala/sigmastate/serialization/DataSerializer.scala b/src/main/scala/sigmastate/serialization/DataSerializer.scala index 6bd9789fd8..c963f39e3b 100644 --- a/src/main/scala/sigmastate/serialization/DataSerializer.scala +++ b/src/main/scala/sigmastate/serialization/DataSerializer.scala @@ -4,18 +4,36 @@ import java.math.BigInteger import java.nio.charset.StandardCharsets import org.ergoplatform.ErgoBox +import org.ergoplatform.validation.{ValidationRule, SoftForkWhenReplaced} +import scalan.RType import sigmastate.Values.SigmaBoolean import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} -import scorex.util.Extensions._ import sigmastate._ -import sigmastate.eval.Evaluation -import sigmastate.interpreter.CryptoConstants.EcPointType +import sigmastate.eval.{Evaluation, _} +import sigmastate.lang.exceptions.SerializerException +import special.collection._ +import special.sigma._ +import sigma.util.Extensions.ByteOps import scala.collection.mutable /** This works in tandem with ConstantSerializer, if you change one make sure to check the other.*/ object DataSerializer { + object CheckSerializableTypeCode extends ValidationRule(1010, + "Check the data values of the type (given by type code) can be serialized") + with SoftForkWhenReplaced { + def apply[T](typeCode: Byte)(block: => T): T = { + val ucode = typeCode.toUByte + def msg = s"Data value of the type with the code $ucode cannot be deserialized." + validate(ucode <= OpCodes.LastDataType.toUByte, new SerializerException(msg), Seq(typeCode), block) + } + } + + /** Use type descriptor `tpe` to deconstruct type structure and recursively serialize subcomponents. + * Primitive types are leaves of the type tree, and they are served as basis of recursion. + * The data value `v` is expected to conform to the type described by `tpe`. + */ def serialize[T <: SType](v: T#WrappedType, tpe: T, w: SigmaByteWriter): Unit = tpe match { case SUnit => // don't need to save anything case SBoolean => w.putBoolean(v.asInstanceOf[Boolean]) @@ -28,28 +46,29 @@ object DataSerializer { w.putUInt(bytes.length) w.putBytes(bytes) case SBigInt => - val data = v.asInstanceOf[BigInteger].toByteArray + val data = SigmaDsl.toBigInteger(v.asInstanceOf[BigInt]).toByteArray w.putUShort(data.length) w.putBytes(data) case SGroupElement => - GroupElementSerializer.serialize(v.asInstanceOf[EcPointType], w) + GroupElementSerializer.serialize(groupElementToECPoint(v.asInstanceOf[GroupElement]), w) case SSigmaProp => - val p = v.asInstanceOf[SigmaBoolean] - SigmaBoolean.serializer.serialize(p, w) + val p = v.asInstanceOf[SigmaProp] + SigmaBoolean.serializer.serialize(sigmaPropToSigmaBoolean(p), w) case SBox => - ErgoBox.sigmaSerializer.serialize(v.asInstanceOf[ErgoBox], w) + val b = v.asInstanceOf[Box] + ErgoBox.sigmaSerializer.serialize(boxToErgoBox(b), w) case SAvlTree => - AvlTreeData.serializer.serialize(v.asInstanceOf[AvlTreeData], w) + AvlTreeData.serializer.serialize(avlTreeToAvlTreeData(v.asInstanceOf[AvlTree]), w) case tColl: SCollectionType[a] => val arr = v.asInstanceOf[tColl.WrappedType] w.putUShort(arr.length) tColl.elemType match { case SBoolean => - w.putBits(arr.asInstanceOf[Array[Boolean]]) + w.putBits(arr.asInstanceOf[Coll[Boolean]].toArray) case SByte => - w.putBytes(arr.asInstanceOf[Array[Byte]]) + w.putBytes(arr.asInstanceOf[Coll[Byte]].toArray) case _ => - arr.foreach(x => serialize(x, tColl.elemType, w)) + arr.toArray.foreach(x => serialize(x, tColl.elemType, w)) } case t: STuple => @@ -67,52 +86,74 @@ object DataSerializer { case _ => sys.error(s"Don't know how to serialize ($v, $tpe)") } - def deserialize[T <: SType](tpe: T, r: SigmaByteReader): (T#WrappedType) = (tpe match { - case SUnit => () - case SBoolean => r.getUByte() != 0 - case SByte => r.getByte() - case SShort => r.getShort() - case SInt => r.getInt() - case SLong => r.getLong() - case SString => - val size = r.getUInt().toInt - val bytes = r.getBytes(size) - new String(bytes, StandardCharsets.UTF_8) - case SBigInt => - val size: Short = r.getUShort().toShort - val valueBytes = r.getBytes(size) - new BigInteger(valueBytes) - case SGroupElement => - GroupElementSerializer.parse(r) - case SSigmaProp => - SigmaBoolean.serializer.parse(r) - case SBox => - ErgoBox.sigmaSerializer.parse(r) - case SAvlTree => - AvlTreeData.serializer.parse(r) - case tColl: SCollectionType[a] => - val len = r.getUShort() - if (tColl.elemType == SByte) - r.getBytes(len) - else - deserializeArray(len, tColl.elemType, r) - case tuple: STuple => - val arr = tuple.items.map { t => - deserialize(t, r) - }.toArray[Any] - arr - case _ => sys.error(s"Don't know how to deserialize $tpe") - }).asInstanceOf[T#WrappedType] + /** Reads a data value from Reader. The data value bytes is expected to confirm + * to the type descriptor `tpe`. */ + def deserialize[T <: SType](tpe: T, r: SigmaByteReader): (T#WrappedType) = { + val depth = r.level + r.level = depth + 1 + val res = (tpe match { + case SUnit => () + case SBoolean => r.getUByte() != 0 + case SByte => r.getByte() + case SShort => r.getShort() + case SInt => r.getInt() + case SLong => r.getLong() + case SString => + val size = r.getUInt().toInt + val bytes = r.getBytes(size) + new String(bytes, StandardCharsets.UTF_8) + case SBigInt => + val size: Short = r.getUShort().toShort + val valueBytes = r.getBytes(size) + SigmaDsl.BigInt(new BigInteger(valueBytes)) + case SGroupElement => + SigmaDsl.GroupElement(GroupElementSerializer.parse(r)) + case SSigmaProp => + SigmaDsl.SigmaProp(SigmaBoolean.serializer.parse(r)) + case SBox => + SigmaDsl.Box(ErgoBox.sigmaSerializer.parse(r)) + case SAvlTree => + SigmaDsl.avlTree(AvlTreeData.serializer.parse(r)) + case tColl: SCollectionType[a] => + val len = r.getUShort() + if (tColl.elemType == SByte) + Colls.fromArray(r.getBytes(len)) + else + deserializeColl(len, tColl.elemType, r) + case tuple: STuple => + val arr = tuple.items.map { t => + deserialize(t, r) + }.toArray[Any] + val coll = Colls.fromArray(arr)(RType.AnyType) + Evaluation.toDslTuple(coll, tuple) + case t => + CheckSerializableTypeCode(t.typeCode) { + throw new SerializerException(s"Not defined DataSerializer for type $t") + } + }).asInstanceOf[T#WrappedType] + r.level = r.level - 1 + res + } - def deserializeArray[T <: SType](len: Int, tpe: T, r: SigmaByteReader): Array[T#WrappedType] = - tpe match { + def deserializeColl[T <: SType](len: Int, tpeElem: T, r: SigmaByteReader): Coll[T#WrappedType] = + tpeElem match { case SBoolean => - r.getBits(len).asInstanceOf[Array[T#WrappedType]] + Colls.fromArray(r.getBits(len)).asInstanceOf[Coll[T#WrappedType]] + case SByte => + Colls.fromArray(r.getBytes(len)).asInstanceOf[Coll[T#WrappedType]] case _ => - val b = mutable.ArrayBuilder.make[T#WrappedType]()(tpe.classTag) - for (i <- 0 until len) { - b += deserialize(tpe, r) + implicit val tItem = (tpeElem match { + case tTup: STuple if tTup.items.length == 2 => + Evaluation.stypeToRType(tpeElem) + case tTup: STuple => + collRType(RType.AnyType) + case _ => + Evaluation.stypeToRType(tpeElem) + }).asInstanceOf[RType[T#WrappedType]] + val b = mutable.ArrayBuilder.make[T#WrappedType]()(tItem.classTag) + for (_ <- 0 until len) { + b += deserialize(tpeElem, r) } - b.result() + Colls.fromArray(b.result()) } } diff --git a/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala b/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala index e9ee45f3cf..1923a65d47 100644 --- a/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala +++ b/src/main/scala/sigmastate/serialization/ErgoTreeSerializer.scala @@ -1,25 +1,125 @@ package sigmastate.serialization -import sigmastate.SCollection.SByteArray -import sigmastate.Values.{Constant, Value, ErgoTree, ConcreteCollection} +import org.ergoplatform.validation.ValidationRules.{CheckDeserializedScriptIsSigmaProp, CheckHeaderSizeBit} +import org.ergoplatform.validation.{ValidationException, SigmaValidationSettings} +import sigmastate.SType +import sigmastate.Values.{Value, ErgoTree, Constant, UnparsedErgoTree} import sigmastate.lang.DeserializationSigmaBuilder -import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} -import sigmastate.utxo.Append -import sigmastate.{SGroupElement, SType} import sigmastate.lang.Terms.ValueOps +import sigmastate.lang.exceptions.SerializerException +import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} +import sigma.util.Extensions._ +import sigmastate.Values.ErgoTree.EmptyConstants + import scala.collection.mutable +/** + * Rationale for soft-forkable ErgoTree serialization. + * There are 2 points: + * + * 1) we can make size bit obligatory, i.e. always save total size of script body + * (in this case we don't need size bit in the header). This will allow to + * always skip right number of bytes in case of any exception (including + * ValidationException) thrown during deserialization and produce + * UnparsedErgoTree. The decision about soft-fork can be done later. + * But is looks like this is not necessary if we do as described below. + * + * 2) we can also strictly check during deserialization the content of the script + * against version number in the header. Thus if the header have vK, then + * script is allowed to have instructions from versions from v1 to vK. On a node vN, N > + * K, this should also be enforced, i.e. vN node will reject scripts as invalid + * if the script has vK in header and vK+1 instruction in body. + * + * Keeping this in mind, if we have a vN node and a script with vS in its header then: + * During script deserialization: + * 1) if vN >= vS then + * the node knows all the instructions and should check that only instructions + * up to vS are used in the script. + * It either parses successfully or throws MalformedScriptException. + * If during the process some unknown instruction is encountered (i.e. ValidationException is thrown), + * this cannot be a soft-fork, because vN >= vS guarantees that all instructions are known, + * thus the script is malformed. + * + * 2) if vN < vS then + * the vN node is expecting unknown instructions. + * If the script is parsed successfully, then + * vN subset of the language is used and script is accepted for execution + * else if ValidationException is thrown then + * UnparsedErgoTree is created, delaying decision about soft-fork until stateful validation. + * if bodySize is stored then + * script body is skipped and whole TX deserialization continues. + * otherwise + * we cannot skip the body which leads to whole TX to be rejected (CannotSkipScriptException) + * else if some other exception is thrown then + * the whole TX is rejected due to said exception. + * + * In the stateful context: + * if vN >= vS then + * we can execute script, but we do additional check + * if vS > the current version of protocol (vP) then + * the script is rejected as invalid because its version exceeds + * the current consensus version of the protocol + * else + * the script can be executed + * if vN < vS then + * if we have Right(tree) + * the script is executed + * if Left(UnparsedErgoTree()) then check soft fork and either execute or throw + * + * Proposition: + * CannotSkipScriptException can only happen on < 10% of the nodes, which is safe for consensus. + * Proof. + * If follows from the fact that vN >= vS nodes will reject the script + * until new vP is upgraded to vS, which means the majority has upgraded to at least vS + * Thus, before vP is upgraded to vS, majority reject (either because they cannot parse, or because vP is not actualized) + * after that majority accept (however old nodes still reject but they are < 10%) + * End of proof. + * + */ class ErgoTreeSerializer { - /** Default serialization of ErgoTree. Doesn't apply any transformations and guarantee to preserve original - * structure after deserialization. */ - def serializeErgoTree(ergoTree: ErgoTree): Array[Byte] = { + /** Serialize header and constants section only.*/ + private def serializeHeader(ergoTree: ErgoTree, w: SigmaByteWriter): Unit = { + w.put(ergoTree.header) + if (ergoTree.isConstantSegregation) { + val constantSerializer = ConstantSerializer(DeserializationSigmaBuilder) + w.putUInt(ergoTree.constants.length) + ergoTree.constants.foreach(c => constantSerializer.serialize(c, w)) + } + } + + private def serializeWithoutSize(ergoTree: ErgoTree): Array[Byte] = { val w = SigmaSerializer.startWriter() serializeHeader(ergoTree, w) - ValueSerializer.serialize(ergoTree.root, w) + assert(ergoTree.isRightParsed, s"Right parsed ErgoTree expected: $ergoTree") + ValueSerializer.serialize(ergoTree.root.right.get, w) w.toBytes } + /** Default serialization of ErgoTree. + * Doesn't apply any transformations and guarantee to preserve original + * structure after deserialization. */ + def serializeErgoTree(ergoTree: ErgoTree): Array[Byte] = { + val res = ergoTree.root match { + case Left(UnparsedErgoTree(bytes, error)) => bytes.array + case _ => + val bytes = serializeWithoutSize(ergoTree) + if (ergoTree.hasSize) { + val w = SigmaSerializer.startWriter() + val header = bytes(0) + val contentLength = bytes.length - 1 + val contentBytes = new Array[Byte](contentLength) + Array.copy(bytes, 1, contentBytes, 0, contentLength) // TODO optimize: avoid new array by implementing putSlice(arr, from, len) + w.put(header) + w.putUInt(contentLength) + w.putBytes(contentBytes) + w.toBytes + } + else bytes + } + res + } + /** Default deserialization of ErgoTree (should be inverse to `serializeErgoTree`). * Doesn't apply any transformations to the parsed tree. */ def deserializeErgoTree(bytes: Array[Byte]): ErgoTree = { @@ -27,29 +127,50 @@ class ErgoTreeSerializer { deserializeErgoTree(r) } - def deserializeErgoTree(r: SigmaByteReader): ErgoTree = { - val (h, cs) = deserializeHeader(r) - val previousConstantStore = r.constantStore - r.constantStore = new ConstantStore(cs) - // reader with constant store attached is required (to get tpe for a constant placeholder) - val root = ValueSerializer.deserialize(r).asSigmaProp - r.constantStore = previousConstantStore - ErgoTree(h, cs, root) - } - - /** Serialize header and constants section only.*/ - private def serializeHeader(ergoTree: ErgoTree, w: SigmaByteWriter): Unit = { - w.put(ergoTree.header) - if (ergoTree.isConstantSegregation) { - val constantSerializer = ConstantSerializer(DeserializationSigmaBuilder) - w.putUInt(ergoTree.constants.length) - ergoTree.constants.foreach(c => constantSerializer.serialize(c, w)) + def deserializeErgoTree(r: SigmaByteReader): ErgoTree = { + val startPos = r.position + val (h, sizeOpt) = deserializeHeaderAndSize(r) + val bodyPos = r.position + val tree = try { + val cs = deserializeConstants(h, r) + val previousConstantStore = r.constantStore + r.constantStore = new ConstantStore(cs) + // reader with constant store attached is required (to get tpe for a constant placeholder) + val root = ValueSerializer.deserialize(r) + CheckDeserializedScriptIsSigmaProp(root) {} + r.constantStore = previousConstantStore + ErgoTree(h, cs, root.asSigmaProp) + } + catch { + case ve: ValidationException => + sizeOpt match { + case Some(treeSize) => + val numBytes = bodyPos - startPos + treeSize + r.position = startPos + val bytes = r.getBytes(numBytes) + ErgoTree(ErgoTree.DefaultHeader, EmptyConstants, Left(UnparsedErgoTree(bytes, ve))) + case None => + throw new SerializerException( + s"Cannot handle ValidationException, ErgoTree serialized without size bit.", None, Some(ve)) + } } + tree } - /** Deserialize header and constants section only. */ - private def deserializeHeader(r: SigmaByteReader): (Byte, Array[Constant[SType]]) = { + /** Deserialize `header` and optional `size` slots only. */ + private def deserializeHeaderAndSize(r: SigmaByteReader): (Byte, Option[Int]) = { val header = r.getByte() + CheckHeaderSizeBit(header) + val sizeOpt = if (ErgoTree.hasSize(header)) { + val size = r.getUInt().toIntExact + Some(size) + } else + None + (header, sizeOpt) + } + + /** Deserialize constants section only. */ + private def deserializeConstants(header: Byte, r: SigmaByteReader): Array[Constant[SType]] = { val constants = if (ErgoTree.isConstantSegregation(header)) { val constantSerializer = ConstantSerializer(DeserializationSigmaBuilder) val nConsts = r.getUInt().toInt @@ -61,92 +182,24 @@ class ErgoTreeSerializer { } else Array.empty[Constant[SType]] - (header, constants) - } - - /** Serialize Value with ConstantSegregationHeader, constants segregated from the tree and ConstantPlaceholders - * referring to the segregated constants. - * - * This method uses single traverse of the tree to: - * 1) find and segregate all constants; - * 2) replace constants with ConstantPlaceholders in the `tree`; - * 3) write the `tree` to the Writer's buffer obtaining `treeBytes`. - * - * After the constants are collected the final byte array is composed by serializing constants and - * then appending `treeBytes` */ - def serializeWithSegregation(tree: Value[SType]): Array[Byte] = { - val constantStore = new ConstantStore() - val treeWriter = SigmaSerializer.startWriter(constantStore) - - // serialize tree and segregate constants into constantStore - ValueSerializer.serialize(tree, treeWriter) - val extractedConstants = constantStore.getAll - - val w = SigmaSerializer.startWriter() - serializeHeader(ErgoTree(ErgoTree.ConstantSegregationHeader, extractedConstants, null), w) - - // write tree bytes with ConstantsPlaceholders (which were injected during serialization) - w.putBytes(treeWriter.toBytes) - w.toBytes + constants } /** Deserialize header and constant sections, but output the rest of the bytes as separate array. */ - def deserializeHeaderWithTreeBytes(r: SigmaByteReader): (Byte, Array[Constant[SType]], Array[Byte]) = { - val (header, constants) = deserializeHeader(r) + def deserializeHeaderWithTreeBytes(r: SigmaByteReader): (Byte, Option[Int], Array[Constant[SType]], Array[Byte]) = { + val (header, sizeOpt) = deserializeHeaderAndSize(r) + val constants = deserializeConstants(header, r) val treeBytes = r.getBytes(r.remaining) - (header, constants, treeBytes) - } - - def deserialize(bytes: Array[Byte], resolvePlaceholdersToConstants: Boolean = true): Value[SType] = { - deserialize(SigmaSerializer.startReader(bytes), resolvePlaceholdersToConstants) - } - - /** Deserialize Value replacing placeholders with constants if the parameter is true. */ - def deserialize(r: SigmaByteReader, resolvePlaceholdersToConstants: Boolean): Value[SType] = { - val (header, constants) = deserializeHeader(r) - require(!resolvePlaceholdersToConstants || ErgoTree.isConstantSegregation(header), - s"Invalid arguments of ErgoTreeSerializer.deserialize: resolvePlaceholdersToConstants=$resolvePlaceholdersToConstants, header=$header") - - val previousConstantStore = r.constantStore - r.constantStore = new ConstantStore(constants) - val previousResolvePlaceholderValue = r.resolvePlaceholdersToConstants - r.resolvePlaceholdersToConstants = resolvePlaceholdersToConstants - val tree = ValueSerializer.deserialize(r) - r.constantStore = previousConstantStore - r.resolvePlaceholdersToConstants = previousResolvePlaceholderValue - tree - } - - def deserializeWithConstantInjection(constantStore: ConstantStore, treeBytes: Array[Byte]): Value[SType] = { - val r = SigmaSerializer.startReader(treeBytes, constantStore, resolvePlaceholdersToConstants = true) - val tree = ValueSerializer.deserialize(r) - tree + (header, sizeOpt, constants, treeBytes) } - def serializedPubkeyPropValue(pubkey: Value[SByteArray]): Value[SByteArray] = - Append( - Append( - ConcreteCollection( - 0.toByte, // header - 1.toByte, // const count - SGroupElement.typeCode // const type - ), - pubkey // const value - ), - ConcreteCollection( - OpCodes.ProveDlogCode, - OpCodes.ConstantPlaceholderIndexCode, - 0.toByte // constant index in the store - ) - ) - def substituteConstants(scriptBytes: Array[Byte], positions: Array[Int], - newVals: Array[Value[SType]]): Array[Byte] = { + newVals: Array[Value[SType]])(implicit vs: SigmaValidationSettings): Array[Byte] = { require(positions.length == newVals.length, s"expected positions and newVals to have the same length, got: positions: ${positions.toSeq},\n newVals: ${newVals.toSeq}") val r = SigmaSerializer.startReader(scriptBytes) - val (header, constants, treeBytes) = deserializeHeaderWithTreeBytes(r) + val (header, _, constants, treeBytes) = deserializeHeaderWithTreeBytes(r) val w = SigmaSerializer.startWriter() w.put(header) w.putUInt(constants.length) @@ -158,7 +211,7 @@ class ErgoTreeSerializer { // we need to get newVal's serialized constant value (see ProveDlogSerializer for example) val constantStore = new ConstantStore() val valW = SigmaSerializer.startWriter(constantStore) - ValueSerializer.serialize(newVal, valW) + valW.putValue(newVal) val newConsts = constantStore.getAll assert(newConsts.length == 1) val newConst = newConsts.head @@ -171,6 +224,7 @@ class ErgoTreeSerializer { w.putBytes(treeBytes) w.toBytes } + } object ErgoTreeSerializer { diff --git a/src/main/scala/sigmastate/serialization/FuncValueSerializer.scala b/src/main/scala/sigmastate/serialization/FuncValueSerializer.scala index fb63f8bc68..23bb527ede 100644 --- a/src/main/scala/sigmastate/serialization/FuncValueSerializer.scala +++ b/src/main/scala/sigmastate/serialization/FuncValueSerializer.scala @@ -2,21 +2,23 @@ package sigmastate.serialization import sigmastate.Values._ import sigmastate._ -import sigmastate.serialization.OpCodes._ import scorex.util.Extensions._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} +import ValueSerializer._ import scala.collection.mutable case class FuncValueSerializer(cons: (IndexedSeq[(Int, SType)], Value[SType]) => Value[SType]) extends ValueSerializer[FuncValue] { - - override val opCode: OpCode = FuncValueCode + override def opDesc = FuncValue override def serialize(obj: FuncValue, w: SigmaByteWriter): Unit = { - w.putUInt(obj.args.length) - obj.args.foreach{ case (idx, tpe) => w.putUInt(idx).putType(tpe) } - w.putValue(obj.body) + w.putUInt(obj.args.length, ArgInfo("numArgs", "number of function arguments")) + foreach("numArgs", obj.args) { case (idx, tpe) => + w.putUInt(idx, ArgInfo("id_i", "identifier of the i-th argument")) + .putType(tpe, ArgInfo("type_i", "type of the i-th argument")) + } + w.putValue(obj.body, ArgInfo("body", "function body, which is parameterized by arguments")) } override def parse(r: SigmaByteReader): Value[SType] = { diff --git a/src/main/scala/sigmastate/serialization/GetVarSerializer.scala b/src/main/scala/sigmastate/serialization/GetVarSerializer.scala index a29b0fcbf8..7dee2117ef 100644 --- a/src/main/scala/sigmastate/serialization/GetVarSerializer.scala +++ b/src/main/scala/sigmastate/serialization/GetVarSerializer.scala @@ -2,19 +2,17 @@ package sigmastate.serialization import sigmastate.Values._ import sigmastate._ -import sigmastate.serialization.OpCodes._ -import scorex.util.Extensions._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.GetVar case class GetVarSerializer(cons: (Byte, SType) => Value[SOption[SType]]) extends ValueSerializer[GetVar[_ <: SType]] { - - override val opCode: OpCode = GetVarCode + import sigmastate.Operations.GetVarInfo._ + override def opDesc = GetVar override def serialize(obj: GetVar[_ <: SType], w: SigmaByteWriter): Unit = - w.put(obj.varId) - .putType(obj.tpe.elemType) + w.put(obj.varId, varIdArg) + .putType(obj.tpe.elemType, ArgInfo("type", "expected type of context variable")) override def parse(r: SigmaByteReader): Value[SType] = { val varId = r.getByte() diff --git a/src/main/scala/sigmastate/serialization/LogicalNotSerializer.scala b/src/main/scala/sigmastate/serialization/LogicalNotSerializer.scala index 01deb7af4c..0d3603f134 100644 --- a/src/main/scala/sigmastate/serialization/LogicalNotSerializer.scala +++ b/src/main/scala/sigmastate/serialization/LogicalNotSerializer.scala @@ -1,14 +1,18 @@ package sigmastate.serialization import sigmastate.LogicalNot +import sigmastate.Operations.LogicalNotInfo import sigmastate.Values.BoolValue import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes.OpCode import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} case class LogicalNotSerializer(cons: BoolValue => BoolValue) extends ValueSerializer[LogicalNot] { - override val opCode: OpCode = OpCodes.LogicalNotCode - override def serialize(obj: LogicalNot, w: SigmaByteWriter): Unit = w.putValue(obj.input) - override def parse(r: SigmaByteReader): BoolValue = cons(r.getValue().asBoolValue) + override def opDesc = LogicalNot + + override def serialize(obj: LogicalNot, w: SigmaByteWriter): Unit = + w.putValue(obj.input, LogicalNotInfo.inputArg) + + override def parse(r: SigmaByteReader): BoolValue = + cons(r.getValue().asBoolValue) } diff --git a/src/main/scala/sigmastate/serialization/MethodCallSerializer.scala b/src/main/scala/sigmastate/serialization/MethodCallSerializer.scala index 56afa33726..ecb662dc77 100644 --- a/src/main/scala/sigmastate/serialization/MethodCallSerializer.scala +++ b/src/main/scala/sigmastate/serialization/MethodCallSerializer.scala @@ -3,50 +3,39 @@ package sigmastate.serialization import sigmastate.Values._ import sigmastate._ import sigmastate.lang.SigmaTyper.STypeSubst -import sigmastate.lang.Terms.{MethodCall, STypeParam} +import sigmastate.lang.Terms.MethodCall import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} -case class MethodCallSerializer(opCode: Byte, cons: (Value[SType], SMethod, IndexedSeq[Value[SType]], STypeSubst) => Value[SType]) +case class MethodCallSerializer(cons: (Value[SType], SMethod, IndexedSeq[Value[SType]], STypeSubst) => Value[SType]) extends ValueSerializer[MethodCall] { + override def opDesc: ValueCompanion = MethodCall override def serialize(mc: MethodCall, w: SigmaByteWriter): Unit = { - w.put(mc.method.objType.typeId) - w.put(mc.method.methodId) - w.putValue(mc.obj) - if (opCode == OpCodes.MethodCallCode) { - assert(mc.args.nonEmpty) - w.putValues(mc.args) - } -// mc.method.stype match { -// case genType: SGenericType if mc.typeSubst.nonEmpty => -// w.putUByte(mc.typeSubst.size) -// genType.substitutedTypeParams.foreach { tp => -// w.putType(mc.typeSubst(tp.ident)) -// } -// case _ => w.putUByte(0) -// } + w.put(mc.method.objType.typeId, ArgInfo("typeCode", "type of the method (see Table~\\ref{table:predeftypes})")) + w.put(mc.method.methodId, ArgInfo("methodCode", "a code of the method")) + w.putValue(mc.obj, ArgInfo("obj", "receiver object of this method call")) + assert(mc.args.nonEmpty) + w.putValues(mc.args, ArgInfo("args", "arguments of the method call")) } + /** The SMethod instances in STypeCompanions may have type STypeIdent in methods types, + * but a valid ErgoTree should have SMethod instances specialized for specific types + * of `obj` and `args` using `specializeFor`. + * This means, if we save typeId, methodId, and we save all the arguments, + * we can restore the specialized SMethod instance. + * This work by induction, if we assume all arguments are monomorphic, + * then we can make MethodCall monomorphic. Thus, all ErgoTree is monomorphic by construction. + * This is limitation of MethodCall, because we cannot use it to represent for example + * def Box.getReg[T](id: Int): Option[T], which require serialization of expected type `T` + * However it can be implemented using separate node type (new type code) and can be added via soft-fork. + */ override def parse(r: SigmaByteReader): Value[SType] = { val typeId = r.getByte() val methodId = r.getByte() val obj = r.getValue() - val args = if (opCode == OpCodes.MethodCallCode) r.getValues() else IndexedSeq() + val args = r.getValues() val method = SMethod.fromIds(typeId, methodId) val specMethod = method.specializeFor(obj.tpe, args.map(_.tpe)) -// val typeSubst: STypeSubst = method.stype match { -// case genType: SGenericType => -// val typeSubstSize = r.getUByte() -// val xs = new Array[(STypeIdent, SType)](typeSubstSize) -// for (i <- 0 until typeSubstSize) { -// val ti = genType.substitutedTypeParams(i).ident -// xs(i) = (ti, r.getType()) -// } -// xs.toMap -// case _ => -// r.getUByte() // read 0 -// Map() -// } cons(obj, specMethod, args, Map()) } } diff --git a/src/main/scala/sigmastate/serialization/ModQArithOpSerializer.scala b/src/main/scala/sigmastate/serialization/ModQArithOpSerializer.scala index 3a88c64a14..6726bbb134 100644 --- a/src/main/scala/sigmastate/serialization/ModQArithOpSerializer.scala +++ b/src/main/scala/sigmastate/serialization/ModQArithOpSerializer.scala @@ -3,14 +3,14 @@ package sigmastate.serialization import sigmastate.Values.{BigIntValue, Value} import sigmastate.lang.Terms._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} -import sigmastate.{ModQArithOp, SType} +import sigmastate.{SType, ModQArithOp, ModQArithOpCompanion} -case class ModQArithOpSerializer(override val opCode: Byte, cons: (BigIntValue, BigIntValue) => BigIntValue) +case class ModQArithOpSerializer(override val opDesc: ModQArithOpCompanion, cons: (BigIntValue, BigIntValue) => BigIntValue) extends ValueSerializer[ModQArithOp] { override def serialize(obj: ModQArithOp, w: SigmaByteWriter): Unit = { - w.putValue(obj.left) - .putValue(obj.right) + w.putValue(obj.left, opDesc.argInfos(0)) + .putValue(obj.right, opDesc.argInfos(1)) } override def parse(r: SigmaByteReader): Value[SType] = { diff --git a/src/main/scala/sigmastate/serialization/ModQSerializer.scala b/src/main/scala/sigmastate/serialization/ModQSerializer.scala index 823d75cc6b..6ccb69141c 100644 --- a/src/main/scala/sigmastate/serialization/ModQSerializer.scala +++ b/src/main/scala/sigmastate/serialization/ModQSerializer.scala @@ -1,16 +1,16 @@ package sigmastate.serialization +import sigmastate.Operations.ModQInfo import sigmastate.Values.Value import sigmastate.lang.Terms._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.{ModQ, SType} object ModQSerializer extends ValueSerializer[ModQ] { - - override val opCode: Byte = OpCodes.ModQCode + override def opDesc = ModQ def serialize(obj: ModQ, w: SigmaByteWriter): Unit = { - w.putValue(obj.input) + w.putValue(obj.input, ModQInfo.thisArg) } def parse(r: SigmaByteReader): Value[SType] = { diff --git a/src/main/scala/sigmastate/serialization/OneArgumentOperationSerializer.scala b/src/main/scala/sigmastate/serialization/OneArgumentOperationSerializer.scala index 6cf9393926..e85b982b71 100644 --- a/src/main/scala/sigmastate/serialization/OneArgumentOperationSerializer.scala +++ b/src/main/scala/sigmastate/serialization/OneArgumentOperationSerializer.scala @@ -1,13 +1,16 @@ package sigmastate.serialization -import sigmastate.Values.{SValue, Value} +import sigmastate.Values.{Value, SValue} import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes.OpCode import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} -import sigmastate.{OneArgumentOperation, SNumericType, SType} +import sigmastate.{OneArgumentOperation, OneArgumentOperationCompanion, SType} -case class OneArgumentOperationSerializer[T <: SType](opCode: OpCode, cons: Value[T] => SValue) +case class OneArgumentOperationSerializer[T <: SType](opDesc: OneArgumentOperationCompanion, cons: Value[T] => SValue) extends ValueSerializer[OneArgumentOperation[T, SType]] { - override def serialize(obj: OneArgumentOperation[T, SType], w: SigmaByteWriter): Unit = w.putValue(obj.input) - override def parse(r: SigmaByteReader): SValue = cons(r.getValue().asValue[T]) + + override def serialize(obj: OneArgumentOperation[T, SType], w: SigmaByteWriter): Unit = + w.putValue(obj.input, opDesc.argInfos(0)) + + override def parse(r: SigmaByteReader): SValue = + cons(r.getValue().asValue[T]) } diff --git a/src/main/scala/sigmastate/serialization/OpCodes.scala b/src/main/scala/sigmastate/serialization/OpCodes.scala index b36018f5d1..c020df9865 100644 --- a/src/main/scala/sigmastate/serialization/OpCodes.scala +++ b/src/main/scala/sigmastate/serialization/OpCodes.scala @@ -41,7 +41,7 @@ object OpCodes extends ValueCodes { // variables val TaggedVariableCode: OpCode = (LastConstantCode + 1).toByte val ValUseCode: OpCode = (LastConstantCode + 2).toByte - val ConstantPlaceholderIndexCode: OpCode = (LastConstantCode + 3).toByte + val ConstantPlaceholderCode: OpCode = (LastConstantCode + 3).toByte val SubstConstantsCode: OpCode = (LastConstantCode + 4).toByte // reserved 5 - 9 (5) val LongToByteArrayCode : OpCode = (LastConstantCode + 10).toByte @@ -49,7 +49,7 @@ object OpCodes extends ValueCodes { val ByteArrayToLongCode : OpCode = (LastConstantCode + 12).toByte val DowncastCode : OpCode = (LastConstantCode + 13).toByte val UpcastCode : OpCode = (LastConstantCode + 14).toByte - + // EvaluatedValue descendants val TrueCode : OpCode = (LastConstantCode + 15).toByte val FalseCode : OpCode = (LastConstantCode + 16).toByte @@ -64,7 +64,7 @@ object OpCodes extends ValueCodes { val Select3Code : OpCode = (LastConstantCode + 25).toByte val Select4Code : OpCode = (LastConstantCode + 26).toByte val Select5Code : OpCode = (LastConstantCode + 27).toByte - val SelectFieldCode: OpCode = (LastConstantCode + 28).toByte // reserved 29 (1) + val SelectFieldCode: OpCode = (LastConstantCode + 28).toByte // reserved 29-30 (2) // Relation descendants val LtCode : OpCode = (LastConstantCode + 31).toByte @@ -95,7 +95,8 @@ object OpCodes extends ValueCodes { val InputsCode : OpCode = (LastConstantCode + 52).toByte val OutputsCode : OpCode = (LastConstantCode + 53).toByte val LastBlockUtxoRootHashCode: OpCode = (LastConstantCode + 54).toByte - val SelfCode : OpCode = (LastConstantCode + 55).toByte + val SelfCode : OpCode = (LastConstantCode + 55).toByte // reserved 56 - 59 (4) + val MinerPubkeyCode : OpCode = (LastConstantCode + 60).toByte // Collection and tree operations codes @@ -109,13 +110,7 @@ object OpCodes extends ValueCodes { val SliceCode : OpCode = (LastConstantCode + 68).toByte val FilterCode : OpCode = (LastConstantCode + 69).toByte val AvlTreeCode : OpCode = (LastConstantCode + 70).toByte - val AvlTreeGetCode : OpCode = (LastConstantCode + 71).toByte -// val TreeUpdatesCode : OpCode = (LastConstantCode + 71).toByte -// val TreeInsertsCode : OpCode = (LastConstantCode + 72).toByte -// val TreeRemovalsCode : OpCode = (LastConstantCode + 73).toByte -// val TreeGetManyCode : OpCode = (LastConstantCode + 74).toByte -// val TreeContainsCode : OpCode = (LastConstantCode + 75).toByte - // reserved 72 - 80 (9) + val AvlTreeGetCode : OpCode = (LastConstantCode + 71).toByte // reserved 72 - 80 (9) // Type casts codes val ExtractAmountCode : OpCode = (LastConstantCode + 81).toByte @@ -130,7 +125,7 @@ object OpCodes extends ValueCodes { val CalcBlake2b256Code : OpCode = (LastConstantCode + 91).toByte val CalcSha256Code : OpCode = (LastConstantCode + 92).toByte val ProveDlogCode : OpCode = (LastConstantCode + 93).toByte - val ProveDiffieHellmanTupleCode: OpCode = (LastConstantCode + 94).toByte + val ProveDHTupleCode : OpCode = (LastConstantCode + 94).toByte val SigmaPropIsProvenCode : OpCode = (LastConstantCode + 95).toByte val SigmaPropBytesCode : OpCode = (LastConstantCode + 96).toByte val BoolToSigmaPropCode : OpCode = (LastConstantCode + 97).toByte @@ -148,10 +143,10 @@ object OpCodes extends ValueCodes { val FuncApplyCode: OpCode = (LastConstantCode + 106).toByte val PropertyCallCode: OpCode = (LastConstantCode + 107).toByte val MethodCallCode: OpCode = (LastConstantCode + 108).toByte - // reserved 109 (1) + val GlobalCode : OpCode = (LastConstantCode + 109).toByte val SomeValueCode: OpCode = (LastConstantCode + 110).toByte - val NoneValueCode: OpCode = (LastConstantCode + 111).toByte + val NoneValueCode: OpCode = (LastConstantCode + 111).toByte // reserved 112 - 114 (3) val GetVarCode : OpCode = (LastConstantCode + 115).toByte val OptionGetCode : OpCode = (LastConstantCode + 116).toByte @@ -191,5 +186,5 @@ object OpCodes extends ValueCodes { val CollRotateRightCode : OpCode = (LastConstantCode + 141).toByte val ContextCode : OpCode = (LastConstantCode + 142).toByte - val XorOfCode : OpCode = (LastConstantCode + 143).toByte + val XorOfCode : OpCode = (LastConstantCode + 143).toByte // equals to 255 } diff --git a/src/main/scala/sigmastate/serialization/OptionGetOrElseSerializer.scala b/src/main/scala/sigmastate/serialization/OptionGetOrElseSerializer.scala index d2f6945a51..155f39e2e4 100644 --- a/src/main/scala/sigmastate/serialization/OptionGetOrElseSerializer.scala +++ b/src/main/scala/sigmastate/serialization/OptionGetOrElseSerializer.scala @@ -3,19 +3,17 @@ package sigmastate.serialization import sigmastate.Values._ import sigmastate._ import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes._ -import scorex.util.Extensions._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.OptionGetOrElse case class OptionGetOrElseSerializer(cons: (Value[SOption[SType]], Value[SType]) => Value[SType]) extends ValueSerializer[OptionGetOrElse[_ <: SType]] { - - override val opCode: OpCode = OptionGetOrElseCode + import sigmastate.Operations.OptionGetOrElseInfo._ + override def opDesc = OptionGetOrElse override def serialize(obj: OptionGetOrElse[_ <: SType], w: SigmaByteWriter): Unit = - w.putValue(obj.input) - .putValue(obj.default) + w.putValue(obj.input, thisArg) + .putValue(obj.default, defaultArg) override def parse(r: SigmaByteReader): Value[SType] = { diff --git a/src/main/scala/sigmastate/serialization/PropertyCallSerializer.scala b/src/main/scala/sigmastate/serialization/PropertyCallSerializer.scala new file mode 100644 index 0000000000..90f64fb5fa --- /dev/null +++ b/src/main/scala/sigmastate/serialization/PropertyCallSerializer.scala @@ -0,0 +1,28 @@ +package sigmastate.serialization + +import sigmastate.Values._ +import sigmastate._ +import sigmastate.lang.SigmaTyper.STypeSubst +import sigmastate.lang.Terms.{MethodCall, PropertyCall} +import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} + +case class PropertyCallSerializer(cons: (Value[SType], SMethod, IndexedSeq[Value[SType]], STypeSubst) => Value[SType]) + extends ValueSerializer[MethodCall] { + override def opDesc: ValueCompanion = PropertyCall + + override def serialize(mc: MethodCall, w: SigmaByteWriter): Unit = { + w.put(mc.method.objType.typeId, ArgInfo("typeCode", "type of the method (see Table~\\ref{table:predeftypes})")) + w.put(mc.method.methodId, ArgInfo("methodCode", "a code of the property")) + w.putValue(mc.obj, ArgInfo("obj", "receiver object of this property call")) + } + + override def parse(r: SigmaByteReader): Value[SType] = { + val typeId = r.getByte() + val methodId = r.getByte() + val obj = r.getValue() + val args = IndexedSeq() + val method = SMethod.fromIds(typeId, methodId) + val specMethod = method.specializeFor(obj.tpe, args) + cons(obj, specMethod, args, Map()) + } +} diff --git a/src/main/scala/sigmastate/serialization/ProveDlogSerializer.scala b/src/main/scala/sigmastate/serialization/ProveDlogSerializer.scala index 8b49e3e7ad..040461cf48 100644 --- a/src/main/scala/sigmastate/serialization/ProveDlogSerializer.scala +++ b/src/main/scala/sigmastate/serialization/ProveDlogSerializer.scala @@ -5,27 +5,27 @@ import sigmastate.{SGroupElement, CreateProveDlog} import sigmastate.Values.{Value, SigmaPropValue} import sigmastate.interpreter.CryptoConstants.EcPointType import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes.OpCode import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} case class ProveDlogSerializer(cons: EcPointType => ProveDlog) extends SigmaSerializer[ProveDlog, ProveDlog] { override def serialize(obj: ProveDlog, w: SigmaByteWriter): Unit = - DataSerializer.serialize[SGroupElement.type](obj.value, SGroupElement, w) + GroupElementSerializer.serialize(obj.value, w) override def parse(r: SigmaByteReader) = { - val res = DataSerializer.deserialize(SGroupElement, r) + val res = GroupElementSerializer.parse(r) cons(res) } } case class CreateProveDlogSerializer(cons: Value[SGroupElement.type] => SigmaPropValue) extends ValueSerializer[CreateProveDlog] { - override val opCode: OpCode = OpCodes.ProveDlogCode + import sigmastate.Operations.CreateProveDlogInfo._ + override def opDesc = CreateProveDlog override def serialize(obj: CreateProveDlog, w: SigmaByteWriter): Unit = { - w.putValue(obj.value) + w.putValue(obj.value, valueArg) } override def parse(r: SigmaByteReader) = { diff --git a/src/main/scala/sigmastate/serialization/SelectFieldSerializer.scala b/src/main/scala/sigmastate/serialization/SelectFieldSerializer.scala index 6ea32e10d9..105a84bd6a 100644 --- a/src/main/scala/sigmastate/serialization/SelectFieldSerializer.scala +++ b/src/main/scala/sigmastate/serialization/SelectFieldSerializer.scala @@ -1,20 +1,18 @@ package sigmastate.serialization +import sigmastate.Operations.SelectFieldInfo import sigmastate.Values.Value import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes._ -import scorex.util.Extensions._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.SelectField import sigmastate.{STuple, SType} case class SelectFieldSerializer(cons: (Value[STuple], Byte) => Value[SType]) extends ValueSerializer[SelectField] { - - override val opCode: Byte = SelectFieldCode + override def opDesc = SelectField override def serialize(obj: SelectField, w: SigmaByteWriter): Unit = - w.putValue(obj.input) - .put(obj.fieldIndex) + w.putValue(obj.input, SelectFieldInfo.inputArg) + .put(obj.fieldIndex, SelectFieldInfo.fieldIndexArg) override def parse(r: SigmaByteReader): Value[SType] = { val tuple = r.getValue().asValue[STuple] diff --git a/src/main/scala/sigmastate/serialization/SigmaPropBytesSerializer.scala b/src/main/scala/sigmastate/serialization/SigmaPropBytesSerializer.scala index 37acddf546..e107b6da8a 100644 --- a/src/main/scala/sigmastate/serialization/SigmaPropBytesSerializer.scala +++ b/src/main/scala/sigmastate/serialization/SigmaPropBytesSerializer.scala @@ -2,16 +2,15 @@ package sigmastate.serialization import sigmastate.{Values, SType} import sigmastate.lang.Terms._ -import scorex.util.Extensions._ -import sigmastate.serialization.OpCodes._ -import sigmastate.utils.{SigmaByteWriter, SigmaByteReader} +import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.SigmaPropBytes object SigmaPropBytesSerializer extends ValueSerializer[SigmaPropBytes] { - override val opCode: Byte = SigmaPropBytesCode + import sigmastate.Operations.SigmaPropBytesInfo._ + override def opDesc = SigmaPropBytes def serialize(obj: SigmaPropBytes, w: SigmaByteWriter): Unit = { - w.putValue(obj.input) + w.putValue(obj.input, thisArg) } def parse(r: SigmaByteReader): Values.Value[SType] = { diff --git a/src/main/scala/sigmastate/serialization/SigmaPropIsProvenSerializer.scala b/src/main/scala/sigmastate/serialization/SigmaPropIsProvenSerializer.scala index 5be17248da..0350f582bb 100644 --- a/src/main/scala/sigmastate/serialization/SigmaPropIsProvenSerializer.scala +++ b/src/main/scala/sigmastate/serialization/SigmaPropIsProvenSerializer.scala @@ -2,14 +2,11 @@ package sigmastate.serialization import sigmastate.{Values, SType} import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes._ -import sigmastate.utils.{SigmaByteWriter, SigmaByteReader} +import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.SigmaPropIsProven -import scorex.util.Extensions._ object SigmaPropIsProvenSerializer extends ValueSerializer[SigmaPropIsProven] { - - override val opCode: Byte = SigmaPropIsProvenCode + override def opDesc = SigmaPropIsProven def serialize(obj: SigmaPropIsProven, w: SigmaByteWriter): Unit = { w.putValue(obj.input) diff --git a/src/main/scala/sigmastate/serialization/SigmaSerializer.scala b/src/main/scala/sigmastate/serialization/SigmaSerializer.scala index 68d579b67e..13771035f7 100644 --- a/src/main/scala/sigmastate/serialization/SigmaSerializer.scala +++ b/src/main/scala/sigmastate/serialization/SigmaSerializer.scala @@ -2,6 +2,8 @@ package sigmastate.serialization import java.nio.ByteBuffer +import org.ergoplatform.ErgoConstants +import org.ergoplatform.validation.SigmaValidationSettings import scorex.util.ByteArrayBuilder import sigmastate.lang.exceptions.SerializerException import sigmastate.utils._ @@ -11,8 +13,8 @@ object SigmaSerializer { type Position = Int type Consumed = Int - val MaxInputSize: Int = 1024 * 1024 * 1 - val MaxTreeDepth: Int = 110 + val MaxInputSize: Int = ErgoConstants.MaxInputSize.get + val MaxTreeDepth: Int = ErgoConstants.MaxTreeDepth.get /** Helper function to be use in serializers. * Starting position is marked and then used to compute number of consumed bytes. @@ -22,17 +24,21 @@ object SigmaSerializer { def startReader(bytes: Array[Byte], pos: Int = 0): SigmaByteReader = { val buf = ByteBuffer.wrap(bytes) buf.position(pos) - val r = new SigmaByteReader(new VLQByteBufferReader(buf), new ConstantStore(), resolvePlaceholdersToConstants = false) - .mark() + val r = new SigmaByteReader(new VLQByteBufferReader(buf), + new ConstantStore(), + resolvePlaceholdersToConstants = false, + maxTreeDepth = MaxTreeDepth).mark() r } def startReader(bytes: Array[Byte], constantStore: ConstantStore, - resolvePlaceholdersToConstants: Boolean): SigmaByteReader = { + resolvePlaceholdersToConstants: Boolean)(implicit vs: SigmaValidationSettings): SigmaByteReader = { val buf = ByteBuffer.wrap(bytes) - val r = new SigmaByteReader(new VLQByteBufferReader(buf), constantStore, resolvePlaceholdersToConstants) - .mark() + val r = new SigmaByteReader(new VLQByteBufferReader(buf), + constantStore, + resolvePlaceholdersToConstants, + maxTreeDepth = MaxTreeDepth).mark() r } @@ -62,8 +68,12 @@ trait SigmaSerializer[TFamily, T <: TFamily] extends Serializer[TFamily, T, Sigm serialize(obj, new SigmaByteWriter(w, None)) } - def parseWithGenericReader(r: Reader): TFamily = { - parse(new SigmaByteReader(r, new ConstantStore(), resolvePlaceholdersToConstants = false)) + def parseWithGenericReader(r: Reader)(implicit vs: SigmaValidationSettings): TFamily = { + parse( + new SigmaByteReader(r, + new ConstantStore(), + resolvePlaceholdersToConstants = false, + maxTreeDepth = SigmaSerializer.MaxTreeDepth)) } def error(msg: String) = throw new SerializerException(msg, None) diff --git a/src/main/scala/sigmastate/serialization/SubstConstantsSerializer.scala b/src/main/scala/sigmastate/serialization/SubstConstantsSerializer.scala index 81074174bd..3306ff888d 100644 --- a/src/main/scala/sigmastate/serialization/SubstConstantsSerializer.scala +++ b/src/main/scala/sigmastate/serialization/SubstConstantsSerializer.scala @@ -7,13 +7,13 @@ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.{SCollection, SType, SubstConstants} object SubstConstantsSerializer extends ValueSerializer[SubstConstants[SType]] { - - override val opCode: Byte = OpCodes.SubstConstantsCode + import sigmastate.Operations.SubstConstantsInfo._ + override def opDesc = SubstConstants def serialize(obj: SubstConstants[SType], w: SigmaByteWriter): Unit = { - w.putValue(obj.scriptBytes) - w.putValue(obj.positions) - w.putValue(obj.newValues) + w.putValue(obj.scriptBytes, scriptBytesArg) + w.putValue(obj.positions, positionsArg) + w.putValue(obj.newValues, newValuesArg) } def parse(r: SigmaByteReader): Value[SType] = { diff --git a/src/main/scala/sigmastate/serialization/TaggedVariableSerializer.scala b/src/main/scala/sigmastate/serialization/TaggedVariableSerializer.scala index 6f2d6ba533..f66f58a053 100644 --- a/src/main/scala/sigmastate/serialization/TaggedVariableSerializer.scala +++ b/src/main/scala/sigmastate/serialization/TaggedVariableSerializer.scala @@ -3,13 +3,12 @@ package sigmastate.serialization import sigmastate.Values._ import sigmastate._ import sigmastate.serialization.OpCodes._ -import scorex.util.Extensions._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} case class TaggedVariableSerializer(cons: (Byte, SType) => Value[SType]) extends ValueSerializer[TaggedVariable[_ <: SType]] { - - override val opCode: OpCode = TaggedVariableCode + override def opDesc = TaggedVariable + override def opCode: OpCode = TaggedVariableCode override def serialize(obj: TaggedVariable[_ <: SType], w: SigmaByteWriter): Unit = w.put(obj.varId) diff --git a/src/main/scala/sigmastate/serialization/TupleSerializer.scala b/src/main/scala/sigmastate/serialization/TupleSerializer.scala index 4f2e7517cf..9ea07a2edf 100644 --- a/src/main/scala/sigmastate/serialization/TupleSerializer.scala +++ b/src/main/scala/sigmastate/serialization/TupleSerializer.scala @@ -1,20 +1,20 @@ package sigmastate.serialization -import sigmastate.SType +import sigmastate.{SType, ArgInfo} import sigmastate.Values._ -import sigmastate.serialization.OpCodes._ -import scorex.util.Extensions._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} +import ValueSerializer._ case class TupleSerializer(cons: Seq[Value[SType]] => Value[SType]) extends ValueSerializer[Tuple] { - - override val opCode: Byte = TupleCode + override def opDesc = Tuple override def serialize(obj: Tuple, w: SigmaByteWriter): Unit = { val length = obj.length - w.putUByte(length) - obj.items.foreach(w.putValue(_)) + w.putUByte(length, ArgInfo("numItems", "number of items in the tuple")) + foreach("numItems", obj.items) { i => + w.putValue(i, ArgInfo("item_i", "tuple's item in i-th position")) + } } override def parse(r: SigmaByteReader): Value[SType] = { diff --git a/src/main/scala/sigmastate/serialization/TwoArgumentsSerializer.scala b/src/main/scala/sigmastate/serialization/TwoArgumentsSerializer.scala index a2a36283b1..5e3be8fbdc 100644 --- a/src/main/scala/sigmastate/serialization/TwoArgumentsSerializer.scala +++ b/src/main/scala/sigmastate/serialization/TwoArgumentsSerializer.scala @@ -2,20 +2,17 @@ package sigmastate.serialization import sigmastate.Values.Value import sigmastate.lang.Terms._ -import sigmastate.utils.{SigmaByteWriter, SigmaByteReader} -import sigmastate.{TwoArgumentsOperation, SType, SBigInt} -import scorex.util.Extensions._ -import OpCodes._ -import sigmastate.utxo.CostTable._ +import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} +import sigmastate.{TwoArgumentsOperation, SType, TwoArgumentOperationCompanion} case class TwoArgumentsSerializer[LIV <: SType, RIV <: SType, OV <: Value[SType]] -(override val opCode: Byte, constructor: (Value[LIV], Value[RIV]) => Value[SType]) +(override val opDesc: TwoArgumentOperationCompanion, constructor: (Value[LIV], Value[RIV]) => Value[SType]) extends ValueSerializer[OV] { override def serialize(obj: OV, w: SigmaByteWriter): Unit = { val typedOp = obj.asInstanceOf[TwoArgumentsOperation[LIV, RIV, LIV]] - w.putValue(typedOp.left) - .putValue(typedOp.right) + w.putValue(typedOp.left, opDesc.argInfos(0)) + .putValue(typedOp.right, opDesc.argInfos(1)) } override def parse(r: SigmaByteReader): Value[SType] = { diff --git a/src/main/scala/sigmastate/serialization/TypeSerializer.scala b/src/main/scala/sigmastate/serialization/TypeSerializer.scala index 2c7521bc91..1a70296258 100644 --- a/src/main/scala/sigmastate/serialization/TypeSerializer.scala +++ b/src/main/scala/sigmastate/serialization/TypeSerializer.scala @@ -2,9 +2,11 @@ package sigmastate.serialization import java.nio.charset.StandardCharsets +import org.ergoplatform.validation.{ValidationRule, SoftForkWhenCodeAdded} import sigmastate._ -import sigmastate.lang.exceptions.{InvalidTypePrefix, TypeDeserializeCallDepthExceeded} +import sigmastate.lang.exceptions.{InvalidTypePrefix, SerializerException, InvalidOpCode} import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} +import sigma.util.Extensions.ByteOps /** Serialization of types according to specification in TypeSerialization.md. */ object TypeSerializer extends ByteBufferSerializer[SType] { @@ -15,11 +17,30 @@ object TypeSerializer extends ByteBufferSerializer[SType] { * For each embeddable type `T`, and type constructor `C`, the type `C[T]` can be represented by single byte. */ val embeddableIdToType = Array[SType](null, SBoolean, SByte, SShort, SInt, SLong, SBigInt, SGroupElement, SSigmaProp) + object CheckPrimitiveTypeCode extends ValidationRule(1008, + "Check the primitive type code is supported or is added via soft-fork") + with SoftForkWhenCodeAdded { + def apply[T](code: Byte)(block: => T): T = { + val ucode = code.toUByte + def msg = s"Cannot deserialize primitive type with code $ucode" + validate(ucode > 0 && ucode < embeddableIdToType.length, new SerializerException(msg), Seq(code), block) + } + } + + object CheckTypeCode extends ValidationRule(1009, + "Check the non-primitive type code is supported or is added via soft-fork") + with SoftForkWhenCodeAdded { + def apply[T](typeCode: Byte)(block: => T): T = { + val ucode = typeCode.toUByte + def msg = s"Cannot deserialize the non-primitive type with code $ucode" + validate(ucode <= SGlobal.typeCode.toUByte, new SerializerException(msg), Seq(typeCode), block) + } + } + def getEmbeddableType(code: Int): SType = - if (code <= 0 || code >= embeddableIdToType.length) - sys.error(s"Cannot deserialize primitive type with code $code") - else + CheckPrimitiveTypeCode(code.toByte) { embeddableIdToType(code) + } override def serialize(tpe: SType, w: SigmaByteWriter) = tpe match { case p: SEmbeddable => w.put(p.typeCode) @@ -29,6 +50,7 @@ object TypeSerializer extends ByteBufferSerializer[SType] { case SBox => w.put(SBox.typeCode) case SAvlTree => w.put(SAvlTree.typeCode) case SContext => w.put(SContext.typeCode) + case SGlobal => w.put(SGlobal.typeCode) case SHeader => w.put(SHeader.typeCode) case SPreHeader => w.put(SPreHeader.typeCode) case c: SCollectionType[a] => c.elemType match { @@ -39,7 +61,7 @@ object TypeSerializer extends ByteBufferSerializer[SType] { case p: SEmbeddable => val code = p.embedIn(NestedCollectionTypeCode) w.put(code) - case t => + case _ => w.put(CollectionTypeCode) serialize(cn, w) } @@ -55,7 +77,7 @@ object TypeSerializer extends ByteBufferSerializer[SType] { case p: SEmbeddable => val code = p.embedIn(SOption.OptionCollectionTypeCode) w.put(code) - case t => + case _ => w.put(SOption.OptionTypeCode) serialize(c, w) } @@ -63,7 +85,7 @@ object TypeSerializer extends ByteBufferSerializer[SType] { w.put(SOption.OptionTypeCode) serialize(t, w) } - case tup @ STuple(Seq(t1, t2)) => (t1, t2) match { + case _ @ STuple(Seq(t1, t2)) => (t1, t2) match { case (p: SEmbeddable, _) => if (p == t2) { // Symmetric pair of primitive types (`(Int, Int)`, `(Byte,Byte)`, etc.) @@ -103,7 +125,7 @@ object TypeSerializer extends ByteBufferSerializer[SType] { // `Tuple` type with more than 4 items `(Int, Byte, Box, Boolean, Int)` serializeTuple(tup, w) } - case typeIdent: STypeIdent => { + case typeIdent: STypeVar => { w.put(typeIdent.typeCode) val bytes = typeIdent.name.getBytes(StandardCharsets.UTF_8) w.putUByte(bytes.length) @@ -114,8 +136,6 @@ object TypeSerializer extends ByteBufferSerializer[SType] { override def deserialize(r: SigmaByteReader): SType = deserialize(r, 0) private def deserialize(r: SigmaByteReader, depth: Int): SType = { - if (depth > SigmaSerializer.MaxTreeDepth) - throw new TypeDeserializeCallDepthExceeded(s"deserialize call depth exceeds ${SigmaSerializer.MaxTreeDepth}") val c = r.getUByte() if (c <= 0) throw new InvalidTypePrefix(s"Cannot deserialize type prefix $c. Unexpected buffer $r with bytes ${r.getBytes(r.remaining)}") @@ -176,21 +196,22 @@ object TypeSerializer extends ByteBufferSerializer[SType] { val items = (0 until len).map(_ => deserialize(r, depth + 1)) STuple(items) } - case SString.typeCode => SString case SAny.typeCode => SAny case SUnit.typeCode => SUnit case SBox.typeCode => SBox case SAvlTree.typeCode => SAvlTree case SContext.typeCode => SContext - case SHeader.typeCode => SHeader - case SPreHeader.typeCode => SPreHeader - case STypeIdent.TypeCode => { + case SString.typeCode => SString + case STypeVar.TypeCode => { val nameLength = r.getUByte() val name = new String(r.getBytes(nameLength), StandardCharsets.UTF_8) - STypeIdent(name) + STypeVar(name) } + case SHeader.typeCode => SHeader + case SPreHeader.typeCode => SPreHeader + case SGlobal.typeCode => SGlobal case _ => - sys.error(s"Cannot deserialize type starting from code $c") + CheckTypeCode(c.toByte) { NoType } } } tpe diff --git a/src/main/scala/sigmastate/serialization/ValDefSerializer.scala b/src/main/scala/sigmastate/serialization/ValDefSerializer.scala index 72cb0b1a8f..0ed749cb28 100644 --- a/src/main/scala/sigmastate/serialization/ValDefSerializer.scala +++ b/src/main/scala/sigmastate/serialization/ValDefSerializer.scala @@ -5,30 +5,32 @@ import sigmastate._ import sigmastate.serialization.OpCodes._ import scorex.util.Extensions._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} - +import ValueSerializer._ import scala.collection.mutable -case class ValDefSerializer(override val opCode: OpCode) extends ValueSerializer[ValDef] { +case class ValDefSerializer(override val opDesc: ValueCompanion) extends ValueSerializer[ValDef] { override def serialize(obj: ValDef, w: SigmaByteWriter): Unit = { w.putUInt(obj.id) - if (opCode == FunDefCode) { - require(!obj.isValDef, s"expected FunDef, got $obj") - require(obj.tpeArgs.nonEmpty, s"expected FunDef with type args, got $obj") - w.put(obj.tpeArgs.length.toByteExact) - obj.tpeArgs.foreach(w.putType(_)) + optional("type arguments") { + if (opCode == FunDefCode) { + require(!obj.isValDef, s"expected FunDef, got $obj") + require(obj.tpeArgs.nonEmpty, s"expected FunDef with type args, got $obj") + w.put(obj.tpeArgs.length.toByteExact) + obj.tpeArgs.foreach(w.putType(_)) + } } w.putValue(obj.rhs) } override def parse(r: SigmaByteReader): Value[SType] = { val id = r.getUInt().toInt - val tpeArgs: Seq[STypeIdent] = opCode match { + val tpeArgs: Seq[STypeVar] = opCode match { case FunDefCode => val tpeArgsCount = r.getByte() - val inputsBuilder = mutable.ArrayBuilder.make[STypeIdent]() + val inputsBuilder = mutable.ArrayBuilder.make[STypeVar]() for (_ <- 0 until tpeArgsCount) { - inputsBuilder += r.getType().asInstanceOf[STypeIdent] + inputsBuilder += r.getType().asInstanceOf[STypeVar] } inputsBuilder.result() case ValDefCode => diff --git a/src/main/scala/sigmastate/serialization/ValUseSerializer.scala b/src/main/scala/sigmastate/serialization/ValUseSerializer.scala index 7d0ac0f8a9..ff54a82a6d 100644 --- a/src/main/scala/sigmastate/serialization/ValUseSerializer.scala +++ b/src/main/scala/sigmastate/serialization/ValUseSerializer.scala @@ -2,12 +2,10 @@ package sigmastate.serialization import sigmastate.Values._ import sigmastate._ -import sigmastate.serialization.OpCodes._ -import sigmastate.utils.{SigmaByteWriter, SigmaByteReader} +import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} case class ValUseSerializer(cons: (Int, SType) => Value[SType]) extends ValueSerializer[ValUse[SType]] { - - override val opCode: OpCode = ValUseCode + override def opDesc = ValUse override def serialize(obj: ValUse[SType], w: SigmaByteWriter): Unit = { w.putUInt(obj.valId) diff --git a/src/main/scala/sigmastate/serialization/ValueSerializer.scala b/src/main/scala/sigmastate/serialization/ValueSerializer.scala index 2545eb350f..81c421fbdf 100644 --- a/src/main/scala/sigmastate/serialization/ValueSerializer.scala +++ b/src/main/scala/sigmastate/serialization/ValueSerializer.scala @@ -1,26 +1,32 @@ package sigmastate.serialization +import org.ergoplatform.validation.ValidationRules.CheckValidOpCode import org.ergoplatform._ import sigmastate.SCollection.SByteArray import sigmastate.Values._ import sigmastate._ import sigmastate.lang.DeserializationSigmaBuilder import sigmastate.lang.Terms.OperationId -import sigmastate.lang.exceptions.{InputSizeLimitExceeded, InvalidOpCode, ValueDeserializeCallDepthExceeded} +import sigmastate.lang.exceptions.InputSizeLimitExceeded import sigmastate.serialization.OpCodes._ import sigmastate.serialization.transformers._ import sigmastate.serialization.trees.{QuadrupleSerializer, Relation2Serializer} import sigma.util.Extensions._ +import sigmastate.utils.SigmaByteWriter.DataInfo import sigmastate.utils._ import sigmastate.utxo.CostTable._ +import sigmastate.utxo._ -trait ValueSerializer[V <: Value[SType]] extends SigmaSerializer[Value[SType], V] { +import scala.collection.mutable +trait ValueSerializer[V <: Value[SType]] extends SigmaSerializer[Value[SType], V] { + import scala.language.implicitConversions val companion = ValueSerializer + def opDesc: ValueCompanion /** Code of the corresponding tree node (Value.opCode) which is used to lookup this serizalizer * during deserialization. It is emitted immediately before the body of this node in serialized byte array. */ - val opCode: OpCode + def opCode: OpCode = opDesc.opCode def opCost(opId: OperationId): ExpressionCost = sys.error(s"Operation opCost is not defined for AST node ${this.getClass}") @@ -35,80 +41,78 @@ object ValueSerializer extends SigmaSerializerCompanion[Value[SType]] { private val constantSerializer = ConstantSerializer(builder) private val constantPlaceholderSerializer = ConstantPlaceholderSerializer(mkConstantPlaceholder) - private val serializers = SparseArrayContainer.buildForSerializers(Seq[ValueSerializer[_ <: Value[SType]]]( + val serializers = SparseArrayContainer.buildForSerializers(Seq[ValueSerializer[_ <: Value[SType]]]( constantSerializer, constantPlaceholderSerializer, TupleSerializer(mkTuple), SelectFieldSerializer(mkSelectField), - Relation2Serializer(GtCode, mkGT[SType]), - Relation2Serializer(GeCode, mkGE[SType]), - Relation2Serializer(LtCode, mkLT[SType]), - Relation2Serializer(LeCode, mkLE[SType]), - Relation2Serializer(EqCode, mkEQ[SType]), - Relation2Serializer(NeqCode, mkNEQ[SType]), + Relation2Serializer(GT, mkGT[SType]), + Relation2Serializer(GE, mkGE[SType]), + Relation2Serializer(LT, mkLT[SType]), + Relation2Serializer(LE, mkLE[SType]), + Relation2Serializer(EQ, mkEQ[SType]), + Relation2Serializer(NEQ, mkNEQ[SType]), CreateAvlTreeSerializer(mkCreateAvlTree), - QuadrupleSerializer(AvlTreeGetCode, mkTreeLookup), -// QuadrupleSerializer(TreeUpdatesCode, mkTreeUpdates), -// QuadrupleSerializer(TreeInsertsCode, mkTreeInserts), -// QuadrupleSerializer(TreeRemovalsCode, mkTreeRemovals), - Relation2Serializer(BinOrCode, mkBinOr), - Relation2Serializer(BinAndCode, mkBinAnd), - Relation2Serializer(BinXorCode, mkBinXor), - QuadrupleSerializer[SBoolean.type, SLong.type, SLong.type, SLong.type](IfCode, mkIf), - TwoArgumentsSerializer(XorCode, mkXor), - TwoArgumentsSerializer(ExponentiateCode, mkExponentiate), - TwoArgumentsSerializer(MultiplyGroupCode, mkMultiplyGroup), - TwoArgumentsSerializer(MinusCode, mkMinus[SNumericType]), - TwoArgumentsSerializer(MultiplyCode, mkMultiply[SNumericType]), - TwoArgumentsSerializer(DivisionCode, mkDivide[SNumericType]), - TwoArgumentsSerializer(ModuloCode, mkModulo[SNumericType]), - TwoArgumentsSerializer(PlusCode, mkPlus[SNumericType]), - TwoArgumentsSerializer(MinCode, mkMin[SNumericType]), - TwoArgumentsSerializer(MaxCode, mkMax[SNumericType]), - TwoArgumentsSerializer(BitOrCode, mkBitOr[SNumericType]), - TwoArgumentsSerializer(BitAndCode, mkBitAnd[SNumericType]), - TwoArgumentsSerializer(BitXorCode, mkBitXor[SNumericType]), - TwoArgumentsSerializer(BitShiftLeftCode, mkBitShiftLeft[SNumericType]), - TwoArgumentsSerializer(BitShiftRightCode, mkBitShiftRight[SNumericType]), - TwoArgumentsSerializer(BitShiftRightZeroedCode, mkBitShiftRightZeroed[SNumericType]), - CaseObjectSerialization(TrueCode, TrueLeaf), - CaseObjectSerialization(FalseCode, FalseLeaf), + QuadrupleSerializer(TreeLookup, mkTreeLookup), + Relation2Serializer(BinOr, mkBinOr), + Relation2Serializer(BinAnd, mkBinAnd), + Relation2Serializer(BinXor, mkBinXor), + QuadrupleSerializer[SBoolean.type, SLong.type, SLong.type, SLong.type](If, mkIf), + TwoArgumentsSerializer(Xor, mkXor), + TwoArgumentsSerializer(Exponentiate, mkExponentiate), + TwoArgumentsSerializer(MultiplyGroup, mkMultiplyGroup), + TwoArgumentsSerializer(ArithOp.Minus, mkMinus[SNumericType]), + TwoArgumentsSerializer(ArithOp.Multiply, mkMultiply[SNumericType]), + TwoArgumentsSerializer(ArithOp.Division, mkDivide[SNumericType]), + TwoArgumentsSerializer(ArithOp.Modulo, mkModulo[SNumericType]), + TwoArgumentsSerializer(ArithOp.Plus, mkPlus[SNumericType]), + TwoArgumentsSerializer(ArithOp.Min, mkMin[SNumericType]), + TwoArgumentsSerializer(ArithOp.Max, mkMax[SNumericType]), + TwoArgumentsSerializer(BitOp.BitOr, mkBitOr[SNumericType]), + TwoArgumentsSerializer(BitOp.BitAnd, mkBitAnd[SNumericType]), + TwoArgumentsSerializer(BitOp.BitXor, mkBitXor[SNumericType]), + TwoArgumentsSerializer(BitOp.BitShiftLeft, mkBitShiftLeft[SNumericType]), + TwoArgumentsSerializer(BitOp.BitShiftRight, mkBitShiftRight[SNumericType]), + TwoArgumentsSerializer(BitOp.BitShiftRightZeroed, mkBitShiftRightZeroed[SNumericType]), SigmaPropIsProvenSerializer, SigmaPropBytesSerializer, ConcreteCollectionBooleanConstantSerializer(mkConcreteCollection), - CaseObjectSerialization(ContextCode, Context), - CaseObjectSerialization(HeightCode, Height), - CaseObjectSerialization(MinerPubkeyCode, MinerPubkey), - CaseObjectSerialization(InputsCode, Inputs), - CaseObjectSerialization(OutputsCode, Outputs), - CaseObjectSerialization(LastBlockUtxoRootHashCode, LastBlockUtxoRootHash), - CaseObjectSerialization(SelfCode, Self), - CaseObjectSerialization(GroupGeneratorCode, GroupGenerator), + CaseObjectSerialization(TrueLeaf, TrueLeaf), + CaseObjectSerialization(FalseLeaf, FalseLeaf), + CaseObjectSerialization(Context, Context), + CaseObjectSerialization(Global, Global), + CaseObjectSerialization(Height, Height), + CaseObjectSerialization(MinerPubkey, MinerPubkey), + CaseObjectSerialization(Inputs, Inputs), + CaseObjectSerialization(Outputs, Outputs), + CaseObjectSerialization(LastBlockUtxoRootHash, LastBlockUtxoRootHash), + CaseObjectSerialization(Self, Self), + CaseObjectSerialization(GroupGenerator, GroupGenerator), ConcreteCollectionSerializer(mkConcreteCollection), - LogicalTransformerSerializer(AndCode, mkAND), - LogicalTransformerSerializer(OrCode, mkOR), - LogicalTransformerSerializer(XorOfCode, mkXorOf), + LogicalTransformerSerializer(AND, mkAND), + LogicalTransformerSerializer(OR, mkOR), + LogicalTransformerSerializer(XorOf, mkXorOf), TaggedVariableSerializer(mkTaggedVariable), GetVarSerializer(mkGetVar), MapCollectionSerializer(mkMapCollection), - BooleanTransformerSerializer[SType](ExistsCode, mkExists), - BooleanTransformerSerializer[SType](ForAllCode, mkForAll), + BooleanTransformerSerializer[SType](Exists, mkExists), + BooleanTransformerSerializer[SType](ForAll, mkForAll), FoldSerializer(mkFold), - SimpleTransformerSerializer[SCollection[SType], SInt.type](SizeOfCode, mkSizeOf), - SimpleTransformerSerializer[SBox.type, SLong.type](ExtractAmountCode, mkExtractAmount), - SimpleTransformerSerializer[SBox.type, SByteArray](ExtractScriptBytesCode, mkExtractScriptBytes), - SimpleTransformerSerializer[SBox.type, SByteArray](ExtractBytesCode, mkExtractBytes), - SimpleTransformerSerializer[SBox.type, SByteArray](ExtractBytesWithNoRefCode, mkExtractBytesWithNoRef), - SimpleTransformerSerializer[SBox.type, SByteArray](ExtractIdCode, mkExtractId), - SimpleTransformerSerializer[SBox.type, STuple](ExtractCreationInfoCode, mkExtractCreationInfo), - SimpleTransformerSerializer[SLong.type, SByteArray](LongToByteArrayCode, mkLongToByteArray), - SimpleTransformerSerializer[SByteArray, SLong.type](ByteArrayToLongCode, mkByteArrayToLong), - SimpleTransformerSerializer[SByteArray, SBigInt.type](ByteArrayToBigIntCode, mkByteArrayToBigInt), - SimpleTransformerSerializer[SByteArray, SByteArray](CalcBlake2b256Code, mkCalcBlake2b256), - SimpleTransformerSerializer[SByteArray, SByteArray](CalcSha256Code, mkCalcSha256), - SimpleTransformerSerializer[SByteArray, SGroupElement.type](DecodePointCode, mkDecodePoint), - SimpleTransformerSerializer[SOption[SType], SType](OptionGetCode, mkOptionGet), - SimpleTransformerSerializer[SOption[SType], SBoolean.type](OptionIsDefinedCode, mkOptionIsDefined), + SimpleTransformerSerializer[SCollection[SType], SInt.type](SizeOf, mkSizeOf), + SimpleTransformerSerializer[SBox.type, SLong.type](ExtractAmount, mkExtractAmount), + SimpleTransformerSerializer[SBox.type, SByteArray](ExtractScriptBytes, mkExtractScriptBytes), + SimpleTransformerSerializer[SBox.type, SByteArray](ExtractBytes, mkExtractBytes), + SimpleTransformerSerializer[SBox.type, SByteArray](ExtractBytesWithNoRef, mkExtractBytesWithNoRef), + SimpleTransformerSerializer[SBox.type, SByteArray](ExtractId, mkExtractId), + SimpleTransformerSerializer[SBox.type, STuple](ExtractCreationInfo, mkExtractCreationInfo), + SimpleTransformerSerializer[SLong.type, SByteArray](LongToByteArray, mkLongToByteArray), + SimpleTransformerSerializer[SByteArray, SLong.type](ByteArrayToLong, mkByteArrayToLong), + SimpleTransformerSerializer[SByteArray, SBigInt.type](ByteArrayToBigInt, mkByteArrayToBigInt), + SimpleTransformerSerializer[SByteArray, SByteArray](CalcBlake2b256, mkCalcBlake2b256), + SimpleTransformerSerializer[SByteArray, SByteArray](CalcSha256, mkCalcSha256), + SimpleTransformerSerializer[SByteArray, SGroupElement.type](DecodePoint, mkDecodePoint), + SimpleTransformerSerializer[SOption[SType], SType](OptionGet, mkOptionGet), + SimpleTransformerSerializer[SOption[SType], SBoolean.type](OptionIsDefined, mkOptionIsDefined), OptionGetOrElseSerializer(mkOptionGetOrElse), DeserializeContextSerializer(mkDeserializeContext), DeserializeRegisterSerializer(mkDeserializeRegister), @@ -118,28 +122,28 @@ object ValueSerializer extends SigmaSerializerCompanion[Value[SType]] { AtLeastSerializer(mkAtLeast), ByIndexSerializer(mkByIndex), AppendSerializer(mkAppend), - NumericCastSerializer(UpcastCode, mkUpcast), - NumericCastSerializer(DowncastCode, mkDowncast), - ValDefSerializer(ValDefCode), - ValDefSerializer(FunDefCode), + NumericCastSerializer(Upcast, mkUpcast), + NumericCastSerializer(Downcast, mkDowncast), + ValDefSerializer(ValDef), + ValDefSerializer(FunDef), BlockValueSerializer(mkBlockValue), ValUseSerializer(mkValUse), FuncValueSerializer(mkFuncValue), ApplySerializer(mkApply), - MethodCallSerializer(PropertyCallCode, mkMethodCall), - MethodCallSerializer(MethodCallCode, mkMethodCall), - SigmaTransformerSerializer(SigmaAndCode, mkSigmaAnd), - SigmaTransformerSerializer(SigmaOrCode, mkSigmaOr), + PropertyCallSerializer(mkMethodCall), + MethodCallSerializer(mkMethodCall), + SigmaTransformerSerializer(SigmaAnd, mkSigmaAnd), + SigmaTransformerSerializer(SigmaOr, mkSigmaOr), BoolToSigmaPropSerializer(mkBoolToSigmaProp), ModQSerializer, - ModQArithOpSerializer(PlusModQCode, mkPlusModQ), - ModQArithOpSerializer(MinusModQCode, mkMinusModQ), + ModQArithOpSerializer(ModQArithOp.PlusModQ, mkPlusModQ), + ModQArithOpSerializer(ModQArithOp.MinusModQ, mkMinusModQ), SubstConstantsSerializer, CreateProveDlogSerializer(mkCreateProveDlog), CreateProveDHTupleSerializer(mkCreateProveDHTuple), LogicalNotSerializer(mkLogicalNot), - OneArgumentOperationSerializer(NegationCode, mkNegation[SNumericType]), - OneArgumentOperationSerializer(BitInversionCode, mkBitInversion[SNumericType]), + OneArgumentOperationSerializer(Negation, mkNegation[SNumericType]), + OneArgumentOperationSerializer(BitInversion, mkBitInversion[SNumericType]), )) private def serializable(v: Value[SType]): Value[SType] = v match { @@ -149,10 +153,193 @@ object ValueSerializer extends SigmaSerializerCompanion[Value[SType]] { } override def getSerializer(opCode: Tag): ValueSerializer[_ <: Value[SType]] = { - val serializer = serializers.get(opCode) - if (serializer == null) - throw new InvalidOpCode(s"Cannot find serializer for Value with opCode = LastConstantCode + ${opCode.toUByte - LastConstantCode}") - serializer + val serializer = serializers(opCode) + CheckValidOpCode(serializer, opCode) { serializer } + } + def addSerializer(opCode: OpCode, ser: ValueSerializer[_ <: Value[SType]]) = { + serializers.add(opCode, ser) + } + def removeSerializer(opCode: OpCode) = { + serializers.remove(opCode) + } + + type ChildrenMap = mutable.ArrayBuffer[(String, Scope)] + trait Scope { + def name: String + def parent: Scope + def children: ChildrenMap + def get(name: String): Option[Scope] = children.find(_._1 == name).map(_._2) + def add(name: String, s: Scope) = { + assert(get(name).isEmpty, s"Error while adding scope $s: name $name already exists in $this") + children += (name -> s) + } + def showInScope(v: String): String + + def provideScope(n: String, createNewScope: => Scope) = { + val scope = get(n) match { + case Some(saved) => saved + case None => + val newScope = createNewScope + add(n, newScope) + newScope + } + scope + } + } + + case class SerScope(opCode: OpCode, children: ChildrenMap) extends Scope { + def serializer = getSerializer(opCode) + def name = s"Serializer of ${serializer.opDesc}" + override def parent: Scope = null + override def showInScope(v: String): String = name + "/" + v + override def toString: Idn = s"SerScope(${serializer.opDesc}, $children)" + } + + case class DataScope(parent: Scope, data: DataInfo[_]) extends Scope { + def name = data.info.name + override def children = mutable.ArrayBuffer.empty + override def showInScope(v: String): String = parent.showInScope(s"DataInfo($data)") + override def toString = s"DataScope($data)" + } + + case class OptionalScope(parent: Scope, name: String, children: ChildrenMap) extends Scope { + override def showInScope(v: String): String = parent.showInScope(s"/opt[$name]/$v") + override def toString = s"OptionalScope($name, $children)" + } + + case class CasesScope(parent: Scope, matchExpr: String, children: ChildrenMap) extends Scope { + override def name: String = matchExpr + def cases: Seq[WhenScope] = children.map { + case (_, when: WhenScope) => when + case s => sys.error(s"Invalid child scope $s in $this") + }.sortBy(_.pos) + override def showInScope(v: String): String = parent.showInScope(s"/cases[$name]/$v") + override def toString = s"CasesScope($name, $children)" + } + + case class WhenScope(parent: Scope, pos: Int, condition: String, children: ChildrenMap) extends Scope { + override def name: String = condition + override def showInScope(v: String): String = parent.showInScope(s"/when[$pos: $condition]/$v") + override def toString = s"WhenScope($pos, $condition, $children)" + def isOtherwise: Boolean = condition == otherwiseCondition + } + + case class ForScope(parent: Scope, name: String, limitVar: String, children: ChildrenMap) extends Scope { + override def showInScope(v: String): String = parent.showInScope(s"/for[$name]/$v") + override def toString = s"ForScope($name, $children)" + } + + case class OptionScope(parent: Scope, name: String, children: ChildrenMap) extends Scope { + override def showInScope(v: String): String = parent.showInScope(s"/option[$name]/$v") + override def toString = s"OptionScope($name, $children)" + } + + val collectSerInfo: Boolean = false + val serializerInfo: mutable.Map[OpCode, SerScope] = mutable.HashMap.empty + private var scopeStack: List[Scope] = Nil + + def printSerInfo(): String = { + serializerInfo.map { case (_, s) => + val ser = getSerializer(s.opCode) + s.toString + }.mkString("\n") + } + + def optional(name: String)(block: => Unit): Unit = { + if (scopeStack.nonEmpty) { + val parent = scopeStack.head + val scope = parent.provideScope(name, OptionalScope(parent, name, mutable.ArrayBuffer.empty)) + + scopeStack ::= scope + block + scopeStack = scopeStack.tail + } else { + block + } + } + + def cases(matchExpr: String)(block: => Unit): Unit = { + if (scopeStack.nonEmpty) { + val parent = scopeStack.head + val scope = parent.provideScope(matchExpr, CasesScope(parent, matchExpr, mutable.ArrayBuffer.empty)) + + scopeStack ::= scope + block + scopeStack = scopeStack.tail + } else { + block + } + } + + def when(pos: Int, condition: String)(block: => Unit): Unit = { + if (scopeStack.nonEmpty) { + val parent = scopeStack.head + val scope = parent.provideScope(condition, WhenScope(parent, pos, condition, mutable.ArrayBuffer.empty)) + + scopeStack ::= scope + block + scopeStack = scopeStack.tail + } else { + block + } + } + + val otherwiseCondition = "otherwise" + + def otherwise(block: => Unit): Unit = { + if (scopeStack.nonEmpty) { + val parent = scopeStack.head + val scope = parent.provideScope(otherwiseCondition, WhenScope(parent, Int.MaxValue, otherwiseCondition, mutable.ArrayBuffer.empty)) + + scopeStack ::= scope + block + scopeStack = scopeStack.tail + } else { + block + } + } + + def foreach[T](sizeVar: String, seq: Seq[T])(f: T => Unit): Unit = { + if (scopeStack.nonEmpty) { + val parent = scopeStack.head + val forName = sizeVar + "*" + val scope = parent.provideScope(forName, ForScope(parent, forName, sizeVar, mutable.ArrayBuffer.empty)) + + scopeStack ::= scope + seq.foreach(f) + scopeStack = scopeStack.tail + } else { + seq.foreach(f) + } + } + + def opt[T](w: SigmaByteWriter, name: String, o: Option[T])(f: (SigmaByteWriter, T) => Unit): Unit = { + if (scopeStack.nonEmpty) { + val parent = scopeStack.head + val scope = parent.provideScope(name, OptionScope(parent, name, mutable.ArrayBuffer.empty)) + + scopeStack ::= scope + w.putOption(o)(f) + scopeStack = scopeStack.tail + } else { + w.putOption(o)(f) + } + } + + def addArgInfo[T](prop: DataInfo[T]): Unit = { + if (scopeStack.isEmpty) return + val scope = scopeStack.head + scope.get(prop.info.name) match { + case None => + scope.add(prop.info.name, DataScope(scope, prop)) + println(s"Added $prop to ${scope}") + case Some(saved) => saved match { + case DataScope(_, data) => + assert(data == prop, s"Saved property $data is different from being added $prop: scope $scope") + case _ => + sys.error(s"Expected DataScope, but found $saved: while adding $prop to scope $scope") + } + } } override def serialize(v: Value[SType], w: SigmaByteWriter): Unit = serializable(v) match { @@ -167,9 +354,26 @@ object ValueSerializer extends SigmaSerializerCompanion[Value[SType]] { } case _ => val opCode = v.opCode - w.put(opCode) // help compiler recognize the type - getSerializer(opCode).asInstanceOf[ValueSerializer[v.type]].serialize(v, w) + val ser = getSerializer(opCode).asInstanceOf[ValueSerializer[v.type]] + if (collectSerInfo) { + val scope = serializerInfo.get(opCode) match { + case None => + val newScope = SerScope(opCode, mutable.ArrayBuffer.empty) + serializerInfo += (opCode -> newScope) + println(s"Added: ${ser.opDesc}") + newScope + case Some(scope) => scope + } + w.put(opCode) + + scopeStack ::= scope + ser.serialize(v, w) + scopeStack = scopeStack.tail + } else { + w.put(opCode) + ser.serialize(v, w) + } } override def deserialize(r: SigmaByteReader): Value[SType] = { @@ -177,8 +381,6 @@ object ValueSerializer extends SigmaSerializerCompanion[Value[SType]] { if (bytesRemaining > SigmaSerializer.MaxInputSize) throw new InputSizeLimitExceeded(s"input size $bytesRemaining exceeds ${ SigmaSerializer.MaxInputSize}") val depth = r.level - if (depth > SigmaSerializer.MaxTreeDepth) - throw new ValueDeserializeCallDepthExceeded(s"nested value deserialization call depth($depth) exceeds allowed maximum ${SigmaSerializer.MaxTreeDepth}") r.level = depth + 1 val firstByte = r.peekByte().toUByte val v = if (firstByte <= LastConstantCode) { @@ -189,7 +391,7 @@ object ValueSerializer extends SigmaSerializerCompanion[Value[SType]] { val opCode = r.getByte() getSerializer(opCode).parse(r) } - r.level = depth - 1 + r.level = r.level - 1 v } diff --git a/src/main/scala/sigmastate/serialization/transformers/AppendSerializer.scala b/src/main/scala/sigmastate/serialization/transformers/AppendSerializer.scala index 82504539ac..970c1b66d0 100644 --- a/src/main/scala/sigmastate/serialization/transformers/AppendSerializer.scala +++ b/src/main/scala/sigmastate/serialization/transformers/AppendSerializer.scala @@ -1,22 +1,20 @@ package sigmastate.serialization.transformers +import sigmastate.Operations.AppendInfo import sigmastate.Values.Value import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes.OpCode -import sigmastate.serialization.{OpCodes, ValueSerializer} -import scorex.util.Extensions._ +import sigmastate.serialization.ValueSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.Append import sigmastate.{SCollection, SType} case class AppendSerializer(cons: (Value[SCollection[SType]], Value[SCollection[SType]]) => Value[SCollection[SType]]) extends ValueSerializer[Append[SType]] { - - override val opCode: OpCode = OpCodes.AppendCode + override def opDesc = Append override def serialize(obj: Append[SType], w: SigmaByteWriter): Unit = - w.putValue(obj.input) - .putValue(obj.col2) + w.putValue(obj.input, AppendInfo.thisArg) + .putValue(obj.col2, AppendInfo.otherArg) override def parse(r: SigmaByteReader): Value[SCollection[SType]] = { val input = r.getValue().asCollection[SType] diff --git a/src/main/scala/sigmastate/serialization/transformers/AtLeastSerializer.scala b/src/main/scala/sigmastate/serialization/transformers/AtLeastSerializer.scala index dc72586218..a401921fed 100644 --- a/src/main/scala/sigmastate/serialization/transformers/AtLeastSerializer.scala +++ b/src/main/scala/sigmastate/serialization/transformers/AtLeastSerializer.scala @@ -1,25 +1,19 @@ package sigmastate.serialization.transformers -import sigmastate.Values.{SigmaPropValue, Value} -import sigmastate._ +import sigmastate.Operations.AtLeastInfo +import sigmastate.Values.{Value, SigmaPropValue} import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes.OpCode -import sigmastate.serialization.{ValueSerializer, OpCodes} -import scorex.util.Extensions._ -import sigmastate.utils.{SigmaByteWriter, SigmaByteReader} import sigmastate._ -import sigmastate.serialization.{OpCodes, ValueSerializer} +import sigmastate.serialization.ValueSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} -import scorex.util.Extensions._ case class AtLeastSerializer(cons: (Value[SInt.type], Value[SCollection[SSigmaProp.type]]) => SigmaPropValue) extends ValueSerializer[AtLeast] { - - override val opCode: OpCode = OpCodes.AtLeastCode + override def opDesc = AtLeast override def serialize(obj: AtLeast, w: SigmaByteWriter): Unit = - w.putValue(obj.bound) - .putValue(obj.input) + w.putValue(obj.bound, AtLeastInfo.boundArg) + .putValue(obj.input, AtLeastInfo.childrenArg) override def parse(r: SigmaByteReader): SigmaPropValue = { val bound = r.getValue().asIntValue diff --git a/src/main/scala/sigmastate/serialization/transformers/BooleanTransformerSerializer.scala b/src/main/scala/sigmastate/serialization/transformers/BooleanTransformerSerializer.scala index 701887aeea..55f90d1f84 100644 --- a/src/main/scala/sigmastate/serialization/transformers/BooleanTransformerSerializer.scala +++ b/src/main/scala/sigmastate/serialization/transformers/BooleanTransformerSerializer.scala @@ -1,23 +1,19 @@ package sigmastate.serialization.transformers -import sigmastate.Values.Value +import sigmastate.Values.{Value, ValueCompanion} import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes.OpCode import sigmastate.serialization.ValueSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} -import sigmastate.utxo.BooleanTransformer -import sigmastate.{SBoolean, SCollection, SFunc, SType} -import scorex.util.Extensions._ +import sigmastate.utxo.{BooleanTransformer, BooleanTransformerCompanion} +import sigmastate.{SCollection, SBoolean, SType, SFunc} case class BooleanTransformerSerializer[T <: SType] -(code: OpCode, +(opDesc: BooleanTransformerCompanion, f: (Value[SCollection[T]], Value[SFunc]) => Value[SBoolean.type]) extends ValueSerializer[BooleanTransformer[T]] { - override val opCode: OpCode = code - override def serialize(obj: BooleanTransformer[T], w: SigmaByteWriter): Unit = - w.putValue(obj.input) - .putValue(obj.condition) + w.putValue(obj.input, opDesc.argInfos(0)) + .putValue(obj.condition, opDesc.argInfos(1)) override def parse(r: SigmaByteReader): Value[SBoolean.type] = { val input = r.getValue().asCollection[T] diff --git a/src/main/scala/sigmastate/serialization/transformers/ByIndexSerializer.scala b/src/main/scala/sigmastate/serialization/transformers/ByIndexSerializer.scala index 506d6a26c5..fe553bdf16 100644 --- a/src/main/scala/sigmastate/serialization/transformers/ByIndexSerializer.scala +++ b/src/main/scala/sigmastate/serialization/transformers/ByIndexSerializer.scala @@ -2,22 +2,22 @@ package sigmastate.serialization.transformers import sigmastate.Values.Value import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes.OpCode -import sigmastate.serialization.{OpCodes, ValueSerializer} -import scorex.util.Extensions._ +import sigmastate.serialization.ValueSerializer +import ValueSerializer._ +import sigmastate.Operations.ByIndexInfo._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.ByIndex -import sigmastate.{SCollection, SInt, SType} +import sigmastate.{SInt, SCollection, SType} case class ByIndexSerializer(cons: (Value[SCollection[SType]], Value[SInt.type], Option[Value[SType]]) => Value[SType]) extends ValueSerializer[ByIndex[SType]] { + override def opDesc = ByIndex - override val opCode: OpCode = OpCodes.ByIndexCode - - override def serialize(obj: ByIndex[SType], w: SigmaByteWriter): Unit = - w.putValue(obj.input) - .putValue(obj.index) - .putOption(obj.default)(_.putValue(_)) + override def serialize(obj: ByIndex[SType], w: SigmaByteWriter): Unit = { + w.putValue(obj.input, thisArg) + .putValue(obj.index, indexArg) + opt(w, "default", obj.default)(_.putValue(_, defaultArg)) + } override def parse(r: SigmaByteReader): Value[SType] = { val input = r.getValue().asCollection[SType] diff --git a/src/main/scala/sigmastate/serialization/transformers/DeserializeContextSerializer.scala b/src/main/scala/sigmastate/serialization/transformers/DeserializeContextSerializer.scala index a66600a613..5f83555405 100644 --- a/src/main/scala/sigmastate/serialization/transformers/DeserializeContextSerializer.scala +++ b/src/main/scala/sigmastate/serialization/transformers/DeserializeContextSerializer.scala @@ -1,21 +1,20 @@ package sigmastate.serialization.transformers -import sigmastate.SType -import sigmastate.Values.Value -import sigmastate.serialization.OpCodes.OpCode -import sigmastate.serialization.{OpCodes, ValueSerializer} -import scorex.util.Extensions._ +import sigmastate.{SType, ArgInfo} +import sigmastate.Values._ +import sigmastate.serialization.ValueSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.DeserializeContext +import SigmaByteWriter._ +import sigmastate.Operations.DeserializeContextInfo case class DeserializeContextSerializer(cons: (Byte, SType) => Value[SType]) extends ValueSerializer[DeserializeContext[SType]] { - - override val opCode: OpCode = OpCodes.DeserializeContextCode + override def opDesc = DeserializeContext override def serialize(obj: DeserializeContext[SType], w: SigmaByteWriter): Unit = - w.putType(obj.tpe) - .put(obj.id) + w.putType(obj.tpe, ArgInfo("type", "expected type of the deserialized script")) + .put(obj.id, DeserializeContextInfo.idArg) override def parse(r: SigmaByteReader): Value[SType] = { val tpe = r.getType() diff --git a/src/main/scala/sigmastate/serialization/transformers/DeserializeRegisterSerializer.scala b/src/main/scala/sigmastate/serialization/transformers/DeserializeRegisterSerializer.scala index 8adc2d6e49..0de85c256d 100644 --- a/src/main/scala/sigmastate/serialization/transformers/DeserializeRegisterSerializer.scala +++ b/src/main/scala/sigmastate/serialization/transformers/DeserializeRegisterSerializer.scala @@ -2,24 +2,24 @@ package sigmastate.serialization.transformers import org.ergoplatform.ErgoBox import org.ergoplatform.ErgoBox.RegisterId -import sigmastate.SType +import sigmastate.Operations.DeserializeRegisterInfo +import sigmastate.{ArgInfo, SType} import sigmastate.Values.Value -import sigmastate.serialization.OpCodes.OpCode -import sigmastate.serialization.{OpCodes, ValueSerializer} -import scorex.util.Extensions._ +import sigmastate.serialization.ValueSerializer +import ValueSerializer._ +import sigmastate.Operations.DeserializeRegisterInfo._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.DeserializeRegister case class DeserializeRegisterSerializer(cons: (RegisterId, SType, Option[Value[SType]]) => Value[SType]) extends ValueSerializer[DeserializeRegister[SType]] { + override def opDesc = DeserializeRegister - override val opCode: OpCode = OpCodes.DeserializeRegisterCode - - override def serialize(obj: DeserializeRegister[SType], w: SigmaByteWriter): Unit = - w.put(obj.reg.number) - .putType(obj.tpe) - .putOption(obj.default)(_.putValue(_)) - + override def serialize(obj: DeserializeRegister[SType], w: SigmaByteWriter): Unit = { + w.put(obj.reg.number, idArg) + w.putType(obj.tpe, ArgInfo("type", "expected type of the deserialized script")) + opt(w, "default", obj.default)(_.putValue(_, defaultArg)) + } override def parse(r: SigmaByteReader): Value[SType] = { val registerId = ErgoBox.findRegisterByIndex(r.getByte()).get val tpe = r.getType() diff --git a/src/main/scala/sigmastate/serialization/transformers/ExtractRegisterAsSerializer.scala b/src/main/scala/sigmastate/serialization/transformers/ExtractRegisterAsSerializer.scala index 077b63a75a..cef8c2b694 100644 --- a/src/main/scala/sigmastate/serialization/transformers/ExtractRegisterAsSerializer.scala +++ b/src/main/scala/sigmastate/serialization/transformers/ExtractRegisterAsSerializer.scala @@ -3,21 +3,20 @@ package sigmastate.serialization.transformers import org.ergoplatform.ErgoBox import org.ergoplatform.ErgoBox.RegisterId import sigmastate.Values.Value -import sigmastate.serialization.OpCodes.OpCode -import sigmastate.serialization.{OpCodes, ValueSerializer} -import scorex.util.Extensions._ +import sigmastate.serialization.ValueSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.ExtractRegisterAs -import sigmastate.{SBox, SOption, SType} +import sigmastate.{SBox, SOption, SType, ArgInfo} case class ExtractRegisterAsSerializer(cons: (Value[SBox.type], RegisterId, SOption[SType]) => Value[SType]) extends ValueSerializer[ExtractRegisterAs[SType]] { - override val opCode: OpCode = OpCodes.ExtractRegisterAs + import sigmastate.Operations.ExtractRegisterAsInfo._ + override def opDesc = ExtractRegisterAs override def serialize(obj: ExtractRegisterAs[SType], w: SigmaByteWriter): Unit = - w.putValue(obj.input) - .put(obj.registerId.number) - .putType(obj.tpe.elemType) + w.putValue(obj.input, thisArg) + .put(obj.registerId.number, regIdArg) + .putType(obj.tpe.elemType, ArgInfo("type", "expected type of the value in register")) override def parse(r: SigmaByteReader): Value[SType] = { val input = r.getValue() diff --git a/src/main/scala/sigmastate/serialization/transformers/FilterSerializer.scala b/src/main/scala/sigmastate/serialization/transformers/FilterSerializer.scala index 7d3ea6e22d..1c16a9d932 100644 --- a/src/main/scala/sigmastate/serialization/transformers/FilterSerializer.scala +++ b/src/main/scala/sigmastate/serialization/transformers/FilterSerializer.scala @@ -2,26 +2,21 @@ package sigmastate.serialization.transformers import sigmastate.Values.Value import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes.OpCode -import sigmastate.serialization.{OpCodes, ValueSerializer} -import scorex.util.Extensions._ +import sigmastate.serialization.ValueSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.Filter -import sigmastate.{SBoolean, SCollection, SType} +import sigmastate.{SCollection, SType, SFunc} -case class FilterSerializer(cons: (Value[SCollection[SType]], Byte, Value[SBoolean.type]) => Value[SCollection[SType]]) extends ValueSerializer[Filter[SType]] { - - override val opCode: OpCode = OpCodes.FilterCode +case class FilterSerializer(cons: (Value[SCollection[SType]], Value[SFunc]) => Value[SCollection[SType]]) extends ValueSerializer[Filter[SType]] { + override def opDesc = Filter override def serialize(obj: Filter[SType], w: SigmaByteWriter): Unit = - w.put(obj.id) - .putValue(obj.input) + w.putValue(obj.input) .putValue(obj.condition) override def parse(r: SigmaByteReader): Value[SCollection[SType]] = { - val id = r.getByte() val input = r.getValue().asCollection[SType] - val condition = r.getValue().asValue[SBoolean.type] - cons(input, id, condition) + val condition = r.getValue().asFunc + cons(input, condition) } } diff --git a/src/main/scala/sigmastate/serialization/transformers/FoldSerializer.scala b/src/main/scala/sigmastate/serialization/transformers/FoldSerializer.scala index 7bf7ff673c..42e72acf43 100644 --- a/src/main/scala/sigmastate/serialization/transformers/FoldSerializer.scala +++ b/src/main/scala/sigmastate/serialization/transformers/FoldSerializer.scala @@ -2,21 +2,21 @@ package sigmastate.serialization.transformers import sigmastate.Values.Value import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes.OpCode -import sigmastate.serialization.{OpCodes, ValueSerializer} -import scorex.util.Extensions._ +import sigmastate.serialization.ValueSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.Fold -import sigmastate.{SCollection, SFunc, SType} +import sigmastate.{SCollection, SType, SFunc} case class FoldSerializer(cons: (Value[SCollection[SType]], Value[SType], Value[SFunc]) => Value[SType]) extends ValueSerializer[Fold[SType, SType]] { - override val opCode: OpCode = OpCodes.FoldCode + override def opDesc = Fold - override def serialize(obj: Fold[SType, SType], w: SigmaByteWriter): Unit = - w.putValue(obj.input) - .putValue(obj.zero) - .putValue(obj.foldOp) + override def serialize(obj: Fold[SType, SType], w: SigmaByteWriter): Unit = { + import sigmastate.Operations.FoldInfo._ + w.putValue(obj.input, thisArg) + .putValue(obj.zero, zeroArg) + .putValue(obj.foldOp, opArg) + } override def parse(r: SigmaByteReader): Value[SType] = { val input = r.getValue().asCollection[SType] diff --git a/src/main/scala/sigmastate/serialization/transformers/LogicalTransformerSerializer.scala b/src/main/scala/sigmastate/serialization/transformers/LogicalTransformerSerializer.scala index 39f0456735..800b5a1508 100644 --- a/src/main/scala/sigmastate/serialization/transformers/LogicalTransformerSerializer.scala +++ b/src/main/scala/sigmastate/serialization/transformers/LogicalTransformerSerializer.scala @@ -1,23 +1,19 @@ package sigmastate.serialization.transformers -import sigmastate.Values.Value +import sigmastate.Values.{Value, ValueCompanion} import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes.OpCode import sigmastate.serialization.ValueSerializer -import scorex.util.Extensions._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.Transformer -import sigmastate.{SBoolean, SCollection} +import sigmastate.{SCollection, SBoolean, LogicalTransformerCompanion} case class LogicalTransformerSerializer[I <: SCollection[SBoolean.type], O <: SBoolean.type] -(code: OpCode, +(opDesc: LogicalTransformerCompanion, cons: Value[SCollection[SBoolean.type]] => Value[SBoolean.type]) extends ValueSerializer[Transformer[I, O]] { - override val opCode: OpCode = code - override def serialize(obj: Transformer[I, O], w: SigmaByteWriter): Unit = - w.putValue(obj.input) + w.putValue(obj.input, opDesc.argInfos(0)) override def parse(r: SigmaByteReader): Value[SBoolean.type] = cons(r.getValue().asCollection[SBoolean.type]) diff --git a/src/main/scala/sigmastate/serialization/transformers/MapCollectionSerializer.scala b/src/main/scala/sigmastate/serialization/transformers/MapCollectionSerializer.scala index af6e098d6b..6d149c3749 100644 --- a/src/main/scala/sigmastate/serialization/transformers/MapCollectionSerializer.scala +++ b/src/main/scala/sigmastate/serialization/transformers/MapCollectionSerializer.scala @@ -2,20 +2,19 @@ package sigmastate.serialization.transformers import sigmastate.Values.Value import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes.OpCode -import sigmastate.serialization.{OpCodes, ValueSerializer} +import sigmastate.serialization.ValueSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.MapCollection -import sigmastate.{SCollection, SFunc, SType} +import sigmastate.{SCollection, SType, SFunc} case class MapCollectionSerializer(cons: (Value[SCollection[SType]], Value[SFunc]) => Value[SType]) extends ValueSerializer[MapCollection[SType, SType]] { - - override val opCode: OpCode = OpCodes.MapCollectionCode + import sigmastate.Operations.MapCollectionInfo._ + override def opDesc = MapCollection override def serialize(obj: MapCollection[SType, SType], w: SigmaByteWriter): Unit = - w.putValue(obj.input) - .putValue(obj.mapper) + w.putValue(obj.input, thisArg) + .putValue(obj.mapper, fArg) override def parse(r: SigmaByteReader): Value[SType] = { val input = r.getValue().asValue[SCollection[SType]] diff --git a/src/main/scala/sigmastate/serialization/transformers/NumericCastSerializer.scala b/src/main/scala/sigmastate/serialization/transformers/NumericCastSerializer.scala index 4a8a735858..d939718cd2 100644 --- a/src/main/scala/sigmastate/serialization/transformers/NumericCastSerializer.scala +++ b/src/main/scala/sigmastate/serialization/transformers/NumericCastSerializer.scala @@ -1,23 +1,19 @@ package sigmastate.serialization.transformers -import sigmastate.Values.Value +import sigmastate.Values.{Value, ValueCompanion} import sigmastate._ import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes.OpCode import sigmastate.serialization.ValueSerializer -import scorex.util.Extensions._ import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.Transformer -case class NumericCastSerializer(code: OpCode, +case class NumericCastSerializer(opDesc: NumericCastCompanion, cons: (Value[SNumericType], SNumericType) => Value[SNumericType]) extends ValueSerializer[Transformer[SNumericType, SNumericType]] { - override val opCode: OpCode = code - override def serialize(obj: Transformer[SNumericType, SNumericType], w: SigmaByteWriter): Unit = - w.putValue(obj.input) - .putType(obj.tpe) + w.putValue(obj.input, opDesc.argInfos(0)) + .putType(obj.tpe, ArgInfo("type", "resulting type of the cast operation")) override def parse(r: SigmaByteReader): Value[SNumericType] = { val input = r.getValue().asNumValue diff --git a/src/main/scala/sigmastate/serialization/transformers/ProveDHTupleSerializer.scala b/src/main/scala/sigmastate/serialization/transformers/ProveDHTupleSerializer.scala index 82a4edd621..823055d101 100644 --- a/src/main/scala/sigmastate/serialization/transformers/ProveDHTupleSerializer.scala +++ b/src/main/scala/sigmastate/serialization/transformers/ProveDHTupleSerializer.scala @@ -5,26 +5,25 @@ import sigmastate.Values.{Value, SigmaPropValue} import sigmastate.basics.ProveDHTuple import sigmastate.interpreter.CryptoConstants.EcPointType import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes.OpCode import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} -import sigmastate.serialization.{ValueSerializer, DataSerializer, OpCodes, SigmaSerializer} +import sigmastate.serialization._ case class ProveDHTupleSerializer( cons: (EcPointType, EcPointType, EcPointType, EcPointType) => ProveDHTuple ) extends SigmaSerializer[ProveDHTuple, ProveDHTuple] { override def serialize(obj: ProveDHTuple, w: SigmaByteWriter): Unit = { - DataSerializer.serialize[SGroupElement.type](obj.gv, SGroupElement, w) - DataSerializer.serialize[SGroupElement.type](obj.hv, SGroupElement, w) - DataSerializer.serialize[SGroupElement.type](obj.uv, SGroupElement, w) - DataSerializer.serialize[SGroupElement.type](obj.vv, SGroupElement, w) + GroupElementSerializer.serialize(obj.gv, w) + GroupElementSerializer.serialize(obj.hv, w) + GroupElementSerializer.serialize(obj.uv, w) + GroupElementSerializer.serialize(obj.vv, w) } override def parse(r: SigmaByteReader) = { - val gv = DataSerializer.deserialize(SGroupElement, r) - val hv = DataSerializer.deserialize(SGroupElement, r) - val uv = DataSerializer.deserialize(SGroupElement, r) - val vv = DataSerializer.deserialize(SGroupElement, r) + val gv = GroupElementSerializer.parse(r) + val hv = GroupElementSerializer.parse(r) + val uv = GroupElementSerializer.parse(r) + val vv = GroupElementSerializer.parse(r) cons(gv, hv, uv, vv) } } @@ -34,14 +33,14 @@ case class CreateProveDHTupleSerializer(cons: (Value[SGroupElement.type], Value[SGroupElement.type], Value[SGroupElement.type]) => SigmaPropValue) extends ValueSerializer[CreateProveDHTuple] { - - override val opCode: OpCode = OpCodes.ProveDiffieHellmanTupleCode + import sigmastate.Operations.CreateProveDHTupleInfo._ + override def opDesc = CreateProveDHTuple override def serialize(obj: CreateProveDHTuple, w: SigmaByteWriter): Unit = { - w.putValue(obj.gv) - w.putValue(obj.hv) - w.putValue(obj.uv) - w.putValue(obj.vv) + w.putValue(obj.gv, gArg) + w.putValue(obj.hv, hArg) + w.putValue(obj.uv, uArg) + w.putValue(obj.vv, vArg) } override def parse(r: SigmaByteReader) = { diff --git a/src/main/scala/sigmastate/serialization/transformers/SigmaTransformerSerializer.scala b/src/main/scala/sigmastate/serialization/transformers/SigmaTransformerSerializer.scala index c5e4e6861d..d736826d29 100644 --- a/src/main/scala/sigmastate/serialization/transformers/SigmaTransformerSerializer.scala +++ b/src/main/scala/sigmastate/serialization/transformers/SigmaTransformerSerializer.scala @@ -1,21 +1,18 @@ package sigmastate.serialization.transformers -import sigmastate.SigmaTransformer -import sigmastate.Values.SigmaPropValue -import sigmastate.serialization.OpCodes.OpCode +import sigmastate.{SigmaTransformer, SigmaTransformerCompanion} +import sigmastate.Values.{ValueCompanion, SigmaPropValue} import sigmastate.serialization.ValueSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import scala.collection.mutable case class SigmaTransformerSerializer[I <: SigmaPropValue, O <: SigmaPropValue] -(code: OpCode, cons: Seq[SigmaPropValue] => SigmaPropValue) +(opDesc: SigmaTransformerCompanion, cons: Seq[SigmaPropValue] => SigmaPropValue) extends ValueSerializer[SigmaTransformer[I, O]] { - override val opCode: OpCode = code - override def serialize(obj: SigmaTransformer[I, O], w: SigmaByteWriter): Unit = - w.putValues(obj.items) + w.putValues(obj.items, opDesc.argInfos(0)) override def parse(r: SigmaByteReader): SigmaPropValue = { val itemsSize = r.getUInt().toInt diff --git a/src/main/scala/sigmastate/serialization/transformers/SimpleTransformerSerializer.scala b/src/main/scala/sigmastate/serialization/transformers/SimpleTransformerSerializer.scala index 2bf23b3cd2..68b8c9a602 100644 --- a/src/main/scala/sigmastate/serialization/transformers/SimpleTransformerSerializer.scala +++ b/src/main/scala/sigmastate/serialization/transformers/SimpleTransformerSerializer.scala @@ -1,22 +1,18 @@ package sigmastate.serialization.transformers import sigmastate.SType -import sigmastate.Values.Value +import sigmastate.Values.{Value, ValueCompanion} import sigmastate.lang.Terms._ -import sigmastate.serialization.OpCodes.OpCode import sigmastate.serialization.ValueSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} -import sigmastate.utxo.Transformer -import scorex.util.Extensions._ +import sigmastate.utxo.{Transformer, SimpleTransformerCompanion} case class SimpleTransformerSerializer[I <: SType, O <: SType] -(code: OpCode, +(opDesc: SimpleTransformerCompanion, cons: Value[I] => Value[O]) extends ValueSerializer[Transformer[I, O]] { - override val opCode: OpCode = code - override def serialize(obj: Transformer[I, O], w: SigmaByteWriter): Unit = - w.putValue(obj.input) + w.putValue(obj.input, opDesc.argInfos(0)) override def parse(r: SigmaByteReader): Value[O] = cons(r.getValue().asValue[I]) diff --git a/src/main/scala/sigmastate/serialization/transformers/SliceSerializer.scala b/src/main/scala/sigmastate/serialization/transformers/SliceSerializer.scala index 6f86bff5fc..4a9841fb83 100644 --- a/src/main/scala/sigmastate/serialization/transformers/SliceSerializer.scala +++ b/src/main/scala/sigmastate/serialization/transformers/SliceSerializer.scala @@ -4,20 +4,20 @@ import sigmastate.Values.Value import sigmastate.lang.Terms._ import sigmastate.serialization.OpCodes.OpCode import sigmastate.serialization.{OpCodes, ValueSerializer} -import scorex.util.Extensions._ +import sigmastate.serialization.ValueSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.utxo.Slice -import sigmastate.{SCollection, SInt, SType} +import sigmastate.{SInt, SCollection, SType} case class SliceSerializer(cons: (Value[SCollection[SType]], Value[SInt.type], Value[SInt.type]) => Value[SCollection[SType]]) extends ValueSerializer[Slice[SType]] { - - override val opCode: OpCode = OpCodes.SliceCode + import sigmastate.Operations.SliceInfo._ + override def opDesc = Slice override def serialize(obj: Slice[SType], w: SigmaByteWriter): Unit = - w.putValue(obj.input) - .putValue(obj.from) - .putValue(obj.until) + w.putValue(obj.input, thisArg) + .putValue(obj.from, fromArg) + .putValue(obj.until, untilArg) override def parse(r: SigmaByteReader): Value[SCollection[SType]] = { val input = r.getValue().asCollection[SType] diff --git a/src/main/scala/sigmastate/serialization/trees/QuadrupleSerializer.scala b/src/main/scala/sigmastate/serialization/trees/QuadrupleSerializer.scala index d9fc6bcb6d..859118282b 100644 --- a/src/main/scala/sigmastate/serialization/trees/QuadrupleSerializer.scala +++ b/src/main/scala/sigmastate/serialization/trees/QuadrupleSerializer.scala @@ -5,17 +5,16 @@ import sigmastate.lang.Terms._ import sigmastate.serialization.ValueSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} import sigmastate.{Quadruple, _} -import scorex.util.Extensions._ case class QuadrupleSerializer[S1 <: SType, S2 <: SType, S3 <: SType, S4 <: SType] -(override val opCode: Byte, +(override val opDesc: QuadrupleCompanion, cons: (Value[S1], Value[S2], Value[S3]) => Value[S4]) extends ValueSerializer[Quadruple[S1, S2, S3, S4]] { override def serialize(obj: Quadruple[S1, S2, S3, S4], w: SigmaByteWriter): Unit = { - w.putValue(obj.first) - w.putValue(obj.second) - w.putValue(obj.third) + w.putValue(obj.first, opDesc.argInfos(0)) + w.putValue(obj.second, opDesc.argInfos(1)) + w.putValue(obj.third, opDesc.argInfos(2)) } override def parse(r: SigmaByteReader): Value[S4] = { diff --git a/src/main/scala/sigmastate/serialization/trees/Relation2Serializer.scala b/src/main/scala/sigmastate/serialization/trees/Relation2Serializer.scala index 0b68cd5e22..175ed8de70 100644 --- a/src/main/scala/sigmastate/serialization/trees/Relation2Serializer.scala +++ b/src/main/scala/sigmastate/serialization/trees/Relation2Serializer.scala @@ -6,22 +6,31 @@ import sigmastate.lang.Terms._ import sigmastate.serialization.OpCodes._ import sigmastate.serialization.ValueSerializer import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} -import scorex.util.Extensions._ - +import sigmastate.serialization.ValueSerializer._ +import sigma.util.Extensions._ case class Relation2Serializer[S1 <: SType, S2 <: SType, R <: Value[SBoolean.type]] -(override val opCode: Byte, +(override val opDesc: RelationCompanion, constructor: (Value[S1], Value[S2]) => Value[SBoolean.type]) extends ValueSerializer[R] { + import SigmaByteWriter._ override def serialize(obj: R, w: SigmaByteWriter): Unit = { val typedRel = obj.asInstanceOf[Relation[S1, S2]] - (typedRel.left, typedRel.right) match { - case (Constant(left, ltpe), Constant(right, rtpe)) if ltpe == SBoolean && rtpe == SBoolean => - w.put(ConcreteCollectionBooleanConstantCode) - w.putBits(Array[Boolean](left.asInstanceOf[Boolean], right.asInstanceOf[Boolean])) - case _ => - w.putValue(typedRel.left) - w.putValue(typedRel.right) + cases("(left, right)") { + (typedRel.left, typedRel.right) match { + case (Constant(left, lTpe), Constant(right, rTpe)) if lTpe == SBoolean && rTpe == SBoolean => + when(1, "(Constant(l, Boolean), Constant(r, Boolean))") { + w.put(ConcreteCollectionBooleanConstantCode, ArgInfo("opCode", s"always contains OpCode ${ConcreteCollectionBooleanConstantCode.toUByte}")) + w.putBits( + Array[Boolean](left.asInstanceOf[Boolean], right.asInstanceOf[Boolean]), + maxBitsInfo("(l,r)", 2, "two higher bits in a byte")) + } + case _ => + otherwise { + w.putValue(typedRel.left, opDesc.argInfos(0)) + w.putValue(typedRel.right, opDesc.argInfos(1)) + } + } } } diff --git a/src/main/scala/sigmastate/serialization/trees/Relation3Serializer.scala b/src/main/scala/sigmastate/serialization/trees/Relation3Serializer.scala deleted file mode 100644 index 0a9bf4bdf8..0000000000 --- a/src/main/scala/sigmastate/serialization/trees/Relation3Serializer.scala +++ /dev/null @@ -1,27 +0,0 @@ -package sigmastate.serialization.trees - -import sigmastate.Values._ -import sigmastate._ -import sigmastate.lang.Terms._ -import sigmastate.serialization.ValueSerializer -import sigmastate.utils.{SigmaByteReader, SigmaByteWriter} -import scorex.util.Extensions._ - -case class Relation3Serializer[S1 <: SType, S2 <: SType, S3 <: SType, R <: Value[SBoolean.type]] -(override val opCode: Byte, - cons: (Value[S1], Value[S2], Value[S3]) => R) extends ValueSerializer[R] { - - override def serialize(obj: R, w: SigmaByteWriter): Unit = { - val rel = obj.asInstanceOf[Relation3[S1, S2, S3]] - w.putValue(rel.first) - w.putValue(rel.second) - w.putValue(rel.third) - } - - override def parse(r: SigmaByteReader): R = { - val arg1 = r.getValue().asValue[S1] - val arg2 = r.getValue().asValue[S2] - val arg3 = r.getValue().asValue[S3] - cons(arg1, arg2, arg3) - } -} diff --git a/src/main/scala/sigmastate/trees.scala b/src/main/scala/sigmastate/trees.scala index a7a9502db3..baf952f85f 100644 --- a/src/main/scala/sigmastate/trees.scala +++ b/src/main/scala/sigmastate/trees.scala @@ -1,13 +1,16 @@ package sigmastate +import org.ergoplatform.ErgoConstants +import org.ergoplatform.validation.SigmaValidationSettings import scorex.crypto.hash.{Sha256, Blake2b256, CryptographicHash32} +import sigmastate.Operations._ import sigmastate.SCollection.{SIntArray, SByteArray} import sigmastate.SOption.SIntOption import sigmastate.Values._ -import sigmastate.basics.{SigmaProtocol, SigmaProtocolPrivateInput, SigmaProtocolCommonInput} +import sigmastate.basics.{SigmaProtocol, SigmaProtocolCommonInput, SigmaProtocolPrivateInput} import sigmastate.serialization.OpCodes._ import sigmastate.serialization._ -import sigmastate.utxo.Transformer +import sigmastate.utxo.{Transformer, SimpleTransformerCompanion} import scala.collection.mutable import scala.collection.mutable.ArrayBuffer @@ -102,33 +105,39 @@ object TrivialProp { /** Embedding of Boolean values to SigmaProp values. As an example, this operation allows boolean experesions * to be used as arguments of `atLeast(..., sigmaProp(boolExpr), ...)` operation. - * During execution results to either `TrueProp` or `FalseProp` values of SigmaBoolean type. + * During execution results to either `TrueProp` or `FalseProp` values of SigmaProp type. */ case class BoolToSigmaProp(value: BoolValue) extends SigmaPropValue { - override val opCode: OpCode = OpCodes.BoolToSigmaPropCode + override def companion = BoolToSigmaProp def tpe = SSigmaProp val opType = SFunc(SBoolean, SSigmaProp) } +object BoolToSigmaProp extends ValueCompanion { + override def opCode: OpCode = OpCodes.BoolToSigmaPropCode +} /** ErgoTree operation to create a new SigmaProp value representing public key * of discrete logarithm signature protocol. */ case class CreateProveDlog(value: Value[SGroupElement.type]) extends SigmaPropValue { - override val opCode: OpCode = OpCodes.ProveDlogCode + override def companion = CreateProveDlog override def tpe = SSigmaProp override def opType = SFunc(SGroupElement, SSigmaProp) } +object CreateProveDlog extends ValueCompanion { + override def opCode: OpCode = OpCodes.ProveDlogCode +} -/** ErgoTree operation to create a new SigmaProp value representing public key - * of discrete logarithm signature protocol. */ +/** Construct a new authenticated dictionary with given parameters and tree root digest.*/ case class CreateAvlTree(operationFlags: ByteValue, digest: Value[SByteArray], keyLength: IntValue, valueLengthOpt: Value[SIntOption]) extends AvlTreeValue { - override val opCode: OpCode = OpCodes.AvlTreeCode + override def companion = CreateAvlTree override def tpe = SAvlTree override def opType = CreateAvlTree.opType } -object CreateAvlTree { +object CreateAvlTree extends ValueCompanion { + override def opCode: OpCode = OpCodes.AvlTreeCode val opType = SFunc(IndexedSeq(SByte, SByteArray, SInt, SIntOption), SAvlTree) } @@ -139,25 +148,31 @@ case class CreateProveDHTuple(gv: Value[SGroupElement.type], hv: Value[SGroupElement.type], uv: Value[SGroupElement.type], vv: Value[SGroupElement.type]) extends SigmaPropValue { - override val opCode: OpCode = OpCodes.ProveDiffieHellmanTupleCode + override def companion = CreateProveDHTuple override def tpe = SSigmaProp override def opType = SFunc(IndexedSeq(SGroupElement, SGroupElement, SGroupElement, SGroupElement), SSigmaProp) } +object CreateProveDHTuple extends ValueCompanion { + override def opCode: OpCode = OpCodes.ProveDHTupleCode +} trait SigmaTransformer[IV <: SigmaPropValue, OV <: SigmaPropValue] extends SigmaPropValue { val items: Seq[IV] } - +trait SigmaTransformerCompanion extends ValueCompanion { + def argInfos: Seq[ArgInfo] +} /** * AND conjunction for sigma propositions */ case class SigmaAnd(items: Seq[SigmaPropValue]) extends SigmaTransformer[SigmaPropValue, SigmaPropValue] { - override val opCode: OpCode = OpCodes.SigmaAndCode + override def companion = SigmaAnd def tpe = SSigmaProp val opType = SFunc(SCollection.SSigmaPropArray, SSigmaProp) } - -object SigmaAnd { +object SigmaAnd extends SigmaTransformerCompanion { + override def opCode: OpCode = OpCodes.SigmaAndCode + override def argInfos: Seq[ArgInfo] = SigmaAndInfo.argInfos def apply(head: SigmaPropValue, tail: SigmaPropValue*): SigmaAnd = SigmaAnd(head +: tail) } @@ -165,12 +180,14 @@ object SigmaAnd { * OR disjunction for sigma propositions */ case class SigmaOr(items: Seq[SigmaPropValue]) extends SigmaTransformer[SigmaPropValue, SigmaPropValue] { - override val opCode: OpCode = OpCodes.SigmaOrCode + override def companion = SigmaOr def tpe = SSigmaProp val opType = SFunc(SCollection.SSigmaPropArray, SSigmaProp) } -object SigmaOr { +object SigmaOr extends SigmaTransformerCompanion { + override def opCode: OpCode = OpCodes.SigmaOrCode + override def argInfos: Seq[ArgInfo] = SigmaOrInfo.argInfos def apply(head: SigmaPropValue, tail: SigmaPropValue*): SigmaOr = SigmaOr(head +: tail) } @@ -180,11 +197,14 @@ object SigmaOr { */ case class OR(input: Value[SCollection[SBoolean.type]]) extends Transformer[SCollection[SBoolean.type], SBoolean.type] with NotReadyValueBoolean { - override val opCode: OpCode = OrCode + override def companion = OR override val opType = SFunc(SCollection.SBooleanArray, SBoolean) } -object OR { +object OR extends LogicalTransformerCompanion { + override def opCode: OpCode = OrCode + override def argInfos: Seq[ArgInfo] = Operations.ORInfo.argInfos + def apply(children: Seq[Value[SBoolean.type]]): OR = OR(ConcreteCollection(children.toIndexedSeq)) @@ -195,11 +215,14 @@ object OR { */ case class XorOf(input: Value[SCollection[SBoolean.type]]) extends Transformer[SCollection[SBoolean.type], SBoolean.type] with NotReadyValueBoolean { - override val opCode: OpCode = XorOfCode + override def companion = XorOf override val opType = SFunc(SCollection.SBooleanArray, SBoolean) } -object XorOf { +object XorOf extends LogicalTransformerCompanion { + override def opCode: OpCode = XorOfCode + override def argInfos: Seq[ArgInfo] = Operations.XorOfInfo.argInfos + def apply(children: Seq[Value[SBoolean.type]]): XorOf = XorOf(ConcreteCollection(children.toIndexedSeq)) @@ -212,11 +235,18 @@ object XorOf { case class AND(input: Value[SCollection[SBoolean.type]]) extends Transformer[SCollection[SBoolean.type], SBoolean.type] with NotReadyValueBoolean { - override val opCode: OpCode = AndCode + override def companion = AND override val opType = SFunc(SCollection.SBooleanArray, SBoolean) } -object AND { +trait LogicalTransformerCompanion extends ValueCompanion { + def argInfos: Seq[ArgInfo] +} + +object AND extends LogicalTransformerCompanion { + override def opCode: OpCode = AndCode + override def argInfos: Seq[ArgInfo] = Operations.ANDInfo.argInfos + def apply(children: Seq[Value[SBoolean.type]]): AND = AND(ConcreteCollection(children.toIndexedSeq)) @@ -225,20 +255,21 @@ object AND { /** * Logical threshold. - * AtLeast has two inputs: integer bound and children same as in AND/OR. The result is true if at least bound children are true. + * AtLeast has two inputs: integer bound and children same as in AND/OR. + * The result is true if at least bound children are true. */ case class AtLeast(bound: Value[SInt.type], input: Value[SCollection[SSigmaProp.type]]) extends Transformer[SCollection[SSigmaProp.type], SSigmaProp.type] with NotReadyValue[SSigmaProp.type] { + override def companion = AtLeast override def tpe: SSigmaProp.type = SSigmaProp - override val opCode: OpCode = AtLeastCode override def opType: SFunc = SFunc(IndexedSeq(SInt, SCollection.SBooleanArray), SBoolean) } -object AtLeast { - - val MaxChildrenCount = 255 +object AtLeast extends ValueCompanion { + override def opCode: OpCode = AtLeastCode + val MaxChildrenCount: Int = ErgoConstants.MaxChildrenCountForAtLeastOp.get def apply(bound: Value[SInt.type], children: Seq[SigmaPropValue]): AtLeast = AtLeast(bound, ConcreteCollection(children.toIndexedSeq)) @@ -295,13 +326,17 @@ case class Upcast[T <: SNumericType, R <: SNumericType](input: Value[T], tpe: R) extends Transformer[T, R] { import Upcast._ require(input.tpe.isInstanceOf[SNumericType], s"Cannot create Upcast node for non-numeric type ${input.tpe}") - override val opCode: OpCode = OpCodes.UpcastCode + override def companion = Upcast override val opType = SFunc(Vector(tT), tR) } - -object Upcast { - val tT = STypeIdent("T") - val tR = STypeIdent("R") +trait NumericCastCompanion extends ValueCompanion { + def argInfos: Seq[ArgInfo] +} +object Upcast extends NumericCastCompanion { + override def opCode: OpCode = OpCodes.UpcastCode + override def argInfos: Seq[ArgInfo] = UpcastInfo.argInfos + val tT = STypeVar("T") + val tR = STypeVar("R") } /** @@ -311,13 +346,15 @@ case class Downcast[T <: SNumericType, R <: SNumericType](input: Value[T], tpe: extends Transformer[T, R] { import Downcast._ require(input.tpe.isInstanceOf[SNumericType], s"Cannot create Downcast node for non-numeric type ${input.tpe}") - override val opCode: OpCode = OpCodes.DowncastCode + override def companion = Downcast override val opType = SFunc(Vector(tT), tR) } -object Downcast { - val tT = STypeIdent("T") - val tR = STypeIdent("R") +object Downcast extends NumericCastCompanion { + override def opCode: OpCode = OpCodes.DowncastCode + override def argInfos: Seq[ArgInfo] = DowncastInfo.argInfos + val tT = STypeVar("T") + val tR = STypeVar("R") } /** @@ -325,36 +362,52 @@ object Downcast { */ case class LongToByteArray(input: Value[SLong.type]) extends Transformer[SLong.type, SByteArray] with NotReadyValueByteArray { - override val opCode: OpCode = OpCodes.LongToByteArrayCode + override def companion = LongToByteArray override val opType = SFunc(SLong, SByteArray) } +object LongToByteArray extends SimpleTransformerCompanion { + override def opCode: OpCode = OpCodes.LongToByteArrayCode + override def argInfos: Seq[ArgInfo] = LongToByteArrayInfo.argInfos +} /** * Convert SByteArray to SLong */ case class ByteArrayToLong(input: Value[SByteArray]) extends Transformer[SByteArray, SLong.type] with NotReadyValueLong { - override val opCode: OpCode = OpCodes.ByteArrayToLongCode + override def companion = ByteArrayToLong override val opType = SFunc(SByteArray, SLong) } +object ByteArrayToLong extends SimpleTransformerCompanion { + override def opCode: OpCode = OpCodes.ByteArrayToLongCode + override def argInfos: Seq[ArgInfo] = ByteArrayToLongInfo.argInfos +} /** * Convert SByteArray to SBigInt */ case class ByteArrayToBigInt(input: Value[SByteArray]) extends Transformer[SByteArray, SBigInt.type] with NotReadyValueBigInt { - override val opCode: OpCode = OpCodes.ByteArrayToBigIntCode + override def companion = ByteArrayToBigInt override val opType = SFunc(SByteArray, SBigInt) } +object ByteArrayToBigInt extends SimpleTransformerCompanion { + override def opCode: OpCode = OpCodes.ByteArrayToBigIntCode + override def argInfos: Seq[ArgInfo] = ByteArrayToBigIntInfo.argInfos +} /** * Convert SByteArray to SGroupElement using CryptoConstants.dlogGroup.curve.decodePoint(bytes) */ case class DecodePoint(input: Value[SByteArray]) extends Transformer[SByteArray, SGroupElement.type] with NotReadyValueGroupElement { - override val opCode: OpCode = OpCodes.DecodePointCode + override def companion = DecodePoint override val opType = SFunc(SByteArray, SGroupElement) } +object DecodePoint extends SimpleTransformerCompanion { + override def opCode: OpCode = OpCodes.DecodePointCode + override def argInfos: Seq[ArgInfo] = DecodePointInfo.argInfos +} trait CalcHash extends Transformer[SByteArray, SByteArray] with NotReadyValueByteArray { val input: Value[SByteArray] @@ -366,17 +419,25 @@ trait CalcHash extends Transformer[SByteArray, SByteArray] with NotReadyValueByt * Calculate Blake2b hash from `input` */ case class CalcBlake2b256(override val input: Value[SByteArray]) extends CalcHash { - override val opCode: OpCode = OpCodes.CalcBlake2b256Code + override def companion = CalcBlake2b256 override val hashFn: CryptographicHash32 = Blake2b256 } +object CalcBlake2b256 extends SimpleTransformerCompanion { + override def opCode: OpCode = OpCodes.CalcBlake2b256Code + override def argInfos: Seq[ArgInfo] = CalcBlake2b256Info.argInfos +} /** * Calculate Sha256 hash from `input` */ case class CalcSha256(override val input: Value[SByteArray]) extends CalcHash { - override val opCode: OpCode = OpCodes.CalcSha256Code + override def companion = CalcSha256 override val hashFn: CryptographicHash32 = Sha256 } +object CalcSha256 extends SimpleTransformerCompanion { + override def opCode: OpCode = OpCodes.CalcSha256Code + override def argInfos: Seq[ArgInfo] = CalcSha256Info.argInfos +} /** * Transforms serialized bytes of ErgoTree with segregated constants by replacing constants @@ -396,16 +457,17 @@ case class CalcSha256(override val input: Value[SByteArray]) extends CalcHash { case class SubstConstants[T <: SType](scriptBytes: Value[SByteArray], positions: Value[SIntArray], newValues: Value[SCollection[T]]) extends NotReadyValueByteArray { import SubstConstants._ - override val opCode: OpCode = OpCodes.SubstConstantsCode + override def companion = SubstConstants override val opType = SFunc(Vector(SByteArray, SIntArray, SCollection(tT)), SByteArray) } -object SubstConstants { - val tT = STypeIdent("T") +object SubstConstants extends ValueCompanion { + override def opCode: OpCode = OpCodes.SubstConstantsCode + val tT = STypeVar("T") def eval(scriptBytes: Array[Byte], positions: Array[Int], - newVals: Array[Value[SType]]): Array[Byte] = + newVals: Array[Value[SType]])(implicit vs: SigmaValidationSettings): Array[Byte] = ErgoTreeSerializer.DefaultSerializer.substituteConstants(scriptBytes, positions, newVals) } @@ -422,16 +484,25 @@ sealed trait OneArgumentOperation[IV <: SType, OV <: SType] extends NotReadyValu val input: Value[IV] override def opType = SFunc(input.tpe, tpe) } +trait OneArgumentOperationCompanion extends ValueCompanion { + def argInfos: Seq[ArgInfo] +} // TwoArgumentsOperation sealed trait TwoArgumentsOperation[LIV <: SType, RIV <: SType, OV <: SType] extends Triple[LIV, RIV, OV] -case class ArithOp[T <: SType](left: Value[T], right: Value[T], opCode: OpCode) +trait TwoArgumentOperationCompanion extends ValueCompanion { + def argInfos: Seq[ArgInfo] +} + +case class ArithOp[T <: SType](left: Value[T], right: Value[T], override val opCode: OpCode) extends TwoArgumentsOperation[T, T, T] with NotReadyValue[T] { + override def companion: ValueCompanion = ArithOp.operations(opCode) override def tpe: T = left.tpe override def opName: String = ArithOp.opcodeToArithOpName(opCode) + // TODO refactor: avoid such enumaration, use ArithOp.operations map instead override def toString: String = opCode match { case OpCodes.PlusCode => s"Plus($left, $right)" case OpCodes.MinusCode => s"Minus($left, $right)" @@ -442,48 +513,116 @@ case class ArithOp[T <: SType](left: Value[T], right: Value[T], opCode: OpCode) case OpCodes.MaxCode => s"Max($left, $right)" } } - +/** NOTE: by-name argument is required for correct initialization order. */ +class ArithOpCompanion(val opCode: Byte, val name: String, _argInfos: => Seq[ArgInfo]) extends TwoArgumentOperationCompanion { + override def argInfos: Seq[ArgInfo] = _argInfos +} object ArithOp { - def opcodeToArithOpName(opCode: Byte): String = opCode match { - case OpCodes.PlusCode => "+" - case OpCodes.MinusCode => "-" - case OpCodes.MultiplyCode => "*" - case OpCodes.DivisionCode => "/" - case OpCodes.ModuloCode => "%" - case OpCodes.MinCode => "min" - case OpCodes.MaxCode => "max" + import OpCodes._ + object Plus extends ArithOpCompanion(PlusCode, "+", PlusInfo.argInfos) + object Minus extends ArithOpCompanion(MinusCode, "-", MinusInfo.argInfos) + object Multiply extends ArithOpCompanion(MultiplyCode, "*", MultiplyInfo.argInfos) + object Division extends ArithOpCompanion(DivisionCode, "/", DivisionInfo.argInfos) + object Modulo extends ArithOpCompanion(ModuloCode, "%", ModuloInfo.argInfos) + object Min extends ArithOpCompanion(MinCode, "min", MinInfo.argInfos) + object Max extends ArithOpCompanion(MaxCode, "max", MaxInfo.argInfos) + + val operations: Map[Byte, ArithOpCompanion] = + Seq(Plus, Minus, Multiply, Division, Modulo, Min, Max).map(o => (o.opCode, o)).toMap + + def opcodeToArithOpName(opCode: Byte): String = operations.get(opCode) match { + case Some(c) => c.name case _ => sys.error(s"Cannot find ArithOpName for opcode $opCode") } } -case class Negation[T <: SType](input: Value[T]) extends OneArgumentOperation[T, T] { - override val opCode: OpCode = OpCodes.NegationCode +/** Negation operation on numeric type T. */ +case class Negation[T <: SNumericType](input: Value[T]) extends OneArgumentOperation[T, T] { + override def companion = Negation override def tpe: T = input.tpe } +object Negation extends OneArgumentOperationCompanion { + override def opCode: OpCode = OpCodes.NegationCode + override def argInfos: Seq[ArgInfo] = NegationInfo.argInfos +} case class BitInversion[T <: SNumericType](input: Value[T]) extends OneArgumentOperation[T, T] { - override val opCode: OpCode = OpCodes.BitInversionCode + override def companion = BitInversion override def tpe: T = input.tpe } +object BitInversion extends OneArgumentOperationCompanion { + override def opCode: OpCode = OpCodes.BitInversionCode + override def argInfos: Seq[ArgInfo] = BitInversionInfo.argInfos +} -case class BitOp[T <: SNumericType](left: Value[T], right: Value[T], opCode: OpCode) +case class BitOp[T <: SNumericType](left: Value[T], right: Value[T], override val opCode: OpCode) extends TwoArgumentsOperation[T, T, T] with NotReadyValue[T] { + override def companion = BitOp.operations(opCode) override def tpe: T = left.tpe } +/** NOTE: by-name argument is required for correct initialization order. */ +class BitOpCompanion(val opCode: Byte, val name: String, _argInfos: => Seq[ArgInfo]) extends TwoArgumentOperationCompanion { + override def argInfos: Seq[ArgInfo] = _argInfos +} + +object BitOp { + import OpCodes._ + object BitOr extends BitOpCompanion(BitOrCode, "|", BitOrInfo.argInfos) + object BitAnd extends BitOpCompanion(BitAndCode, "&", BitAndInfo.argInfos) + object BitXor extends BitOpCompanion(BitXorCode, "^", BitXorInfo.argInfos) + object BitShiftRight extends BitOpCompanion(BitShiftRightCode, ">>", BitShiftRightInfo.argInfos) + object BitShiftLeft extends BitOpCompanion(BitShiftLeftCode, "<<", BitShiftLeftInfo.argInfos) + object BitShiftRightZeroed extends BitOpCompanion(BitShiftRightZeroedCode, ">>>", BitShiftRightZeroedInfo.argInfos) + + val operations: Map[Byte, BitOpCompanion] = + Seq(BitOr, BitAnd, BitXor, BitShiftRight, BitShiftLeft, BitShiftRightZeroed).map(o => (o.opCode, o)).toMap + + def opcodeToName(opCode: Byte): String = operations.get(opCode) match { + case Some(c) => c.name + case _ => sys.error(s"Cannot find BitOpName for opcode $opCode") + } +} case class ModQ(input: Value[SBigInt.type]) extends NotReadyValue[SBigInt.type] { - override val opCode: OpCode = OpCodes.ModQCode + override def companion = ModQ override def tpe: SBigInt.type = SBigInt override def opType: SFunc = SFunc(input.tpe, tpe) } +object ModQ extends ValueCompanion { + override def opCode: OpCode = OpCodes.ModQCode +} -case class ModQArithOp(left: Value[SBigInt.type], right: Value[SBigInt.type], opCode: OpCode) +case class ModQArithOp(left: Value[SBigInt.type], right: Value[SBigInt.type], override val opCode: OpCode) extends NotReadyValue[SBigInt.type] { + override def companion = ModQArithOp.operations(opCode) override def tpe: SBigInt.type = SBigInt override def opType: SFunc = SFunc(Vector(left.tpe, right.tpe), tpe) } +abstract class ModQArithOpCompanion(val opCode: Byte, val name: String) extends ValueCompanion { + def argInfos: Seq[ArgInfo] +} + +trait OpGroup[C <: ValueCompanion] { + def operations: Map[Byte, C] +} +object ModQArithOp extends OpGroup[ModQArithOpCompanion] { + import OpCodes._ + object PlusModQ extends ModQArithOpCompanion(PlusModQCode, "PlusModQ") { + override def argInfos: Seq[ArgInfo] = PlusModQInfo.argInfos + } + object MinusModQ extends ModQArithOpCompanion(MinusModQCode, "MinusModQ") { + override def argInfos: Seq[ArgInfo] = PlusModQInfo.argInfos + } + + val operations: Map[Byte, ModQArithOpCompanion] = Seq(PlusModQ, MinusModQ).map(o => (o.opCode, o)).toMap + + def opcodeToName(opCode: Byte): String = operations.get(opCode) match { + case Some(c) => c.name + case _ => sys.error(s"Cannot find ModQArithOp operation name for opcode $opCode") + } +} /** * XOR for two SByteArray */ @@ -491,62 +630,87 @@ case class Xor(override val left: Value[SByteArray], override val right: Value[SByteArray]) extends TwoArgumentsOperation[SByteArray, SByteArray, SByteArray] with NotReadyValueByteArray { - - override val opCode: OpCode = XorCode + override def companion = Xor +} +object Xor extends TwoArgumentOperationCompanion { + override def opCode: OpCode = XorCode + override def argInfos: Seq[ArgInfo] = XorInfo.argInfos } case class Exponentiate(override val left: Value[SGroupElement.type], override val right: Value[SBigInt.type]) extends TwoArgumentsOperation[SGroupElement.type, SBigInt.type, SGroupElement.type] with NotReadyValueGroupElement { - - override val opCode: OpCode = ExponentiateCode + override def companion = Exponentiate +} +object Exponentiate extends TwoArgumentOperationCompanion { + override def opCode: OpCode = ExponentiateCode + override def argInfos: Seq[ArgInfo] = ExponentiateInfo.argInfos } case class MultiplyGroup(override val left: Value[SGroupElement.type], override val right: Value[SGroupElement.type]) extends TwoArgumentsOperation[SGroupElement.type, SGroupElement.type, SGroupElement.type] with NotReadyValueGroupElement { - - override val opCode: OpCode = MultiplyGroupCode + override def companion = MultiplyGroup +} +object MultiplyGroup extends TwoArgumentOperationCompanion { + override def opCode: OpCode = MultiplyGroupCode + override def argInfos: Seq[ArgInfo] = MultiplyGroupInfo.argInfos } - // Relation sealed trait Relation[LIV <: SType, RIV <: SType] extends Triple[LIV, RIV, SBoolean.type] with NotReadyValueBoolean trait SimpleRelation[T <: SType] extends Relation[T, T] { - val tT = STypeIdent("T") + val tT = STypeVar("T") override val opType = SFunc(Vector(tT, tT), SBoolean) } +trait RelationCompanion extends ValueCompanion { + def argInfos: Seq[ArgInfo] +} + /** * Less operation for SInt */ case class LT[T <: SType](override val left: Value[T], override val right: Value[T]) extends SimpleRelation[T] { - override val opCode: OpCode = LtCode + override def companion = LT +} +object LT extends RelationCompanion { + override def opCode: OpCode = LtCode + override def argInfos: Seq[ArgInfo] = LTInfo.argInfos } - /** * Less or equals operation for SInt */ case class LE[T <: SType](override val left: Value[T], override val right: Value[T]) extends SimpleRelation[T] { - override val opCode: OpCode = LeCode + override def companion = LE +} +object LE extends RelationCompanion { + override def opCode: OpCode = LeCode + override def argInfos: Seq[ArgInfo] = LEInfo.argInfos } - /** * Greater operation for SInt */ case class GT[T <: SType](override val left: Value[T], override val right: Value[T]) extends SimpleRelation[T] { - override val opCode: OpCode = GtCode + override def companion = GT +} +object GT extends RelationCompanion { + override def opCode: OpCode = GtCode + override def argInfos: Seq[ArgInfo] = GTInfo.argInfos } - /** * Greater or equals operation for SInt */ case class GE[T <: SType](override val left: Value[T], override val right: Value[T]) extends SimpleRelation[T] { - override val opCode: OpCode = GeCode + override def companion = GE +} +object GE extends RelationCompanion { + override def opCode: OpCode = GeCode + override def argInfos: Seq[ArgInfo] = GEInfo.argInfos } /** @@ -555,7 +719,11 @@ case class GE[T <: SType](override val left: Value[T], override val right: Value */ case class EQ[S <: SType](override val left: Value[S], override val right: Value[S]) extends SimpleRelation[S] { - override val opCode: OpCode = EqCode + override def companion = EQ +} +object EQ extends RelationCompanion { + override def opCode: OpCode = EqCode + override def argInfos: Seq[ArgInfo] = EQInfo.argInfos } /** @@ -563,7 +731,11 @@ case class EQ[S <: SType](override val left: Value[S], override val right: Value */ case class NEQ[S <: SType](override val left: Value[S], override val right: Value[S]) extends SimpleRelation[S] { - override val opCode: OpCode = NeqCode + override def companion = NEQ +} +object NEQ extends RelationCompanion { + override def opCode: OpCode = NeqCode + override def argInfos: Seq[ArgInfo] = NEQInfo.argInfos } /** @@ -572,7 +744,11 @@ case class NEQ[S <: SType](override val left: Value[S], override val right: Valu */ case class BinOr(override val left: BoolValue, override val right: BoolValue) extends Relation[SBoolean.type, SBoolean.type] { - override val opCode: OpCode = BinOrCode + override def companion = BinOr +} +object BinOr extends RelationCompanion { + override def opCode: OpCode = BinOrCode + override def argInfos: Seq[ArgInfo] = BinOrInfo.argInfos } /** @@ -581,22 +757,20 @@ case class BinOr(override val left: BoolValue, override val right: BoolValue) */ case class BinAnd(override val left: BoolValue, override val right: BoolValue) extends Relation[SBoolean.type, SBoolean.type] { - override val opCode: OpCode = BinAndCode + override def companion = BinAnd +} +object BinAnd extends RelationCompanion { + override def opCode: OpCode = BinAndCode + override def argInfos: Seq[ArgInfo] = BinAndInfo.argInfos } case class BinXor(override val left: BoolValue, override val right: BoolValue) extends Relation[SBoolean.type, SBoolean.type] { - override val opCode: OpCode = BinXorCode + override def companion = BinXor } - -/** Returns this collection shifted left/right by the specified number of elements, - * filling in the new right/left elements from left/right elements. The size of collection is preserved. */ -case class Rotate[T <: SType](coll: Value[SCollection[T]], - shift: Value[SInt.type], - opCode: OpCode) - extends NotReadyValue[SCollection[T]] { - override def tpe: SCollection[T] = coll.tpe - override def opType = SFunc(Vector(coll.tpe, shift.tpe), tpe) +object BinXor extends RelationCompanion { + override def opCode: OpCode = BinXorCode + override def argInfos: Seq[ArgInfo] = BinXorInfo.argInfos } /** @@ -611,9 +785,6 @@ sealed trait Quadruple[IV1 <: SType, IV2 <: SType, IV3 <: SType, OV <: SType] ex val opType = SFunc(Vector(first.tpe, second.tpe, third.tpe), tpe) } -sealed trait Relation3[IV1 <: SType, IV2 <: SType, IV3 <: SType] - extends Quadruple[IV1, IV2, IV3, SBoolean.type] with NotReadyValueBoolean - /** * Perform a lookup of key `key` in a tree with root `tree` using proof `proof`. * Throws exception if proof is incorrect @@ -623,15 +794,19 @@ sealed trait Relation3[IV1 <: SType, IV2 <: SType, IV3 <: SType] case class TreeLookup(tree: Value[SAvlTree.type], key: Value[SByteArray], proof: Value[SByteArray]) extends Quadruple[SAvlTree.type, SByteArray, SByteArray, SOption[SByteArray]] { - + override def companion = TreeLookup override def tpe = SOption[SByteArray] - - override val opCode: OpCode = OpCodes.AvlTreeGetCode - override lazy val first = tree override lazy val second = key override lazy val third = proof } +trait QuadrupleCompanion extends ValueCompanion { + def argInfos: Seq[ArgInfo] +} +object TreeLookup extends QuadrupleCompanion { + override def opCode: OpCode = OpCodes.AvlTreeGetCode + override def argInfos: Seq[ArgInfo] = TreeLookupInfo.argInfos +} /** * If conditional function. @@ -643,19 +818,24 @@ case class TreeLookup(tree: Value[SAvlTree.type], */ case class If[T <: SType](condition: Value[SBoolean.type], trueBranch: Value[T], falseBranch: Value[T]) extends Quadruple[SBoolean.type, T, T, T] { - override val opCode: OpCode = OpCodes.IfCode - + override def companion = If override def tpe = trueBranch.tpe - override lazy val first = condition override lazy val second = trueBranch override lazy val third = falseBranch } -object If { - val tT = STypeIdent("T") +object If extends QuadrupleCompanion { + override def opCode: OpCode = OpCodes.IfCode + override def argInfos: Seq[ArgInfo] = IfInfo.argInfos + val tT = STypeVar("T") } case class LogicalNot(input: Value[SBoolean.type]) extends NotReadyValueBoolean { - override val opCode: OpCode = OpCodes.LogicalNotCode + override def companion = LogicalNot override val opType = SFunc(Vector(SBoolean), SBoolean) } +object LogicalNot extends ValueCompanion { + override def opCode: OpCode = OpCodes.LogicalNotCode +} + + diff --git a/src/main/scala/sigmastate/types.scala b/src/main/scala/sigmastate/types.scala index 8f93d54b02..44eb51df31 100644 --- a/src/main/scala/sigmastate/types.scala +++ b/src/main/scala/sigmastate/types.scala @@ -1,8 +1,10 @@ package sigmastate import java.math.BigInteger +import java.util -import org.ergoplatform.{ErgoLikeContext, ErgoBox} +import org.ergoplatform._ +import org.ergoplatform.validation._ import scalan.RType import sigmastate.SType.{TypeCode, AnyOps} import sigmastate.interpreter.CryptoConstants @@ -15,25 +17,20 @@ import sigmastate.SCollection._ import sigmastate.interpreter.CryptoConstants.{EcPointType, hashLength} import sigmastate.serialization.OpCodes import special.collection.Coll +import special.sigma._ import sigmastate.eval.RuntimeCosting import scala.language.implicitConversions -import scala.reflect.ClassTag import scala.reflect.{ClassTag, classTag} -import scalan.meta.ScalanAst.STypeArgAnnotation -import sigmastate.SBoolean.typeCode -import sigmastate.SByte.typeCode import sigmastate.SMethod.MethodCallIrBuilder -import sigmastate.basics.DLogProtocol.ProveDlog -import sigmastate.basics.ProveDHTuple -import sigmastate.utxo.{ExtractCreationInfo, ByIndex} -import special.sigma.{Header, Box, wrapperType, SigmaProp, AvlTree, PreHeader} -import sigmastate.lang.SigmaTyper.STypeSubst import sigmastate.utxo.ByIndex -import special.sigma.{AvlTree, Box, SigmaProp, wrapperType} -//import sigmastate.SNumericType._ -import sigmastate.SSigmaProp.{IsProven, PropBytes} -import sigmastate.eval.SigmaDsl +import sigmastate.utxo.ExtractCreationInfo +import sigmastate.utxo._ +import special.sigma.{Header, Box, SigmaProp, AvlTree, SigmaDslBuilder, PreHeader} +import sigmastate.lang.SigmaTyper.STypeSubst +import sigmastate.eval.Evaluation.stypeToRType +import sigmastate.eval._ +import sigmastate.lang.exceptions.SerializerException /** Base type for all AST nodes of sigma lang. */ trait SigmaNode extends Product @@ -57,7 +54,10 @@ trait SigmaNodeCompanion * */ sealed trait SType extends SigmaNode { type WrappedType - val typeCode: SType.TypeCode //TODO remove, because in general type is encoded by more than one byte + /** Type code used in serialization of SType values. + * @see TypeSerializer + */ + val typeCode: SType.TypeCode /** Approximate size of the given value in bytes. It is actual size only for primitive types.*/ def dataSize(v: SType#WrappedType): Long = sys.error(s"Don't know how to compute dataCost($v) with T = $this") @@ -75,11 +75,19 @@ sealed trait SType extends SigmaNode { def mkConstant(v: WrappedType): Value[this.type] = sys.error(s"Don't know how mkConstant for data value $v with T = $this") - def withSubstTypes(subst: Map[STypeIdent, SType]): SType = + def withSubstTypes(subst: Map[STypeVar, SType]): SType = if (subst.isEmpty) this else SigmaTyper.applySubst(this, subst) + /** Returns parsable type term string of the type described by this type descriptor. + * For every type it should be inverse to SigmaTyper.parseType. + * This is default fallback implementation, should be overriden if it + * is not correct for a particular type. */ + def toTermString: String = { + val t = Evaluation.stypeToRType(this) + t.name + } } object SType { @@ -106,19 +114,20 @@ object SType { implicit val SigmaBooleanRType: RType[SigmaBoolean] = RType.fromClassTag(classTag[SigmaBoolean]) implicit val ErgoBoxRType: RType[ErgoBox] = RType.fromClassTag(classTag[ErgoBox]) + implicit val ErgoBoxCandidateRType: RType[ErgoBoxCandidate] = RType.fromClassTag(classTag[ErgoBoxCandidate]) implicit val AvlTreeDataRType: RType[AvlTreeData] = RType.fromClassTag(classTag[AvlTreeData]) implicit val ErgoLikeContextRType: RType[ErgoLikeContext] = RType.fromClassTag(classTag[ErgoLikeContext]) /** All pre-defined types should be listed here. Note, NoType is not listed. * Should be in sync with sigmastate.lang.Types.predefTypes. */ - val allPredefTypes = Seq(SBoolean, SByte, SShort, SInt, SLong, SBigInt, SContext, SHeader, SPreHeader, SAvlTree, SGroupElement, SSigmaProp, SString, SBox, SUnit, SAny) + val allPredefTypes = Seq(SBoolean, SByte, SShort, SInt, SLong, SBigInt, SContext, SGlobal, SHeader, SPreHeader, SAvlTree, SGroupElement, SSigmaProp, SString, SBox, SUnit, SAny) val typeCodeToType = allPredefTypes.map(t => t.typeCode -> t).toMap /** A mapping of object types supporting MethodCall operations. For each serialized typeId this map contains * a companion object which can be used to access the list of corresponding methods. * NOTE: in the current implementation only monomorphic methods are supported (without type parameters)*/ val types: Map[Byte, STypeCompanion] = Seq( - SNumericType, SString, STuple, SGroupElement, SSigmaProp, SContext, SHeader, SPreHeader, + SBoolean, SNumericType, SString, STuple, SGroupElement, SSigmaProp, SContext, SGlobal, SHeader, SPreHeader, SAvlTree, SBox, SOption, SCollection, SBigInt ).map { t => (t.typeId, t) }.toMap @@ -127,6 +136,7 @@ object SType { def isCollection: Boolean = tpe.isInstanceOf[SCollectionType[_]] def isOption: Boolean = tpe.isInstanceOf[SOption[_]] def isBox: Boolean = tpe.isInstanceOf[SBox.type] + def isGroupElement: Boolean = tpe.isInstanceOf[SGroupElement.type] def isSigmaProp: Boolean = tpe.isInstanceOf[SSigmaProp.type] def isAvlTree: Boolean = tpe.isInstanceOf[SAvlTree.type] def isFunc : Boolean = tpe.isInstanceOf[SFunc] @@ -144,6 +154,7 @@ object SType { def asNumType: SNumericType = tpe.asInstanceOf[SNumericType] def asFunc: SFunc = tpe.asInstanceOf[SFunc] def asProduct: SProduct = tpe.asInstanceOf[SProduct] + def asTuple: STuple = tpe.asInstanceOf[STuple] def asOption[T <: SType]: SOption[T] = tpe.asInstanceOf[SOption[T]] def whenFunc[T](action: SFunc => Unit) = if(tpe.isInstanceOf[SFunc]) action(tpe.asFunc) def asCollection[T <: SType] = tpe.asInstanceOf[SCollection[T]] @@ -154,13 +165,13 @@ object SType { case SInt => reflect.classTag[Int] case SLong => reflect.classTag[Long] case SBigInt => reflect.classTag[BigInteger] - case SAvlTree => reflect.classTag[AvlTreeData] + case SAvlTree => reflect.classTag[AvlTree] case SGroupElement => reflect.classTag[EcPointType] case SSigmaProp => reflect.classTag[SigmaBoolean] case SUnit => reflect.classTag[Unit] case SBox => reflect.classTag[ErgoBox] case SAny => reflect.classTag[Any] - case t: STuple => reflect.classTag[Array[Any]] + case _: STuple => reflect.classTag[Array[Any]] case tColl: SCollection[a] => val elemType = tColl.elemType implicit val ca = elemType.classTag[elemType.WrappedType] @@ -198,17 +209,48 @@ object SType { }) } -/** Basic interface for all type companions */ +/** Basic interface for all type companions. + * This is necessary to make distinction between concrete type descriptor of a type like Coll[Int] + * and generic descriptor of Coll[T] type constructor. + * Some simple types like Int, GroupElement inherit from both SType and STypeCompanion. + * @see SInt, SGroupElement, SType + */ trait STypeCompanion { /** Type identifier to use in method serialization */ def typeId: Byte + def typeName: String = { + this match { + case t: SType => + val rtype = stypeToRType(t) + rtype.name + case _ => this.getClass.getSimpleName.replace("$", "") + } + } + /** List of methods defined for instances of this type. */ def methods: Seq[SMethod] - def getMethodById(methodId: Byte): SMethod = - methods.filter(_.objType == this).apply(methodId - 1) + lazy val _methodsMap: Map[Byte, Map[Byte, SMethod]] = methods + .groupBy(_.objType.typeId) + .map { case (typeId, ms) => (typeId -> ms.map(m => m.methodId -> m).toMap) } + + def hasMethodWithId(methodId: Byte): Boolean = + getMethodById(methodId).isDefined + + /** Lookup method by its id in this type. */ + @inline def getMethodById(methodId: Byte): Option[SMethod] = + _methodsMap.get(typeId) + .flatMap(ms => ms.get(methodId)) + + /** Lookup method in this type by method's id or throw ValidationException. + * This method can be used in trySoftForkable section to either obtain valid method + * or catch ValidatioinException which can be checked for soft-fork condition. + * It delegate to getMethodById to lookup method. + * @see getMethodById + */ + def methodById(methodId: Byte): SMethod = CheckAndGetMethod(this, methodId) { m => m } def getMethodByName(name: String): SMethod = methods.find(_.name == name).get @@ -254,29 +296,55 @@ trait SProduct extends SType { } /** Base trait implemented by all generic types (those which has type parameters, - * e.g. Array[T], Option[T], etc.)*/ + * e.g. Coll[T], Option[T], etc.)*/ trait SGenericType { def typeParams: Seq[STypeParam] - def tparamSubst: Map[STypeIdent, SType] + def tparamSubst: Map[STypeVar, SType] lazy val substitutedTypeParams: Seq[STypeParam] = typeParams.map { tp => tparamSubst.getOrElse(tp.ident, tp.ident) match { - case v: STypeIdent => STypeParam(v) + case v: STypeVar => STypeParam(v) case _ => tp } } } +/** Special interface to access CostingHandler. + * Each `STypeCompanion.coster` property optionally defines an instance of this interface to provide + * access to Coster for its methods. If not specified (which is default) then generic costing mechanism + * is not used for methods of the corresponding type. (e.g. SInt, SLong)*/ trait CosterFactory { def apply[Ctx <: RuntimeCosting](IR: Ctx): IR.CostingHandler[_] } +/** An instance of this class is created in each `STypeCompaion.coster` property implementation. + * @see SBox, SContext + */ case class Coster(selector: RuntimeCosting => RuntimeCosting#CostingHandler[_]) extends CosterFactory { def apply[Ctx <: RuntimeCosting](IR: Ctx): IR.CostingHandler[_] = selector(IR).asInstanceOf[IR.CostingHandler[_]] } +/** Meta information which can be attached to each argument of SMethod. + * @param name name of the argument + * @param description argument description. */ +case class ArgInfo(name: String, description: String) + +/** Meta information which can be attached to SMethod. + * @param description human readable description of the method + * @param args one item for each argument */ +case class OperationInfo(opDesc: ValueCompanion, description: String, args: Seq[ArgInfo]) { + def isFrontendOnly: Boolean = opDesc == null +} + +/** Meta information connecting SMethod with ErgoTree. + * @param irBuilder optional recognizer and ErgoTree node builder. */ +case class MethodIRInfo( + irBuilder: Option[PartialFunction[(SigmaBuilder, SValue, SMethod, Seq[SValue], STypeSubst), SValue]] +) + + /** Method info including name, arg type and result type. * Here stype.tDom - arg type and stype.tRange - result type. * `methodId` should be unique among methods of the same objType. */ @@ -285,11 +353,12 @@ case class SMethod( name: String, stype: SFunc, methodId: Byte, - irBuilder: Option[PartialFunction[(SigmaBuilder, SValue, SMethod, Seq[SValue], STypeSubst), SValue]]) { + irInfo: MethodIRInfo, + docInfo: Option[OperationInfo]) { def withSType(newSType: SFunc): SMethod = copy(stype = newSType) - def withConcreteTypes(subst: Map[STypeIdent, SType]): SMethod = + def withConcreteTypes(subst: Map[STypeVar, SType]): SMethod = withSType(stype.withSubstTypes(subst).asFunc) def opId: OperationId = { @@ -304,21 +373,61 @@ case class SMethod( case _ => this } } + def withInfo(opDesc: ValueCompanion, desc: String, args: ArgInfo*) = { + this.copy(docInfo = Some(OperationInfo(opDesc, desc, ArgInfo("this", "this instance") +: args.toSeq))) + } + def withIRInfo( + irBuilder: PartialFunction[(SigmaBuilder, SValue, SMethod, Seq[SValue], STypeSubst), SValue]) = { + this.copy(irInfo = MethodIRInfo(Some(irBuilder))) + } + def argInfo(argName: String): ArgInfo = + docInfo.get.args.find(_.name == argName).get +} + +object CheckTypeWithMethods extends ValidationRule(1011, + "Check the type (given by type code) supports methods") + with SoftForkWhenCodeAdded { + def apply[T](typeCode: Byte, cond: => Boolean)(block: => T): T = { + val ucode = typeCode.toUByte + def msg = s"Type with code $ucode doesn't support methods." + validate(cond, new SerializerException(msg), Seq(typeCode), block) + } +} + +object CheckAndGetMethod extends ValidationRule(1012, + "Check the type has the declared method.") { + def apply[T](objType: STypeCompanion, methodId: Byte)(block: SMethod => T): T = { + def msg = s"The method with code $methodId doesn't declared in the type $objType." + lazy val methodOpt = objType.getMethodById(methodId) + validate(methodOpt.isDefined, new SerializerException(msg), Seq(objType, methodId), block(methodOpt.get)) + } + override def isSoftFork(vs: SigmaValidationSettings, + ruleId: Short, + status: RuleStatus, + args: Seq[Any]): Boolean = (status, args) match { + case (ChangedRule(newValue), Seq(objType: STypeCompanion, methodId: Byte)) => + val key = Array(objType.typeId, methodId) + newValue.grouped(2).exists(util.Arrays.equals(_, key)) + case _ => false + } } object SMethod { type RCosted[A] = RuntimeCosting#RCosted[A] - val MethodCallIrBuilder: Option[PartialFunction[(SigmaBuilder, SValue, SMethod, Seq[SValue], STypeSubst), SValue]] = Some { + val MethodCallIrBuilder: PartialFunction[(SigmaBuilder, SValue, SMethod, Seq[SValue], STypeSubst), SValue] = { case (builder, obj, method, args, tparamSubst) => builder.mkMethodCall(obj, method, args.toIndexedSeq, tparamSubst) } - def apply(objType: STypeCompanion, name: String, stype: SFunc, methodId: Byte): SMethod = - SMethod(objType, name, stype, methodId, None) + def apply(objType: STypeCompanion, name: String, stype: SFunc, methodId: Byte): SMethod = { + SMethod(objType, name, stype, methodId, MethodIRInfo(None), None) + } def fromIds(typeId: Byte, methodId: Byte): SMethod = { - val typeCompanion = SType.types.getOrElse(typeId, sys.error(s"Cannot find STypeCompanion instance for typeId=$typeId")) - val method = typeCompanion.getMethodById(methodId) + val typeCompanion = CheckTypeWithMethods(typeId, SType.types.contains(typeId)) { + SType.types(typeId) + } + val method = typeCompanion.methodById(methodId) method } } @@ -388,6 +497,12 @@ trait SNumericType extends SProduct { /** Number of bytes to store values of this type. */ @inline private def typeIndex: Int = allNumericTypes.indexOf(this) + + def castOpDesc(toType: SNumericType): ValueCompanion = { + if ((this max toType) == this) Downcast else Upcast + } + + override def toString: Idn = this.getClass.getSimpleName } object SNumericType extends STypeCompanion { final val allNumericTypes = Array(SByte, SShort, SInt, SLong, SBigInt) @@ -398,15 +513,31 @@ object SNumericType extends STypeCompanion { val ToLong = "toLong" val ToBigInt = "toBigInt" - val tNum = STypeIdent("TNum") + val tNum = STypeVar("TNum") val methods = Vector( - SMethod(this, ToByte, SFunc(tNum, SByte), 1), // see Downcast - SMethod(this, ToShort, SFunc(tNum, SShort), 2), // see Downcast - SMethod(this, ToInt, SFunc(tNum, SInt), 3), // see Downcast - SMethod(this, ToLong, SFunc(tNum, SLong), 4), // see Downcast - SMethod(this, ToBigInt, SFunc(tNum, SBigInt), 5), // see Downcast - SMethod(this, "toBytes", SFunc(tNum, SByteArray), 6, MethodCallIrBuilder), - SMethod(this, "toBits", SFunc(tNum, SBooleanArray), 7, MethodCallIrBuilder), + SMethod(this, ToByte, SFunc(tNum, SByte), 1) + .withInfo(PropertyCall, "Converts this numeric value to \\lst{Byte}, throwing exception if overflow."), // see Downcast + SMethod(this, ToShort, SFunc(tNum, SShort), 2) + .withInfo(PropertyCall, "Converts this numeric value to \\lst{Short}, throwing exception if overflow."), // see Downcast + SMethod(this, ToInt, SFunc(tNum, SInt), 3) + .withInfo(PropertyCall, "Converts this numeric value to \\lst{Int}, throwing exception if overflow."), // see Downcast + SMethod(this, ToLong, SFunc(tNum, SLong), 4) + .withInfo(PropertyCall, "Converts this numeric value to \\lst{Long}, throwing exception if overflow."), // see Downcast + SMethod(this, ToBigInt, SFunc(tNum, SBigInt), 5) + .withInfo(PropertyCall, "Converts this numeric value to \\lst{BigInt}"), // see Downcast + SMethod(this, "toBytes", SFunc(tNum, SByteArray), 6) + .withIRInfo(MethodCallIrBuilder) + .withInfo(PropertyCall, + """ Returns a big-endian representation of this numeric value in a collection of bytes. + | For example, the \lst{Int} value \lst{0x12131415} would yield the + | collection of bytes \lst{[0x12, 0x13, 0x14, 0x15]}. + """.stripMargin), + SMethod(this, "toBits", SFunc(tNum, SBooleanArray), 7) + .withIRInfo(MethodCallIrBuilder) + .withInfo(PropertyCall, + """ Returns a big-endian representation of this numeric in a collection of Booleans. + | Each boolean corresponds to one bit. + """.stripMargin) ) val castMethods: Array[String] = Array(ToByte, ToShort, ToInt, ToLong, ToBigInt) } @@ -418,8 +549,9 @@ trait SLogical extends SType { * @see `SGenericType` */ trait SMonoType extends SType with STypeCompanion { - protected def property(name: String, tpe: SType, id: Byte) = - SMethod(this, name, SFunc(this, tpe), id, MethodCallIrBuilder) + protected def property(name: String, tpeRes: SType, id: Byte) = + SMethod(this, name, SFunc(this, tpeRes), id) + .withIRInfo(MethodCallIrBuilder) } case object SBoolean extends SPrimType with SEmbeddable with SLogical with SProduct with SMonoType { @@ -428,9 +560,13 @@ case object SBoolean extends SPrimType with SEmbeddable with SLogical with SProd override def typeId = typeCode override def ancestors: Seq[SType] = Nil val ToByte = "toByte" - protected override def getMethods() = super.getMethods() ++ Seq( - SMethod(this, ToByte, SFunc(this, SByte), 1), + protected override def getMethods() = super.getMethods() + /* TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 + ++ Seq( + SMethod(this, ToByte, SFunc(this, SByte), 1) + .withInfo(PropertyCall, "Convert true to 1 and false to 0"), ) + */ override def mkConstant(v: Boolean): Value[SBoolean.type] = BooleanConstant(v) override def dataSize(v: SType#WrappedType): Long = 1 override def isConstantSize = true @@ -438,7 +574,7 @@ case object SBoolean extends SPrimType with SEmbeddable with SLogical with SProd case object SByte extends SPrimType with SEmbeddable with SNumericType with SMonoType { override type WrappedType = Byte - override val typeCode: TypeCode = 2: Byte //TODO change to 4 after SByteArray is removed + override val typeCode: TypeCode = 2: Byte override def typeId = typeCode override def mkConstant(v: Byte): Value[SByte.type] = ByteConstant(v) override def dataSize(v: SType#WrappedType): Long = 1 @@ -456,7 +592,6 @@ case object SByte extends SPrimType with SEmbeddable with SNumericType with SMon } } -//todo: make PreservingNonNegativeInt type for registers which value should be preserved? case object SShort extends SPrimType with SEmbeddable with SNumericType with SMonoType { override type WrappedType = Short override val typeCode: TypeCode = 3: Byte @@ -477,7 +612,6 @@ case object SShort extends SPrimType with SEmbeddable with SNumericType with SMo } } -//todo: make PreservingNonNegativeInt type for registers which value should be preserved? case object SInt extends SPrimType with SEmbeddable with SNumericType with SMonoType { override type WrappedType = Int override val typeCode: TypeCode = 4: Byte @@ -500,7 +634,6 @@ case object SInt extends SPrimType with SEmbeddable with SNumericType with SMono } } -//todo: make PreservingNonNegativeInt type for registers which value should be preserved? case object SLong extends SPrimType with SEmbeddable with SNumericType with SMonoType { override type WrappedType = Long override val typeCode: TypeCode = 5: Byte @@ -526,16 +659,16 @@ case object SLong extends SPrimType with SEmbeddable with SNumericType with SMon /** Type of 256 bit integet values. Implemented using [[java.math.BigInteger]]. */ case object SBigInt extends SPrimType with SEmbeddable with SNumericType with SMonoType { - override type WrappedType = BigInteger + override type WrappedType = BigInt override val typeCode: TypeCode = 6: Byte override def typeId = typeCode - override def mkConstant(v: BigInteger): Value[SBigInt.type] = BigIntConstant(v) + override def mkConstant(v: BigInt): Value[SBigInt.type] = BigIntConstant(v) /** Type of Relation binary op like GE, LE, etc. */ val RelationOpType = SFunc(Vector(SBigInt, SBigInt), SBoolean) /** The maximum size of BigInteger value in byte array representation. */ - val MaxSizeInBytes: Long = 32L + val MaxSizeInBytes: Long = ErgoConstants.MaxBigIntSizeInBytes.get override def dataSize(v: SType#WrappedType): Long = MaxSizeInBytes @@ -543,32 +676,44 @@ case object SBigInt extends SPrimType with SEmbeddable with SNumericType with SM * In sigma we limit the size by the fixed constant and thus BigInt is a constant size type. */ override def isConstantSize = true - val Max: BigInteger = CryptoConstants.dlogGroup.order //todo: we use mod q, maybe mod p instead? + val Max: BigInt = SigmaDsl.BigInt(CryptoConstants.dlogGroup.order) - override def upcast(v: AnyVal): BigInteger = v match { - case x: Byte => BigInteger.valueOf(x.toLong) - case x: Short => BigInteger.valueOf(x.toLong) - case x: Int => BigInteger.valueOf(x.toLong) - case x: Long => BigInteger.valueOf(x) - case _ => sys.error(s"Cannot upcast value $v to the type $this") + override def upcast(v: AnyVal): BigInt = { + val bi = v match { + case x: Byte => BigInteger.valueOf(x.toLong) + case x: Short => BigInteger.valueOf(x.toLong) + case x: Int => BigInteger.valueOf(x.toLong) + case x: Long => BigInteger.valueOf(x) + case _ => sys.error(s"Cannot upcast value $v to the type $this") + } + SigmaDsl.BigInt(bi) } - override def downcast(v: AnyVal): BigInteger = v match { - case x: Byte => BigInteger.valueOf(x.toLong) - case x: Short => BigInteger.valueOf(x.toLong) - case x: Int => BigInteger.valueOf(x.toLong) - case x: Long => BigInteger.valueOf(x) - case _ => sys.error(s"Cannot downcast value $v to the type $this") + override def downcast(v: AnyVal): BigInt = { + val bi = v match { + case x: Byte => BigInteger.valueOf(x.toLong) + case x: Short => BigInteger.valueOf(x.toLong) + case x: Int => BigInteger.valueOf(x.toLong) + case x: Long => BigInteger.valueOf(x) + case _ => sys.error(s"Cannot downcast value $v to the type $this") + } + SigmaDsl.BigInt(bi) } val ModQMethod = SMethod(this, "modQ", SFunc(this, SBigInt), 1) + .withInfo(ModQ, "Returns this \\lst{mod} Q, i.e. remainder of division by Q, where Q is an order of the cryprographic group.") val PlusModQMethod = SMethod(this, "plusModQ", SFunc(IndexedSeq(this, SBigInt), SBigInt), 2) + .withInfo(ModQArithOp.PlusModQ, "Adds this number with \\lst{other} by module Q.", ArgInfo("other", "Number to add to this.")) val MinusModQMethod = SMethod(this, "minusModQ", SFunc(IndexedSeq(this, SBigInt), SBigInt), 3) - val MultModQMethod = SMethod(this, "multModQ", SFunc(IndexedSeq(this, SBigInt), SBigInt), 4, MethodCallIrBuilder) + .withInfo(ModQArithOp.MinusModQ, "Subtracts \\lst{other} number from this by module Q.", ArgInfo("other", "Number to subtract from this.")) + val MultModQMethod = SMethod(this, "multModQ", SFunc(IndexedSeq(this, SBigInt), SBigInt), 4) + .withIRInfo(MethodCallIrBuilder) + .withInfo(MethodCall, "Multiply this number with \\lst{other} by module Q.", ArgInfo("other", "Number to multiply with this.")) protected override def getMethods() = super.getMethods() ++ Seq( ModQMethod, PlusModQMethod, MinusModQMethod, - MultModQMethod, + // TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 + // MultModQMethod, ) } @@ -585,25 +730,33 @@ case object SString extends SProduct with SMonoType { /** NOTE: this descriptor both type and type companion */ case object SGroupElement extends SProduct with SPrimType with SEmbeddable with SMonoType { - override type WrappedType = EcPointType + override type WrappedType = GroupElement override val typeCode: TypeCode = 7: Byte override def typeId = typeCode override def coster: Option[CosterFactory] = Some(Coster(_.GroupElementCoster)) protected override def getMethods(): Seq[SMethod] = super.getMethods() ++ Seq( - SMethod(this, "isIdentity", SFunc(this, SBoolean), 1), - SMethod(this, "nonce", SFunc(this, SByteArray), 2), - SMethod(this, "getEncoded", SFunc(IndexedSeq(this), SByteArray), 3, MethodCallIrBuilder), - SMethod(this, "exp", SFunc(IndexedSeq(this, SBigInt), this), 4, Some { - case (builder, obj, method, Seq(arg), tparamSubst) => - builder.mkExponentiate(obj.asGroupElement, arg.asBigInt) - }), - SMethod(this, "multiply", SFunc(IndexedSeq(this, SGroupElement), this), 5, Some { - case (builder, obj, method, Seq(arg), tparamSubst) => - builder.mkMultiplyGroup(obj.asGroupElement, arg.asGroupElement) - }), - SMethod(this, "negate", SFunc(this, this), 6, MethodCallIrBuilder) + /* TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 + SMethod(this, "isIdentity", SFunc(this, SBoolean), 1) + .withInfo(PropertyCall, "Checks if this value is identity element of the eliptic curve group."), + */ + SMethod(this, "getEncoded", SFunc(IndexedSeq(this), SByteArray), 2) + .withIRInfo(MethodCallIrBuilder) + .withInfo(PropertyCall, "Get an encoding of the point value."), + SMethod(this, "exp", SFunc(IndexedSeq(this, SBigInt), this), 3) + .withIRInfo({ case (builder, obj, _, Seq(arg), _) => + builder.mkExponentiate(obj.asGroupElement, arg.asBigInt) }) + .withInfo(Exponentiate, + "Exponentiate this \\lst{GroupElement} to the given number. Returns this to the power of k", + ArgInfo("k", "The power")), + SMethod(this, "multiply", SFunc(IndexedSeq(this, SGroupElement), this), 4) + .withIRInfo({ case (builder, obj, _, Seq(arg), _) => + builder.mkMultiplyGroup(obj.asGroupElement, arg.asGroupElement) }) + .withInfo(MultiplyGroup, "Group operation.", ArgInfo("other", "other element of the group")), + SMethod(this, "negate", SFunc(this, this), 5) + .withIRInfo(MethodCallIrBuilder) + .withInfo(PropertyCall, "Inverse element of the group.") ) - override def mkConstant(v: EcPointType): Value[SGroupElement.type] = GroupElementConstant(v) + override def mkConstant(v: GroupElement): Value[SGroupElement.type] = GroupElementConstant(v) override def dataSize(v: SType#WrappedType): Long = CryptoConstants.EncodedGroupElementLength.toLong override def isConstantSize = true def ancestors = Nil @@ -611,28 +764,23 @@ case object SGroupElement extends SProduct with SPrimType with SEmbeddable with case object SSigmaProp extends SProduct with SPrimType with SEmbeddable with SLogical with SMonoType { import SType._ - override type WrappedType = SigmaBoolean + override type WrappedType = SigmaProp override val typeCode: TypeCode = 8: Byte override def typeId = typeCode - override def mkConstant(v: SigmaBoolean): Value[SSigmaProp.type] = SigmaPropConstant(v) - override def dataSize(v: SType#WrappedType): Long = v match { - case ProveDlog(g) => - SGroupElement.dataSize(g.asWrappedType) + 1 - case ProveDHTuple(gv, hv, uv, vv) => - SGroupElement.dataSize(gv.asWrappedType) * 4 + 1 - case CAND(inputs) => inputs.map(i => dataSize(i.asWrappedType)).sum + 1 - case COR(inputs) => inputs.map(i => dataSize(i.asWrappedType)).sum + 1 - case CTHRESHOLD(k, inputs) => 4 + inputs.map(i => dataSize(i.asWrappedType)).sum + 1 - case t: TrivialProp => 1 - case _ => sys.error(s"Cannot get SigmaProp.dataSize($v)") + override def mkConstant(v: SigmaProp): Value[SSigmaProp.type] = SigmaPropConstant(v) + override def dataSize(v: SType#WrappedType): Long = { + Sized.sizeOf(v.asInstanceOf[SigmaProp]).dataSize } override def isConstantSize = false def ancestors = Nil val PropBytes = "propBytes" val IsProven = "isProven" protected override def getMethods() = super.getMethods() ++ Seq( - SMethod(this, PropBytes, SFunc(this, SByteArray), 1), + SMethod(this, PropBytes, SFunc(this, SByteArray), 1) + .withInfo(SigmaPropBytes, "Serialized bytes of this sigma proposition taken as ErgoTree."), SMethod(this, IsProven, SFunc(this, SBoolean), 2) + .withInfo(null, // available only at frontend of ErgoScript + "Verify that sigma proposition is proven.") ) } @@ -653,7 +801,7 @@ case object SUnit extends SPrimType { /** Type description of optional values. Instances of `Option` * are either constructed by `Some` or by `None` constructors. */ -case class SOption[ElemType <: SType](elemType: ElemType) extends SProduct with SGenericType { // TODO make SOption inherit SGenericType +case class SOption[ElemType <: SType](elemType: ElemType) extends SProduct with SGenericType { import SOption._ override type WrappedType = Option[ElemType#WrappedType] override val typeCode: TypeCode = SOption.OptionTypeCode @@ -671,8 +819,10 @@ case class SOption[ElemType <: SType](elemType: ElemType) extends SProduct with // } // } override def toString = s"Option[$elemType]" + override def toTermString: String = s"Option[${elemType.toTermString}]" + val typeParams: Seq[STypeParam] = Seq(STypeParam(tT)) - def tparamSubst: Map[STypeIdent, SType] = Map(tT -> elemType) + def tparamSubst: Map[STypeVar, SType] = Map(tT -> elemType) } object SOption extends STypeCompanion { @@ -682,6 +832,8 @@ object SOption extends STypeCompanion { val OptionCollectionTypeCode: TypeCode = ((SPrimType.MaxPrimTypeCode + 1) * OptionCollectionTypeConstrId).toByte override def typeId = OptionTypeCode + override def coster: Option[CosterFactory] = Some(Coster(_.OptionCoster)) + type SBooleanOption = SOption[SBoolean.type] type SByteOption = SOption[SByte.type] type SShortOption = SOption[SShort.type] @@ -706,41 +858,65 @@ object SOption extends STypeCompanion { implicit def optionTypeCollection[V <: SType](implicit tV: V): SOption[SCollection[V]] = SOption(SCollection[V]) - val IsEmpty = "isEmpty" val IsDefined = "isDefined" val Get = "get" val GetOrElse = "getOrElse" val Fold = "fold" - val tT = STypeIdent("T") - val tR = STypeIdent("R") + val tT = STypeVar("T") + val tR = STypeVar("R") val ThisType = SOption(tT) - val IsEmptyMethod = SMethod(this, IsEmpty, SFunc(ThisType, SBoolean), 1) val IsDefinedMethod = SMethod(this, IsDefined, SFunc(ThisType, SBoolean), 2) + .withInfo(OptionIsDefined, + "Returns \\lst{true} if the option is an instance of \\lst{Some}, \\lst{false} otherwise.") + val GetMethod = SMethod(this, Get, SFunc(ThisType, tT), 3) + .withInfo(OptionGet, + """Returns the option's value. The option must be nonempty. Throws exception if the option is empty.""") + val GetOrElseMethod = SMethod(this, GetOrElse, SFunc(IndexedSeq(ThisType, tT), tT, Seq(tT)), 4) + .withInfo(OptionGetOrElse, + """Returns the option's value if the option is nonempty, otherwise + |return the result of evaluating \lst{default}. + """.stripMargin, ArgInfo("default", "the default value")) + val FoldMethod = SMethod(this, Fold, SFunc(IndexedSeq(ThisType, tR, SFunc(tT, tR)), tR, Seq(tT, tR)), 5) - val ToCollMethod = SMethod(this, "toColl", SFunc(IndexedSeq(ThisType), SCollection(tT), Seq(tT)), 6, MethodCallIrBuilder) + .withInfo(MethodCall, + """Returns the result of applying \lst{f} to this option's + | value if the option is nonempty. Otherwise, evaluates + | expression \lst{ifEmpty}. + | This is equivalent to \lst{option map f getOrElse ifEmpty}. + """.stripMargin, + ArgInfo("ifEmpty", "the expression to evaluate if empty"), + ArgInfo("f", "the function to apply if nonempty")) + val MapMethod = SMethod(this, "map", - SFunc(IndexedSeq(ThisType, SFunc(tT, tR)), SOption(tR), Seq(STypeParam(tT), STypeParam(tR))), - 7, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType, SFunc(tT, tR)), SOption(tR), Seq(STypeParam(tT), STypeParam(tR))), 7) + .withIRInfo(MethodCallIrBuilder) + .withInfo(MethodCall, + """Returns a \lst{Some} containing the result of applying \lst{f} to this option's + | value if this option is nonempty. + | Otherwise return \lst{None}. + """.stripMargin, ArgInfo("f", "the function to apply")) + val FilterMethod = SMethod(this, "filter", - SFunc(IndexedSeq(ThisType, SFunc(tT, SBoolean)), ThisType, Seq(STypeParam(tT))), - 8, MethodCallIrBuilder) - val FlatMapMethod = SMethod(this, "flatMap", - SFunc(IndexedSeq(ThisType, SFunc(tT, SOption(tR))), SOption(tR), Seq(STypeParam(tT), STypeParam(tR))), - 9, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType, SFunc(tT, SBoolean)), ThisType, Seq(STypeParam(tT))), 8) + .withIRInfo(MethodCallIrBuilder) + .withInfo(MethodCall, + """Returns this option if it is nonempty and applying the predicate \lst{p} to + | this option's value returns true. Otherwise, return \lst{None}. + """.stripMargin, ArgInfo("p", "the predicate used for testing")) + val methods: Seq[SMethod] = Seq( - IsEmptyMethod, IsDefinedMethod, GetMethod, GetOrElseMethod, + /* TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 FoldMethod, - ToCollMethod, + */ MapMethod, FilterMethod, - FlatMapMethod, ) def apply[T <: SType](implicit elemType: T, ov: Overload1): SOption[T] = SOption(elemType) def unapply[T <: SType](tOpt: SOption[T]): Option[T] = Some(tOpt.elemType) @@ -748,7 +924,7 @@ object SOption extends STypeCompanion { trait SCollection[T <: SType] extends SProduct with SGenericType { def elemType: T - override type WrappedType = Array[T#WrappedType] + override type WrappedType = Coll[T#WrappedType] def ancestors = Nil override def isConstantSize = false } @@ -756,25 +932,19 @@ trait SCollection[T <: SType] extends SProduct with SGenericType { case class SCollectionType[T <: SType](elemType: T) extends SCollection[T] { override val typeCode: TypeCode = SCollectionType.CollectionTypeCode - override def mkConstant(v: Array[T#WrappedType]): Value[this.type] = + override def mkConstant(v: Coll[T#WrappedType]): Value[this.type] = CollectionConstant(v, elemType).asValue[this.type] override def dataSize(v: SType#WrappedType): Long = { - val arr = (v match { case col: Coll[_] => col.toArray case _ => v}).asInstanceOf[Array[T#WrappedType]] - val header = 2 - val res = - if (arr.isEmpty) - header - else if (elemType.isConstantSize) - header + elemType.dataSize(arr(0)) * arr.length - else - arr.map(x => elemType.dataSize(x)).sum - res + val coll = v.asInstanceOf[Coll[T#WrappedType]] + implicit val sT = Sized.typeToSized(Evaluation.stypeToRType(elemType)) + Sized.sizeOf(coll).dataSize } def typeParams: Seq[STypeParam] = SCollectionType.typeParams - def tparamSubst: Map[STypeIdent, SType] = Map(tIV -> elemType) + def tparamSubst: Map[STypeVar, SType] = Map(tIV -> elemType) protected override def getMethods() = super.getMethods() ++ SCollection.methods override def toString = s"Coll[$elemType]" + override def toTermString = s"Coll[${elemType.toTermString}]" } object SCollectionType { @@ -789,105 +959,184 @@ object SCollection extends STypeCompanion with MethodByNameUnapply { override def typeId = SCollectionType.CollectionTypeCode override def coster: Option[CosterFactory] = Some(Coster(_.CollCoster)) - val tIV = STypeIdent("IV") + val tIV = STypeVar("IV") val paramIV = STypeParam(tIV) - val tOV = STypeIdent("OV") + val tOV = STypeVar("OV") val paramOV = STypeParam(tOV) - val tK = STypeIdent("K") - val tV = STypeIdent("V") + val tK = STypeVar("K") + val tV = STypeVar("V") val ThisType = SCollection(tIV) val tOVColl = SCollection(tOV) val tPredicate = SFunc(tIV, SBoolean) val SizeMethod = SMethod(this, "size", SFunc(ThisType, SInt), 1) - val GetOrElseMethod = SMethod(this, "getOrElse", SFunc(IndexedSeq(ThisType, SInt, tIV), tIV, Seq(paramIV)), 2, Some { - case (builder, obj, method, Seq(index, defaultValue), _) => - val index1 = index.asValue[SInt.type] - val defaultValue1 = defaultValue.asValue[SType] - builder.mkByIndex(obj.asValue[SCollection[SType]], index1, Some(defaultValue1)) - } - ) + .withInfo(SizeOf, "The size of the collection in elements.") + + val GetOrElseMethod = SMethod(this, "getOrElse", SFunc(IndexedSeq(ThisType, SInt, tIV), tIV, Seq(paramIV)), 2) + .withIRInfo({ case (builder, obj, _, Seq(index, defaultValue), _) => + val index1 = index.asValue[SInt.type] + val defaultValue1 = defaultValue.asValue[SType] + builder.mkByIndex(obj.asValue[SCollection[SType]], index1, Some(defaultValue1)) + }) + .withInfo(ByIndex, "Return the element of collection if \\lst{index} is in range \\lst{0 .. size-1}", + ArgInfo("index", "index of the element of this collection"), + ArgInfo("default", "value to return when \\lst{index} is out of range")) + val MapMethod = SMethod(this, "map", SFunc(IndexedSeq(ThisType, SFunc(tIV, tOV)), tOVColl, Seq(paramIV, paramOV)), 3) + .withInfo(MapCollection, + """ Builds a new collection by applying a function to all elements of this collection. + | Returns a new collection of type \lst{Coll[B]} resulting from applying the given function + | \lst{f} to each element of this collection and collecting the results. + """.stripMargin, + ArgInfo("f", "the function to apply to each element")) + val ExistsMethod = SMethod(this, "exists", SFunc(IndexedSeq(ThisType, tPredicate), SBoolean, Seq(paramIV)), 4) + .withInfo(Exists, + """Tests whether a predicate holds for at least one element of this collection. + |Returns \lst{true} if the given predicate \lst{p} is satisfied by at least one element of this collection, otherwise \lst{false} + """.stripMargin, + ArgInfo("p", "the predicate used to test elements")) + val FoldMethod = SMethod(this, "fold", SFunc(IndexedSeq(ThisType, tOV, SFunc(IndexedSeq(tOV, tIV), tOV)), tOV, Seq(paramIV, paramOV)), 5) + .withInfo(Fold, "Applies a binary operator to a start value and all elements of this collection, going left to right.", + ArgInfo("zero", "a starting value"), + ArgInfo("op", "the binary operator")) + val ForallMethod = SMethod(this, "forall", SFunc(IndexedSeq(ThisType, tPredicate), SBoolean, Seq(paramIV)), 6) + .withInfo(ForAll, + """Tests whether a predicate holds for all elements of this collection. + |Returns \lst{true} if this collection is empty or the given predicate \lst{p} + |holds for all elements of this collection, otherwise \lst{false}. + """.stripMargin, + ArgInfo("p", "the predicate used to test elements")) + val SliceMethod = SMethod(this, "slice", SFunc(IndexedSeq(ThisType, SInt, SInt), ThisType, Seq(paramIV)), 7) + .withInfo(Slice, + """Selects an interval of elements. The returned collection is made up + | of all elements \lst{x} which satisfy the invariant: + | \lst{ + | from <= indexOf(x) < until + | } + """.stripMargin, + ArgInfo("from", "the lowest index to include from this collection"), + ArgInfo("until", "the lowest index to EXCLUDE from this collection")) + val FilterMethod = SMethod(this, "filter", SFunc(IndexedSeq(ThisType, tPredicate), ThisType, Seq(paramIV)), 8) + .withIRInfo({ + case (builder, obj, _, Seq(l), _) => builder.mkFilter(obj.asValue[SCollection[SType]], l.asFunc) + }) + .withInfo(Filter, + """Selects all elements of this collection which satisfy a predicate. + | Returns a new collection consisting of all elements of this collection that satisfy the given + | predicate \lst{p}. The order of the elements is preserved. + """.stripMargin, + ArgInfo("p", "the predicate used to test elements.")) + val AppendMethod = SMethod(this, "append", SFunc(IndexedSeq(ThisType, ThisType), ThisType, Seq(paramIV)), 9) + .withIRInfo({ + case (builder, obj, _, Seq(xs), _) => + builder.mkAppend(obj.asCollection[SType], xs.asCollection[SType]) + }) + .withInfo(Append, "Puts the elements of other collection after the elements of this collection (concatenation of 2 collections)", + ArgInfo("other", "the collection to append at the end of this")) val ApplyMethod = SMethod(this, "apply", SFunc(IndexedSeq(ThisType, SInt), tIV, Seq(tIV)), 10) + .withInfo(ByIndex, + """The element at given index. + | Indices start at \lst{0}; \lst{xs.apply(0)} is the first element of collection \lst{xs}. + | Note the indexing syntax \lst{xs(i)} is a shorthand for \lst{xs.apply(i)}. + | Returns the element at the given index. + | Throws an exception if \lst{i < 0} or \lst{length <= i} + """.stripMargin, ArgInfo("i", "the index")) + val BitShiftLeftMethod = SMethod(this, "<<", SFunc(IndexedSeq(ThisType, SInt), ThisType, Seq(paramIV)), 11) val BitShiftRightMethod = SMethod(this, ">>", SFunc(IndexedSeq(ThisType, SInt), ThisType, Seq(paramIV)), 12) val BitShiftRightZeroedMethod = SMethod(this, ">>>", SFunc(IndexedSeq(SCollection(SBoolean), SInt), SCollection(SBoolean)), 13) - val IndicesMethod = SMethod(this, "indices", SFunc(ThisType, SCollection(SInt)), 14, MethodCallIrBuilder) + + val IndicesMethod = SMethod(this, "indices", SFunc(ThisType, SCollection(SInt)), 14) + .withIRInfo(MethodCallIrBuilder) + .withInfo(PropertyCall, + """Produces the range of all indices of this collection as a new collection + | containing [0 .. length-1] values. + """.stripMargin) + val FlatMapMethod = SMethod(this, "flatMap", - SFunc( - IndexedSeq(ThisType, SFunc(tIV, tOVColl)), - tOVColl, - Seq(paramIV, paramOV)), - 15, MethodCallIrBuilder) - val SegmentLengthMethod = SMethod(this, "segmentLength", - SFunc(IndexedSeq(ThisType, tPredicate, SInt), SInt, Seq(paramIV)), - 16, MethodCallIrBuilder) - val IndexWhereMethod = SMethod(this, "indexWhere", - SFunc(IndexedSeq(ThisType, tPredicate, SInt), SInt, Seq(paramIV)), - 17, MethodCallIrBuilder) - val LastIndexWhereMethod = SMethod(this, "lastIndexWhere", - SFunc(IndexedSeq(ThisType, tPredicate, SInt), SInt, Seq(paramIV)), - 18, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType, SFunc(tIV, tOVColl)), tOVColl, Seq(paramIV, paramOV)), 15) + .withIRInfo(MethodCallIrBuilder) + .withInfo(MethodCall, + """ Builds a new collection by applying a function to all elements of this collection + | and using the elements of the resulting collections. + | Function \lst{f} is constrained to be of the form \lst{x => x.someProperty}, otherwise + | it is illegal. + | Returns a new collection of type \lst{Coll[B]} resulting from applying the given collection-valued function + | \lst{f} to each element of this collection and concatenating the results. + """.stripMargin, ArgInfo("f", "the function to apply to each element.")) + + val PatchMethod = SMethod(this, "patch", - SFunc(IndexedSeq(ThisType, SInt, ThisType, SInt), ThisType, Seq(paramIV)), - 19, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType, SInt, ThisType, SInt), ThisType, Seq(paramIV)), 19) + .withIRInfo(MethodCallIrBuilder) + .withInfo(MethodCall, "") + val UpdatedMethod = SMethod(this, "updated", - SFunc(IndexedSeq(ThisType, SInt, tIV), ThisType, Seq(paramIV)), - 20, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType, SInt, tIV), ThisType, Seq(paramIV)), 20) + .withIRInfo(MethodCallIrBuilder).withInfo(MethodCall, "") + val UpdateManyMethod = SMethod(this, "updateMany", - SFunc(IndexedSeq(ThisType, SCollection(SInt), ThisType), ThisType, Seq(paramIV)), - 21, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType, SCollection(SInt), ThisType), ThisType, Seq(paramIV)), 21) + .withIRInfo(MethodCallIrBuilder).withInfo(MethodCall, "") + val UnionSetsMethod = SMethod(this, "unionSets", - SFunc(IndexedSeq(ThisType, ThisType), ThisType, Seq(paramIV)), - 22, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType, ThisType), ThisType, Seq(paramIV)), 22) + .withIRInfo(MethodCallIrBuilder).withInfo(MethodCall, "") + val DiffMethod = SMethod(this, "diff", - SFunc(IndexedSeq(ThisType, ThisType), ThisType, Seq(paramIV)), - 23, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType, ThisType), ThisType, Seq(paramIV)), 23) + .withIRInfo(MethodCallIrBuilder).withInfo(MethodCall, "") val IntersectMethod = SMethod(this, "intersect", - SFunc(IndexedSeq(ThisType, ThisType), ThisType, Seq(paramIV)), - 24, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType, ThisType), ThisType, Seq(paramIV)), 24) + .withIRInfo(MethodCallIrBuilder).withInfo(MethodCall, "") + val PrefixLengthMethod = SMethod(this, "prefixLength", - SFunc(IndexedSeq(ThisType, tPredicate), SInt, Seq(paramIV)), - 25, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType, tPredicate), SInt, Seq(paramIV)), 25) + .withIRInfo(MethodCallIrBuilder).withInfo(MethodCall, "") + val IndexOfMethod = SMethod(this, "indexOf", - SFunc(IndexedSeq(ThisType, tIV, SInt), SInt, Seq(paramIV)), - 26, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType, tIV, SInt), SInt, Seq(paramIV)), 26) + .withIRInfo(MethodCallIrBuilder).withInfo(MethodCall, "") + val LastIndexOfMethod = SMethod(this, "lastIndexOf", - SFunc(IndexedSeq(ThisType, tIV, SInt), SInt, Seq(paramIV)), - 27, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType, tIV, SInt), SInt, Seq(paramIV)), 27) + .withIRInfo(MethodCallIrBuilder).withInfo(MethodCall, "") + lazy val FindMethod = SMethod(this, "find", - SFunc(IndexedSeq(ThisType, tPredicate), SOption(tIV), Seq(paramIV)), - 28, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType, tPredicate), SOption(tIV), Seq(paramIV)), 28) + .withIRInfo(MethodCallIrBuilder).withInfo(MethodCall, "") + val ZipMethod = SMethod(this, "zip", - SFunc(IndexedSeq(ThisType, tOVColl), - SCollection(STuple(tIV, tOV)), Seq(tIV, tOV)), - 29, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType, tOVColl), SCollection(STuple(tIV, tOV)), Seq(tIV, tOV)), 29) + .withIRInfo(MethodCallIrBuilder).withInfo(MethodCall, "") + val DistinctMethod = SMethod(this, "distinct", - SFunc(IndexedSeq(ThisType), ThisType, Seq(tIV)), 30, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType), ThisType, Seq(tIV)), 30) + .withIRInfo(MethodCallIrBuilder).withInfo(PropertyCall, "") + val StartsWithMethod = SMethod(this, "startsWith", - SFunc(IndexedSeq(ThisType, ThisType, SInt), SBoolean, Seq(paramIV)), - 31, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType, ThisType, SInt), SBoolean, Seq(paramIV)), 31) + .withIRInfo(MethodCallIrBuilder).withInfo(MethodCall, "") + val EndsWithMethod = SMethod(this, "endsWith", - SFunc(IndexedSeq(ThisType, ThisType), SBoolean, Seq(paramIV)), - 32, MethodCallIrBuilder) - val PartitionMethod = SMethod(this, "partition", - SFunc(IndexedSeq(ThisType, tPredicate), STuple(ThisType, ThisType), Seq(paramIV)), - 33, MethodCallIrBuilder) + SFunc(IndexedSeq(ThisType, ThisType), SBoolean, Seq(paramIV)), 32) + .withIRInfo(MethodCallIrBuilder).withInfo(MethodCall, "") + val MapReduceMethod = SMethod(this, "mapReduce", SFunc( IndexedSeq(ThisType, SFunc(tIV, STuple(tK, tV)), SFunc(STuple(tV, tV), tV)), SCollection(STuple(tK, tV)), - Seq(paramIV, STypeParam(tK), STypeParam(tV))), - 34, MethodCallIrBuilder) + Seq(paramIV, STypeParam(tK), STypeParam(tV))), 34) + .withIRInfo(MethodCallIrBuilder).withInfo(MethodCall, "") lazy val methods: Seq[SMethod] = Seq( SizeMethod, @@ -900,30 +1149,34 @@ object SCollection extends STypeCompanion with MethodByNameUnapply { FilterMethod, AppendMethod, ApplyMethod, + /* TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 BitShiftLeftMethod, BitShiftRightMethod, BitShiftRightZeroedMethod, + */ IndicesMethod, FlatMapMethod, - SegmentLengthMethod, - IndexWhereMethod, - LastIndexWhereMethod, PatchMethod, UpdatedMethod, UpdateManyMethod, + /*TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 UnionSetsMethod, DiffMethod, IntersectMethod, PrefixLengthMethod, + */ IndexOfMethod, + /* TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 LastIndexOfMethod, FindMethod, + */ ZipMethod, + /* TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 DistinctMethod, StartsWithMethod, EndsWithMethod, - PartitionMethod, MapReduceMethod, + */ ) def apply[T <: SType](elemType: T): SCollection[T] = SCollectionType(elemType) def apply[T <: SType](implicit elemType: T, ov: Overload1): SCollection[T] = SCollectionType(elemType) @@ -993,12 +1246,13 @@ case class STuple(items: IndexedSeq[SType]) extends SCollection[SAny.type] { } /** Construct tree node Constant for a given data object. */ - override def mkConstant(v: Array[Any]): Value[this.type] = + override def mkConstant(v: Coll[Any]): Value[this.type] = Constant[STuple](v, this).asValue[this.type] val typeParams = Seq() val tparamSubst = Map() + override def toTermString = s"(${items.map(_.toTermString).mkString(",")})" override def toString = s"(${items.mkString(",")})" } @@ -1020,7 +1274,9 @@ object STuple extends STypeCompanion { lazy val colMethods = { val subst = Map(SCollection.tIV -> SAny) - SCollection.methods.map { m => + // TODO: implement other + val activeMethods = Set(1.toByte, 10.toByte) + SCollection.methods.filter(m => activeMethods.contains(m.methodId)).map { m => m.copy(stype = SigmaTyper.applySubst(m.stype, subst).asFunc) } } @@ -1028,7 +1284,7 @@ object STuple extends STypeCompanion { def methods: Seq[SMethod] = sys.error(s"Shouldn't be called.") def apply(items: SType*): STuple = STuple(items.toIndexedSeq) - val MaxTupleLength = 255 + val MaxTupleLength: Int = ErgoConstants.MaxTupleLength.get private val componentNames = Array.tabulate(MaxTupleLength){ i => s"_${i + 1}" } def componentNameByIndex(i: Int): String = try componentNames(i) @@ -1049,14 +1305,18 @@ case class SFunc(tDom: IndexedSeq[SType], tRange: SType, tpeParams: Seq[STypePa val args = if (tpeParams.isEmpty) "" else tpeParams.mkString("[", ",", "]") s"$args(${tDom.mkString(",")}) => $tRange" } + override def toTermString = { + val args = if (tpeParams.isEmpty) "" else tpeParams.mkString("[", ",", "]") + s"$args(${tDom.map(_.toTermString).mkString(",")}) => ${tRange.toTermString}" + } override def dataSize(v: SType#WrappedType) = 8L import SFunc._ val typeParams: Seq[STypeParam] = tpeParams - val tparamSubst: Map[STypeIdent, SType] = Map() // defined in MethodCall.typeSubst + val tparamSubst: Map[STypeVar, SType] = Map() // defined in MethodCall.typeSubst def getGenericType: SFunc = { val typeParams: Seq[STypeParam] = tDom.zipWithIndex - .map { case (t, i) => STypeParam(tD.name + (i + 1)) } :+ STypeParam(tR.name) + .map { case (_, i) => STypeParam(tD.name + (i + 1)) } :+ STypeParam(tR.name) val ts = typeParams.map(_.ident) SFunc(ts.init.toIndexedSeq, ts.last, Nil) } @@ -1064,10 +1324,11 @@ case class SFunc(tDom: IndexedSeq[SType], tRange: SType, tpeParams: Seq[STypePa } object SFunc { - val tD = STypeIdent("D") - val tR = STypeIdent("R") + val tD = STypeVar("D") + val tR = STypeVar("R") final val FuncTypeCode: TypeCode = OpCodes.FirstFuncType def apply(tDom: SType, tRange: SType): SFunc = SFunc(IndexedSeq(tDom), tRange) + val identity = { x: Any => x } } @@ -1080,39 +1341,44 @@ object STypeApply { val TypeCode = 94: Byte } -case class STypeIdent(name: String) extends SType { +/** Type variable which is used in generic method/func signatures. */ +case class STypeVar(name: String) extends SType { require(name.length <= 255, "name is too long") override type WrappedType = Any - override val typeCode = STypeIdent.TypeCode + override val typeCode = STypeVar.TypeCode override def isConstantSize = false override def toString = name + override def toTermString: String = name } -object STypeIdent { +object STypeVar { val TypeCode: TypeCode = 103: Byte - implicit def liftString(n: String): STypeIdent = STypeIdent(n) + implicit def liftString(n: String): STypeVar = STypeVar(n) } case object SBox extends SProduct with SPredefType with SMonoType { - override type WrappedType = ErgoBox + import ErgoBox._ + override type WrappedType = Box override val typeCode: TypeCode = 99: Byte override def typeId = typeCode - override def mkConstant(v: ErgoBox): Value[SBox.type] = BoxConstant(v) + override def mkConstant(v: Box): Value[SBox.type] = BoxConstant(v) override def dataSize(v: SType#WrappedType): Long = { val box = v.asInstanceOf[this.WrappedType] - 4 + // box.value - box.propositionBytes.length + - box.additionalTokens.length * (32 + 4) + - box.additionalRegisters.values.map(x => x.tpe.dataSize(x.value)).sum + - box.transactionId.length + - 2 // box.index + Sized.sizeOf(box).dataSize } override def isConstantSize = false def ancestors = Nil - val tT = STypeIdent("T") + val tT = STypeVar("T") def registers(idOfs: Int): Seq[SMethod] = { - (1 to 10).map { i => - SMethod(this, s"R$i", SFunc(IndexedSeq(SBox), SOption(tT), Seq(STypeParam(tT))), (idOfs + i).toByte) + allRegisters.map { i => + i match { + case r: MandatoryRegisterId => + SMethod(this, s"R${i.asIndex}", SFunc(IndexedSeq(SBox), SOption(tT), Seq(STypeParam(tT))), (idOfs + i.asIndex + 1).toByte) + .withInfo(ExtractRegisterAs, r.purpose) + case _ => + SMethod(this, s"R${i.asIndex}", SFunc(IndexedSeq(SBox), SOption(tT), Seq(STypeParam(tT))), (idOfs + i.asIndex + 1).toByte) + .withInfo(ExtractRegisterAs, "Non-mandatory register") + } } } val PropositionBytes = "propositionBytes" @@ -1122,36 +1388,53 @@ case object SBox extends SProduct with SPredefType with SMonoType { val BytesWithoutRef = "bytesWithoutRef" val CreationInfo = "creationInfo" val GetReg = "getReg" + // should be lazy, otherwise lead to initialization error - lazy val creationInfoMethod = SMethod(this, CreationInfo, ExtractCreationInfo.OpType, 6) // see ExtractCreationInfo - lazy val getRegMethod = SMethod(this, "getReg", SFunc(IndexedSeq(SBox, SByte), SOption(tT), Seq(STypeParam(tT))), 7) - lazy val tokensMethod = SMethod(this, "tokens", SFunc(SBox, ErgoBox.STokensRegType), 8, MethodCallIrBuilder) + lazy val creationInfoMethod = SMethod(this, CreationInfo, ExtractCreationInfo.OpType, 6) + .withInfo(ExtractCreationInfo, + """ If \lst{tx} is a transaction which generated this box, then \lst{creationInfo._1} + | is a height of the tx's block. The \lst{creationInfo._2} is a serialized transaction + | identifier followed by box index in the transaction outputs. + """.stripMargin ) // see ExtractCreationInfo + + lazy val getRegMethod = SMethod(this, "getReg", SFunc(IndexedSeq(SBox, SInt), SOption(tT), Seq(STypeParam(tT))), 7) + .withInfo(ExtractRegisterAs, + """ Extracts register by id and type. + | Type param \lst{T} expected type of the register. + | Returns \lst{Some(value)} if the register is defined and has given type and \lst{None} otherwise + """.stripMargin, + ArgInfo("regId", "zero-based identifier of the register.")) + + lazy val tokensMethod = SMethod(this, "tokens", SFunc(SBox, ErgoBox.STokensRegType), 8) + .withIRInfo(MethodCallIrBuilder) + .withInfo(PropertyCall, "Secondary tokens") + // should be lazy to solve recursive initialization protected override def getMethods() = super.getMethods() ++ Vector( - SMethod(this, Value, SFunc(SBox, SLong), 1), // see ExtractAmount - SMethod(this, PropositionBytes, SFunc(SBox, SByteArray), 2), // see ExtractScriptBytes - SMethod(this, Bytes, SFunc(SBox, SByteArray), 3), // see ExtractBytes - SMethod(this, BytesWithoutRef, SFunc(SBox, SByteArray), 4), // see ExtractBytesWithNoRef - SMethod(this, Id, SFunc(SBox, SByteArray), 5), // see ExtractId + SMethod(this, Value, SFunc(SBox, SLong), 1) + .withInfo(ExtractAmount, "Mandatory: Monetary value, in Ergo tokens (NanoErg unit of measure)"), // see ExtractAmount + SMethod(this, PropositionBytes, SFunc(SBox, SByteArray), 2) + .withInfo(ExtractScriptBytes, "Serialized bytes of guarding script, which should be evaluated to true in order to\n" + + " open this box. (aka spend it in a transaction)"), // see ExtractScriptBytes + SMethod(this, Bytes, SFunc(SBox, SByteArray), 3) + .withInfo(ExtractBytes, "Serialized bytes of this box's content, including proposition bytes."), // see ExtractBytes + SMethod(this, BytesWithoutRef, SFunc(SBox, SByteArray), 4) + .withInfo(ExtractBytesWithNoRef, "Serialized bytes of this box's content, excluding transactionId and index of output."), // see ExtractBytesWithNoRef + SMethod(this, Id, SFunc(SBox, SByteArray), 5) + .withInfo(ExtractId, "Blake2b256 hash of this box's content, basically equals to \\lst{blake2b256(bytes)}"), // see ExtractId creationInfoMethod, getRegMethod, - tokensMethod, + tokensMethod ) ++ registers(8) override val coster = Some(Coster(_.BoxCoster)) } case object SAvlTree extends SProduct with SPredefType with SMonoType { - override type WrappedType = AvlTreeData + override type WrappedType = AvlTree override val typeCode: TypeCode = 100: Byte override def typeId = typeCode - override def mkConstant(v: AvlTreeData): Value[SAvlTree.type] = AvlTreeConstant(v) - override def dataSize(v: SType#WrappedType): Long = { - val tree = v.asInstanceOf[AvlTreeData] - AvlTreeData.DigestSize + // digest - 1 + // flags - 4 + // keyLength - tree.valueLengthOpt.fold(0)(_ => 4) - } + override def mkConstant(v: AvlTree): Value[SAvlTree.type] = AvlTreeConstant(v) + override def dataSize(v: SType#WrappedType): Long = AvlTreeData.TreeDataSize override def isConstantSize = false def ancestors = Nil @@ -1159,34 +1442,115 @@ case object SAvlTree extends SProduct with SPredefType with SMonoType { val TCollOptionCollByte = SCollection(SByteArrayOption) val CollKeyValue = SCollection(STuple(SByteArray, SByteArray)) - val digestMethod = SMethod(this, "digest", SFunc(this, SByteArray), 1, MethodCallIrBuilder) - val enabledOperationsMethod = SMethod(this, "enabledOperations", SFunc(this, SByte), 2, MethodCallIrBuilder) - val keyLengthMethod = SMethod(this, "keyLength", SFunc(this, SInt), 3, MethodCallIrBuilder) - val valueLengthOptMethod = SMethod(this, "valueLengthOpt", SFunc(this, SIntOption), 4, MethodCallIrBuilder) - val isInsertAllowedMethod = SMethod(this, "isInsertAllowed", SFunc(this, SBoolean), 5, MethodCallIrBuilder) - val isUpdateAllowedMethod = SMethod(this, "isUpdateAllowed", SFunc(this, SBoolean), 6, MethodCallIrBuilder) - val isRemoveAllowedMethod = SMethod(this, "isRemoveAllowed", SFunc(this, SBoolean), 7, MethodCallIrBuilder) + val digestMethod = SMethod(this, "digest", SFunc(this, SByteArray), 1) + .withIRInfo(MethodCallIrBuilder) + .withInfo(PropertyCall, + """Returns digest of the state represented by this tree. + | Authenticated tree \lst{digest} = \lst{root hash bytes} ++ \lst{tree height} + """.stripMargin) + + val enabledOperationsMethod = SMethod(this, "enabledOperations", SFunc(this, SByte), 2) + .withIRInfo(MethodCallIrBuilder) + .withInfo(PropertyCall, + """ Flags of enabled operations packed in single byte. + | \lst{isInsertAllowed == (enabledOperations & 0x01) != 0}\newline + | \lst{isUpdateAllowed == (enabledOperations & 0x02) != 0}\newline + | \lst{isRemoveAllowed == (enabledOperations & 0x04) != 0} + """.stripMargin) + val keyLengthMethod = SMethod(this, "keyLength", SFunc(this, SInt), 3) + .withIRInfo(MethodCallIrBuilder) + .withInfo(PropertyCall, + """ + | + """.stripMargin) + val valueLengthOptMethod = SMethod(this, "valueLengthOpt", SFunc(this, SIntOption), 4) + .withIRInfo(MethodCallIrBuilder) + .withInfo(PropertyCall, + """ + | + """.stripMargin) + val isInsertAllowedMethod = SMethod(this, "isInsertAllowed", SFunc(this, SBoolean), 5) + .withIRInfo(MethodCallIrBuilder) + .withInfo(PropertyCall, + """ + | + """.stripMargin) + val isUpdateAllowedMethod = SMethod(this, "isUpdateAllowed", SFunc(this, SBoolean), 6) + .withIRInfo(MethodCallIrBuilder) + .withInfo(PropertyCall, + """ + | + """.stripMargin) + val isRemoveAllowedMethod = SMethod(this, "isRemoveAllowed", SFunc(this, SBoolean), 7) + .withIRInfo(MethodCallIrBuilder) + .withInfo(PropertyCall, + """ + | + """.stripMargin) val updateOperationsMethod = SMethod(this, "updateOperations", - SFunc(IndexedSeq(SAvlTree, SByte), SAvlTreeOption), 8, MethodCallIrBuilder) + SFunc(IndexedSeq(SAvlTree, SByte), SAvlTree), 8) + .withIRInfo(MethodCallIrBuilder) + .withInfo(MethodCall, + """ + | + """.stripMargin) val containsMethod = SMethod(this, "contains", - SFunc(IndexedSeq(SAvlTree, SByteArray, SByteArray), SBoolean), 9, MethodCallIrBuilder) + SFunc(IndexedSeq(SAvlTree, SByteArray, SByteArray), SBoolean), 9) + .withIRInfo(MethodCallIrBuilder) + .withInfo(MethodCall, + """ + | + """.stripMargin) val getMethod = SMethod(this, "get", - SFunc(IndexedSeq(SAvlTree, SByteArray, SByteArray), SByteArrayOption), 10, MethodCallIrBuilder) + SFunc(IndexedSeq(SAvlTree, SByteArray, SByteArray), SByteArrayOption), 10) + .withIRInfo(MethodCallIrBuilder) + .withInfo(MethodCall, + """ + | + """.stripMargin) val getManyMethod = SMethod(this, "getMany", - SFunc(IndexedSeq(SAvlTree, SByteArray2, SByteArray), TCollOptionCollByte), 11, MethodCallIrBuilder) + SFunc(IndexedSeq(SAvlTree, SByteArray2, SByteArray), TCollOptionCollByte), 11) + .withIRInfo(MethodCallIrBuilder) + .withInfo(MethodCall, + """ + | + """.stripMargin) val insertMethod = SMethod(this, "insert", - SFunc(IndexedSeq(SAvlTree, CollKeyValue, SByteArray), SAvlTreeOption), 12, MethodCallIrBuilder) + SFunc(IndexedSeq(SAvlTree, CollKeyValue, SByteArray), SAvlTreeOption), 12) + .withIRInfo(MethodCallIrBuilder) + .withInfo(MethodCall, + """ + | + """.stripMargin) val updateMethod = SMethod(this, "update", - SFunc(IndexedSeq(SAvlTree, CollKeyValue, SByteArray), SAvlTreeOption), 13, MethodCallIrBuilder) + SFunc(IndexedSeq(SAvlTree, CollKeyValue, SByteArray), SAvlTreeOption), 13) + .withIRInfo(MethodCallIrBuilder) + .withInfo(MethodCall, + """ + | + """.stripMargin) val removeMethod = SMethod(this, "remove", - SFunc(IndexedSeq(SAvlTree, SByteArray2, SByteArray), SAvlTreeOption), 14, MethodCallIrBuilder) + SFunc(IndexedSeq(SAvlTree, SByteArray2, SByteArray), SAvlTreeOption), 14) + .withIRInfo(MethodCallIrBuilder) + .withInfo(MethodCall, + """ + | + """.stripMargin) + + val updateDigestMethod = SMethod(this, "updateDigest", + SFunc(IndexedSeq(SAvlTree, SByteArray), SAvlTree), 15) + .withIRInfo(MethodCallIrBuilder) + .withInfo(MethodCall, + """ + | + """.stripMargin) protected override def getMethods(): Seq[SMethod] = super.getMethods() ++ Seq( digestMethod, @@ -1202,7 +1566,8 @@ case object SAvlTree extends SProduct with SPredefType with SMonoType { getManyMethod, insertMethod, updateMethod, - removeMethod + removeMethod, + updateDigestMethod ) override val coster = Some(Coster(_.AvlTreeCoster)) } @@ -1220,7 +1585,7 @@ case object SContext extends SProduct with SPredefType with SMonoType { override def isConstantSize = false def ancestors = Nil - val tT = STypeIdent("T") + val tT = STypeVar("T") val dataInputsMethod = property("dataInputs", SBoxArray, 1) val headersMethod = property("headers", SHeaderArray, 2) val preHeaderMethod = property("preHeader", SPreHeader, 3) @@ -1323,3 +1688,47 @@ case object SPreHeader extends SProduct with SPredefType with SMonoType { ) override val coster = Some(Coster(_.PreHeaderCoster)) } + +/** This type is introduced to unify handling of global and non-global (i.e. methods) operations. + * It unifies implementation of global operation with implementation of methods and avoids code + * duplication (following DRY principle https://en.wikipedia.org/wiki/Don%27t_repeat_yourself). + * The WrappedType is `special.sigma.SigmaDslBuilder`, which is an interface implemented by + * the singleton sigmastate.eval.CostingSigmaDslBuilder + * + * The Constant(...) tree node of this type are not allowed, as well as using it in register and + * context variables (aka ContextExtension) + * + * When new methods are added to this type via a soft-fork, they will be serialized as part + * of ErgoTree using MethodCallSerializer, where SGlobal.typeCode will be used. + * + * @see sigmastate.lang.SigmaPredef + * */ +case object SGlobal extends SProduct with SPredefType with SMonoType { + override type WrappedType = SigmaDslBuilder + override val typeCode: TypeCode = 106: Byte + override def typeId = typeCode + override def mkConstant(v: SigmaDslBuilder): Value[SGlobal.type] = { + sys.error(s"Constants of SGlobal type cannot be created.") + } + /** Approximate data size of the given context without ContextExtension. */ + override def dataSize(v: SType#WrappedType): Long = { + sys.error(s"Should not be used, use SizeContext and Sized typeclass instead") + } + override def isConstantSize = true // only fixed amount of global information is allowed + def ancestors = Nil + + val tT = STypeVar("T") + val groupGeneratorMethod = SMethod(this, "groupGenerator", SFunc(this, SGroupElement), 1) + .withIRInfo({ case (builder, obj, method, args, tparamSubst) => GroupGenerator }) + .withInfo(GroupGenerator, "") + val xorMethod = SMethod(this, "xor", SFunc(IndexedSeq(this, SByteArray, SByteArray), SByteArray), 2) + .withIRInfo({ + case (_, _, _, Seq(l, r), _) => Xor(l.asByteArray, r.asByteArray) + }) + protected override def getMethods() = super.getMethods() ++ Seq( + groupGeneratorMethod, + xorMethod + ) + override val coster = Some(Coster(_.SigmaDslBuilderCoster)) +} + diff --git a/src/main/scala/sigmastate/utils/Helpers.scala b/src/main/scala/sigmastate/utils/Helpers.scala index ae7c42a2fa..4feff629b7 100644 --- a/src/main/scala/sigmastate/utils/Helpers.scala +++ b/src/main/scala/sigmastate/utils/Helpers.scala @@ -46,6 +46,15 @@ object Helpers { result } + def concatArrays[T:ClassTag](arr1: Array[T], arr2: Array[T]): Array[T] = { + val length: Int = arr1.length + arr2.length + val result: Array[T] = new Array[T](length) + var pos: Int = 0 + System.arraycopy(arr1, 0, result, 0, arr1.length) + System.arraycopy(arr2, 0, result, arr1.length, arr2.length) + result + } + def concatArrays[T:ClassTag](seq: Traversable[Array[T]]): Array[T] = { val length: Int = seq.foldLeft(0)((acc, arr) => acc + arr.length) val result: Array[T] = new Array[T](length) diff --git a/src/main/scala/sigmastate/utils/SigmaByteReader.scala b/src/main/scala/sigmastate/utils/SigmaByteReader.scala index 07b826505f..84e3b47fb7 100644 --- a/src/main/scala/sigmastate/utils/SigmaByteReader.scala +++ b/src/main/scala/sigmastate/utils/SigmaByteReader.scala @@ -1,16 +1,16 @@ package sigmastate.utils -import java.nio.ByteBuffer - -import scorex.util.serialization.{Reader, VLQByteBufferReader} +import scorex.util.serialization.Reader import sigmastate.SType import sigmastate.Values.SValue -import sigmastate.serialization.{ConstantStore, TypeSerializer, ValDefTypeStore, ValueSerializer} +import sigmastate.lang.exceptions.DeserializeCallDepthExceeded +import sigmastate.serialization._ import scorex.util.Extensions._ class SigmaByteReader(val r: Reader, var constantStore: ConstantStore, - var resolvePlaceholdersToConstants: Boolean) + var resolvePlaceholdersToConstants: Boolean, + val maxTreeDepth: Int = SigmaSerializer.MaxTreeDepth) extends Reader { val valDefTypeStore: ValDefTypeStore = new ValDefTypeStore() @@ -67,7 +67,12 @@ class SigmaByteReader(val r: Reader, private var lvl: Int = 0 @inline def level: Int = lvl - @inline def level_=(v: Int): Unit = lvl = v + @inline def level_=(v: Int): Unit = { + if (v > maxTreeDepth) + throw new DeserializeCallDepthExceeded(s"nested value deserialization call depth($v) exceeds allowed maximum $maxTreeDepth") + lvl = v + } + @inline def getValues(): IndexedSeq[SValue] = { val size = getUInt().toIntExact val xs = new Array[SValue](size) diff --git a/src/main/scala/sigmastate/utils/SigmaByteWriter.scala b/src/main/scala/sigmastate/utils/SigmaByteWriter.scala index 96377e8176..6884575b5a 100644 --- a/src/main/scala/sigmastate/utils/SigmaByteWriter.scala +++ b/src/main/scala/sigmastate/utils/SigmaByteWriter.scala @@ -2,13 +2,14 @@ package sigmastate.utils import scorex.util.serialization.{VLQByteStringWriter, VLQByteBufferWriter, Writer} import scorex.util.serialization.Writer.Aux -import sigmastate.SType -import sigmastate.Values.Value +import sigmastate.{ArgInfo, SType} +import sigmastate.Values.{Value, SValue} import sigmastate.serialization.{TypeSerializer, ValueSerializer, ConstantStore} class SigmaByteWriter(val w: Writer, val constantExtractionStore: Option[ConstantStore]) extends Writer { - + import SigmaByteWriter._ + import ValueSerializer._ type CH = w.CH @inline override def length(): Int = w.length() @@ -20,24 +21,73 @@ class SigmaByteWriter(val w: Writer, @inline override def result(): CH = w.result() @inline def put(x: Byte): this.type = { w.put(x); this } + @inline def put(x: Byte, info: DataInfo[Byte]): this.type = { + ValueSerializer.addArgInfo(info) + w.put(x); this + } + + override def putUByte(x: Int): this.type = { + super.putUByte(x) + } + def putUByte(x: Int, info: DataInfo[U[Byte]]): this.type = { + ValueSerializer.addArgInfo(info) + super.putUByte(x) + } @inline def putBoolean(x: Boolean): this.type = { w.putBoolean(x); this } + @inline def putBoolean(x: Boolean, info: DataInfo[Boolean]): this.type = { + ValueSerializer.addArgInfo(info) + w.putBoolean(x); this + } @inline def putShort(x: Short): this.type = { w.putShort(x); this } + @inline def putShort(x: Short, info: DataInfo[Short]): this.type = { + ValueSerializer.addArgInfo(info) + w.putShort(x); this + } @inline def putUShort(x: Int): this.type = { w.putUShort(x); this } + @inline def putUShort(x: Int, info: DataInfo[Vlq[U[Short]]]): this.type = { + ValueSerializer.addArgInfo(info) + w.putUShort(x); this + } @inline def putInt(x: Int): this.type = { w.putInt(x); this } + @inline def putInt(x: Int, info: DataInfo[Int]): this.type = { + ValueSerializer.addArgInfo(info) + w.putInt(x); this + } @inline def putUInt(x: Long): this.type = { w.putUInt(x); this } + @inline def putUInt(x: Long, info: DataInfo[Vlq[U[Int]]]): this.type = { + ValueSerializer.addArgInfo(info) + w.putUInt(x); this + } @inline def putLong(x: Long): this.type = { w.putLong(x); this } + @inline def putLong(x: Long, info: DataInfo[Vlq[ZigZag[Long]]]): this.type = { + ValueSerializer.addArgInfo(info) + w.putLong(x); this + } @inline def putULong(x: Long): this.type = { w.putULong(x); this } + @inline def putULong(x: Long, info: DataInfo[Vlq[U[Long]]]): this.type = { + ValueSerializer.addArgInfo(info) + w.putULong(x); this + } @inline def putBytes(xs: Array[Byte]): this.type = { w.putBytes(xs); this } + @inline def putBytes(xs: Array[Byte], info: DataInfo[Array[Byte]]): this.type = { + ValueSerializer.addArgInfo(info) + w.putBytes(xs); this + } @inline def putBits(xs: Array[Boolean]): this.type = { w.putBits(xs); this } + @inline def putBits(xs: Array[Boolean], info: DataInfo[Bits]): this.type = { + ValueSerializer.addArgInfo(info) + w.putBits(xs); + this + } @inline def putOption[T](x: Option[T])(putValueC: (this.type, T) => Unit): this.type = { w.putOption(x) { (_, v) => @@ -48,18 +98,158 @@ class SigmaByteWriter(val w: Writer, @inline def putShortString(s: String): this.type = { w.putShortString(s); this } - // todo move to Writer + // TODO refactor: move to Writer @inline def toBytes: Array[Byte] = w match { case wr: VLQByteStringWriter => wr.result().asByteBuffer.array() case wr: VLQByteBufferWriter => wr.toBytes } @inline def putType[T <: SType](x: T): this.type = { TypeSerializer.serialize(x, this); this } + @inline def putType[T <: SType](x: T, info: DataInfo[SType]): this.type = { + ValueSerializer.addArgInfo(info) + TypeSerializer.serialize(x, this); this + } + @inline def putValue[T <: SType](x: Value[T]): this.type = { ValueSerializer.serialize(x, this); this } + @inline def putValue[T <: SType](x: Value[T], info: DataInfo[SValue]): this.type = { + ValueSerializer.addArgInfo(info) + ValueSerializer.serialize(x, this); this + } @inline def putValues[T <: SType](xs: Seq[Value[T]]): this.type = { putUInt(xs.length) xs.foreach(putValue(_)) this } + @inline def putValues[T <: SType](xs: Seq[Value[T]], info: DataInfo[Seq[SValue]]): this.type = { + putUInt(xs.length, ArgInfo("\\#items", "number of items in the collection")) + foreach("\\#items", xs) { x => + val itemFmt = info.format.asInstanceOf[SeqFmt[SValue]].fmt + putValue(x, DataInfo(ArgInfo(info.info.name+"_i", s"i-th item in the ${info.info.description}"), itemFmt)) + } + this + } +} + +object SigmaByteWriter { + import scala.language.implicitConversions + + /** Format descriptor type family. */ + trait FormatDescriptor[T] { + /** Size formula associated with this format */ + def size: String + } + + /** Marker type to automatically resolve correct implicit format descriptor + * in Writer methods. + * This is type-level type, since no instances of it are ever created. */ + trait Vlq[T] + + /** Marker type to automatically resolve correct implicit format descriptor + * in Writer methods. + * This is type-level type, since no instances of it are ever created. */ + trait ZigZag[T] + + /** Marker type for Unsigned types to automatically resolve correct implicit format descriptor + * in Writer methods. + * This is type-level type, since no instances of it are ever created. */ + trait U[T] + + /** Marker type for bits representation of Coll[Boolean]. + * Should be used only as argument for FormatDescriptor. + * This is type-level type, since no instances of it are ever created. + */ + trait Bits + + implicit case object ValueFmt extends FormatDescriptor[SValue] { + override def size: String = "[1, *]" + override def toString: String = "Expr" + } + implicit case object TypeFmt extends FormatDescriptor[SType] { + override def size: String = "[1, *]" + override def toString: String = "Type" + } + + case object BitsFmt extends FormatDescriptor[Bits] { + override def size: String = "[1, *]" + override def toString: String = "Bits" + } + + case class MaxBitsFmt(maxBits: Int) extends FormatDescriptor[Bits] { + override def size: String = { + val maxBytes = (maxBits - 1) / 8 + 1 + if (maxBytes == 1) "1" + else s"[1, $maxBytes]" + } + override def toString: String = "Bits" + } + + implicit object BooleanFmt extends FormatDescriptor[Boolean] { + override def size: String = "1" + override def toString: String = "Boolean" + } + implicit object ByteFmt extends FormatDescriptor[Byte] { + override def size: String = "1" + override def toString: String = "Byte" + } + implicit object ShortFmt extends FormatDescriptor[Short] { + override def size: String = "2" + override def toString: String = "Short" + } + implicit object IntFmt extends FormatDescriptor[Int] { + override def size: String = "4" + override def toString: String = "Int" + } + implicit object LongFmt extends FormatDescriptor[Long] { + override def size: String = "8" + override def toString: String = "Long" + } + + implicit object UByteFmt extends FormatDescriptor[U[Byte]] { + override def size: String = "1" + override def toString: String = "UByte" + } + implicit object UShortFmt extends FormatDescriptor[U[Short]] { + override def size: String = "2" + override def toString: String = "UShort" + } + implicit object UIntFmt extends FormatDescriptor[U[Int]] { + override def size: String = "4" + override def toString: String = "UInt" + } + implicit object ULongFmt extends FormatDescriptor[U[Long]] { + override def size: String = "8" + override def toString: String = "ULong" + } + + case class ZigZagFmt[T](fmt: FormatDescriptor[T]) extends FormatDescriptor[ZigZag[T]] { + override def size: String = s"[1, *]" + override def toString: String = s"ZigZag($fmt)" + } + case class UVlqFmt[T](fmt: FormatDescriptor[U[T]]) extends FormatDescriptor[Vlq[U[T]]] { + override def size: String = s"[1, *]" + override def toString: String = s"VLQ($fmt)" + } + case class ZigZagVlqFmt[T](fmt: FormatDescriptor[ZigZag[T]]) extends FormatDescriptor[Vlq[ZigZag[T]]] { + override def size: String = s"[1, *]" + override def toString: String = s"VLQ($fmt)" + } + case class SeqFmt[T](fmt: FormatDescriptor[T]) extends FormatDescriptor[Seq[T]] { + override def size: String = s"n * ${fmt.size}" + override def toString: String = s"Seq($fmt)" + } + + implicit def toZigZagFmt[T](implicit fmt: FormatDescriptor[T]): FormatDescriptor[ZigZag[T]] = ZigZagFmt(fmt) + implicit def toUVlqFmt[T](implicit fmt: FormatDescriptor[U[T]]): FormatDescriptor[Vlq[U[T]]] = UVlqFmt(fmt) + implicit def toZigZagVlqFmt[T](implicit fmt: FormatDescriptor[ZigZag[T]]): FormatDescriptor[Vlq[ZigZag[T]]] = ZigZagVlqFmt(fmt) + implicit def toSeqFmt[T](implicit fmt: FormatDescriptor[T]): FormatDescriptor[Seq[T]] = SeqFmt(fmt) + case class DataInfo[T](info: ArgInfo, format: FormatDescriptor[T]) + + def bitsInfo(name: String, desc: String = ""): DataInfo[Bits] = DataInfo(ArgInfo(name, desc), BitsFmt) + def maxBitsInfo(name: String, maxBits: Int, desc: String = ""): DataInfo[Bits] = DataInfo(ArgInfo(name, desc), MaxBitsFmt(maxBits)) + + implicit def argInfoToDataInfo[T](arg: ArgInfo)(implicit fmt: FormatDescriptor[T]): DataInfo[T] = DataInfo(arg, fmt) + // TODO remove this conversion and make it explicit + /**Helper conversion */ + implicit def nameToDataInfo[T](name: String)(implicit fmt: FormatDescriptor[T]): DataInfo[T] = ArgInfo(name, "") } diff --git a/src/main/scala/sigmastate/utils/SparseArrayContainer.scala b/src/main/scala/sigmastate/utils/SparseArrayContainer.scala index 103b8b20f2..dad6437f5d 100644 --- a/src/main/scala/sigmastate/utils/SparseArrayContainer.scala +++ b/src/main/scala/sigmastate/utils/SparseArrayContainer.scala @@ -16,9 +16,11 @@ class SparseArrayContainer[T: ClassTag](values: Seq[(Byte, T)]) { private def build(sers: Seq[(Byte, T)]): Array[T] = { val set = sers.map(_._1).toSet - require(sers.size == set.size, - s"expected distinct codes, got duplicated: ${sers.groupBy { case (b, _) => b }.filter { case (k, g) => g.size > 1 }.toList }") - val array = Array.fill[T](256)(null.asInstanceOf[T]) + require(sers.size == set.size, { + val dupGroups = sers.groupBy { case (b, _) => b }.filter { case (_, g) => g.size > 1 }.toList + s"expected distinct codes, got duplicated: $dupGroups" + }) + val array = Array.fill[T](256)(null.asInstanceOf[T]) // one item for each OpCode sers.foreach { case (code, value) => array(codeToIndex(code)) = value } @@ -34,8 +36,29 @@ class SparseArrayContainer[T: ClassTag](values: Seq[(Byte, T)]) { * @return value or null if no value for a given code */ @inline - def get(code: Byte): T = sparseArray(codeToIndex(code)) + def apply(code: Byte): T = sparseArray(codeToIndex(code)) + /** + * Returns Some(value) for the given code if it is not `null`. + * @param code of a value + * @return Some(value) or None if no value for a given code + */ + @inline + def get(code: Byte): Option[T] = Option(this(code)) + + /** Add new values to this container. */ + def add(code: Byte, value: T) = { + val index = codeToIndex(code) + require(sparseArray(index) == null, s"Value with index $index already defined.") + sparseArray(index) = value + } + + def remove(code: Byte) = { + val index = codeToIndex(code) + require(sparseArray(index) != null, + s"Value with index $index not-defined, probably incorrect attempt to remove it.") + sparseArray(index) = null.asInstanceOf[T] + } } object SparseArrayContainer { diff --git a/src/main/scala/sigmastate/utxo/CostTable.scala b/src/main/scala/sigmastate/utxo/CostTable.scala index d06f4d5a23..500b971243 100644 --- a/src/main/scala/sigmastate/utxo/CostTable.scala +++ b/src/main/scala/sigmastate/utxo/CostTable.scala @@ -17,7 +17,9 @@ case class CostTable(operCosts: Map[OperationId, Int]) extends (OperationId => I override def apply(operId: OperationId): Int = { val costOpt = this.get(operId) costOpt match { - case Some(cost) => cost + case Some(cost) => + // println(s"$operId -> $cost") + cost case None => //costToInt(MinimalCost) sys.error(s"Cannot find cost in CostTable for $operId") } @@ -43,31 +45,47 @@ object CostTable { val newCollValueCost = 1 val newPairValueCost = 1 val newOptionValueCost = 1 + val newAvlTreeCost = 10 val plusMinus = 2 val multiply = 10 val plusMinusBigInt = 10 + val comparisonBigInt = 10 val multiplyBigInt = 50 val hashPerKb = 100 + val avlTreeOp = hashPerKb * 2 + val collAccess = 5 + val collLength = 5 // TODO costing: should be >= selectField + val collByIndex = 5 // TODO costing: should be >= selectField val collToColl = 20 val comparisonCost = 3 + val comparisonPerKbCost = 10 val logicCost = 2 val sigmaAndCost = 10 val sigmaOrCost = 40 - val castOp = 5 + val proveDlogEvalCost = groupElementConst + constCost + 2 * expCost + multiplyGroup + val proveDHTupleEvalCost = proveDlogEvalCost * 4 // we approximate it as multiple of proveDlogEvalCost + + val castOp = 5 // TODO costing: should be >= selectField val treeOp = 1000 - val extractCost = 10 + val extractCost = 10 + val selectField = 10 + val accessContextVar = 10 + val accessBox = 10 + val accessRegister = 10 + + val OptionOp = 10 val DefaultCosts = CostTable.fromSeq(Seq( ("Const", "() => Unit", constCost), @@ -91,12 +109,12 @@ object CostTable { ("Self$", "Context => Box", constCost), ("AccessAvlTree", "Context => AvlTree", constCost), - ("SelectField", "() => Unit", extractCost), + ("SelectField", "() => Unit", selectField), ("AccessKiloByteOfData", "() => Unit", extractCost), - ("AccessBox", "Context => Box", extractCost), + ("AccessBox", "Context => Box", accessBox), ("GetVar", "(Context, Byte) => Option[T]", extractCost), - ("GetRegister", "(Box, Byte) => Option[T]", extractCost), - ("AccessRegister", "Box => Option[T]", extractCost), + ("GetRegister", "(Box, Byte) => Option[T]", accessRegister), + ("AccessRegister", "Box => Option[T]", accessRegister), ("ExtractAmount", "(Box) => Long", extractCost), ("ExtractId", "(Box) => Coll[Byte]", extractCost), ("ExtractBytes", "(Box) => Coll[Byte]", extractCost), @@ -113,17 +131,16 @@ object CostTable { ("Slice", "(Coll[IV],Int,Int) => Coll[IV]", collToColl), ("Append", "(Coll[IV],Coll[IV]) => Coll[IV]", collToColl), - ("SizeOf", "(Coll[IV]) => Int", collAccess), - ("ByIndex", "(Coll[IV],Int) => IV", collAccess), + ("SizeOf", "(Coll[IV]) => Int", collLength), + ("ByIndex", "(Coll[IV],Int) => IV", collByIndex), + ("SCollection$.map", "(Coll[IV],(IV) => OV) => Coll[OV]", collToColl), + ("SCollection$.flatMap", "(Coll[IV],(IV) => Coll[OV]) => Coll[OV]", collToColl), ("SCollection$.indexOf_per_kb", "(Coll[IV],IV,Int) => Int", collToColl), - ("SCollection$.segmentLength", "(Coll[IV],(IV) => Boolean,Int) => Int", collToColl), - ("SCollection$.indexWhere", "(Coll[IV],(IV) => Boolean,Int) => Int", collToColl), - ("SCollection$.lastIndexWhere", "(Coll[IV],(IV) => Boolean,Int) => Int", collToColl), ("SCollection$.zip", "(Coll[IV],Coll[OV]) => Coll[(IV,OV)]", collToColl), - ("SCollection$.partition", "(Coll[IV],(IV) => Boolean) => (Coll[IV],Coll[IV])", collToColl), ("SCollection$.patch", "(Coll[IV],Int,Coll[IV],Int) => Coll[IV]", collToColl), ("SCollection$.updated", "(Coll[IV],Int,IV) => Coll[IV]", collToColl), ("SCollection$.updateMany_per_kb", "(Coll[IV],Coll[Int],Coll[IV]) => Coll[IV]", collToColl), + ("SCollection$.filter", "(Coll[IV],(IV) => Boolean) => Coll[IV]", collToColl), ("If", "(Boolean, T, T) => T", logicCost), @@ -132,6 +149,7 @@ object CostTable { ("SigmaPropBytes", "SigmaProp => Coll[Byte]", logicCost), ("BinAnd", "(Boolean, Boolean) => Boolean", logicCost), ("BinOr", "(Boolean, Boolean) => Boolean", logicCost), + ("BinXor", "(Boolean, Boolean) => Boolean", logicCost), ("AND", "(Coll[Boolean]) => Boolean", logicCost), ("OR_per_item", "(Coll[Boolean]) => Boolean", logicCost), ("AND_per_item", "(Coll[Boolean]) => Boolean", logicCost), @@ -149,19 +167,19 @@ object CostTable { ("EQ", "(T,T) => Boolean", comparisonCost), ("NEQ", "(T,T) => Boolean", comparisonCost), - ("GT_per_kb", "(T,T) => Boolean", comparisonCost), - ("GE_per_kb", "(T,T) => Boolean", comparisonCost), - ("LE_per_kb", "(T,T) => Boolean", comparisonCost), - ("LT_per_kb", "(T,T) => Boolean", comparisonCost), - ("EQ_per_kb", "(T,T) => Boolean", comparisonCost), - ("NEQ_per_kb", "(T,T) => Boolean", comparisonCost), - - ("GT", "(BigInt,BigInt) => Boolean", plusMinusBigInt), - ("GE", "(BigInt,BigInt) => Boolean", plusMinusBigInt), - ("LE", "(BigInt,BigInt) => Boolean", plusMinusBigInt), - ("LT", "(BigInt,BigInt) => Boolean", plusMinusBigInt), - ("EQ", "(BigInt,BigInt) => Boolean", plusMinusBigInt), - ("NEQ", "(BigInt,BigInt) => Boolean", plusMinusBigInt), + ("GT_per_kb", "(T,T) => Boolean", comparisonPerKbCost), + ("GE_per_kb", "(T,T) => Boolean", comparisonPerKbCost), + ("LE_per_kb", "(T,T) => Boolean", comparisonPerKbCost), + ("LT_per_kb", "(T,T) => Boolean", comparisonPerKbCost), + ("EQ_per_kb", "(T,T) => Boolean", comparisonPerKbCost), + ("NEQ_per_kb", "(T,T) => Boolean", comparisonPerKbCost), + + ("GT", "(BigInt,BigInt) => Boolean", comparisonBigInt), + ("GE", "(BigInt,BigInt) => Boolean", comparisonBigInt), + ("LE", "(BigInt,BigInt) => Boolean", comparisonBigInt), + ("LT", "(BigInt,BigInt) => Boolean", comparisonBigInt), + ("EQ", "(BigInt,BigInt) => Boolean", comparisonBigInt), + ("NEQ", "(BigInt,BigInt) => Boolean", comparisonBigInt), // (">_per_item", "(BigInt, BigInt) => BigInt", MinimalCost), ("+", "(Byte, Byte) => Byte", plusMinus), @@ -211,6 +229,7 @@ object CostTable { ("%_per_item", "(BigInt, BigInt) => BigInt", MinimalCost), ("ModQ", "(BigInt) => BigInt", MinimalCost), + ("ModQArithOp", "(BigInt, BigInt) => BigInt", MinimalCost), ("Downcast", s"(${Downcast.tT}) => ${Downcast.tR}", castOp), ("Upcast", s"(${Upcast.tT}) => ${Upcast.tR}", castOp), @@ -229,20 +248,20 @@ object CostTable { ("max", "(BigInt, BigInt) => BigInt", comparisonCost), ("max_per_item", "(BigInt, BigInt) => BigInt", comparisonCost), - ("SAvlTree$.insert_per_kb", "(AvlTree, Coll[(Coll[Byte], Coll[Byte])], Coll[Byte]) => Option[AvlTree]", hashPerKb * 2), - ("SAvlTree$.update_per_kb", "(AvlTree, Coll[(Coll[Byte], Coll[Byte])], Coll[Byte]) => Option[AvlTree]", hashPerKb * 2), - ("SAvlTree$.remove_per_kb", "(AvlTree, Coll[Coll[Byte]], Coll[Byte]) => Option[AvlTree]", hashPerKb * 2), - ("SAvlTree$.contains_per_kb", "(AvlTree,Coll[Byte],Coll[Byte]) => Boolean", hashPerKb * 2), - ("SAvlTree$.get_per_kb", "(AvlTree,Coll[Byte],Coll[Byte]) => Option[Coll[Byte]]", hashPerKb * 2), - ("SAvlTree$.getMany_per_kb", "(AvlTree,Coll[Coll[Byte]],Coll[Byte]) => Coll[Option[Coll[Byte]]]", hashPerKb * 2), + ("SAvlTree$.insert_per_kb", "(AvlTree, Coll[(Coll[Byte], Coll[Byte])], Coll[Byte]) => Option[AvlTree]", avlTreeOp), + ("SAvlTree$.update_per_kb", "(AvlTree, Coll[(Coll[Byte], Coll[Byte])], Coll[Byte]) => Option[AvlTree]", avlTreeOp), + ("SAvlTree$.remove_per_kb", "(AvlTree, Coll[Coll[Byte]], Coll[Byte]) => Option[AvlTree]", avlTreeOp), + ("SAvlTree$.contains_per_kb", "(AvlTree,Coll[Byte],Coll[Byte]) => Boolean", avlTreeOp), + ("SAvlTree$.get_per_kb", "(AvlTree,Coll[Byte],Coll[Byte]) => Option[Coll[Byte]]", avlTreeOp), + ("SAvlTree$.getMany_per_kb", "(AvlTree,Coll[Coll[Byte]],Coll[Byte]) => Coll[Option[Coll[Byte]]]", avlTreeOp), + ("SAvlTree$.updateDigest", "(AvlTree,Coll[Byte]) => AvlTree", newAvlTreeCost), + ("SAvlTree$.updateOperations", "(AvlTree,Byte) => AvlTree", newAvlTreeCost), ("LongToByteArray", "(Long) => Coll[Byte]", castOp), ("ByteArrayToLong", "(Coll[Byte]) => Long", castOp), - ("ProveDlogEval", "(Unit) => SigmaProp", groupElementConst + constCost + 2 * expCost + multiplyGroup), - - //cost if of twice prove dlog - ("ProveDHTuple", "(Unit) => SigmaProp", 2 * (groupElementConst + constCost + 2 * expCost + multiplyGroup)), + ("ProveDlogEval", "(Unit) => SigmaProp", proveDlogEvalCost), + ("ProveDHTuple", "(Unit) => SigmaProp", proveDHTupleEvalCost), ("SigmaAnd_per_item", "(Coll[SigmaProp]) => SigmaProp", sigmaAndCost), ("SigmaOr_per_item", "(Coll[SigmaProp]) => SigmaProp", sigmaOrCost), @@ -250,6 +269,9 @@ object CostTable { ("SubstConstants_per_kb", "(Coll[Byte], Coll[Int], Coll[T]) => Coll[Byte]", MinimalCost), ("DecodePoint", "(Coll[Byte]) => GroupElement", MinimalCost), + + ("SOption$.map", "(Option[T],(T) => R) => Option[R]", OptionOp), + ("SOption$.filter", "(Option[T],(T) => Boolean) => Option[T]", OptionOp), )) def fromSeq(items: Seq[(String, String, Int)]): CostTable = { @@ -311,6 +333,7 @@ object CostTable { val BinOrDeclaration = 1 val BinAndDeclaration = 1 + val BinXorDeclaration = 1 val IfDeclaration = 1 /**PropLeaf declaration cost, wrapped script cost to be added as well.*/ diff --git a/src/main/scala/sigmastate/utxo/transformers.scala b/src/main/scala/sigmastate/utxo/transformers.scala index 9b8e38c6ed..731a5a3941 100644 --- a/src/main/scala/sigmastate/utxo/transformers.scala +++ b/src/main/scala/sigmastate/utxo/transformers.scala @@ -1,16 +1,14 @@ package sigmastate.utxo -import com.google.common.primitives.Shorts -import org.ergoplatform._ -import sigmastate.SCollection.{SBooleanArray, SByteArray} +import sigmastate.SCollection.SByteArray import sigmastate.Values._ import sigmastate.lang.Terms._ import sigmastate._ -import sigmastate.interpreter.{Context, Interpreter} import sigmastate.serialization.OpCodes.OpCode import sigmastate.serialization.OpCodes import sigmastate.utxo.CostTable.Cost import org.ergoplatform.ErgoBox.{R3, RegisterId} +import sigmastate.Operations._ import sigmastate.lang.exceptions.{OptionUnwrapNone, InterpreterException} import special.sigma.InvalidType @@ -23,36 +21,47 @@ case class MapCollection[IV <: SType, OV <: SType]( input: Value[SCollection[IV]], mapper: Value[SFunc]) extends Transformer[SCollection[IV], SCollection[OV]] { - override val opCode: OpCode = OpCodes.MapCollectionCode + override def companion = MapCollection implicit def tOV = mapper.asValue[OV].tpe override val tpe = SCollection[OV](mapper.tpe.tRange.asInstanceOf[OV]) override val opType = SCollection.MapMethod.stype.asFunc } +object MapCollection extends ValueCompanion { + override def opCode: OpCode = OpCodes.MapCollectionCode +} case class Append[IV <: SType](input: Value[SCollection[IV]], col2: Value[SCollection[IV]]) extends Transformer[SCollection[IV], SCollection[IV]] { - override val opCode: OpCode = OpCodes.AppendCode + override def companion = Append override val tpe = input.tpe - override val opType = SCollection.AppendMethod.stype.asFunc + override val opType = SCollection.AppendMethod.stype +} +object Append extends ValueCompanion { + override def opCode: OpCode = OpCodes.AppendCode } case class Slice[IV <: SType](input: Value[SCollection[IV]], from: Value[SInt.type], until: Value[SInt.type]) extends Transformer[SCollection[IV], SCollection[IV]] { - override val opCode: OpCode = OpCodes.SliceCode + override def companion = Slice override val tpe = input.tpe override def opType = { val tpeColl = SCollection(input.tpe.typeParams.head.ident) SFunc(Vector(tpeColl, SInt, SInt), tpeColl) } } +object Slice extends ValueCompanion { + override def opCode: OpCode = OpCodes.SliceCode +} case class Filter[IV <: SType](input: Value[SCollection[IV]], - id: Byte, - condition: Value[SBoolean.type]) + condition: Value[SFunc]) extends Transformer[SCollection[IV], SCollection[IV]] { - override val opCode: OpCode = OpCodes.FilterCode + override def companion = Filter override def tpe: SCollection[IV] = input.tpe - override val opType = SCollection.FilterMethod.stype.asFunc + override val opType = SCollection.FilterMethod.stype +} +object Filter extends ValueCompanion { + override def opCode: OpCode = OpCodes.FilterCode } trait BooleanTransformer[IV <: SType] extends Transformer[SCollection[IV], SBoolean.type] { @@ -60,32 +69,43 @@ trait BooleanTransformer[IV <: SType] extends Transformer[SCollection[IV], SBool val condition: Value[SFunc] override def tpe = SBoolean } +trait BooleanTransformerCompanion extends ValueCompanion { + def argInfos: Seq[ArgInfo] +} case class Exists[IV <: SType](override val input: Value[SCollection[IV]], override val condition: Value[SFunc]) extends BooleanTransformer[IV] { - override val opCode: OpCode = OpCodes.ExistsCode - override val opType = SCollection.ExistsMethod.stype.asFunc + override def companion = Exists + override val opType = SCollection.ExistsMethod.stype +} +object Exists extends BooleanTransformerCompanion { + override def opCode: OpCode = OpCodes.ExistsCode + override def argInfos: Seq[ArgInfo] = ExistsInfo.argInfos } case class ForAll[IV <: SType](override val input: Value[SCollection[IV]], override val condition: Value[SFunc]) extends BooleanTransformer[IV] { - override val opCode: OpCode = OpCodes.ForAllCode - override val opType = SCollection.ForallMethod.stype.asFunc + override def companion = ForAll + override val opType = SCollection.ForallMethod.stype +} +object ForAll extends BooleanTransformerCompanion { + override def opCode: OpCode = OpCodes.ForAllCode + override def argInfos: Seq[ArgInfo] = ForAllInfo.argInfos } - case class Fold[IV <: SType, OV <: SType](input: Value[SCollection[IV]], zero: Value[OV], foldOp: Value[SFunc]) extends Transformer[SCollection[IV], OV] { - override val opCode: OpCode = OpCodes.FoldCode + override def companion = Fold implicit override def tpe: OV = zero.tpe - val opType: SFunc = SCollection.FoldMethod.stype.asFunc + val opType: SFunc = SCollection.FoldMethod.stype } -object Fold { +object Fold extends ValueCompanion { + override def opCode: OpCode = OpCodes.FoldCode def sum[T <: SNumericType](input: Value[SCollection[T]])(implicit tT: T) = Fold(input, Constant(tT.upcast(0.toByte), tT), @@ -108,77 +128,115 @@ case class ByIndex[V <: SType](input: Value[SCollection[V]], index: Value[SInt.type], default: Option[Value[V]] = None) extends Transformer[SCollection[V], V] with NotReadyValue[V] { - override val opCode: OpCode = OpCodes.ByIndexCode + override def companion = ByIndex override val tpe = input.tpe.elemType override val opType = SCollection.ApplyMethod.stype.asFunc } +object ByIndex extends ValueCompanion { + override def opCode: OpCode = OpCodes.ByIndexCode +} /** Select tuple field by its 1-based index. E.g. input._1 is transformed to SelectField(input, 1) */ case class SelectField(input: Value[STuple], fieldIndex: Byte) extends Transformer[STuple, SType] with NotReadyValue[SType] { - override val opCode: OpCode = OpCodes.SelectFieldCode + override def companion = SelectField override val tpe = input.tpe.items(fieldIndex - 1) override val opType = SFunc(input.tpe, tpe) } +object SelectField extends ValueCompanion { + override def opCode: OpCode = OpCodes.SelectFieldCode +} /** Represents execution of Sigma protocol that validates the given input SigmaProp. */ case class SigmaPropIsProven(input: Value[SSigmaProp.type]) extends Transformer[SSigmaProp.type, SBoolean.type] with NotReadyValueBoolean { - override val opCode: OpCode = OpCodes.SigmaPropIsProvenCode + override def companion = SigmaPropIsProven override def opType = SFunc(input.tpe, SBoolean) } +object SigmaPropIsProven extends ValueCompanion { + override def opCode: OpCode = OpCodes.SigmaPropIsProvenCode +} /** Extract serialized bytes of a SigmaProp value */ case class SigmaPropBytes(input: Value[SSigmaProp.type]) extends Transformer[SSigmaProp.type, SByteArray] with NotReadyValue[SByteArray] { - override val opCode: OpCode = OpCodes.SigmaPropBytesCode + override def companion = SigmaPropBytes override def tpe = SByteArray override val opType = SFunc(input.tpe, tpe) } - +object SigmaPropBytes extends ValueCompanion { + override def opCode: OpCode = OpCodes.SigmaPropBytesCode +} +trait SimpleTransformerCompanion extends ValueCompanion { + def argInfos: Seq[ArgInfo] +} case class SizeOf[V <: SType](input: Value[SCollection[V]]) extends Transformer[SCollection[V], SInt.type] with NotReadyValueInt { - override val opCode: OpCode = OpCodes.SizeOfCode + override def companion = SizeOf override val opType = SFunc(SCollection(SCollection.tIV), SInt) } +object SizeOf extends SimpleTransformerCompanion { + override def opCode: OpCode = OpCodes.SizeOfCode + override def argInfos: Seq[ArgInfo] = SizeOfInfo.argInfos +} sealed trait Extract[V <: SType] extends Transformer[SBox.type, V] { } case class ExtractAmount(input: Value[SBox.type]) extends Extract[SLong.type] with NotReadyValueLong { - override val opCode: OpCode = OpCodes.ExtractAmountCode + override def companion = ExtractAmount override val opType = SFunc(SBox, SLong) } +object ExtractAmount extends SimpleTransformerCompanion { + override def opCode: OpCode = OpCodes.ExtractAmountCode + override def argInfos: Seq[ArgInfo] = ExtractAmountInfo.argInfos +} case class ExtractScriptBytes(input: Value[SBox.type]) extends Extract[SByteArray] with NotReadyValueByteArray { - override val opCode: OpCode = OpCodes.ExtractScriptBytesCode + override def companion = ExtractScriptBytes override val opType = SFunc(SBox, SByteArray) } +object ExtractScriptBytes extends SimpleTransformerCompanion { + override def opCode: OpCode = OpCodes.ExtractScriptBytesCode + override def argInfos: Seq[ArgInfo] = ExtractScriptBytesInfo.argInfos +} case class ExtractBytes(input: Value[SBox.type]) extends Extract[SByteArray] with NotReadyValueByteArray { - override val opCode: OpCode = OpCodes.ExtractBytesCode + override def companion = ExtractBytes override val opType = SFunc(SBox, SByteArray) } +object ExtractBytes extends SimpleTransformerCompanion { + override def opCode: OpCode = OpCodes.ExtractBytesCode + override def argInfos: Seq[ArgInfo] = ExtractBytesInfo.argInfos +} case class ExtractBytesWithNoRef(input: Value[SBox.type]) extends Extract[SByteArray] with NotReadyValueByteArray { - override val opCode: OpCode = OpCodes.ExtractBytesWithNoRefCode + override def companion = ExtractBytesWithNoRef override val opType = SFunc(SBox, SByteArray) } +object ExtractBytesWithNoRef extends SimpleTransformerCompanion { + override def opCode: OpCode = OpCodes.ExtractBytesWithNoRefCode + override def argInfos: Seq[ArgInfo] = ExtractBytesWithNoRefInfo.argInfos +} case class ExtractId(input: Value[SBox.type]) extends Extract[SByteArray] with NotReadyValueByteArray { - override val opCode: OpCode = OpCodes.ExtractIdCode + override def companion = ExtractId override val opType = SFunc(SBox, SByteArray) } +object ExtractId extends SimpleTransformerCompanion { + override def opCode: OpCode = OpCodes.ExtractIdCode + override def argInfos: Seq[ArgInfo] = ExtractIdInfo.argInfos +} case class ExtractRegisterAs[V <: SType]( input: Value[SBox.type], registerId: RegisterId, override val tpe: SOption[V]) extends Extract[SOption[V]] with NotReadyValue[SOption[V]] { - override val opCode: OpCode = OpCodes.ExtractRegisterAs + override def companion = ExtractRegisterAs override def opType = SFunc(Vector(SBox, SByte), tpe) } - -object ExtractRegisterAs { +object ExtractRegisterAs extends ValueCompanion { + override def opCode: OpCode = OpCodes.ExtractRegisterAs def apply[V <: SType](input: Value[SBox.type], registerId: RegisterId)(implicit tpe: V): ExtractRegisterAs[V] = ExtractRegisterAs(input, registerId, SOption(tpe)) @@ -190,11 +248,13 @@ object ExtractRegisterAs { */ case class ExtractCreationInfo(input: Value[SBox.type]) extends Extract[STuple] with NotReadyValue[STuple] { import ExtractCreationInfo._ + override def companion = ExtractCreationInfo override def tpe: STuple = ResultType - override val opCode: OpCode = OpCodes.ExtractCreationInfoCode override def opType = OpType } -object ExtractCreationInfo { +object ExtractCreationInfo extends SimpleTransformerCompanion { + override def opCode: OpCode = OpCodes.ExtractCreationInfoCode + override def argInfos: Seq[ArgInfo] = ExtractCreationInfoInfo.argInfos val ResultType = STuple(SInt, SByteArray) val OpType = SFunc(SBox, ResultType) } @@ -210,42 +270,61 @@ trait Deserialize[V <: SType] extends NotReadyValue[V] * @since 2.0 */ case class DeserializeContext[V <: SType](id: Byte, tpe: V) extends Deserialize[V] { - override val opCode: OpCode = OpCodes.DeserializeContextCode + override def companion = DeserializeContext override val opType = SFunc(Vector(SContext, SByte), tpe) } +object DeserializeContext extends ValueCompanion { + override def opCode: OpCode = OpCodes.DeserializeContextCode +} -//todo: make it method of SBox and write test for this class +/** Extract register of SELF box as Coll[Byte], deserialize it into Value and inline into executing script. + * NOTE: it only applicable to SELF box + */ case class DeserializeRegister[V <: SType](reg: RegisterId, tpe: V, default: Option[Value[V]] = None) extends Deserialize[V] { - override val opCode: OpCode = OpCodes.DeserializeRegisterCode + override def companion = DeserializeRegister override val opType = SFunc(Vector(SBox, SByte, SOption(tpe)), tpe) } +object DeserializeRegister extends ValueCompanion { + override def opCode: OpCode = OpCodes.DeserializeRegisterCode +} case class GetVar[V <: SType](varId: Byte, override val tpe: SOption[V]) extends NotReadyValue[SOption[V]] { - override val opCode: OpCode = OpCodes.GetVarCode + override def companion = GetVar override val opType = SFunc(Vector(SContext, SByte), tpe) } - -object GetVar { +object GetVar extends ValueCompanion { + override def opCode: OpCode = OpCodes.GetVarCode def apply[V <: SType](varId: Byte, innerTpe: V): GetVar[V] = GetVar[V](varId, SOption(innerTpe)) } case class OptionGet[V <: SType](input: Value[SOption[V]]) extends Transformer[SOption[V], V] { - override val opCode: OpCode = OpCodes.OptionGetCode + override def companion = OptionGet override val opType = SFunc(input.tpe, tpe) override def tpe: V = input.tpe.elemType override def toString: String = s"$input.get" } +object OptionGet extends SimpleTransformerCompanion { + override def opCode: OpCode = OpCodes.OptionGetCode + override def argInfos: Seq[ArgInfo] = OptionGetInfo.argInfos +} case class OptionGetOrElse[V <: SType](input: Value[SOption[V]], default: Value[V]) extends Transformer[SOption[V], V] { - override val opCode: OpCode = OpCodes.OptionGetOrElseCode + override def companion = OptionGetOrElse override val opType = SFunc(IndexedSeq(input.tpe, tpe), tpe) override def tpe: V = input.tpe.elemType } +object OptionGetOrElse extends ValueCompanion { + override def opCode: OpCode = OpCodes.OptionGetOrElseCode +} case class OptionIsDefined[V <: SType](input: Value[SOption[V]]) extends Transformer[SOption[V], SBoolean.type] { - override val opCode: OpCode = OpCodes.OptionIsDefinedCode + override def companion = OptionIsDefined override val opType = SFunc(input.tpe, SBoolean) override def tpe= SBoolean } +object OptionIsDefined extends SimpleTransformerCompanion { + override def opCode: OpCode = OpCodes.OptionIsDefinedCode + override def argInfos: Seq[ArgInfo] = OptionIsDefinedInfo.argInfos +} diff --git a/src/test/scala/org/ergoplatform/ErgoAddressSpecification.scala b/src/test/scala/org/ergoplatform/ErgoAddressSpecification.scala index 7674324db1..d5a22066b9 100644 --- a/src/test/scala/org/ergoplatform/ErgoAddressSpecification.scala +++ b/src/test/scala/org/ergoplatform/ErgoAddressSpecification.scala @@ -11,14 +11,15 @@ import sigmastate.serialization.ErgoTreeSerializer.DefaultSerializer import sigmastate.serialization.ValueSerializer import sigmastate.serialization.generators.ValueGenerators import org.ergoplatform.ErgoScriptPredef._ +import org.ergoplatform.validation.ValidationSpecification import sigmastate.Values.ErgoTree class ErgoAddressSpecification extends PropSpec with ValueGenerators with PropertyChecks with Matchers - with TryValues { - + with TryValues + with ValidationSpecification { private implicit val ergoAddressEncoder: ErgoAddressEncoder = new ErgoAddressEncoder(TestnetNetworkPrefix) diff --git a/src/test/scala/org/ergoplatform/ErgoLikeTransactionSpec.scala b/src/test/scala/org/ergoplatform/ErgoLikeTransactionSpec.scala index 06433d0a8c..677ec10c13 100644 --- a/src/test/scala/org/ergoplatform/ErgoLikeTransactionSpec.scala +++ b/src/test/scala/org/ergoplatform/ErgoLikeTransactionSpec.scala @@ -1,5 +1,6 @@ package org.ergoplatform +import org.ergoplatform.ErgoBox.TokenId import org.scalatest.prop.GeneratorDrivenPropertyChecks import org.scalatest.{Matchers, PropSpec} import scorex.util.Random @@ -8,6 +9,9 @@ import sigmastate.helpers.SigmaTestingCommons import sigmastate.interpreter.{ContextExtension, ProverResult} import sigmastate.serialization.SigmaSerializer import sigmastate.serialization.generators.ValueGenerators +import sigmastate.eval._ +import sigmastate.eval.Extensions._ +import sigmastate.SType._ class ErgoLikeTransactionSpec extends PropSpec with GeneratorDrivenPropertyChecks @@ -24,8 +28,10 @@ class ErgoLikeTransactionSpec extends PropSpec forAll { txIn: ErgoLikeTransaction => whenever(txIn.outputCandidates.head.additionalTokens.nonEmpty) { val out = txIn.outputCandidates.head + // clone tokenIds so that same id have different references + val tokens = out.additionalTokens.map(v => (v._1.clone().asInstanceOf[TokenId], v._2)) val outputs = (0 until 10).map { i => - new ErgoBoxCandidate(out.value, out.ergoTree, i, out.additionalTokens, out.additionalRegisters) + new ErgoBoxCandidate(out.value, out.ergoTree, i, tokens, out.additionalRegisters) } val tx = new ErgoLikeTransaction(txIn.inputs, txIn.dataInputs, txIn.outputCandidates ++ outputs) roundTripTestWithPos(tx)(ErgoLikeTransaction.serializer) @@ -35,9 +41,9 @@ class ErgoLikeTransactionSpec extends PropSpec ErgoLikeTransaction.serializer.serialize(tx, w) val bytes = w.toBytes - tx.outputCandidates.flatMap(_.additionalTokens).foreach { token => - bytes.indexOfSlice(token._1) should not be -1 - bytes.indexOfSlice(token._1) shouldBe bytes.lastIndexOfSlice(token._1) + tx.outputCandidates.toColl.flatMap(_.additionalTokens).foreach { (tokenId, _) => + bytes.indexOfSlice(tokenId) should not be -1 + bytes.indexOfSlice(tokenId) shouldBe bytes.lastIndexOfSlice(tokenId) } } } diff --git a/src/test/scala/org/ergoplatform/ErgoScriptPredefSpec.scala b/src/test/scala/org/ergoplatform/ErgoScriptPredefSpec.scala index 3db29e8816..3d7d445ea4 100644 --- a/src/test/scala/org/ergoplatform/ErgoScriptPredefSpec.scala +++ b/src/test/scala/org/ergoplatform/ErgoScriptPredefSpec.scala @@ -7,7 +7,7 @@ import org.ergoplatform.settings.MonetarySettings import org.scalacheck.Gen import scorex.crypto.hash.{Digest32, Blake2b256} import scorex.util.Random -import sigmastate.Values.{SigmaPropConstant, CollectionConstant, Value, ByteArrayConstant, SigmaPropValue, IntConstant} +import sigmastate.Values.{SigmaPropConstant, CollectionConstant, ByteArrayConstant, SigmaPropValue, IntConstant, ErgoTree} import sigmastate._ import sigmastate.basics.DLogProtocol.{ProveDlog, DLogProverInput} import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, SigmaTestingCommons, ErgoLikeTestInterpreter} @@ -15,7 +15,7 @@ import sigmastate.interpreter.Interpreter.{ScriptNameProp, emptyEnv} import sigmastate.interpreter.{ProverResult, ContextExtension} import sigmastate.lang.Terms.ValueOps import sigmastate.serialization.ValueSerializer -import sigmastate.utxo.{ExtractCreationInfo, ByIndex, SelectField, CostTable} +import sigmastate.utxo.{CostTable, ExtractCreationInfo, ByIndex, SelectField} import scalan.util.BenchmarkUtil._ import ErgoScriptPredef._ @@ -44,7 +44,7 @@ class ErgoScriptPredefSpec extends SigmaTestingCommons { val inputBox = ErgoBox(1, prop, nextHeight, Seq(), Map()) val inputBoxes = IndexedSeq(inputBox) val inputs = inputBoxes.map(b => Input(b.id, emptyProverResult)) - val minerBox = new ErgoBoxCandidate(1, SigmaPropConstant(minerProp), nextHeight, Seq(), Map()) + val minerBox = new ErgoBoxCandidate(1, SigmaPropConstant(minerProp), nextHeight) val spendingTransaction = ErgoLikeTransaction(inputs, IndexedSeq(minerBox)) @@ -98,7 +98,7 @@ class ErgoScriptPredefSpec extends SigmaTestingCommons { def checkSpending(remainingAmount: Long, height: Int, - newProp: SigmaPropValue, + newProp: ErgoTree, inputR4Val: CollectionConstant[SByte.type]): Try[Unit] = Try { val outputR4Val: CollectionConstant[SByte.type] = ByteArrayConstant(Random.randomBytes()) val inputBoxes = IndexedSeq(ErgoBox(emission.foundersCoinsTotal, prop, 0, Seq(), Map(R4 -> inputR4Val))) @@ -188,7 +188,7 @@ class ErgoScriptPredefSpec extends SigmaTestingCommons { createRewardTx(currentRate, height, minerPk) shouldBe 'failure } - def createRewardTx(emissionAmount: Long, nextHeight: Int, minerProp: SigmaPropValue): Try[ErgoLikeTransaction] = { + def createRewardTx(emissionAmount: Long, nextHeight: Int, minerProp: ErgoTree): Try[ErgoLikeTransaction] = { checkRewardTx(minerPk, minerProp, emissionBox, @@ -234,7 +234,7 @@ class ErgoScriptPredefSpec extends SigmaTestingCommons { val inputs0 = IndexedSeq( ErgoBox(20, prop, 0, Seq((wrongId, tokenAmount), (tokenId, tokenAmount), (wrongId2, tokenAmount)), Map()) ) - check(inputs0).get shouldBe(()) + check(inputs0).get shouldBe (()) // transaction with the only input with insufficient token should fail val inputs1 = IndexedSeq( @@ -278,7 +278,7 @@ class ErgoScriptPredefSpec extends SigmaTestingCommons { } def checkRewardTx(minerPk: ProveDlog, - minerProp: SigmaPropValue, + minerProp: ErgoTree, emissionBox: ErgoBox, emissionAmount: Long, nextHeight: Int)(prover: ContextEnrichingTestProvingInterpreter): Try[ErgoLikeTransaction] = Try { @@ -288,8 +288,8 @@ class ErgoScriptPredefSpec extends SigmaTestingCommons { val inputs = inputBoxes.map(b => Input(b.id, emptyProverResult)) val pkBytes = minerPk.pkBytes - val newEmissionBox = new ErgoBoxCandidate(emissionBox.value - emissionAmount, prop, nextHeight, Seq(), Map()) - val minerBox = new ErgoBoxCandidate(emissionAmount, minerProp, nextHeight, Seq(), Map()) + val newEmissionBox = new ErgoBoxCandidate(emissionBox.value - emissionAmount, prop, nextHeight) + val minerBox = new ErgoBoxCandidate(emissionAmount, minerProp, nextHeight) val spendingTransaction = ErgoLikeTransaction(inputs, IndexedSeq(newEmissionBox, minerBox)) diff --git a/src/test/scala/org/ergoplatform/dsl/TestContractSpec.scala b/src/test/scala/org/ergoplatform/dsl/TestContractSpec.scala index 1cf41a5f81..ad98a30d44 100644 --- a/src/test/scala/org/ergoplatform/dsl/TestContractSpec.scala +++ b/src/test/scala/org/ergoplatform/dsl/TestContractSpec.scala @@ -45,10 +45,7 @@ case class TestContractSpec(testSuite: SigmaTestingCommons)(implicit val IR: IRC val boxToSpend = inBox.utxoBox val propSpec: PropositionSpec = boxToSpend.propSpec val bindings = extensions.mapValues { case v: TestValue[a] => - implicit val tA = v.tA - val treeType = Evaluation.toErgoTreeType(tA) - val treeData = Evaluation.fromDslData(v.value, tRes = treeType) - IR.builder.mkConstant(treeData.asWrappedType, Evaluation.rtypeToSType(v.tA)) + IR.builder.mkConstant(v.value.asWrappedType, Evaluation.rtypeToSType(v.tA)) } val ctx = inBox.toErgoContext // val newExtension = ContextExtension(ctx.extension.values ++ bindings) diff --git a/src/test/scala/org/ergoplatform/validation/RuleStatusSerializerSpec.scala b/src/test/scala/org/ergoplatform/validation/RuleStatusSerializerSpec.scala new file mode 100644 index 0000000000..37dce4830a --- /dev/null +++ b/src/test/scala/org/ergoplatform/validation/RuleStatusSerializerSpec.scala @@ -0,0 +1,33 @@ +package org.ergoplatform.validation + +import org.scalatest.Assertion +import sigmastate.helpers.SigmaTestingCommons +import sigmastate.serialization.{SigmaSerializer, SerializationSpecification} + +class RuleStatusSerializerSpec extends SerializationSpecification with SigmaTestingCommons { + + private def roundtrip(status: RuleStatus): Assertion = { + implicit val ser = RuleStatusSerializer + roundTripTest(status) + roundTripTestWithPos(status) + } + + property("RuleStatusSerializer round trip") { + forAll(statusGen, MinSuccessful(100)) { status => + roundtrip(status) + } + } + + property("RuleStatusSerializer parse unrecognized status") { + val unknownCode = 100.toByte + val someByte = 10.toByte + val nextByte = 20.toByte + val bytes = Array[Byte](1, unknownCode, someByte, nextByte) + val r = SigmaSerializer.startReader(bytes) + val s = RuleStatusSerializer.parse(r) + s shouldBe ReplacedRule(0) + val b = r.getByte() + b shouldBe nextByte + } + +} diff --git a/src/test/scala/org/ergoplatform/validation/SigmaValidationSettingsSerializerSpec.scala b/src/test/scala/org/ergoplatform/validation/SigmaValidationSettingsSerializerSpec.scala new file mode 100644 index 0000000000..2024274ae3 --- /dev/null +++ b/src/test/scala/org/ergoplatform/validation/SigmaValidationSettingsSerializerSpec.scala @@ -0,0 +1,26 @@ +package org.ergoplatform.validation + +import org.scalatest.Assertion +import sigmastate.helpers.SigmaTestingCommons +import sigmastate.serialization.SerializationSpecification + +class SigmaValidationSettingsSerializerSpec extends SerializationSpecification with SigmaTestingCommons { + + private def roundtrip(settings: SigmaValidationSettings): Assertion = { + implicit val set = SigmaValidationSettingsSerializer + roundTripTest(settings) + roundTripTestWithPos(settings) + } + + property("ValidationRules.currentSettings round trip") { + roundtrip(ValidationRules.currentSettings) + } + + property("SigmaValidationSettings round trip") { + forAll(ruleIdGen, statusGen, MinSuccessful(100)) { (ruleId, status) => + val vs = ValidationRules.currentSettings.updated(ruleId, status) + roundtrip(vs) + } + } + +} \ No newline at end of file diff --git a/src/test/scala/org/ergoplatform/validation/ValidationSpecification.scala b/src/test/scala/org/ergoplatform/validation/ValidationSpecification.scala new file mode 100644 index 0000000000..1e30653d9b --- /dev/null +++ b/src/test/scala/org/ergoplatform/validation/ValidationSpecification.scala @@ -0,0 +1,5 @@ +package org.ergoplatform.validation + +trait ValidationSpecification { + implicit val vs: SigmaValidationSettings = ValidationRules.currentSettings +} diff --git a/src/test/scala/sigmastate/CostingSpecification.scala b/src/test/scala/sigmastate/CostingSpecification.scala new file mode 100644 index 0000000000..d6e2c72001 --- /dev/null +++ b/src/test/scala/sigmastate/CostingSpecification.scala @@ -0,0 +1,237 @@ +package sigmastate + +import org.ergoplatform.ErgoLikeContext.{dummyPubkey, dummyPreHeader, noHeaders, noBoxes} +import org.ergoplatform.{ErgoLikeContext, ErgoBox} +import org.ergoplatform.ErgoScriptPredef.TrueProp +import scorex.crypto.authds.{ADDigest, ADKey} +import scorex.crypto.authds.avltree.batch.Lookup +import scorex.crypto.hash.Blake2b256 +import sigmastate.Values.{ByteArrayConstant, AvlTreeConstant, BooleanConstant, IntConstant} +import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, SigmaTestingCommons} +import sigmastate.interpreter.ContextExtension +import sigmastate.interpreter.Interpreter.{ScriptNameProp, ScriptEnv} +import sigmastate.utxo.CostTable +import sigmastate.utxo.CostTable._ +import sigmastate.eval._ +import sigmastate.eval.Extensions._ +import special.sigma.{SigmaTestingData, AvlTree} +import Sized._ + +class CostingSpecification extends SigmaTestingData { + implicit lazy val IR = new TestingIRContext { + override val okPrintEvaluatedEntries = false + substFromCostTable = false + } + lazy val interpreter = new ContextEnrichingTestProvingInterpreter + lazy val pkA = interpreter.dlogSecrets(0).publicImage + lazy val pkB = interpreter.dlogSecrets(1).publicImage + + val printCosts = true + + val (key1, _, avlProver) = sampleAvlProver + val keys = Colls.fromItems(key1) + avlProver.performOneOperation(Lookup(ADKey @@ key1.toArray)) + val digest = avlProver.digest.toColl + val lookupProof = avlProver.generateProof().toColl + val avlTreeData = AvlTreeData(ADDigest @@ digest.toArray, AvlTreeFlags.AllOperationsAllowed, 32, None) + val avlTree: AvlTree = CAvlTree(avlTreeData) + + val env: ScriptEnv = Map( + ScriptNameProp -> s"filename_verify", + "key1" -> key1, + "keys" -> keys, + "lookupProof" -> lookupProof + ) + + val extension: ContextExtension = ContextExtension(Map( + 1.toByte -> IntConstant(1), + 2.toByte -> BooleanConstant(true) + )) + val tokenId = Blake2b256("tokenA") + val selfBox = createBox(0, TrueProp, Seq(tokenId -> 10L), + Map(ErgoBox.R4 -> ByteArrayConstant(Array[Byte](1, 2, 3)), + ErgoBox.R5 -> IntConstant(3), + ErgoBox.R6 -> AvlTreeConstant(avlTree))) + lazy val outBoxA = ErgoBox(10, pkA, 0) + lazy val outBoxB = ErgoBox(20, pkB, 0) + lazy val tx = createTransaction(IndexedSeq(outBoxA, outBoxB)) + lazy val context = + new ErgoLikeContext( + currentHeight = preHeader.height, + lastBlockUtxoRoot = header2.stateRoot.asInstanceOf[CAvlTree].treeData, + minerPubkey = preHeader.minerPk.getEncoded.toArray, + headers = headers, preHeader = preHeader, + dataBoxes = IndexedSeq(dataBox), + boxesToSpend = IndexedSeq(selfBox), + spendingTransaction = tx, self = selfBox, extension) + + def cost(script: String): Long = { + val ergoTree = compiler.compile(env, script) + val res = interpreter.reduceToCrypto(context, env, ergoTree).get._2 + if (printCosts) + println(script + s" --> cost $res") + res + } + + val ContextVarAccess = accessContextVar + selectField // `getVar(id)` + `.get` + val RegisterAccess = accessRegister + selectField // `getReg(id)` + `.get` + val GTConstCost = comparisonCost + constCost + val LengthGTConstCost = collLength + GTConstCost + val LengthGTCost = collLength + comparisonCost // can be used when constCost is already accumulated + + property("basic (smoke) tests") { + + cost("{ getVar[Boolean](2).get }") shouldBe ContextVarAccess + + cost("{ getVar[Int](1).get > 1 }") shouldBe (ContextVarAccess + GTConstCost) + + // accessing two context variables + cost("{ getVar[Int](1).get > 1 && getVar[Boolean](2).get }") shouldBe + (ContextVarAccess * 2 + GTConstCost + logicCost) + + // the same var is used twice doesn't lead to double cost + cost("{ getVar[Int](1).get + 1 > getVar[Int](1).get }") shouldBe + (ContextVarAccess + plusMinus + constCost + comparisonCost) + + // cost is accumulated along the expression tree + cost("{ getVar[Int](1).get + 1 > getVar[Int](1).get && getVar[Boolean](2).get }") shouldBe + (ContextVarAccess * 2 + plusMinus + constCost + comparisonCost + logicCost) + } + + property("logical op costs") { + cost("{ val cond = getVar[Boolean](2).get; cond && cond }") shouldBe (ContextVarAccess + logicCost) + cost("{ val cond = getVar[Boolean](2).get; cond || cond }") shouldBe (ContextVarAccess + logicCost) + cost("{ val cond = getVar[Boolean](2).get; cond || cond && true }") shouldBe (ContextVarAccess + logicCost * 2 + constCost) + cost("{ val cond = getVar[Boolean](2).get; cond || cond && true || cond }") shouldBe (ContextVarAccess + logicCost * 3 + constCost) + cost("{ val cond = getVar[Boolean](2).get; cond ^ cond && true ^ cond }") shouldBe (ContextVarAccess + logicCost * 3 + constCost) + cost("{ val cond = getVar[Boolean](2).get; allOf(Coll(cond, true, cond)) }") shouldBe (ContextVarAccess + logicCost * 2 + constCost) + } + + property("SELF box operations cost") { + cost("{ SELF.value > 0 }") shouldBe (accessBox + extractCost + GTConstCost) + cost("{ SELF.id.size > 0 }") shouldBe (accessBox + extractCost + LengthGTConstCost) + cost("{ SELF.tokens.size > 0 }") shouldBe (accessBox + extractCost + LengthGTConstCost) + cost("{ SELF.creationInfo._1 > 0 }") shouldBe (accessBox + accessRegister + selectField + GTConstCost) + cost("{ SELF.R5[Int].get > 0 }") shouldBe (accessBox + RegisterAccess + GTConstCost) + + // TODO coverage: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/416 + // cost("{ SELF.getReg[Long](0.toByte).get > 0 }") shouldBe (accessBox + RegisterAccess + GTConstCost) + } + + lazy val OutputsCost = selectField + accessBox * tx.outputs.length + lazy val InputsCost = selectField + accessBox * context.boxesToSpend.length + lazy val DataInputsCost = selectField + accessBox * context.dataBoxes.length + lazy val HeadersCost = selectField + lazy val PreHeaderCost = selectField + lazy val AccessHeaderCost = selectField + collByIndex + constCost + + property("Global operations cost") { + // TODO costing: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 + // cost("{ groupGenerator.isIdentity > 0 }") shouldBe (selectField + selectField + GTConstCost) + + val sizeOfArgs = Seq(sizeOf(key1), sizeOf(key1)).foldLeft(0L)(_ + _.dataSize) + val xorCost = constCost + perKbCostOf(sizeOfArgs, hashPerKb / 2) + cost("{ xor(key1, key1).size > 0 }") shouldBe (xorCost + LengthGTConstCost) + } + + property("Context operations cost") { + cost("{ HEIGHT > 0 }") shouldBe (selectField + GTConstCost) + cost("{ OUTPUTS.size > 0 }") shouldBe (OutputsCost + LengthGTConstCost) + cost("{ INPUTS.size > 0 }") shouldBe (InputsCost + LengthGTConstCost) + cost("{ CONTEXT.dataInputs.size > 0 }") shouldBe (DataInputsCost + LengthGTConstCost) + cost("{ LastBlockUtxoRootHash.isUpdateAllowed }") shouldBe (selectField + selectField) + cost("{ MinerPubkey.size > 0 }") shouldBe (selectField + LengthGTConstCost) + cost("{ CONTEXT.headers.size > 0 }") shouldBe (HeadersCost + LengthGTConstCost) + cost("{ CONTEXT.preHeader.height > 0 }") shouldBe (PreHeaderCost + selectField + GTConstCost) + } + + property("PreHeader operations cost") { + cost("{ CONTEXT.preHeader.version > 0 }") shouldBe (PreHeaderCost + selectField + castOp + GTConstCost) + cost("{ CONTEXT.preHeader.parentId.size > 0 }") shouldBe (PreHeaderCost + selectField + LengthGTConstCost) + cost("{ CONTEXT.preHeader.timestamp > 0L }") shouldBe (PreHeaderCost + selectField + GTConstCost) + cost("{ CONTEXT.preHeader.nBits > 0L }") shouldBe (PreHeaderCost + selectField + GTConstCost) + cost("{ CONTEXT.preHeader.height > 0 }") shouldBe (PreHeaderCost + selectField + GTConstCost) + + cost("{ CONTEXT.preHeader.minerPk == groupGenerator }") shouldBe + (PreHeaderCost + selectField + comparisonCost + selectField) + + cost("{ CONTEXT.preHeader.votes.size > 0 }") shouldBe + (PreHeaderCost + selectField + LengthGTConstCost) + } + + property("Header operations cost") { + val header = "CONTEXT.headers(0)" + cost(s"{ $header.id.size > 0 }") shouldBe (AccessHeaderCost + selectField + LengthGTCost) + cost(s"{ $header.version > 0 }") shouldBe (AccessHeaderCost + selectField + castOp + comparisonCost) + cost(s"{ $header.parentId.size > 0 }") shouldBe (AccessHeaderCost + selectField + LengthGTCost) + cost(s"{ $header.ADProofsRoot.size > 0 }") shouldBe (AccessHeaderCost + selectField + LengthGTCost) + cost(s"{ $header.stateRoot.isUpdateAllowed }") shouldBe (AccessHeaderCost + selectField + selectField) + cost(s"{ $header.transactionsRoot.size > 0 }") shouldBe (AccessHeaderCost + selectField + LengthGTCost) + cost(s"{ $header.timestamp > 0L }") shouldBe (AccessHeaderCost + selectField + GTConstCost) + cost(s"{ $header.nBits > 0L }") shouldBe (AccessHeaderCost + selectField + GTConstCost) + cost(s"{ $header.height > 0 }") shouldBe (AccessHeaderCost + selectField + comparisonCost) + cost(s"{ $header.extensionRoot.size > 0 }") shouldBe (AccessHeaderCost + selectField + LengthGTCost) + + cost(s"{ $header.minerPk == groupGenerator }") shouldBe + (AccessHeaderCost + selectField + comparisonCost + selectField) + + cost(s"{ $header.powOnetimePk == groupGenerator }") shouldBe + (AccessHeaderCost + selectField + comparisonCost + selectField) + + cost(s"{ $header.powNonce.size > 0 }") shouldBe (AccessHeaderCost + selectField + LengthGTCost) + + cost(s"{ $header.powDistance > 0 }") shouldBe (AccessHeaderCost + selectField + comparisonBigInt + constCost) + cost(s"{ $header.votes.size > 0 }") shouldBe (AccessHeaderCost + selectField + LengthGTCost) + } + + val AccessRootHash = selectField + def perKbCostOf(dataSize: Long, opCost: Int) = { + ((dataSize / 1024L).toInt + 1) * opCost + } + + property("AvlTree operations cost") { + val rootTree = "LastBlockUtxoRootHash" + cost(s"{ $rootTree.digest.size > 0 }") shouldBe (AccessRootHash + selectField + LengthGTConstCost) + cost(s"{ $rootTree.enabledOperations > 0 }") shouldBe (AccessRootHash + selectField + castOp + GTConstCost) + cost(s"{ $rootTree.keyLength > 0 }") shouldBe (AccessRootHash + selectField + GTConstCost) + cost(s"{ $rootTree.isInsertAllowed }") shouldBe (AccessRootHash + selectField) + cost(s"{ $rootTree.isUpdateAllowed }") shouldBe (AccessRootHash + selectField) + cost(s"{ $rootTree.isRemoveAllowed }") shouldBe (AccessRootHash + selectField) + cost(s"{ $rootTree.updateDigest($rootTree.digest) == $rootTree }") shouldBe (AccessRootHash + selectField + newAvlTreeCost + comparisonPerKbCost) + cost(s"{ $rootTree.updateOperations(1.toByte) == $rootTree }") shouldBe (AccessRootHash + newAvlTreeCost + comparisonPerKbCost + constCost) + + val AccessTree = accessBox + RegisterAccess + val selfTree = "SELF.R6[AvlTree].get" + val sizeOfArgs = Seq(sizeOf(avlTree), sizeOf(key1), sizeOf(lookupProof)).foldLeft(0L)(_ + _.dataSize) + val containsCost = perKbCostOf(sizeOfArgs, avlTreeOp) + + cost(s"{ $selfTree.contains(key1, lookupProof) }") shouldBe (AccessTree + containsCost + constCost) + cost(s"{ $selfTree.get(key1, lookupProof).isDefined }") shouldBe (AccessTree + containsCost + constCost + selectField) + cost(s"{ $selfTree.getMany(keys, lookupProof).size > 0 }") shouldBe (AccessTree + containsCost + constCost + LengthGTConstCost) + } + + property("Coll operations cost") { + val coll = "OUTPUTS" + cost(s"{ $coll.filter({ (b: Box) => b.value > 1L }).size > 0 }") shouldBe + (lambdaCost + accessBox + extractCost + GTConstCost + selectField + + (accessBox + comparisonCost) * tx.outputs.length + collToColl + LengthGTConstCost) + cost(s"{ $coll.flatMap({ (b: Box) => b.propositionBytes }).size > 0 }") shouldBe + (lambdaCost + accessBox + extractCost + selectField + + accessBox * tx.outputs.length + collToColl + LengthGTConstCost) + cost(s"{ $coll.zip(OUTPUTS).size > 0 }") shouldBe + (selectField + accessBox * tx.outputs.length + + accessBox * tx.outputs.length * 2 + collToColl + LengthGTConstCost) + } + + property("Option operations cost") { + val opt = "SELF.R5[Int]" + val accessOpt = accessBox + accessRegister + cost(s"{ $opt.get > 0 }") shouldBe (accessOpt + selectField + GTConstCost) + cost(s"{ $opt.isDefined }") shouldBe (accessOpt + selectField) + cost(s"{ $opt.getOrElse(1) > 0 }") shouldBe (accessOpt + selectField + GTConstCost) + cost(s"{ $opt.filter({ (x: Int) => x > 0}).isDefined }") shouldBe + (accessOpt + OptionOp + lambdaCost + GTConstCost + selectField) + cost(s"{ $opt.map({ (x: Int) => x + 1}).isDefined }") shouldBe + (accessOpt + OptionOp + lambdaCost + plusMinus + constCost + selectField) + } +} \ No newline at end of file diff --git a/src/test/scala/sigmastate/SoftForkabilitySpecification.scala b/src/test/scala/sigmastate/SoftForkabilitySpecification.scala new file mode 100644 index 0000000000..5f99072fd4 --- /dev/null +++ b/src/test/scala/sigmastate/SoftForkabilitySpecification.scala @@ -0,0 +1,316 @@ +package sigmastate + +import org.ergoplatform.validation.ValidationRules._ +import org.ergoplatform._ +import org.ergoplatform.validation._ +import sigmastate.SPrimType.MaxPrimTypeCode +import sigmastate.Values.ErgoTree.EmptyConstants +import sigmastate.Values.{UnparsedErgoTree, NotReadyValueInt, ByteArrayConstant, Tuple, IntConstant, ErgoTree} +import sigmastate.Values.{UnparsedErgoTree, NotReadyValueInt, ByteArrayConstant, Tuple, IntConstant, ErgoTree, ValueCompanion} +import sigmastate.eval.Colls +import sigmastate.helpers.{ErgoLikeTestProvingInterpreter, ErgoLikeTestInterpreter} +import sigmastate.interpreter.{ProverResult, ContextExtension} +import sigmastate.interpreter.Interpreter.{ScriptNameProp, emptyEnv} +import sigmastate.serialization._ +import sigmastate.lang.Terms._ +import sigmastate.lang.exceptions.{SerializerException, CosterException} +import sigmastate.serialization.DataSerializer.CheckSerializableTypeCode +import sigmastate.serialization.OpCodes.{LastConstantCode, OpCode} +import sigmastate.serialization.TypeSerializer.{CheckPrimitiveTypeCode, CheckTypeCode} +import sigmastate.utxo.{DeserializeContext, SelectField} +import special.sigma.SigmaTestingData + +class SoftForkabilitySpecification extends SigmaTestingData { + + implicit lazy val IR = new TestingIRContext + lazy val prover = new ErgoLikeTestProvingInterpreter() + lazy val verifier = new ErgoLikeTestInterpreter + val deadline = 100 + val boxAmt = 100L + lazy val invalidPropV1 = compile(emptyEnv + ("deadline" -> deadline), + """{ + | HEIGHT > deadline && OUTPUTS.size == 1 + |}""".stripMargin).asBoolValue + lazy val invalidTxV1 = createTransaction(createBox(boxAmt, invalidPropV1.asSigmaProp, 1)) + lazy val invalidTxV1bytes = invalidTxV1.messageToSign + + lazy val propV1 = invalidPropV1.toSigmaProp + lazy val txV1 = createTransaction(createBox(boxAmt, propV1, 1)) + lazy val txV1bytes = txV1.messageToSign + + val blockHeight = 110 + + def createContext(h: Int, tx: ErgoLikeTransaction, vs: SigmaValidationSettings) = + ErgoLikeContext(h, + AvlTreeData.dummy, ErgoLikeContext.dummyPubkey, IndexedSeq(fakeSelf), + tx, fakeSelf, vs = vs) + + def proveTx(name: String, tx: ErgoLikeTransaction, vs: SigmaValidationSettings): ProverResult = { + val env = Map(ScriptNameProp -> (name + "_prove")) + val ctx = createContext(blockHeight, tx, vs) + val prop = tx.outputs(0).ergoTree + val proof1 = prover.prove(env, prop, ctx, fakeMessage).get + proof1 + } + + def verifyTx(name: String, tx: ErgoLikeTransaction, proof: ProverResult, vs: SigmaValidationSettings) = { + val env = Map(ScriptNameProp -> (name + "_verify")) + val ctx = createContext(blockHeight, tx, vs) + val prop = tx.outputs(0).ergoTree + verifier.verify(env, prop, ctx, proof, fakeMessage).map(_._1).fold(t => throw t, identity) shouldBe true + } + + def proveAndVerifyTx(name: String, tx: ErgoLikeTransaction, vs: SigmaValidationSettings) = { + val proof = proveTx(name, tx, vs) + verifyTx(name, tx, proof, vs) + } + + def checkTxProp[T <: ErgoLikeTransaction, R](tx1: T, tx2: T)(p: T => R) = { + p(tx1) shouldBe p(tx2) + } + + property("node v1, received tx with script v1, incorrect script") { + assertExceptionThrown({ + // CheckDeserializedScriptIsSigmaProp rule violated + ErgoLikeTransaction.serializer.parse(SigmaSerializer.startReader(invalidTxV1bytes)) + }, { + case se: SerializerException if se.cause.isDefined => + val ve = se.cause.get.asInstanceOf[ValidationException] + ve.rule == CheckDeserializedScriptIsSigmaProp + case _ => false + }) + } + + property("node v1, received tx with script v1, correct script") { + // able to parse + val tx = ErgoLikeTransaction.serializer.parse(SigmaSerializer.startReader(txV1bytes)) + + // validating script + proveAndVerifyTx("propV1", tx, vs) + } + + val Height2Code = (LastConstantCode + 56).toByte + /** Same as Height, but new opcode to test soft-fork */ + case object Height2 extends NotReadyValueInt with ValueCompanion { + override def companion = this + override val opCode: OpCode = Height2Code // use reserved code + def opType = SFunc(SContext, SInt) + } + val Height2Ser = CaseObjectSerialization(Height2, Height2) + + // prepare soft-fork settings for v2 + val v2vs = vs.updated(CheckValidOpCode.id, ChangedRule(Array(Height2Code))) + + /** Runs the block as if on the v2 node. */ + def runOnV2Node[T](block: => T): T = { + ValueSerializer.addSerializer(Height2Code, Height2Ser) + val res = block + ValueSerializer.removeSerializer(Height2Code) + res + } + + lazy val prop = GT(Height2, IntConstant(deadline)) + lazy val invalidTxV2 = createTransaction(createBox(boxAmt, prop.asSigmaProp, 1)) + + + lazy val propV2 = prop.toSigmaProp + // prepare bytes using special serialization WITH `size flag` in the header + lazy val propV2tree = ErgoTree.withSegregation(ErgoTree.SizeFlag, propV2) + lazy val propV2treeBytes = runOnV2Node { + propV2tree.bytes + } + + lazy val txV2 = createTransaction(createBox(boxAmt, propV2tree, 1)) + lazy val txV2messageToSign = runOnV2Node { + txV2.messageToSign + } + + val newTypeCode = (SGlobal.typeCode + 1).toByte + + property("node v1, soft-fork up to v2, script v2 without size bit") { + // try prepare v2 script without `size bit` in the header + assertExceptionThrown({ + ErgoTree(1.toByte, EmptyConstants, propV2) + }, { + case e: IllegalArgumentException => true + case _ => false + } ) + + // prepare bytes using default serialization and then replacing version in the header + val v2tree_withoutSize_bytes = runOnV2Node { + val tree = ErgoTree.fromProposition(propV2) + val bytes = tree.bytes + bytes(0) = 1.toByte // set version to v2, we cannot do this using ErgoTree constructor + bytes + } + + // v1 node should fail + assertExceptionThrown( + { + val r = SigmaSerializer.startReader(v2tree_withoutSize_bytes) + ErgoTreeSerializer.DefaultSerializer.deserializeErgoTree(r) + }, + { + case ve: ValidationException if ve.rule == CheckHeaderSizeBit => true + case _ => false + } + ) + + // v2 node should fail + runOnV2Node { + assertExceptionThrown( + { + val r = SigmaSerializer.startReader(v2tree_withoutSize_bytes) + ErgoTreeSerializer.DefaultSerializer.deserializeErgoTree(r) + }, + { + case ve: ValidationException if ve.rule == CheckHeaderSizeBit => true + case _ => false + } ) + } + } + + property("node v1, soft-fork up to v2, script v2 with `size bit`") { + val treeBytes = propV2treeBytes + val txV2bytes = txV2messageToSign + + // parse and validate tx with v2 settings + val tx = ErgoLikeTransaction.serializer.parse(SigmaSerializer.startReader(txV2bytes)) + proveAndVerifyTx("propV2", tx, v2vs) + + // also check that transaction prop was trivialized due to soft-fork + tx.outputs(0).ergoTree.root.left.get.bytes.array shouldBe treeBytes + tx.outputs(0).ergoTree.root.left.get.isInstanceOf[UnparsedErgoTree] shouldBe true + + // check deserialized tx is otherwise remains the same + checkTxProp(txV2, tx)(_.inputs) + checkTxProp(txV2, tx)(_.dataInputs) + checkTxProp(txV2, tx)(_.outputs.length) + checkTxProp(txV2, tx)(_.outputs(0).creationHeight) + checkTxProp(txV2, tx)(_.outputs(0).value) + checkTxProp(txV2, tx)(_.outputs(0).additionalTokens) + } + + property("node v1, no soft-fork, received script v2, raise error") { + assertExceptionThrown({ + val invalidTxV2bytes = runOnV2Node { invalidTxV2.messageToSign } + ErgoLikeTransaction.serializer.parse(SigmaSerializer.startReader(invalidTxV2bytes)) + },{ + case se: SerializerException if se.cause.isDefined => + val ve = se.cause.get.asInstanceOf[ValidationException] + ve.rule == CheckValidOpCode + case _ => false + }) + } + + property("our node v2, was soft-fork up to v2, received script v2") { + val txV2bytes = txV2.messageToSign + + // run as on node v2 + runOnV2Node { + + // parse and validate tx with v2 script (since serializers were extended to v2) + val tx = ErgoLikeTransaction.serializer.parse(SigmaSerializer.startReader(txV2bytes)) + tx shouldBe txV2 + + // fails evaluation of v2 script (due to the rest of the implementation is still v1) + assertExceptionThrown({ + proveAndVerifyTx("propV2", tx, v2vs) + },{ + case _: CosterException => true + case _ => false + }) + } + } + + property("our node v1, was soft-fork up to v2, received v1 script, DeserializeContext of v2 script") { + // script bytes for context variable containing v2 operation + val propBytes = runOnV2Node { + ValueSerializer.serialize(prop) + } + + // v1 main script which deserializes from context v2 script + val mainProp = BinAnd(GT(Height, IntConstant(deadline)), DeserializeContext(1, SBoolean)).toSigmaProp + + val tx = createTransaction(createBox(boxAmt, ErgoTree.fromProposition(mainProp), 1)) + val bindings = Map(1.toByte -> ByteArrayConstant(Colls.fromArray(propBytes))) + val proof = new ProverResult(Array.emptyByteArray, ContextExtension(bindings)) + + // verify transaction on v1 node using v2 validation settings + verifyTx("deserialize", tx, proof, v2vs) + } + + def checkRule(rule: ValidationRule, v2vs: SigmaValidationSettings, action: => Unit) = { + // try SoftForkable block using current vs (v1 version) + assertExceptionThrown({ + trySoftForkable(false) { + action + true + } + }, { + case ve: ValidationException if ve.rule == rule => true + case _ => false + }) + + val res = trySoftForkable(false)({ + action + true + })(v2vs) + res shouldBe false + } + + property("CheckTupleType rule") { + val exp = SelectField(Tuple(IntConstant(1), IntConstant(2), IntConstant(3)), 3) + val v2vs = vs.updated(CheckTupleType.id, ReplacedRule(0)) + checkRule(CheckTupleType, v2vs, { + IR.doCosting(emptyEnv, exp) + }) + } + + property("CheckPrimitiveTypeCode rule") { + val typeBytes = Array[Byte](MaxPrimTypeCode) + val v2vs = vs.updated(CheckPrimitiveTypeCode.id, ChangedRule(Array[Byte](MaxPrimTypeCode))) + checkRule(CheckPrimitiveTypeCode, v2vs, { + val r = SigmaSerializer.startReader(typeBytes) + TypeSerializer.deserialize(r) + }) + } + + property("CheckTypeCode rule") { + val typeBytes = Array[Byte](newTypeCode) + val v2vs = vs.updated(CheckTypeCode.id, ChangedRule(Array[Byte](newTypeCode))) + checkRule(CheckTypeCode, v2vs, { + val r = SigmaSerializer.startReader(typeBytes) + TypeSerializer.deserialize(r) + }) + } + + property("CheckSerializableTypeCode rule") { + val newType = SFunc(SInt, SInt) + val dataBytes = Array[Byte](1, 2, 3) // any random bytes will work + val v2vs = vs.updated(CheckSerializableTypeCode.id, ReplacedRule(0)) + checkRule(CheckSerializableTypeCode, v2vs, { + val r = SigmaSerializer.startReader(dataBytes) + DataSerializer.deserialize(newType, r) + }) + } + + property("CheckTypeWithMethods rule") { + val freeMethodId = 1.toByte + val mcBytes = Array[Byte](OpCodes.PropertyCallCode, newTypeCode, freeMethodId, Outputs.opCode) + val v2vs = vs.updated(CheckTypeWithMethods.id, ChangedRule(Array(newTypeCode))) + checkRule(CheckTypeWithMethods, v2vs, { + ValueSerializer.deserialize(mcBytes) + }) + } + + property("CheckMethod rule") { + val freeMethodId = 16.toByte + val mcBytes = Array[Byte](OpCodes.PropertyCallCode, SCollection.typeId, freeMethodId, Outputs.opCode) + val v2vs = vs.updated(CheckAndGetMethod.id, ChangedRule(Array(SCollection.typeId, freeMethodId))) + checkRule(CheckAndGetMethod, v2vs, { + ValueSerializer.deserialize(mcBytes) + }) + } + +} diff --git a/src/test/scala/sigmastate/TestingInterpreterSpecification.scala b/src/test/scala/sigmastate/TestingInterpreterSpecification.scala index 10c682ac14..874c8cc304 100644 --- a/src/test/scala/sigmastate/TestingInterpreterSpecification.scala +++ b/src/test/scala/sigmastate/TestingInterpreterSpecification.scala @@ -34,9 +34,9 @@ class TestingInterpreterSpecification extends SigmaTestingCommons { val ctx = testingContext(h) prover.reduceToCrypto(ctx, AND(GE(Height, IntConstant(h - 1)), dk1)).get._1 should( - matchPattern { case sb: SigmaBoolean => }) + matchPattern { case _: SigmaBoolean => }) prover.reduceToCrypto(ctx, AND(GE(Height, IntConstant(h)), dk1)).get._1 should ( - matchPattern { case sb: SigmaBoolean => }) + matchPattern { case _: SigmaBoolean => }) { val res = prover.reduceToCrypto(ctx, AND(GE(Height, IntConstant(h + 1)), dk1)).get._1 @@ -54,7 +54,7 @@ class TestingInterpreterSpecification extends SigmaTestingCommons { } { val res = prover.reduceToCrypto(ctx, OR(GE(Height, IntConstant(h + 1)), dk1)).get._1 - res should matchPattern { case sb: SigmaBoolean => } + res should matchPattern { case _: SigmaBoolean => } } } } @@ -164,7 +164,6 @@ class TestingInterpreterSpecification extends SigmaTestingCommons { | val arr = box1.R5[Coll[Boolean]].get | allOf(arr) == false |}""".stripMargin) - testEval( """{ | val arr = Coll(1, 2, 3) @@ -185,13 +184,14 @@ class TestingInterpreterSpecification extends SigmaTestingCommons { | val arr = Coll(1, 2, 3) | arr.map {(i: Int) => i + 1} == Coll(2, 3, 4) |}""".stripMargin) - // // TODO uncomment when Costing for where is implemented - // testEval("""{ - // | val arr = Array(1, 2, 3) - // | arr.filter {(i: Int) => i < 3} == Array(1, 2) - // |}""".stripMargin) + testEval( + """{ + | val arr = Coll(1, 2, 3) + | arr.filter {(i: Int) => i < 3} == Coll(1, 2) + |}""".stripMargin) } +// TODO coverage: implement it as negative test // property("Evaluate sigma in lambdas") { // testeval("""{ // | val arr = Array(dk1, dk2) @@ -304,7 +304,7 @@ class TestingInterpreterSpecification extends SigmaTestingCommons { val prop3 = AND(FalseLeaf, TrueLeaf).toSigmaProp verifier.verify(prop3, env, proof, challenge).map(_._1).fold(t => throw t, identity) shouldBe false - val prop4 = GT(Height, LongConstant(100)).toSigmaProp + val prop4 = GT(Height, IntConstant(100)).toSigmaProp verifier.verify(prop4, env, proof, challenge).map(_._1).fold(t => throw t, identity) shouldBe false } diff --git a/src/test/scala/sigmastate/crypto/SigningSpecification.scala b/src/test/scala/sigmastate/crypto/SigningSpecification.scala index 5cd7381b95..f9fa88465d 100644 --- a/src/test/scala/sigmastate/crypto/SigningSpecification.scala +++ b/src/test/scala/sigmastate/crypto/SigningSpecification.scala @@ -1,6 +1,5 @@ package sigmastate.crypto -import org.ergoplatform.ErgoLikeContext import scorex.util.encode.Base16 import sigmastate.AtLeast import sigmastate.basics.DLogProtocol.DLogProverInput @@ -10,8 +9,6 @@ import sigmastate.interpreter.{ContextExtension, ProverResult} class SigningSpecification extends SigmaTestingCommons { implicit lazy val IR = new TestingIRContext - val fakeContext: ErgoLikeContext = ErgoLikeContext.dummy(fakeSelf) - property("simple signature test vector") { val msg = Base16.decode("1dc01772ee0171f5f614c673e3c7fa1107a8cf727bdf5a6dadb379e93c0d1d00").get @@ -53,11 +50,14 @@ class SigningSpecification extends SigmaTestingCommons { val sk = proverA.dlogSecrets.head val prop = sk.publicImage - val prove = proverA.prove(prop, fakeContext, msg).get + val tree = prop.toSigmaProp.treeWithSegregation + val prove = proverA.prove(tree, fakeContext, msg).get println(s"Message: ${Base16.encode(msg)}") println(s"sk: ${sk.w}") + println(s"sk(Base16): ${Base16.encode(sk.w.toByteArray)}") println(s"pkBytes: ${Base16.encode(prop.pkBytes)}") + println(s"treeBytes: ${Base16.encode(tree.bytes)}") println(s"Signature: ${Base16.encode(prove.proof)}") } diff --git a/src/test/scala/sigmastate/eval/BasicOpsTests.scala b/src/test/scala/sigmastate/eval/BasicOpsTests.scala index 7de0d29af8..b16574372a 100644 --- a/src/test/scala/sigmastate/eval/BasicOpsTests.scala +++ b/src/test/scala/sigmastate/eval/BasicOpsTests.scala @@ -4,8 +4,7 @@ import java.math.BigInteger import org.bouncycastle.crypto.ec.CustomNamedCurves import org.scalatest.{FunSuite, Matchers} -import special.sigma.Extensions._ -import special.sigma.{Box, Context, ContractsTestkit, MockSigma, SigmaContract, SigmaDslBuilder, SigmaProp, TestBox, TestSigmaDslBuilder} +import special.sigma.{Box, Context, ContractsTestkit, MockSigma, SigmaContract, SigmaDslBuilder, SigmaProp, TestSigmaDslBuilder} import scala.language.implicitConversions @@ -60,18 +59,12 @@ class BasicOpsTests extends FunSuite with ContractsTestkit with Matchers { val c2 = collection[Byte](3, 4) c1.append(c2).toArray shouldBe Array[Byte](1, 2, 3, 4) } - test("examples from wpaper") { - val selfId = collection[Byte](0, 1) - val self = new TestBox(selfId, 10, noBytes, noBytes, noBytes, noRegisters) - val ctx = testContext(noInputs, noOutputs, height = 200, self, emptyAvlTree, dummyPubkey, Array()) - } test("box.creationInfo._1 is Int") { - val box = newAliceBox(1, 100, Map(3 -> toAnyValue((20 -> SigmaDsl.Colls.fromArray(Array.emptyByteArray))))) + val box = newAliceBox(1, 100) box.creationInfo._1 shouldBe a [Integer] } - case class Contract1(base64_pk1: String) extends SigmaContract { override def builder: SigmaDslBuilder = new TestSigmaDslBuilder override def canOpen(ctx: Context): Boolean = { diff --git a/src/test/scala/sigmastate/eval/CompilerItTest.scala b/src/test/scala/sigmastate/eval/CompilerItTest.scala index 8cb34d58f3..0b08bcdedf 100644 --- a/src/test/scala/sigmastate/eval/CompilerItTest.scala +++ b/src/test/scala/sigmastate/eval/CompilerItTest.scala @@ -125,12 +125,12 @@ class CompilerItTest extends BaseCtxTests def bigIntArray_Map_Case = { import SCollection._ - val res = Colls.fromArray(bigIntegerArr1).map(n => n.add(n1)).toArray + val res = bigIntegerArr1.map(n => n.add(n1)).toArray Case(env, "bigIntArray_Map", "bigIntArr1.map { (i: BigInt) => i + n1 }", ergoCtx, calc = { ctx => - val vals = liftConst(Colls.fromArray(bigIntegerArr1.map(dslValue.BigInt(_)))) - vals.map(fun(n => n.add(liftConst(dslValue.BigInt(n1))))) + val vals = liftConst(bigIntegerArr1) + vals.map(fun(n => n.add(liftConst(n1)))) }, cost = null, // {_ => diff --git a/src/test/scala/sigmastate/eval/CostingTest.scala b/src/test/scala/sigmastate/eval/CostingTest.scala index e2b6e83c50..f351c206da 100644 --- a/src/test/scala/sigmastate/eval/CostingTest.scala +++ b/src/test/scala/sigmastate/eval/CostingTest.scala @@ -23,7 +23,6 @@ import sigmastate.lang.Terms.ValueOps class CostingTest extends BaseCtxTests with LangTests with ExampleContracts with ErgoScriptTestkit { cake => implicit override lazy val IR: TestContext with IRContext = new TestContext with IRContext with CompiletimeCosting { - this.useAlphaEquality = true } import IR._ import WArray._ @@ -80,7 +79,7 @@ class CostingTest extends BaseCtxTests with LangTests with ExampleContracts with costOf(c) + costOf(utxo.SizeOf(c)) }) - val n1Sym = liftConst(dslValue.BigInt(n1)) + val n1Sym = liftConst(n1) checkInEnv(env, "bigint", "n1", {_ => n1Sym }, { _ => constCost[BigInt] }) val g1Sym = liftConst(g1) @@ -96,7 +95,7 @@ class CostingTest extends BaseCtxTests with LangTests with ExampleContracts with import builder._ check("one+one", "1 + 1", _ => toRep(1) + 1, {_ => val c1 = IntConstant(1); costOf(c1) + costOf(c1) + costOf(Plus(c1, c1)) }) - checkInEnv(env, "one+one2", "big - n1", {_ => liftConst(dslValue.BigInt(big)).subtract(liftConst(dslValue.BigInt(n1)))}) + checkInEnv(env, "one+one2", "big - n1", {_ => liftConst(dslValue.BigInt(big)).subtract(liftConst(n1))}) check("one_gt_one", "1 > 1", {_ => toRep(1) > 1}, { _ => val c1 = IntConstant(1); diff --git a/src/test/scala/sigmastate/eval/ErgoScriptTestkit.scala b/src/test/scala/sigmastate/eval/ErgoScriptTestkit.scala index b28d386d20..2c7f7d10d5 100644 --- a/src/test/scala/sigmastate/eval/ErgoScriptTestkit.scala +++ b/src/test/scala/sigmastate/eval/ErgoScriptTestkit.scala @@ -1,11 +1,12 @@ package sigmastate.eval import org.ergoplatform.ErgoAddressEncoder.TestnetNetworkPrefix +import org.ergoplatform.validation.ValidationSpecification import scala.util.Success import sigmastate.{SInt, AvlTreeData, SLong, SType} -import sigmastate.Values.{LongConstant, Constant, EvaluatedValue, SValue, TrueLeaf, SigmaPropConstant, Value, IntConstant, BigIntArrayConstant} -import org.ergoplatform.{ErgoLikeContext, ErgoLikeTransaction, ErgoBox, ErgoScriptPredef} +import sigmastate.Values.{LongConstant, Constant, EvaluatedValue, SValue, TrueLeaf, SigmaPropConstant, Value, IntConstant, ErgoTree, BigIntArrayConstant} +import org.ergoplatform.{Context => _, _} import sigmastate.utxo.CostTable import scalan.BaseCtxTests import sigmastate.lang.{LangTests, SigmaCompiler} @@ -14,11 +15,13 @@ import sigmastate.interpreter.ContextExtension import sigmastate.interpreter.Interpreter.ScriptEnv import sigmastate.serialization.ErgoTreeSerializer import special.sigma.{ContractsTestkit, Context => DContext, _} -import special.sigma.Extensions._ +import sigmastate.eval.Extensions._ +import sigmastate.serialization.ErgoTreeSerializer.DefaultSerializer import scala.language.implicitConversions -trait ErgoScriptTestkit extends ContractsTestkit with LangTests { self: BaseCtxTests => +trait ErgoScriptTestkit extends ContractsTestkit with LangTests + with ValidationSpecification { self: BaseCtxTests => implicit lazy val IR: TestContext with IRContext = new TestContext with IRContext with CompiletimeCosting @@ -46,15 +49,15 @@ trait ErgoScriptTestkit extends ContractsTestkit with LangTests { self: BaseCtxT } - val boxA1 = newAliceBox(1, 100) - val boxA2 = newAliceBox(2, 200) + lazy val boxA1 = newAliceBox(1, 100) + lazy val boxA2 = newAliceBox(2, 200) def contract(canOpen: DContext => Boolean) = new NoEnvContract(canOpen) lazy val dsl = sigmaDslBuilder lazy val dslValue = sigmaDslBuilderValue lazy val bigSym = liftConst(dslValue.BigInt(big)) - lazy val n1Sym = liftConst(dslValue.BigInt(n1)) + lazy val n1Sym = liftConst(n1) val timeout = 100 val minToRaise = 1000L @@ -127,7 +130,13 @@ trait ErgoScriptTestkit extends ContractsTestkit with LangTests { self: BaseCtxT val x = block x shouldBe expected.get } -// String.format(messageFmt, x.asInstanceOf[AnyRef], expected.get.asInstanceOf[AnyRef])) + } + + def checkExpectedFunc[A,B](block: => Rep[A => B], expected: Option[Rep[A => B]], messageFmt: String) = { + if (expected.isDefined) { + val x = block + assert(alphaEqual(x, expected.get), messageFmt) + } } def pairify(xs: Seq[Sym]): Sym = xs match { @@ -156,9 +165,9 @@ trait ErgoScriptTestkit extends ContractsTestkit with LangTests { self: BaseCtxT val graphs = Seq(str, strExp) emit(name, graphs:_*) } - checkExpected(calcF, expectedCalcF, "Calc function actual: %s, expected: %s") - checkExpected(costF, expectedCostF, "Cost function actual: %s, expected: %s") - checkExpected(sizeF, expectedSizeF, "Size function actual: %s, expected: %s") + checkExpectedFunc(calcF, expectedCalcF, "Calc function actual: %s, expected: %s") + checkExpectedFunc(costF, expectedCostF, "Cost function actual: %s, expected: %s") + checkExpectedFunc(sizeF, expectedSizeF, "Size function actual: %s, expected: %s") res } @@ -168,11 +177,12 @@ trait ErgoScriptTestkit extends ContractsTestkit with LangTests { self: BaseCtxT verifyIsProven(calcF) shouldBe Success(()) if (expectedTree.isDefined) { - val compiledTree = IR.buildTree(calcF.asRep[Context => SType#WrappedType]) - checkExpected(compiledTree, expectedTree, "Compiled Tree actual: %s, expected: %s") + val compiledProp = IR.buildTree(calcF.asRep[Context => SType#WrappedType]) + checkExpected(compiledProp, expectedTree, "Compiled Tree actual: %s, expected: %s") - val compiledTreeBytes = ErgoTreeSerializer.DefaultSerializer.serializeWithSegregation(compiledTree) - checkExpected(ErgoTreeSerializer.DefaultSerializer.deserialize(compiledTreeBytes), Some(compiledTree), + val ergoTree = compiledProp.treeWithSegregation + val compiledTreeBytes = DefaultSerializer.serializeErgoTree(ergoTree) + checkExpected(DefaultSerializer.deserializeErgoTree(compiledTreeBytes), Some(ergoTree), "(de)serialization round trip actual: %s, expected: %s") } diff --git a/src/test/scala/sigmastate/eval/ErgoTreeBuildingTest.scala b/src/test/scala/sigmastate/eval/ErgoTreeBuildingTest.scala index 5fdb5d5d21..421c010c41 100644 --- a/src/test/scala/sigmastate/eval/ErgoTreeBuildingTest.scala +++ b/src/test/scala/sigmastate/eval/ErgoTreeBuildingTest.scala @@ -56,6 +56,11 @@ class ErgoTreeBuildingTest extends BaseCtxTests Vector(ValDef(1, List(), SizeOf(Outputs))), BinOr(GT(ValUse(1, SInt), IntConstant(1)), LT(ValUse(1, SInt), IntConstant(1)))) ) + build(emptyEnv, "logical4", "OUTPUTS.size > 1 ^ OUTPUTS.size < 1", + BlockValue( + Vector(ValDef(1, List(), SizeOf(Outputs))), + BinXor(GT(ValUse(1, SInt), IntConstant(1)), LT(ValUse(1, SInt), IntConstant(1)))) + ) } test("context data") { diff --git a/src/test/scala/sigmastate/eval/EvaluationTest.scala b/src/test/scala/sigmastate/eval/EvaluationTest.scala index e92d900c81..e3a5e79fa6 100644 --- a/src/test/scala/sigmastate/eval/EvaluationTest.scala +++ b/src/test/scala/sigmastate/eval/EvaluationTest.scala @@ -1,7 +1,7 @@ package sigmastate.eval import org.ergoplatform.ErgoBox -import sigmastate.Values.{ConcreteCollection, IntArrayConstant, IntConstant, SigmaPropConstant, SigmaPropValue, Value} +import sigmastate.Values.{ConcreteCollection, IntArrayConstant, IntConstant, SigmaPropConstant, SigmaPropValue} import sigmastate.helpers.ContextEnrichingTestProvingInterpreter import sigmastate.interpreter.Interpreter._ import scalan.BaseCtxTests @@ -9,7 +9,7 @@ import sigmastate.lang.LangTests import scalan.util.BenchmarkUtil._ import sigmastate._ import sigmastate.basics.DLogProtocol.{DLogProverInput, ProveDlog} -import sigmastate.serialization.ErgoTreeSerializer +import sigmastate.serialization.ErgoTreeSerializer.DefaultSerializer class EvaluationTest extends BaseCtxTests with LangTests with ExampleContracts with ErgoScriptTestkit { @@ -57,8 +57,6 @@ class EvaluationTest extends BaseCtxTests reduce(emptyEnv, "value", "SELF.value + 1L", ctx, 11L) } - // TODO Caused by: java.lang.AssertionError: assertion failed: - // Invalid cast Cast(SizeCollElem, s1572): interface special.collection.SizeColl is not assignable from class special.collection.CSizePrim test("lambdas") { val ctx = newErgoContext(height = 1, boxToSpend) reduce(emptyEnv, "lam3", "{ val f = { (out: Box) => out.value >= 0L }; f(SELF) }", ctx, true) @@ -127,18 +125,19 @@ class EvaluationTest extends BaseCtxTests // } test("SubstConst") { - def script(pk: ProveDlog): Value[SType] = AND(EQ(IntConstant(1), IntConstant(1)), SigmaPropConstant(pk).isProven) + def script(pk: ProveDlog): SigmaPropValue = + AND(EQ(IntConstant(1), IntConstant(1)), SigmaPropConstant(pk).isProven).toSigmaProp val pk1 = DLogProverInput.random().publicImage val pk2 = DLogProverInput.random().publicImage val script1 = script(pk1) val script2 = script(pk2) - val inputBytes = ErgoTreeSerializer.DefaultSerializer.serializeWithSegregation(script1) + val inputBytes = DefaultSerializer.serializeErgoTree(script1.treeWithSegregation) val positions = IntArrayConstant(Array[Int](2)) // in ergo we have only byte array of a serialized group element val newVals = ConcreteCollection(Vector[SigmaPropValue](CreateProveDlog(DecodePoint(pk2.pkBytes))), SSigmaProp) - val expectedBytes = ErgoTreeSerializer.DefaultSerializer.serializeWithSegregation(script2) + val expectedBytes = DefaultSerializer.serializeErgoTree(script2.treeWithSegregation) val ctx = newErgoContext(height = 1, boxToSpend) reduce(emptyEnv, "SubstConst", EQ(SubstConstants(inputBytes, positions, newVals), expectedBytes), diff --git a/src/test/scala/sigmastate/helpers/ErgoTransactionValidator.scala b/src/test/scala/sigmastate/helpers/ErgoTransactionValidator.scala index 3b5c965928..bc391e858f 100644 --- a/src/test/scala/sigmastate/helpers/ErgoTransactionValidator.scala +++ b/src/test/scala/sigmastate/helpers/ErgoTransactionValidator.scala @@ -14,7 +14,6 @@ class ErgoLikeTestInterpreter(override val maxCost: Long = CostTable.ScriptLimit class ErgoTransactionValidator(implicit IR: IRContext) { val verifier = new ErgoLikeTestInterpreter() - //todo: check that outputs are well-formed? def validate(tx: ErgoLikeTransaction, blockchainState: BlockchainState, minerPubkey: Array[Byte], diff --git a/src/test/scala/sigmastate/helpers/SigmaTestingCommons.scala b/src/test/scala/sigmastate/helpers/SigmaTestingCommons.scala index 93a1491c41..c848dbb4f3 100644 --- a/src/test/scala/sigmastate/helpers/SigmaTestingCommons.scala +++ b/src/test/scala/sigmastate/helpers/SigmaTestingCommons.scala @@ -5,24 +5,27 @@ import java.math.BigInteger import org.ergoplatform.ErgoAddressEncoder.TestnetNetworkPrefix import org.ergoplatform.ErgoBox.{NonMandatoryRegisterId, TokenId} import org.ergoplatform.ErgoScriptPredef.TrueProp -import org.ergoplatform.{ErgoBox, ErgoBoxCandidate, ErgoLikeContext, ErgoLikeTransaction} +import org.ergoplatform._ +import org.ergoplatform.validation.ValidationSpecification import org.scalacheck.Arbitrary.arbByte import org.scalacheck.Gen -import org.scalatest.prop.{GeneratorDrivenPropertyChecks, PropertyChecks} -import org.scalatest.{Assertion, Matchers, PropSpec} -import scalan.{Nullable, RType, TestContexts, TestUtils} -import scorex.crypto.hash.Blake2b256 +import org.scalatest.prop.{PropertyChecks, GeneratorDrivenPropertyChecks} +import org.scalatest.{PropSpec, Assertion, Matchers} +import scalan.{TestUtils, TestContexts, Nullable, RType} +import scorex.crypto.hash.{Digest32, Blake2b256} import scorex.util.serialization.{VLQByteStringReader, VLQByteStringWriter} -import sigma.types.{IsPrimView, PrimViewType, View} -import sigmastate.Values.{Constant, ErgoTree, EvaluatedValue, GroupElementConstant, SValue, Value} -import sigmastate.eval.{CompiletimeCosting, Evaluation, IRContext} -import sigmastate.interpreter.Interpreter.{ScriptEnv, ScriptNameProp} +import sigma.types.{PrimViewType, IsPrimView, View} +import sigmastate.Values.{Constant, EvaluatedValue, SValue, Value, ErgoTree, GroupElementConstant} +import sigmastate.eval.{CompiletimeCosting, IRContext, Evaluation} +import sigmastate.interpreter.Interpreter.{ScriptNameProp, ScriptEnv} import sigmastate.interpreter.{CryptoConstants, Interpreter} -import sigmastate.lang.{SigmaCompiler, TransformingSigmaBuilder} -import sigmastate.serialization.{ErgoTreeSerializer, SigmaSerializer} +import sigmastate.lang.{TransformingSigmaBuilder, SigmaCompiler} +import sigmastate.serialization.{ValueSerializer, ErgoTreeSerializer, SigmaSerializer} import sigmastate.{SGroupElement, SType} import special.sigma._ import spire.util.Opt +import sigmastate.eval._ +import sigmastate.interpreter.CryptoConstants.EcPointType import scala.annotation.tailrec import scala.language.implicitConversions @@ -30,24 +33,26 @@ import scala.language.implicitConversions trait SigmaTestingCommons extends PropSpec with PropertyChecks with GeneratorDrivenPropertyChecks - with Matchers with TestUtils with TestContexts { - + with Matchers with TestUtils with TestContexts with ValidationSpecification { val fakeSelf: ErgoBox = createBox(0, TrueProp) + val fakeContext: ErgoLikeContext = ErgoLikeContext.dummy(fakeSelf) + //fake message, in a real-life a message is to be derived from a spending transaction val fakeMessage = Blake2b256("Hello World") - implicit def grElemConvert(leafConstant: GroupElementConstant): CryptoConstants.EcPointType = leafConstant.value + implicit def grElemConvert(leafConstant: GroupElementConstant): EcPointType = + SigmaDsl.toECPoint(leafConstant.value).asInstanceOf[EcPointType] implicit def grLeafConvert(elem: CryptoConstants.EcPointType): Value[SGroupElement.type] = GroupElementConstant(elem) val compiler = SigmaCompiler(TestnetNetworkPrefix, TransformingSigmaBuilder) def checkSerializationRoundTrip(v: SValue): Unit = { - val compiledTreeBytes = ErgoTreeSerializer.DefaultSerializer.serializeWithSegregation(v) + val compiledTreeBytes = ValueSerializer.serialize(v) withClue(s"(De)Serialization roundtrip failed for the tree:") { - ErgoTreeSerializer.DefaultSerializer.deserialize(compiledTreeBytes) shouldEqual v + ValueSerializer.deserialize(compiledTreeBytes) shouldEqual v } } @@ -59,13 +64,13 @@ trait SigmaTestingCommons extends PropSpec tree } - def createBox(value: Int, + def createBox(value: Long, proposition: ErgoTree, - additionalTokens: Seq[(TokenId, Long)] = Seq(), + additionalTokens: Seq[(Digest32, Long)] = Seq(), additionalRegisters: Map[NonMandatoryRegisterId, _ <: EvaluatedValue[_ <: SType]] = Map()) = ErgoBox(value, proposition, 0, additionalTokens, additionalRegisters) - def createBox(value: Int, + def createBox(value: Long, proposition: ErgoTree, creationHeight: Int) = ErgoBox(value, proposition, creationHeight, Seq(), Map(), ErgoBox.allZerosModifierId) @@ -85,7 +90,7 @@ trait SigmaTestingCommons extends PropSpec class TestingIRContext extends TestContext with IRContext with CompiletimeCosting { override def onCostingResult[T](env: ScriptEnv, tree: SValue, res: RCostingResultEx[T]): Unit = { env.get(ScriptNameProp) match { - case Some(name: String) => + case Some(name: String) if saveGraphsInFile => emit(name, res) case _ => } @@ -99,7 +104,7 @@ trait SigmaTestingCommons extends PropSpec } } - def func[A: RType, B: RType](func: String)(implicit IR: IRContext): A => B = { + def func[A: RType, B: RType](func: String, bindings: (Byte, EvaluatedValue[_ <: SType])*)(implicit IR: IRContext): A => B = { import IR._ import IR.Context._; val tA = RType[A] @@ -125,71 +130,15 @@ trait SigmaTestingCommons extends PropSpec (in: A) => { implicit val cA = tA.classTag val x = fromPrimView(in) - val context = ErgoLikeContext.dummy(createBox(0, TrueProp)) - .withBindings(1.toByte -> Constant[SType](x.asInstanceOf[SType#WrappedType], tpeA)) + val context = + ErgoLikeContext.dummy(createBox(0, TrueProp)) + .withBindings(1.toByte -> Constant[SType](x.asInstanceOf[SType#WrappedType], tpeA)).withBindings(bindings: _*) val calcCtx = context.toSigmaContext(IR, isCost = false) val (res, _) = valueFun(calcCtx) res.asInstanceOf[B] -// (TransformingSigmaBuilder.unliftAny(res) match { -// case Nullable(x) => // x is a value extracted from Constant -// tB match { -// case _: PrimViewType[_, _] => // need to wrap value into PrimValue -// View.mkPrimView(x) match { -// case Opt(pv) => pv -// case _ => x // cannot wrap, so just return as is -// } -// case _ => Evaluation.toDslData(x, tpeB, isCost = false) // don't need to wrap -// } -// case _ => Evaluation.toDslData(res, tpeB, isCost = false) -// }).asInstanceOf[B] } } -// def func2[A: RType, B: RType, R: RType](func: String)(implicit IR: IRContext): (A, B) => R = { -// val tA = RType[A] -// val tB = RType[B] -// val tR = RType[R] -// val tpeA = Evaluation.rtypeToSType(tA) -// val tpeB = Evaluation.rtypeToSType(tB) -// val tpeR = Evaluation.rtypeToSType(tR) -// val code = -// s"""{ -// | val func = $func -// | val res = func(getVar[${tA.name}](1).get, getVar[${tB.name}](2).get) -// | res -// |} -// """.stripMargin -// val env = Interpreter.emptyEnv -// val interProp = compiler.typecheck(env, code) -// val IR.Pair(calcF, _) = IR.doCosting(env, interProp) -// val valueFun = IR.compile[tpeR.type](IR.getDataEnv, IR.asRep[IR.Context => tpeR.WrappedType](calcF)) -// -// (in1: A, in2: B) => { -// implicit val cA = tA.classTag -// implicit val cB = tB.classTag -// val x = fromPrimView(in1) -// val y = fromPrimView(in2) -// val context = ErgoLikeContext.dummy(createBox(0, TrueProp)) -// .withBindings( -// 1.toByte -> Constant[SType](x.asInstanceOf[SType#WrappedType], tpeA), -// 2.toByte -> Constant[SType](y.asInstanceOf[SType#WrappedType], tpeB)) -// val calcCtx = context.toSigmaContext(IR, isCost = false) -// val res = valueFun(calcCtx) -// (TransformingSigmaBuilder.unliftAny(res) match { -// case Nullable(x) => // x is a value extracted from Constant -// tB match { -// case _: PrimViewType[_, _] => // need to wrap value into PrimValue -// View.mkPrimView(x) match { -// case Opt(pv) => pv -// case _ => x // cannot wrap, so just return as is -// } -// case _ => x // don't need to wrap -// } -// case _ => res -// }).asInstanceOf[R] -// } -// } - def assertExceptionThrown(fun: => Any, assertion: Throwable => Boolean): Unit = { try { fun diff --git a/src/test/scala/sigmastate/lang/LangTests.scala b/src/test/scala/sigmastate/lang/LangTests.scala index 5714d309da..8e4729bfa4 100644 --- a/src/test/scala/sigmastate/lang/LangTests.scala +++ b/src/test/scala/sigmastate/lang/LangTests.scala @@ -10,9 +10,11 @@ import org.scalatest.Matchers import sigmastate.basics.DLogProtocol.ProveDlog import sigmastate.SCollection.SByteArray import sigmastate.basics.ProveDHTuple -import sigmastate.eval.CostingSigmaDslBuilder import sigmastate.interpreter.CryptoConstants import sigmastate.interpreter.Interpreter.ScriptEnv +import special.sigma._ +import sigmastate.eval._ +import special.collection.Coll trait LangTests extends Matchers { @@ -41,9 +43,9 @@ trait LangTests extends Matchers { val g3 = CostingSigmaDslBuilder.GroupElement(ecp3.asInstanceOf[ECPoint]) val g4 = CostingSigmaDslBuilder.GroupElement(ecp4.asInstanceOf[ECPoint]) - protected val n1: BigInteger = BigInt(10).underlying() - protected val n2: BigInteger = BigInt(20).underlying() - protected val bigIntegerArr1: Array[BigInteger] = Array(n1, n2) + protected val n1: BigInt = BigInt(10).underlying() + protected val n2: BigInt = BigInt(20).underlying() + protected val bigIntegerArr1: Coll[BigInt] = Colls.fromItems(n1, n2) protected val big: BigInteger = BigInt(Long.MaxValue).underlying().pow(2) protected val p1: SigmaBoolean = ProveDlog(ecp1) protected val p2: SigmaBoolean = ProveDlog(ecp2) diff --git a/src/test/scala/sigmastate/lang/SigmaBinderTest.scala b/src/test/scala/sigmastate/lang/SigmaBinderTest.scala index 9e1d256a26..768210632d 100644 --- a/src/test/scala/sigmastate/lang/SigmaBinderTest.scala +++ b/src/test/scala/sigmastate/lang/SigmaBinderTest.scala @@ -4,15 +4,13 @@ import org.ergoplatform.{Height, Inputs, Outputs, Self} import org.ergoplatform.ErgoAddressEncoder._ import org.scalatest.prop.PropertyChecks import org.scalatest.{Matchers, PropSpec} -import scorex.util.encode.Base58 import sigmastate.Values._ import sigmastate._ import sigmastate.interpreter.Interpreter.ScriptEnv import sigmastate.lang.SigmaPredef.PredefinedFuncRegistry import sigmastate.lang.Terms._ -import sigmastate.lang.exceptions.{BinderException, InvalidArguments, InvalidTypeArguments} -import sigmastate.serialization.ValueSerializer -import sigmastate.utxo._ +import sigmastate.lang.exceptions.BinderException +import sigmastate.eval._ class SigmaBinderTest extends PropSpec with PropertyChecks with Matchers with LangTests { import StdSigmaBuilder._ @@ -46,21 +44,20 @@ class SigmaBinderTest extends PropSpec with PropertyChecks with Matchers with La bind(env, "x-y") shouldBe Minus(10, 11) bind(env, "x+y") shouldBe plus(10, 11) bind(env, "c1 && c2") shouldBe MethodCallLike(TrueLeaf, "&&", IndexedSeq(FalseLeaf)) - bind(env, "arr1") shouldBe ByteArrayConstant(Array(1, 2)) + bind(env, "arr1") shouldBe ByteArrayConstant(Array[Byte](1, 2)) bind(env, "HEIGHT - 1") shouldBe mkMinus(Height, 1) bind(env, "HEIGHT + 1") shouldBe plus(Height, 1) bind(env, "INPUTS.size > 1") shouldBe GT(Select(Inputs, "size").asIntValue, 1) - // todo: restore in https://github.com/ScorexFoundation/sigmastate-interpreter/issues/324 -// bind(env, "arr1 | arr2") shouldBe Xor(Array[Byte](1, 2), Array[Byte](10, 20)) + bind(env, "xor(arr1, arr2)") shouldBe + Apply(Ident("xor"), IndexedSeq(ByteArrayConstant(Array[Byte](1, 2)), ByteArrayConstant(Array[Byte](10, 20)))) bind(env, "arr1 ++ arr2") shouldBe MethodCallLike(Array[Byte](1, 2), "++", IndexedSeq(Array[Byte](10, 20))) // AppendBytes(Array[Byte](1, 2), Array[Byte](10,20)) bind(env, "col1 ++ col2") shouldBe MethodCallLike( ConcreteCollection(LongConstant(1), LongConstant(2)), "++", IndexedSeq(ConcreteCollection(LongConstant(10), LongConstant(20)))) - // todo should be g1.exp(n1) - // ( see https://github.com/ScorexFoundation/sigmastate-interpreter/issues/324 ) -// bind(env, "g1 ^ n1") shouldBe Exponentiate(g1, n1) - bind(env, "g1 * g2") shouldBe MethodCallLike(ecp1, "*", IndexedSeq(ecp2)) + bind(env, "g1.exp(n1)") shouldBe + Apply(Select(GroupElementConstant(g1), "exp"), IndexedSeq(BigIntConstant(n1))) + bind(env, "g1 * g2") shouldBe MethodCallLike(SigmaDsl.GroupElement(ecp1), "*", IndexedSeq(ecp2)) } property("predefined functions") { diff --git a/src/test/scala/sigmastate/lang/SigmaCompilerTest.scala b/src/test/scala/sigmastate/lang/SigmaCompilerTest.scala index b4a30ec4da..16198a1feb 100644 --- a/src/test/scala/sigmastate/lang/SigmaCompilerTest.scala +++ b/src/test/scala/sigmastate/lang/SigmaCompilerTest.scala @@ -7,11 +7,12 @@ import sigmastate.Values._ import sigmastate._ import sigmastate.helpers.SigmaTestingCommons import sigmastate.interpreter.Interpreter.ScriptEnv -import sigmastate.lang.Terms.{Apply, Ident, Lambda, ZKProofBlock} +import sigmastate.lang.Terms.{Lambda, MethodCall, ZKProofBlock, Apply, Ident} import sigmastate.lang.exceptions.{CosterException, InvalidArguments, TyperException} import sigmastate.serialization.ValueSerializer import sigmastate.serialization.generators.ValueGenerators -import sigmastate.utxo.{ByIndex, ExtractAmount, GetVar, SelectField} +import sigmastate.utxo.{GetVar, ExtractAmount, ByIndex, SelectField} +import sigmastate.eval._ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGenerators { import CheckingSigmaBuilder._ @@ -54,7 +55,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen ByIndex(ConcreteCollection(IndexedSeq(IntConstant(1)))(SInt), 0) comp(env, "Coll(Coll(1))(0)(0)") shouldBe ByIndex(ByIndex(ConcreteCollection(IndexedSeq(ConcreteCollection(IndexedSeq(IntConstant(1)))))(SCollection(SInt)), 0), 0) - comp(env, "arr1(0)") shouldBe ByIndex(ByteArrayConstant(Array(1, 2)), 0) + comp(env, "arr1(0)") shouldBe ByIndex(ByteArrayConstant(Array[Byte](1, 2)), 0) } property("array indexed access with default value") { @@ -68,7 +69,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen Some(ConcreteCollection(Vector(IntConstant(2))))), 0) comp(env, "arr1.getOrElse(999, 0.toByte)") shouldBe - ByIndex(ByteArrayConstant(Array(1, 2)), IntConstant(999), Some(ByteConstant(0))) + ByIndex(ByteArrayConstant(Array[Byte](1, 2)), IntConstant(999), Some(ByteConstant(0))) } property("predefined functions") { @@ -83,6 +84,13 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen comp(env, "getVar[Coll[Byte]](10).get") shouldBe GetVarByteArray(10).get } + property("global methods") { + comp(env, "{ groupGenerator }") shouldBe MethodCall(Global, SGlobal.groupGeneratorMethod, IndexedSeq(), SigmaTyper.emptySubst) + comp(env, "{ Global.groupGenerator }") shouldBe MethodCall(Global, SGlobal.groupGeneratorMethod, IndexedSeq(), SigmaTyper.emptySubst) + comp(env, "{ Global.xor(arr1, arr2) }") shouldBe Xor(ByteArrayConstant(arr1), ByteArrayConstant(arr2)) + comp(env, "{ xor(arr1, arr2) }") shouldBe Xor(ByteArrayConstant(arr1), ByteArrayConstant(arr2)) + } + property("user-defined functions") { comp("{ def f(i: Int) = { i + 1 }; f(2) }") shouldBe Apply( FuncValue(Vector((1,SInt)),Plus(ValUse(1,SInt), IntConstant(1))), @@ -126,9 +134,9 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen } property("fromBaseX") { - comp(""" fromBase58("r") """) shouldBe ByteArrayConstant(Array(49)) - comp(""" fromBase64("MQ") """) shouldBe ByteArrayConstant(Array(49)) - comp(""" fromBase64("M" + "Q") """) shouldBe ByteArrayConstant(Array(49)) + comp(""" fromBase58("r") """) shouldBe ByteArrayConstant(Array[Byte](49)) + comp(""" fromBase64("MQ") """) shouldBe ByteArrayConstant(Array[Byte](49)) + comp(""" fromBase64("M" + "Q") """) shouldBe ByteArrayConstant(Array[Byte](49)) } property("deserialize") { @@ -183,7 +191,10 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen } property("LogicalXor") { - testMissingCosting("true ^ false", BinXor(TrueLeaf, FalseLeaf)) + comp("false ^ false") shouldBe FalseLeaf + comp("true ^ true") shouldBe FalseLeaf + comp("false ^ true") shouldBe TrueLeaf + comp("true ^ false") shouldBe LogicalNot(FalseLeaf) } property("BitwiseOr") { @@ -210,7 +221,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen testMissingCosting("1 >>> 2", mkBitShiftRightZeroed(IntConstant(1), IntConstant(2))) } - // TODO related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/418 + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/418 ignore("Collection.BitShiftLeft") { comp("Coll(1,2) << 2") shouldBe mkMethodCall( @@ -219,7 +230,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen Vector(IntConstant(2)), Map()) } - // TODO related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/418 + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/418 ignore("Collection.BitShiftRight") { testMissingCosting("Coll(1,2) >> 2", mkMethodCall( @@ -230,7 +241,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen ) } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster + // TODO soft-fork: 1) implement method for special.collection.Coll 2) add rule to CollCoster ignore("Collection.BitShiftRightZeroed") { comp("Coll(true, false) >>> 2") shouldBe mkMethodCall( @@ -249,7 +260,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen ) } - // TODO enable after such lambda is implemented in CollCoster.flatMap + // TODO soft-fork: enable after such lambda is implemented in CollCoster.flatMap ignore("SCollection.flatMap") { comp("OUTPUTS.flatMap({ (out: Box) => Coll(out.value >= 1L) })") shouldBe mkMethodCall(Outputs, @@ -258,19 +269,19 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen ConcreteCollection(Vector(GE(ExtractAmount(ValUse(1, SBox)), LongConstant(1))), SBoolean))), Map()) } - // TODO should be fixed + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/486 ignore("SNumeric.toBytes") { comp("4.toBytes") shouldBe mkMethodCall(IntConstant(4), SInt.method("toBytes").get, IndexedSeq()) } - // TODO should be fixed + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/486 ignore("SNumeric.toBits") { comp("4.toBits") shouldBe mkMethodCall(IntConstant(4), SInt.method("toBits").get, IndexedSeq()) } - // TODO should be fixed + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/327 ignore("SBigInt.multModQ") { comp("1.toBigInt.multModQ(2.toBigInt)") shouldBe mkMethodCall(BigIntConstant(1), SBigInt.MultModQMethod, IndexedSeq(BigIntConstant(2))) @@ -281,12 +292,12 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen mkMethodCall(Self, SBox.tokensMethod, IndexedSeq()) } - // TODO add rule to OptionCoster - ignore("SOption.toColl") { - comp("getVar[Int](1).toColl") shouldBe - mkMethodCall(GetVarInt(1), - SOption.ToCollMethod.withConcreteTypes(Map(SOption.tT -> SInt)), IndexedSeq(), Map()) - } +//TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 +// property("SOption.toColl") { +// comp("getVar[Int](1).toColl") shouldBe +// mkMethodCall(GetVarInt(1), +// SOption.ToCollMethod.withConcreteTypes(Map(SOption.tT -> SInt)), IndexedSeq(), Map()) +// } property("SContext.dataInputs") { comp("CONTEXT.dataInputs") shouldBe @@ -298,97 +309,46 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen mkMethodCall(GetVar(1.toByte, SAvlTree).get, SAvlTree.digestMethod, IndexedSeq()) } - // TODO add costing rule - ignore("SGroupElement.exp") { + property("SGroupElement.exp") { comp("g1.exp(1.toBigInt)") shouldBe mkMethodCall(GroupElementConstant(ecp1), SGroupElement.method("exp").get, IndexedSeq(BigIntConstant(1))) } - //TODO: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/422 - // TODO add rule to OptionCoster - ignore("SOption.map") { - testMissingCosting("getVar[Int](1).map({(i: Int) => i + 1})", + property("SOption.map") { + comp("getVar[Int](1).map({(i: Int) => i + 1})") shouldBe mkMethodCall(GetVarInt(1), SOption.MapMethod.withConcreteTypes(Map(SOption.tT -> SInt, SOption.tR -> SInt)), - IndexedSeq(Terms.Lambda( - Vector(("i", SInt)), - SInt, - Some(Plus(Ident("i", SInt).asIntValue, IntConstant(1))))), Map()) - ) + IndexedSeq(FuncValue( + Vector((1, SInt)), + Plus(ValUse(1, SInt), IntConstant(1)))), Map() + ) } - //TODO: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/422 - - // TODO add rule to OptionCoster - ignore("SOption.filter") { - testMissingCosting("getVar[Int](1).filter({(i: Int) => i > 0})", + property("SOption.filter") { + comp("getVar[Int](1).filter({(i: Int) => i > 0})") shouldBe mkMethodCall(GetVarInt(1), SOption.FilterMethod.withConcreteTypes(Map(SOption.tT -> SInt)), - IndexedSeq(Terms.Lambda( - Vector(("i", SInt)), - SBoolean, - Some(GT(Ident("i", SInt).asIntValue, IntConstant(0))))), Map()) - ) - } - - // TODO add rule to OptionCoster - ignore("SOption.flatMap") { - comp("getVar[Int](1).flatMap({(i: Int) => getVar[Int](2)})") shouldBe - mkMethodCall(GetVarInt(1), - SOption.FlatMapMethod.withConcreteTypes(Map(SOption.tT -> SInt, SOption.tR -> SInt)), - IndexedSeq(Terms.Lambda( - Vector(("i", SInt)), - SOption(SInt), - Some(GetVarInt(2)))), - Map()) - } - - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster - ignore("SCollection.segmentLength") { - comp("OUTPUTS.segmentLength({ (out: Box) => out.value >= 1L }, 0)") shouldBe - mkMethodCall(Outputs, - SCollection.SegmentLengthMethod.withConcreteTypes(Map(SCollection.tIV -> SBox)), - Vector( - FuncValue( - Vector((1, SBox)), - GE(ExtractAmount(ValUse(1, SBox)), LongConstant(1))), - IntConstant(0) - ), - Map()) - } - - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster - ignore("SCollection.indexWhere") { - comp("OUTPUTS.indexWhere({ (out: Box) => out.value >= 1L }, 0)") shouldBe - mkMethodCall(Outputs, - SCollection.IndexWhereMethod.withConcreteTypes(Map(SCollection.tIV -> SBox)), - Vector( - FuncValue( - Vector((1, SBox)), - GE(ExtractAmount(ValUse(1, SBox)), LongConstant(1))), - IntConstant(0) - ), - Map()) + IndexedSeq(FuncValue( + Vector((1, SInt)), + GT(ValUse(1, SInt), IntConstant(0)))), Map() + ) } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster - ignore("SCollection.lastIndexWhere") { - comp("OUTPUTS.lastIndexWhere({ (out: Box) => out.value >= 1L }, 1)") shouldBe - mkMethodCall(Outputs, - SCollection.LastIndexWhereMethod.withConcreteTypes(Map(SCollection.tIV -> SBox)), - Vector( - FuncValue( - Vector((1, SBox)), - GE(ExtractAmount(ValUse(1, SBox)), LongConstant(1))), - IntConstant(1) - ), - Map()) - } +// TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 +// property("SOption.flatMap") { +// comp("getVar[Int](1).flatMap({(i: Int) => getVar[Int](2)})") shouldBe +// mkMethodCall(GetVarInt(1), +// SOption.FlatMapMethod.withConcreteTypes(Map(SOption.tT -> SInt, SOption.tR -> SInt)), +// IndexedSeq(Terms.Lambda( +// Vector(("i", SInt)), +// SOption(SInt), +// Some(GetVarInt(2)))), +// Map()) +// } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster - ignore("SCollection.patch") { + property("SCollection.patch") { comp("Coll(1, 2).patch(1, Coll(3), 1)") shouldBe mkMethodCall( ConcreteCollection(IntConstant(1), IntConstant(2)), @@ -397,8 +357,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen Map()) } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster - ignore("SCollection.updated") { + property("SCollection.updated") { comp("Coll(1, 2).updated(1, 1)") shouldBe mkMethodCall( ConcreteCollection(IntConstant(1), IntConstant(2)), @@ -407,8 +366,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen Map()) } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster - ignore("SCollection.updateMany") { + property("SCollection.updateMany") { comp("Coll(1, 2).updateMany(Coll(1), Coll(3))") shouldBe mkMethodCall( ConcreteCollection(IntConstant(1), IntConstant(2)), @@ -417,7 +375,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen Map()) } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 ignore("SCollection.unionSets") { comp("Coll(1, 2).unionSets(Coll(1))") shouldBe mkMethodCall( @@ -427,7 +385,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen Map()) } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 ignore("SCollection.diff") { comp("Coll(1, 2).diff(Coll(1))") shouldBe mkMethodCall( @@ -437,7 +395,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen Map()) } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 ignore("SCollection.intersect") { comp("Coll(1, 2).intersect(Coll(1))") shouldBe mkMethodCall( @@ -447,7 +405,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen Map()) } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 ignore("SCollection.prefixLength") { comp("OUTPUTS.prefixLength({ (out: Box) => out.value >= 1L })") shouldBe mkMethodCall(Outputs, @@ -461,8 +419,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen Map()) } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster - ignore("SCollection.indexOf") { + property("SCollection.indexOf") { comp("Coll(1, 2).indexOf(1, 0)") shouldBe mkMethodCall( ConcreteCollection(IntConstant(1), IntConstant(2)), @@ -471,7 +428,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen Map()) } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 ignore("SCollection.lastIndexOf") { comp("Coll(1, 2).lastIndexOf(1, 0)") shouldBe mkMethodCall( @@ -481,7 +438,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen Map()) } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 ignore("SCollection.find") { comp("OUTPUTS.find({ (out: Box) => out.value >= 1L })") shouldBe mkMethodCall(Outputs, @@ -495,7 +452,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen Map()) } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 ignore("Collection.distinct") { comp("Coll(true, false).distinct") shouldBe mkMethodCall( @@ -505,7 +462,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen ) } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 ignore("SCollection.startsWith") { comp("Coll(1, 2).startsWith(Coll(1), 1)") shouldBe mkMethodCall( @@ -515,7 +472,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen Map()) } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 ignore("SCollection.endsWith") { comp("Coll(1, 2).endsWith(Coll(1))") shouldBe mkMethodCall( @@ -525,8 +482,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen Map()) } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster - ignore("SCollection.zip") { + property("SCollection.zip") { comp("Coll(1, 2).zip(Coll(1, 1))") shouldBe mkMethodCall( ConcreteCollection(IntConstant(1), IntConstant(2)), @@ -535,20 +491,7 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen ) } - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster - ignore("SCollection.partition") { - comp("Coll(1, 2).partition({ (i: Int) => i > 0 })") shouldBe - mkMethodCall( - ConcreteCollection(IntConstant(1), IntConstant(2)), - SCollection.PartitionMethod.withConcreteTypes(Map(SCollection.tIV -> SInt)), - Vector(FuncValue( - Vector((1, SInt)), - GT(ValUse(1, SInt), IntConstant(0)) - )), - Map()) - } - - // TODO 1) implement method for special.collection.Coll 2) add rule to CollCoster + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 ignore("SCollection.mapReduce") { comp( "Coll(1, 2).mapReduce({ (i: Int) => (i > 0, i.toLong) }, { (tl: (Long, Long)) => tl._1 + tl._2 })") shouldBe @@ -576,6 +519,16 @@ class SigmaCompilerTest extends SigmaTestingCommons with LangTests with ValueGen Map()) } + property("SCollection.filter") { + comp("OUTPUTS.filter({ (out: Box) => out.value >= 1L })") shouldBe + mkFilter(Outputs, + FuncValue( + Vector((1, SBox)), + GE(ExtractAmount(ValUse(1, SBox)), LongConstant(1)) + ) + ) + } + property("failed option constructors (not supported)") { costerFail("None", 1, 1) costerFail("Some(10)", 1, 1) diff --git a/src/test/scala/sigmastate/lang/SigmaParserTest.scala b/src/test/scala/sigmastate/lang/SigmaParserTest.scala index 6abcd0e8a1..0eab9f2550 100644 --- a/src/test/scala/sigmastate/lang/SigmaParserTest.scala +++ b/src/test/scala/sigmastate/lang/SigmaParserTest.scala @@ -46,6 +46,7 @@ class SigmaParserTest extends PropSpec with PropertyChecks with Matchers with La def and(l: SValue, r: SValue) = MethodCallLike(l, "&&", IndexedSeq(r)) def or(l: SValue, r: SValue) = MethodCallLike(l, "||", IndexedSeq(r)) + def xor(l: SValue, r: SValue) = MethodCallLike(l, "^", IndexedSeq(r)) property("simple expressions") { parse("10") shouldBe IntConstant(10) @@ -70,11 +71,12 @@ class SigmaParserTest extends PropSpec with PropertyChecks with Matchers with La parse("1==1") shouldBe EQ(1, 1) parse("true && true") shouldBe and(TrueLeaf, TrueLeaf) parse("true || false") shouldBe or(TrueLeaf, FalseLeaf) + parse("true ^ false") shouldBe xor(TrueLeaf, FalseLeaf) parse("true || (true && false)") shouldBe or(TrueLeaf, and(TrueLeaf, FalseLeaf)) + parse("true || (true ^ false)") shouldBe or(TrueLeaf, xor(TrueLeaf, FalseLeaf)) parse("false || false || false") shouldBe or(or(FalseLeaf, FalseLeaf), FalseLeaf) + parse("false ^ false ^ false") shouldBe xor(xor(FalseLeaf, FalseLeaf), FalseLeaf) parse("(1>= 0)||(3L >2L)") shouldBe or(GE(1, 0), GT(3L, 2L)) - // todo: restore in https://github.com/ScorexFoundation/sigmastate-interpreter/issues/324 -// parse("arr1 | arr2") shouldBe Xor(ByteArrayIdent("arr1"), ByteArrayIdent("arr2")) parse("arr1 ++ arr2") shouldBe MethodCallLike(Ident("arr1"), "++", IndexedSeq(Ident("arr2"))) parse("col1 ++ col2") shouldBe MethodCallLike(Ident("col1"), "++", IndexedSeq(Ident("col2"))) parse("ge.exp(n)") shouldBe Apply(Select(GEIdent("ge"), "exp"), Vector(BigIntIdent("n"))) @@ -470,8 +472,8 @@ class SigmaParserTest extends PropSpec with PropertyChecks with Matchers with La } property("function with type args") { - val tA = STypeIdent("A") - val tB = STypeIdent("B") + val tA = STypeVar("A") + val tB = STypeVar("B") parse("{ def f[A, B](x: A, y: B): (A, B) = (x, y) }") shouldBe Block(List(), Val("f", STuple(tA, tB), @@ -489,8 +491,8 @@ class SigmaParserTest extends PropSpec with PropertyChecks with Matchers with La } property("method extension(dotty)(no args) with type args") { - val tA = STypeIdent("A") - val tB = STypeIdent("B") + val tA = STypeVar("A") + val tB = STypeVar("B") parse("{ def (pairs: Coll[(A,B)]) f[A, B]: Coll[(B, A)] = pairs.magicSwap }") shouldBe Block(List(), Val("f", SCollection(STuple(tB, tA)), @@ -503,8 +505,8 @@ class SigmaParserTest extends PropSpec with PropertyChecks with Matchers with La } property("method extension(dotty)(one arg) with type args") { - val tA = STypeIdent("A") - val tB = STypeIdent("B") + val tA = STypeVar("A") + val tB = STypeVar("B") parse("{ def (pairs: Coll[(A,B)]) take[A, B](i: Int): Coll[(A, B)] = pairs.drop(i) }") shouldBe Block(List(), Val("take", SCollection(STuple(tA, tB)), @@ -539,6 +541,7 @@ class SigmaParserTest extends PropSpec with PropertyChecks with Matchers with La parse("X[(Int, Boolean)]") shouldBe ApplyTypes(Ident("X"), Seq(STuple(SInt, SBoolean))) parse("X[Int, Boolean]") shouldBe ApplyTypes(Ident("X"), Seq(SInt, SBoolean)) parse("SELF.R1[Int]") shouldBe ApplyTypes(Select(Ident("SELF"), "R1"), Seq(SInt)) + parse("SELF.getReg[Int](1)") shouldBe Apply(ApplyTypes(Select(Ident("SELF"), "getReg"), Seq(SInt)), IndexedSeq(IntConstant(1))) parse("SELF.R1[Int].isDefined") shouldBe Select(ApplyTypes(Select(Ident("SELF"), "R1"), Seq(SInt)),"isDefined") parse("f[Int](10)") shouldBe Apply(ApplyTypes(Ident("f"), Seq(SInt)), IndexedSeq(IntConstant(10))) parse("INPUTS.map[Int]") shouldBe ApplyTypes(Select(Ident("INPUTS"), "map"), Seq(SInt)) diff --git a/src/test/scala/sigmastate/lang/SigmaSpecializerTest.scala b/src/test/scala/sigmastate/lang/SigmaSpecializerTest.scala index ce89578081..8a61c72389 100644 --- a/src/test/scala/sigmastate/lang/SigmaSpecializerTest.scala +++ b/src/test/scala/sigmastate/lang/SigmaSpecializerTest.scala @@ -118,7 +118,7 @@ class SigmaSpecializerTest extends PropSpec spec("OUTPUTS.slice(0, 10)") shouldBe Slice(Outputs, IntConstant(0), IntConstant(10)) spec("OUTPUTS.filter({ (out: Box) => out.value >= 10 })") shouldBe - Filter(Outputs, 21, GE(ExtractAmount(TaggedBox(21)), LongConstant(10))) + Filter(Outputs, Lambda(Vector(("out", SBox)), SBoolean, GE(ExtractAmount(Ident("out", SBox).asBox), LongConstant(10)))) } property("AND flattening predefined") { @@ -152,6 +152,12 @@ class SigmaSpecializerTest extends PropSpec BinOr(BinOr(TrueLeaf, BinOr(TrueLeaf, TrueLeaf)), TrueLeaf) } + property("XOR flattening predefined") { + spec("true ^ true ^ true") shouldBe BinXor(BinXor(TrueLeaf, TrueLeaf), TrueLeaf) + spec("true ^ (true ^ true) ^ true") shouldBe + BinXor(BinXor(TrueLeaf, BinXor(TrueLeaf, TrueLeaf)), TrueLeaf) + } + property("OR flattening, CAND/COR untouched") { val sigmaBooleans1 = OR(Seq(TrueLeaf, CAND(Seq(proveDlogGen.sample.get, proveDHTGen.sample.get)).toSigmaProp.isProven)) spec(Map(), sigmaBooleans1) shouldBe sigmaBooleans1 diff --git a/src/test/scala/sigmastate/lang/SigmaTyperTest.scala b/src/test/scala/sigmastate/lang/SigmaTyperTest.scala index 4f5ce080fe..0f0846d564 100644 --- a/src/test/scala/sigmastate/lang/SigmaTyperTest.scala +++ b/src/test/scala/sigmastate/lang/SigmaTyperTest.scala @@ -12,7 +12,7 @@ import sigmastate.interpreter.CryptoConstants import sigmastate.interpreter.Interpreter.ScriptEnv import sigmastate.lang.SigmaPredef._ import sigmastate.lang.Terms.Select -import sigmastate.lang.exceptions.{NonApplicableMethod, TyperException, InvalidBinaryOperationParameters, MethodNotFound} +import sigmastate.lang.exceptions.TyperException import sigmastate.serialization.generators.ValueGenerators import sigmastate.utxo.{ExtractCreationInfo, Append} @@ -28,7 +28,6 @@ class SigmaTyperTest extends PropSpec with PropertyChecks with Matchers with Lan val predefinedFuncRegistry = new PredefinedFuncRegistry(builder) val binder = new SigmaBinder(env, builder, TestnetNetworkPrefix, predefinedFuncRegistry) val bound = binder.bind(parsed) - val st = new SigmaTree(bound) val typer = new SigmaTyper(builder, predefinedFuncRegistry) val typed = typer.typecheck(bound) assertSrcCtxForAllNodes(typed) @@ -45,7 +44,6 @@ class SigmaTyperTest extends PropSpec with PropertyChecks with Matchers with Lan val predefinedFuncRegistry = new PredefinedFuncRegistry(builder) val binder = new SigmaBinder(env, builder, TestnetNetworkPrefix, predefinedFuncRegistry) val bound = binder.bind(parsed) - val st = new SigmaTree(bound) val typer = new SigmaTyper(builder, predefinedFuncRegistry) val exception = the[TyperException] thrownBy typer.typecheck(bound) withClue(s"Exception: $exception, is missing source context:") { exception.source shouldBe defined } @@ -68,13 +66,10 @@ class SigmaTyperTest extends PropSpec with PropertyChecks with Matchers with Lan typecheck(env, "INPUTS", Inputs) shouldBe SCollection(SBox) typecheck(env, "INPUTS.size") shouldBe SInt typecheck(env, "INPUTS.size > 1", GT(Select(Inputs, "size", Some(SInt)), 1)) shouldBe SBoolean - // todo: restore in https://github.com/ScorexFoundation/sigmastate-interpreter/issues/324 - // typecheck(env, "arr1 | arr2", Xor(ByteArrayConstant(arr1), ByteArrayConstant(arr2))) shouldBe SByteArray + typecheck(env, "xor(arr1, arr2)", Xor(ByteArrayConstant(arr1), ByteArrayConstant(arr2))) shouldBe SByteArray typecheck(env, "arr1 ++ arr2", Append(ByteArrayConstant(arr1), ByteArrayConstant(arr2))) shouldBe SByteArray typecheck(env, "col1 ++ col2") shouldBe SCollection(SLong) - // todo should be g1.exp(n1) - // ( see https://github.com/ScorexFoundation/sigmastate-interpreter/issues/324 ) - // typecheck(env, "g1 ^ n1") shouldBe SGroupElement + typecheck(env, "g1.exp(n1)") shouldBe SGroupElement typecheck(env, "g1 * g2") shouldBe SGroupElement typecheck(env, "p1 || p2") shouldBe SSigmaProp typecheck(env, "p1 && p2") shouldBe SSigmaProp @@ -164,9 +159,13 @@ class SigmaTyperTest extends PropSpec with PropertyChecks with Matchers with Lan typecheck(env, "(1, 2L).size") shouldBe SInt typecheck(env, "(1, 2L)(0)") shouldBe SInt typecheck(env, "(1, 2L)(1)") shouldBe SLong + typecheck(env, "{ (a: Int) => (1, 2L)(a) }") shouldBe SFunc(IndexedSeq(SInt), SAny) + } + + // TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 + ignore("tuple advanced operations") { typecheck(env, "(1, 2L).getOrElse(2, 3)") shouldBe SAny typecheck(env, "(1, 2L).slice(0, 2)") shouldBe SCollection(SAny) - typecheck(env, "{ (a: Int) => (1, 2L)(a) }") shouldBe SFunc(IndexedSeq(SInt), SAny) } property("types") { @@ -245,8 +244,10 @@ class SigmaTyperTest extends PropSpec with PropertyChecks with Matchers with Lan typecheck(env, "{ (a: Int) => { val b = a + 1; b } }") shouldBe SFunc(IndexedSeq(SInt), SInt) typecheck(env, "{ (a: Int, box: Box) => a + box.value }") shouldBe SFunc(IndexedSeq(SInt, SBox), SLong) + /* TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 typecheck(env, "{ (p: (Int, GroupElement), box: Box) => p._1 > box.value && p._2.isIdentity }") shouldBe SFunc(IndexedSeq(STuple(SInt, SGroupElement), SBox), SBoolean) + */ typecheck(env, "{ (p: (Int, SigmaProp), box: Box) => p._1 > box.value && p._2.isProven }") shouldBe SFunc(IndexedSeq(STuple(SInt, SSigmaProp), SBox), SBoolean) @@ -273,8 +274,11 @@ class SigmaTyperTest extends PropSpec with PropertyChecks with Matchers with Lan property("type parameters") { typecheck(env, "SELF.R1[Int]") shouldBe SOption(SInt) typecheck(env, "SELF.R1[Int].isDefined") shouldBe SBoolean - typecheck(env, "SELF.R1[Int].isEmpty") shouldBe SBoolean + // TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 + // typecheck(env, "SELF.R1[Int].isEmpty") shouldBe SBoolean typecheck(env, "SELF.R1[Int].get") shouldBe SInt + // TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/416 + // typecheck(env, "SELF.getReg[Int](1)") shouldBe SOption.SIntOption typefail(env, "x[Int]", 1, 1) typefail(env, "arr1[Int]", 1, 1) typecheck(env, "SELF.R1[(Int,Boolean)]") shouldBe SOption(STuple(SInt, SBoolean)) @@ -309,7 +313,7 @@ class SigmaTyperTest extends PropSpec with PropertyChecks with Matchers with Lan val t1 = ty(s1); val t2 = ty(s2) checkTypes(t1, t2, exp) } - def unify(s1: String, s2: String, subst: (STypeIdent, SType)*): Unit = + def unify(s1: String, s2: String, subst: (STypeVar, SType)*): Unit = check(s1, s2, Some(subst.toMap)) unifyTypes(NoType, NoType) shouldBe None @@ -409,7 +413,7 @@ class SigmaTyperTest extends PropSpec with PropertyChecks with Matchers with Lan checkTypes(t1, t2, exp) } def checkAll(ts: Seq[String], exp: Option[SType]): Unit = { - val types = ts.map(ty(_)); + val types = ts.map(ty(_)) checkAllTypes(types, exp) } @@ -587,21 +591,24 @@ class SigmaTyperTest extends PropSpec with PropertyChecks with Matchers with Lan typefail(env, "true >>> false", 1, 1) } - property("Collection.BitShiftLeft") { + // TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 + ignore("Collection.BitShiftLeft") { typecheck(env, "Coll(1,2) << 2") shouldBe SCollection(SInt) an [TyperException] should be thrownBy typecheck(env, "Coll(1,2) << true") an [TyperException] should be thrownBy typecheck(env, "Coll(1,2) << 2L") an [TyperException] should be thrownBy typecheck(env, "Coll(1,2) << (2L, 3)") } - property("Collection.BitShiftRight") { + // TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 + ignore("Collection.BitShiftRight") { typecheck(env, "Coll(1,2) >> 2") shouldBe SCollection(SInt) an [TyperException] should be thrownBy typecheck(env, "Coll(1,2) >> 2L") an [TyperException] should be thrownBy typecheck(env, "Coll(1,2) >> true") an [TyperException] should be thrownBy typecheck(env, "Coll(1,2) >> (2L, 3)") } - property("Collection.BitShiftRightZeroed") { + // TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 + ignore("Collection.BitShiftRightZeroed") { typecheck(env, "Coll(true, false) >>> 2") shouldBe SCollection(SBoolean) an [TyperException] should be thrownBy typecheck(env, "Coll(1,2) >>> 2") an [TyperException] should be thrownBy typecheck(env, "Coll(true, false) >>> true") @@ -621,9 +628,10 @@ class SigmaTyperTest extends PropSpec with PropertyChecks with Matchers with Lan typecheck(env, "SELF.tokens") shouldBe ErgoBox.STokensRegType } - property("SOption.toColl") { - typecheck(env, "getVar[Int](1).toColl") shouldBe SIntArray - } +// TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 +// property("SOption.toColl") { +// typecheck(env, "getVar[Int](1).toColl") shouldBe SIntArray +// } property("SContext.dataInputs") { typecheck(env, "CONTEXT.dataInputs") shouldBe SCollection(SBox) diff --git a/src/test/scala/sigmastate/serialization/AndSerializerSpecification.scala b/src/test/scala/sigmastate/serialization/AndSerializerSpecification.scala index 0895ce4036..f81b44fda6 100644 --- a/src/test/scala/sigmastate/serialization/AndSerializerSpecification.scala +++ b/src/test/scala/sigmastate/serialization/AndSerializerSpecification.scala @@ -2,6 +2,7 @@ package sigmastate.serialization import sigmastate.Values.{BooleanConstant, Constant, IntConstant} import sigmastate._ +import sigmastate.eval.Extensions._ import sigmastate.serialization.OpCodes._ import scorex.util.encode.ZigZagEncoder.encodeZigZagInt @@ -13,7 +14,7 @@ class AndSerializerSpecification extends TableSerializationSpecification { ("object", "bytes"), (AND(boolConst(true), boolConst(false)), Array[Byte](AndCode, ConcreteCollectionBooleanConstantCode, 2, 1)), - (AND(Constant[SCollection[SBoolean.type]](Array[Boolean](false, true), SCollection(SBoolean))), + (AND(Constant[SCollection[SBoolean.type]](Array[Boolean](false, true).toColl, SCollection(SBoolean))), Array[Byte](AndCode, SBoolean.embedIn(SCollectionType.CollectionTypeCode), 2, 2)), (AND(boolConst(true), EQ(IntConstant(1), IntConstant(1))), Array[Byte](AndCode, ConcreteCollectionCode, 2, SBoolean.typeCode, // collection type diff --git a/src/test/scala/sigmastate/serialization/AvlTreeSpecification.scala b/src/test/scala/sigmastate/serialization/AvlTreeSpecification.scala index e8a1e796b7..91a48d98e3 100644 --- a/src/test/scala/sigmastate/serialization/AvlTreeSpecification.scala +++ b/src/test/scala/sigmastate/serialization/AvlTreeSpecification.scala @@ -23,7 +23,7 @@ class AvlTreeSpecification extends SerializationSpecification { } property("roundtrip for an AVL tree") { - forAll(avlTreeDataGen) { t => + forAll(avlTreeGen) { t => val v = AvlTreeConstant(t) roundTripTest(v) } diff --git a/src/test/scala/sigmastate/serialization/ConcreteCollectionSerializerSpecification.scala b/src/test/scala/sigmastate/serialization/ConcreteCollectionSerializerSpecification.scala index 2af3ac39a4..1d7f96638c 100644 --- a/src/test/scala/sigmastate/serialization/ConcreteCollectionSerializerSpecification.scala +++ b/src/test/scala/sigmastate/serialization/ConcreteCollectionSerializerSpecification.scala @@ -1,7 +1,8 @@ package sigmastate.serialization -import sigmastate.Values.{ConcreteCollection, Constant, FalseLeaf, IntConstant, TaggedInt, TrueLeaf} +import sigmastate.Values.{FalseLeaf, Constant, TrueLeaf, IntConstant, TaggedInt, ConcreteCollection} import sigmastate._ +import sigmastate.eval.Evaluation import sigmastate.lang.Terms._ import scala.util.Random @@ -10,7 +11,8 @@ class ConcreteCollectionSerializerSpecification extends TableSerializationSpecif private def testCollectionWithConstant[T <: SType](tpe: T) = { implicit val wWrapped = wrappedTypeGen(tpe) - implicit val tag = tpe.classTag[T#WrappedType] + implicit val tT = Evaluation.stypeToRType(tpe) + implicit val tag = tT.classTag forAll { x: Array[T#WrappedType] => roundTripTest(ConcreteCollection[T](x.map(v => Constant(v, tpe)), tpe)) @@ -53,6 +55,6 @@ class ConcreteCollectionSerializerSpecification extends TableSerializationSpecif property("ConcreteCollection: deserialize collection of a crazy size") { val bytes = Array[Byte](OpCodes.ConcreteCollectionCode) ++ SigmaSerializer.startWriter().putUInt(Int.MaxValue).toBytes - an[AssertionError] should be thrownBy ValueSerializer.deserialize(bytes) + an[IllegalArgumentException] should be thrownBy ValueSerializer.deserialize(bytes) } } diff --git a/src/test/scala/sigmastate/serialization/ConstantSerializerSpecification.scala b/src/test/scala/sigmastate/serialization/ConstantSerializerSpecification.scala index 538df8b213..50aab09279 100644 --- a/src/test/scala/sigmastate/serialization/ConstantSerializerSpecification.scala +++ b/src/test/scala/sigmastate/serialization/ConstantSerializerSpecification.scala @@ -4,26 +4,38 @@ import java.math.BigInteger import org.ergoplatform._ import org.scalacheck.Arbitrary._ +import scalan.RType import sigmastate.SCollection.SByteArray -import sigmastate.Values.{FalseLeaf, Constant, SValue, TrueLeaf, BigIntConstant, GroupGenerator, ByteArrayConstant, LongConstant} +import sigmastate.Values.{LongConstant, FalseLeaf, Constant, SValue, TrueLeaf, BigIntConstant, GroupGenerator, ByteArrayConstant} import sigmastate.interpreter.CryptoConstants.EcPointType import sigmastate._ +import sigmastate.eval._ +import sigmastate.eval.Extensions._ import sigmastate.Values._ +import sigmastate.eval.Evaluation +import special.sigma.AvlTree +import SType.AnyOps class ConstantSerializerSpecification extends TableSerializationSpecification { private def testCollection[T <: SType](tpe: T) = { implicit val wWrapped = wrappedTypeGen(tpe) - implicit val tag = tpe.classTag[T#WrappedType] + implicit val tT = Evaluation.stypeToRType(tpe) + implicit val tag = tT.classTag forAll { xs: Array[T#WrappedType] => - roundTripTest(Constant[SCollection[T]](xs, SCollection(tpe))) - roundTripTest(Constant[SCollection[STuple]](xs.map(x => Array[Any](x, x)), SCollection(STuple(tpe, tpe)))) - roundTripTest(Constant[SCollection[SCollection[T]]](xs.map(x => Array[T#WrappedType](x, x)), SCollection(SCollection(tpe)))) - roundTripTest(Constant[SCollection[STuple]]( - xs.map { x => - val arr = Array[T#WrappedType](x, x) - Array[Any](arr, arr) - }, + implicit val tAny = RType.AnyType + roundTripTest(Constant[SCollection[T]](xs.toColl, SCollection(tpe))) + roundTripTest(Constant[SType](xs.toColl.map(x => (x, x)).asWrappedType, SCollection(STuple(tpe, tpe)))) // pairs are special case + val triples = xs.toColl.map(x => TupleColl(x, x, x)).asWrappedType + roundTripTest(Constant[SType](triples, SCollection(STuple(tpe, tpe, tpe)))) + val quartets = xs.toColl.map(x => TupleColl(x, x, x, x)).asWrappedType + roundTripTest(Constant[SType](quartets, SCollection(STuple(tpe, tpe, tpe, tpe)))) + roundTripTest(Constant[SCollection[SCollection[T]]](xs.toColl.map(x => Colls.fromItems(x, x)), SCollection(SCollection(tpe)))) + roundTripTest(Constant[SType]( + xs.toColl.map { x => + val arr = Colls.fromItems(x, x) + (arr, arr) + }.asWrappedType, SCollection(STuple(SCollection(tpe), SCollection(tpe))) )) } @@ -31,11 +43,17 @@ class ConstantSerializerSpecification extends TableSerializationSpecification { def testTuples[T <: SType](tpe: T) = { implicit val wWrapped = wrappedTypeGen(tpe) - implicit val tag = tpe.classTag[T#WrappedType] + implicit val tT = Evaluation.stypeToRType(tpe) + implicit val tag = tT.classTag + implicit val tAny = RType.AnyType forAll { in: (T#WrappedType, T#WrappedType) => val (x,y) = (in._1, in._2) - roundTripTest(Constant[STuple](Array[Any](x, y), STuple(tpe, tpe))) - roundTripTest(Constant[STuple](Array[Any](x, y, Array[Any](x, y)), STuple(tpe, tpe, STuple(tpe, tpe)))) + roundTripTest(Constant[SType]((x, y).asWrappedType, STuple(tpe, tpe))) + roundTripTest(Constant[SType](TupleColl(x, y, x).asWrappedType, STuple(tpe, tpe, tpe))) + roundTripTest(Constant[SType](TupleColl(x, y, x, y).asWrappedType, STuple(tpe, tpe, tpe, tpe))) + roundTripTest(Constant[STuple](Colls.fromItems[Any](x, y, (x, y)), STuple(tpe, tpe, STuple(tpe, tpe)))) + roundTripTest(Constant[STuple](Colls.fromItems[Any](x, y, TupleColl(x, y, x)), STuple(tpe, tpe, STuple(tpe, tpe, tpe)))) + roundTripTest(Constant[STuple](Colls.fromItems[Any](x, y, TupleColl(x, y, (x, y))), STuple(tpe, tpe, STuple(tpe, tpe, STuple(tpe, tpe))))) } } @@ -48,8 +66,8 @@ class ConstantSerializerSpecification extends TableSerializationSpecification { forAll { x: EcPointType => roundTripTest(Constant[SGroupElement.type](x, SGroupElement)) } forAll { x: SigmaBoolean => roundTripTest(Constant[SSigmaProp.type](x, SSigmaProp)) } forAll { x: ErgoBox => roundTripTest(Constant[SBox.type](x, SBox)) } - forAll { x: AvlTreeData => roundTripTest(Constant[SAvlTree.type](x, SAvlTree)) } - forAll { x: Array[Byte] => roundTripTest(Constant[SByteArray](x, SByteArray)) } + forAll { x: AvlTree => roundTripTest(Constant[SAvlTree.type](x, SAvlTree)) } + forAll { x: Array[Byte] => roundTripTest(Constant[SByteArray](x.toColl, SByteArray)) } forAll { t: SPredefType => testCollection(t) } forAll { t: SPredefType => testTuples(t) } } diff --git a/src/test/scala/sigmastate/serialization/DataSerializerSpecification.scala b/src/test/scala/sigmastate/serialization/DataSerializerSpecification.scala index c031fc12be..4747cef86c 100644 --- a/src/test/scala/sigmastate/serialization/DataSerializerSpecification.scala +++ b/src/test/scala/sigmastate/serialization/DataSerializerSpecification.scala @@ -4,10 +4,16 @@ import java.math.BigInteger import org.ergoplatform.ErgoBox import org.scalacheck.Arbitrary._ +import scalan.RType import sigmastate.SCollection.SByteArray import sigmastate.Values.SigmaBoolean import sigmastate._ +import sigmastate.eval.Evaluation +import sigmastate.eval._ +import sigmastate.eval.Extensions._ import sigmastate.interpreter.CryptoConstants.EcPointType +import special.sigma.AvlTree +import SType.AnyOps class DataSerializerSpecification extends SerializationSpecification { @@ -26,16 +32,27 @@ class DataSerializerSpecification extends SerializationSpecification { def testCollection[T <: SType](tpe: T) = { implicit val wWrapped = wrappedTypeGen(tpe) - implicit val tag = tpe.classTag[T#WrappedType] + implicit val tT = Evaluation.stypeToRType(tpe) + implicit val tagT = tT.classTag + implicit val tAny = RType.AnyType forAll { xs: Array[T#WrappedType] => - roundtrip[SCollection[T]](xs, SCollection(tpe)) - roundtrip[SCollection[STuple]](xs.map(x => Array[Any](x, x)), SCollection(STuple(tpe, tpe))) - roundtrip[SCollection[SCollection[T]]](xs.map(x => Array[T#WrappedType](x, x)), SCollection(SCollection(tpe))) - roundtrip[SCollection[STuple]]( - xs.map { x => - val arr = Array[T#WrappedType](x, x) - Array[Any](arr, arr) - }, + roundtrip[SCollection[T]](xs.toColl, SCollection(tpe)) + roundtrip[SType](xs.toColl.map(x => (x, x)).asWrappedType, SCollection(STuple(tpe, tpe))) + + val triples = xs.toColl.map(x => TupleColl(x, x, x)).asWrappedType + roundtrip(triples, SCollection(STuple(tpe, tpe, tpe))) + + val quartets = xs.toColl.map(x => TupleColl(x, x, x, x)).asWrappedType + roundtrip(quartets, SCollection(STuple(tpe, tpe, tpe, tpe))) + + val nested = xs.toColl.map(x => Colls.fromItems[T#WrappedType](x, x)) + roundtrip[SCollection[SCollection[T]]](nested, SCollection(SCollection(tpe))) + + roundtrip[SType]( + xs.toColl.map { x => + val arr = Colls.fromItems[T#WrappedType](x, x) + (arr, arr) + }.asWrappedType, SCollection(STuple(SCollection(tpe), SCollection(tpe))) ) } @@ -44,10 +61,15 @@ class DataSerializerSpecification extends SerializationSpecification { def testTuples[T <: SType](tpe: T) = { implicit val wWrapped = wrappedTypeGen(tpe) implicit val tag = tpe.classTag[T#WrappedType] + implicit val tAny = RType.AnyType forAll { in: (T#WrappedType, T#WrappedType) => val (x,y) = (in._1, in._2) - roundtrip[STuple](Array[Any](x, y), STuple(tpe, tpe)) - roundtrip[STuple](Array[Any](x, y, Array[Any](x, y)), STuple(tpe, tpe, STuple(tpe, tpe))) + roundtrip[SType]((x, y).asWrappedType, STuple(tpe, tpe)) + roundtrip[SType](TupleColl(x, y, x).asWrappedType, STuple(tpe, tpe, tpe)) + roundtrip[SType](TupleColl(x, y, x, y).asWrappedType, STuple(tpe, tpe, tpe, tpe)) + roundtrip[STuple](Colls.fromItems[Any](x, y, (x, y)), STuple(tpe, tpe, STuple(tpe, tpe))) + roundtrip[STuple](Colls.fromItems[Any](x, y, TupleColl(x, y, x)), STuple(tpe, tpe, STuple(tpe, tpe, tpe))) + roundtrip[STuple](Colls.fromItems[Any](x, y, TupleColl(x, y, (x, y))), STuple(tpe, tpe, STuple(tpe, tpe, STuple(tpe, tpe)))) } } @@ -60,8 +82,8 @@ class DataSerializerSpecification extends SerializationSpecification { forAll { x: EcPointType => roundtrip[SGroupElement.type](x, SGroupElement) } forAll { x: SigmaBoolean => roundtrip[SSigmaProp.type](x, SSigmaProp) } forAll { x: ErgoBox => roundtrip[SBox.type](x, SBox) } - forAll { x: AvlTreeData => roundtrip[SAvlTree.type](x, SAvlTree) } - forAll { x: Array[Byte] => roundtrip[SByteArray](x, SByteArray) } + forAll { x: AvlTree => roundtrip[SAvlTree.type](x, SAvlTree) } + forAll { x: Array[Byte] => roundtrip[SByteArray](x.toColl, SByteArray) } forAll { t: SPredefType => testCollection(t) } forAll { t: SPredefType => testTuples(t) } } diff --git a/src/test/scala/sigmastate/serialization/DeserializationResilience.scala b/src/test/scala/sigmastate/serialization/DeserializationResilience.scala index b47bb2092c..0ef0884b0e 100644 --- a/src/test/scala/sigmastate/serialization/DeserializationResilience.scala +++ b/src/test/scala/sigmastate/serialization/DeserializationResilience.scala @@ -1,12 +1,30 @@ package sigmastate.serialization -import sigmastate.lang.exceptions.{InputSizeLimitExceeded, InvalidOpCode, InvalidTypePrefix, ValueDeserializeCallDepthExceeded} +import java.nio.ByteBuffer + +import org.ergoplatform.Outputs +import org.ergoplatform.validation.ValidationException +import scorex.util.serialization.{VLQByteBufferReader, Reader} +import sigmastate.Values.{SigmaBoolean, Tuple, SValue, IntConstant} +import sigmastate._ +import sigmastate.lang.exceptions.{InvalidTypePrefix, InputSizeLimitExceeded, DeserializeCallDepthExceeded} import sigmastate.serialization.OpCodes._ -import scorex.util.Extensions._ -import sigmastate.{AND, SBoolean} +import sigmastate.utils.SigmaByteReader +import sigmastate.utxo.SizeOf + +import scala.collection.mutable class DeserializationResilience extends SerializationSpecification { + private def reader(bytes: Array[Byte], maxTreeDepth: Int): SigmaByteReader = { + val buf = ByteBuffer.wrap(bytes) + new SigmaByteReader( + new VLQByteBufferReader(buf), + new ConstantStore(), + resolvePlaceholdersToConstants = false, + maxTreeDepth = maxTreeDepth).mark() + } + property("empty") { an[ArrayIndexOutOfBoundsException] should be thrownBy ValueSerializer.deserialize(Array[Byte]()) } @@ -22,15 +40,15 @@ class DeserializationResilience extends SerializationSpecification { an[InvalidTypePrefix] should be thrownBy ValueSerializer.deserialize(Array.fill[Byte](2)(0)) } - property("AND/OR nested crazy deep") { + property("default value for max recursive call depth is checked") { val evilBytes = List.tabulate(SigmaSerializer.MaxTreeDepth + 1)(_ => Array[Byte](AndCode, ConcreteCollectionCode, 2, SBoolean.typeCode)) .toArray.flatten - an[ValueDeserializeCallDepthExceeded] should be thrownBy + an[DeserializeCallDepthExceeded] should be thrownBy SigmaSerializer.startReader(evilBytes, 0).getValue() // test other API endpoints - an[ValueDeserializeCallDepthExceeded] should be thrownBy + an[DeserializeCallDepthExceeded] should be thrownBy ValueSerializer.deserialize(evilBytes, 0) - an[ValueDeserializeCallDepthExceeded] should be thrownBy + an[DeserializeCallDepthExceeded] should be thrownBy ValueSerializer.deserialize(SigmaSerializer.startReader(evilBytes, 0)) // guard should not be tripped up by a huge collection @@ -44,7 +62,138 @@ class DeserializationResilience extends SerializationSpecification { } property("invalid op code") { - an[InvalidOpCode] should be thrownBy + an[ValidationException] should be thrownBy ValueSerializer.deserialize(Array.fill[Byte](1)(117.toByte)) } + + private def traceReaderCallDepth(expr: SValue): (IndexedSeq[Int], IndexedSeq[Int]) = { + class LoggingSigmaByteReader(r: Reader) extends + SigmaByteReader(r, + new ConstantStore(), + resolvePlaceholdersToConstants = false, + maxTreeDepth = SigmaSerializer.MaxTreeDepth) { + val levels: mutable.ArrayBuilder[Int] = mutable.ArrayBuilder.make[Int]() + override def level_=(v: Int): Unit = { + if (v >= super.level) { + // going deeper (depth is increasing), save new depth to account added depth level by the caller + levels += v + } else { + // going up (depth is decreasing), save previous depth to account added depth level for the caller + levels += super.level + } + super.level_=(v) + } + } + + class ProbeException extends Exception + + class ThrowingSigmaByteReader(r: Reader, levels: IndexedSeq[Int], throwOnNthLevelCall: Int) extends + SigmaByteReader(r, + new ConstantStore(), + resolvePlaceholdersToConstants = false, + maxTreeDepth = SigmaSerializer.MaxTreeDepth) { + private var levelCall: Int = 0 + override def level_=(v: Int): Unit = { + if (throwOnNthLevelCall == levelCall) throw new ProbeException() + levelCall += 1 + super.level_=(v) + } + } + + val bytes = ValueSerializer.serialize(expr) + val loggingR = new LoggingSigmaByteReader(new VLQByteBufferReader(ByteBuffer.wrap(bytes))).mark() + val _ = ValueSerializer.deserialize(loggingR) + val levels = loggingR.levels.result() + levels.nonEmpty shouldBe true + + val callDepthsBuilder = mutable.ArrayBuilder.make[Int]() + levels.zipWithIndex.foreach { case (_, levelIndex) => + val throwingR = new ThrowingSigmaByteReader(new VLQByteBufferReader(ByteBuffer.wrap(bytes)), + levels, + throwOnNthLevelCall = levelIndex).mark() + try { + val _ = ValueSerializer.deserialize(throwingR) + } catch { + case e: Exception => + e.isInstanceOf[ProbeException] shouldBe true + val stackTrace = e.getStackTrace + val depth = stackTrace.count { se => + (se.getClassName == ValueSerializer.getClass.getName && se.getMethodName == "deserialize") || + (se.getClassName == DataSerializer.getClass.getName && se.getMethodName == "deserialize") || + (se.getClassName == SigmaBoolean.serializer.getClass.getName && se.getMethodName == "parse") + } + callDepthsBuilder += depth + } + } + (levels, callDepthsBuilder.result()) + } + + property("reader.level correspondence to the serializer recursive call depth") { + forAll(logicalExprTreeNodeGen(Seq(AND.apply, OR.apply))) { expr => + val (callDepths, levels) = traceReaderCallDepth(expr) + callDepths shouldEqual levels + } + forAll(numExprTreeNodeGen) { numExpr => + val expr = EQ(numExpr, IntConstant(1)) + val (callDepths, levels) = traceReaderCallDepth(expr) + callDepths shouldEqual levels + } + forAll(sigmaBooleanGen) { sigmaBool => + val (callDepths, levels) = traceReaderCallDepth(sigmaBool) + callDepths shouldEqual levels + } + } + + property("reader.level is updated in ValueSerializer.deserialize") { + val expr = SizeOf(Outputs) + val (callDepths, levels) = traceReaderCallDepth(expr) + callDepths shouldEqual levels + callDepths shouldEqual IndexedSeq(1, 2, 2, 1) + } + + property("max recursive call depth is checked in reader.level for ValueSerializer calls") { + val expr = SizeOf(Outputs) + an[DeserializeCallDepthExceeded] should be thrownBy + ValueSerializer.deserialize(reader(ValueSerializer.serialize(expr), maxTreeDepth = 1)) + } + + property("reader.level is updated in DataSerializer.deserialize") { + val expr = IntConstant(1) + val (callDepths, levels) = traceReaderCallDepth(expr) + callDepths shouldEqual levels + callDepths shouldEqual IndexedSeq(1, 2, 2, 1) + } + + property("max recursive call depth is checked in reader.level for DataSerializer calls") { + val expr = IntConstant(1) + an[DeserializeCallDepthExceeded] should be thrownBy + ValueSerializer.deserialize(reader(ValueSerializer.serialize(expr), maxTreeDepth = 1)) + } + + property("reader.level is updated in SigmaBoolean.serializer.parse") { + val expr = CAND(Seq(proveDlogGen.sample.get, proveDHTGen.sample.get)) + val (callDepths, levels) = traceReaderCallDepth(expr) + callDepths shouldEqual levels + callDepths shouldEqual IndexedSeq(1, 2, 3, 4, 4, 4, 4, 3, 2, 1) + } + + property("max recursive call depth is checked in reader.level for SigmaBoolean.serializer calls") { + val expr = CAND(Seq(proveDlogGen.sample.get, proveDHTGen.sample.get)) + an[DeserializeCallDepthExceeded] should be thrownBy + ValueSerializer.deserialize(reader(ValueSerializer.serialize(expr), maxTreeDepth = 1)) + } + + property("reader.level is updated in TypeSerializer") { + val expr = Tuple(Tuple(IntConstant(1), IntConstant(1)), IntConstant(1)) + val (callDepths, levels) = traceReaderCallDepth(expr) + callDepths shouldEqual levels + callDepths shouldEqual IndexedSeq(1, 2, 3, 4, 4, 3, 3, 4, 4, 3, 2, 2, 3, 3, 2, 1) + } + + property("max recursive call depth is checked in reader.level for TypeSerializer") { + val expr = Tuple(Tuple(IntConstant(1), IntConstant(1)), IntConstant(1)) + an[DeserializeCallDepthExceeded] should be thrownBy + ValueSerializer.deserialize(reader(ValueSerializer.serialize(expr), maxTreeDepth = 3)) + } + } diff --git a/src/test/scala/sigmastate/serialization/ErgoTreeSerializerSpecification.scala b/src/test/scala/sigmastate/serialization/ErgoTreeSerializerSpecification.scala index abd11b3c58..d99df4da0f 100644 --- a/src/test/scala/sigmastate/serialization/ErgoTreeSerializerSpecification.scala +++ b/src/test/scala/sigmastate/serialization/ErgoTreeSerializerSpecification.scala @@ -1,30 +1,20 @@ package sigmastate.serialization -import java.nio.ByteBuffer - -import org.ergoplatform.Self -import sigmastate.Values.ErgoTree.DefaultHeader -import sigmastate.Values.{BlockValue, Constant, ConstantPlaceholder, ErgoTree, IntConstant, LongConstant, ValDef, ValUse, Value} +import sigmastate.Values.{ErgoTree, IntConstant, SigmaPropValue} import sigmastate._ import sigmastate.eval.IRContext import sigmastate.helpers.SigmaTestingCommons -import sigmastate.utils.SigmaByteReader -import sigmastate.utxo.ExtractAmount +import sigmastate.lang.exceptions.SerializerException +import sigmastate.serialization.ErgoTreeSerializer.DefaultSerializer -class ErgoTreeSerializerSpecification extends SerializationSpecification with SigmaTestingCommons { +class ErgoTreeSerializerSpecification extends SerializationSpecification + with SigmaTestingCommons { implicit lazy val IR: TestingIRContext = new TestingIRContext { beginPass(noConstPropagationPass) } - private def passThroughTreeBuilder(tree: Value[SType]): Value[SType] = { - val env = Map[String, Any]() - val IR.Pair(calcF, _) = IR.doCosting(env, tree) - val outTree = IR.buildTree(calcF, None) - outTree - } - - private def extractConstants(tree: Value[SType])(implicit IR: IRContext): ErgoTree = { + private def extractConstants(tree: SigmaPropValue)(implicit IR: IRContext): ErgoTree = { import ErgoTree._ val env = Map[String, Any]() val IR.Pair(calcF, _) = IR.doCosting(env, tree) @@ -37,10 +27,10 @@ class ErgoTreeSerializerSpecification extends SerializationSpecification with Si } property("(de)serialization round trip using treeBytes()") { - val tree = Plus(10, 20) + val tree = EQ(Plus(10, 20), IntConstant(30)).toSigmaProp val ergoTree = extractConstants(tree) - val bytes = ErgoTreeSerializer.DefaultSerializer.serializeErgoTree(ergoTree) - val (_, deserializedConstants, treeBytes) = ErgoTreeSerializer.DefaultSerializer + val bytes = DefaultSerializer.serializeErgoTree(ergoTree) + val (_, _, deserializedConstants, treeBytes) = DefaultSerializer .deserializeHeaderWithTreeBytes(SigmaSerializer.startReader(bytes)) deserializedConstants shouldEqual ergoTree.constants val r = SigmaSerializer.startReader(treeBytes, new ConstantStore(deserializedConstants), @@ -50,63 +40,50 @@ class ErgoTreeSerializerSpecification extends SerializationSpecification with Si } property("Constant extraction via compiler pass: (de)serialization round trip") { - val prop = Plus(10, 20) + val prop = EQ(Plus(10, 20), IntConstant(30)).toSigmaProp val ergoTree = extractConstants(prop) - val bytes = ErgoTreeSerializer.DefaultSerializer.serializeErgoTree(ergoTree) - val deserializedTree = ErgoTreeSerializer.DefaultSerializer.deserializeErgoTree(bytes) + val bytes = DefaultSerializer.serializeErgoTree(ergoTree) + val deserializedTree = DefaultSerializer.deserializeErgoTree(bytes) deserializedTree shouldEqual ergoTree } + property("failed type check on tree deserialization") { + forAll(numExprTreeNodeGen) { numProp => + val bytes = DefaultSerializer.serializeErgoTree(extractConstants(numProp.asInstanceOf[SigmaPropValue])) + an[SerializerException] should be thrownBy DefaultSerializer.deserializeErgoTree(bytes) + an[SerializerException] should be thrownBy DefaultSerializer.deserializeErgoTree(bytes) + } + } + property("Constant extraction during serialization: (de)serialization round trip") { - val tree = Plus(10, 20) - val bytes = ErgoTreeSerializer.DefaultSerializer.serializeWithSegregation(tree) - val (_, deserializedConstants, _) = ErgoTreeSerializer.DefaultSerializer. + val tree = EQ(Plus(10, 20), IntConstant(30)).toSigmaProp.treeWithSegregation + val bytes = DefaultSerializer.serializeErgoTree(tree) + val (_, _, deserializedConstants, _) = DefaultSerializer. deserializeHeaderWithTreeBytes(SigmaSerializer.startReader(bytes)) - deserializedConstants.length shouldBe 2 - val deserializedTree = ErgoTreeSerializer.DefaultSerializer.deserialize(bytes) + deserializedConstants.length shouldBe 3 + val deserializedTree = DefaultSerializer.deserializeErgoTree(bytes) deserializedTree shouldEqual tree } property("tree with placeholders bytes should be equal if only constants are different") { - val tree1 = Plus(10, 20) - val tree2 = Plus(30, 40) - val bytes1 = ErgoTreeSerializer.DefaultSerializer.serializeWithSegregation(tree1) - val bytes2 = ErgoTreeSerializer.DefaultSerializer.serializeWithSegregation(tree2) - val (_, _, treeBytes1) = ErgoTreeSerializer.DefaultSerializer + val tree1 = EQ(Plus(10, 20), IntConstant(30)).toSigmaProp.treeWithSegregation + val tree2 = EQ(Plus(30, 40), IntConstant(70)).toSigmaProp.treeWithSegregation + val bytes1 = DefaultSerializer.serializeErgoTree(tree1) + val bytes2 = DefaultSerializer.serializeErgoTree(tree2) + val (_, _, _, treeBytes1) = DefaultSerializer .deserializeHeaderWithTreeBytes(SigmaSerializer.startReader(bytes1)) - val (_, _, treeBytes2) = ErgoTreeSerializer.DefaultSerializer + val (_, _, _, treeBytes2) = DefaultSerializer .deserializeHeaderWithTreeBytes(SigmaSerializer.startReader(bytes2)) treeBytes1 shouldEqual treeBytes2 } - property("(de)serialize round trip (without constants)") { - val prop = EQ(ExtractAmount(Self), LongConstant(0)).toSigmaProp - val tree = ErgoTree(DefaultHeader, IndexedSeq(), prop, prop) - val bytes = ErgoTreeSerializer.DefaultSerializer.serializeErgoTree(tree) - val deserializedProp = ErgoTreeSerializer.DefaultSerializer.deserializeErgoTree(bytes).proposition - deserializedProp shouldEqual prop - } - - property("AND expr gen: (de)serializer round trip") { - forAll(logicalExprTreeNodeGen(Seq(AND.apply))) { tree => - val processedTree = passThroughTreeBuilder(tree) - val ergoTree = extractConstants(processedTree) - val bytes = ErgoTreeSerializer.DefaultSerializer.serializeErgoTree(ergoTree) - val deserializedTree = ErgoTreeSerializer.DefaultSerializer.deserializeErgoTree(bytes) - deserializedTree shouldEqual ergoTree - } - } - - property("AND expr gen: deserialization round trip with constant injection") { - forAll(logicalExprTreeNodeGen(Seq(AND.apply))) { tree => - val processedTree = passThroughTreeBuilder(tree) - val ergoTree = extractConstants(processedTree) - val bytes = ErgoTreeSerializer.DefaultSerializer.serializeErgoTree(ergoTree) - val (_, deserializedConstants, treeBytes) = ErgoTreeSerializer.DefaultSerializer - .deserializeHeaderWithTreeBytes(SigmaSerializer.startReader(bytes)) - val c = new ConstantStore(deserializedConstants) - val deserializedTree = ErgoTreeSerializer.DefaultSerializer.deserializeWithConstantInjection(c, treeBytes) - deserializedTree shouldEqual processedTree + property("(de)serialize round trip") { + // increased minimum number of successes + // for better coverage of all possible combinations (with/without constants, segregation option, etc.) + forAll(ergoTreeGen, minSuccessful(500)) { tree: ErgoTree => + val bytes = DefaultSerializer.serializeErgoTree(tree) + val deserializedTree = DefaultSerializer.deserializeErgoTree(bytes) + deserializedTree shouldEqual tree } } diff --git a/src/test/scala/sigmastate/serialization/GroupElementSerializerSpecification.scala b/src/test/scala/sigmastate/serialization/GroupElementSerializerSpecification.scala index d4860358d4..dd17ce069f 100644 --- a/src/test/scala/sigmastate/serialization/GroupElementSerializerSpecification.scala +++ b/src/test/scala/sigmastate/serialization/GroupElementSerializerSpecification.scala @@ -1,7 +1,7 @@ package sigmastate.serialization import sigmastate.interpreter.CryptoConstants - +import sigmastate.eval._ class GroupElementSerializerSpecification extends SerializationSpecification { diff --git a/src/test/scala/sigmastate/serialization/MethodCallSerializerSpecification.scala b/src/test/scala/sigmastate/serialization/MethodCallSerializerSpecification.scala index 0124cbfb74..e84f2f64a4 100644 --- a/src/test/scala/sigmastate/serialization/MethodCallSerializerSpecification.scala +++ b/src/test/scala/sigmastate/serialization/MethodCallSerializerSpecification.scala @@ -19,7 +19,7 @@ class MethodCallSerializerSpecification extends SerializationSpecification { property("MethodCall deserialization round trip (non-generic method)") { val expr = MethodCall(Outputs, - SMethod(SCollection, "size", SFunc(SCollection[SBox.type], SInt), 1), + SCollection.SizeMethod.withConcreteTypes(Map(SCollection.tIV -> SBox)), Vector(), Map() ) diff --git a/src/test/scala/sigmastate/serialization/OrSerializerSpecification.scala b/src/test/scala/sigmastate/serialization/OrSerializerSpecification.scala index b65a0b79e3..124bdfb637 100644 --- a/src/test/scala/sigmastate/serialization/OrSerializerSpecification.scala +++ b/src/test/scala/sigmastate/serialization/OrSerializerSpecification.scala @@ -2,6 +2,7 @@ package sigmastate.serialization import sigmastate.Values.{BooleanConstant, Constant, IntConstant} import sigmastate._ +import sigmastate.eval.Extensions._ import sigmastate.serialization.OpCodes._ import scorex.util.encode.ZigZagEncoder.encodeZigZagInt @@ -13,7 +14,7 @@ class OrSerializerSpecification extends TableSerializationSpecification { ("object", "bytes"), (OR(boolConst(true), boolConst(false)), Array[Byte](OrCode, ConcreteCollectionBooleanConstantCode, 2, 1)), - (OR(Constant[SCollection[SBoolean.type]](Array[Boolean](false, true), SCollection(SBoolean))), + (OR(Constant[SCollection[SBoolean.type]](Array[Boolean](false, true).toColl, SCollection(SBoolean))), Array[Byte](OrCode, SBoolean.embedIn(SCollectionType.CollectionTypeCode), 2, 2)), (OR(boolConst(true), EQ(IntConstant(1), IntConstant(1))), Array[Byte](OrCode, ConcreteCollectionCode, 2, SBoolean.typeCode, // collection type diff --git a/src/test/scala/sigmastate/serialization/PDHTSerializerSpecification.scala b/src/test/scala/sigmastate/serialization/PDHTSerializerSpecification.scala index cd00750883..8490580c37 100644 --- a/src/test/scala/sigmastate/serialization/PDHTSerializerSpecification.scala +++ b/src/test/scala/sigmastate/serialization/PDHTSerializerSpecification.scala @@ -1,6 +1,7 @@ package sigmastate.serialization import sigmastate.basics.ProveDHTuple +import sigmastate.utils.GenSerializers import sigmastate.utxo.CostTableStat class PDHTSerializerSpecification extends SerializationSpecification { @@ -11,7 +12,9 @@ class PDHTSerializerSpecification extends SerializationSpecification { } // In IntelliJ IDEA this test is executed last, at this point all statistics has been collected // We output it here in the console - println(CostTableStat.costTableString) +// println(CostTableStat.costTableString) +// println(ValueSerializer.printSerInfo()) +// GenSerializers.generateSerSpec() } } diff --git a/src/test/scala/sigmastate/serialization/SerializationSpecification.scala b/src/test/scala/sigmastate/serialization/SerializationSpecification.scala index 659e3bd61d..85d7d92cbb 100644 --- a/src/test/scala/sigmastate/serialization/SerializationSpecification.scala +++ b/src/test/scala/sigmastate/serialization/SerializationSpecification.scala @@ -1,8 +1,9 @@ package sigmastate.serialization +import org.ergoplatform.validation.ValidationSpecification import org.scalacheck.Gen -import org.scalatest.prop.{GeneratorDrivenPropertyChecks, PropertyChecks, TableDrivenPropertyChecks} -import org.scalatest.{Assertion, Matchers, PropSpec} +import org.scalatest.prop.{PropertyChecks, TableDrivenPropertyChecks, GeneratorDrivenPropertyChecks} +import org.scalatest.{PropSpec, Assertion, Matchers} import org.scalacheck.Arbitrary._ import sigmastate.Values._ import sigmastate.SType @@ -17,7 +18,8 @@ trait SerializationSpecification extends PropSpec with ConcreteCollectionGenerators with OpcodesGen with TransformerGenerators - with RelationGenerators { + with RelationGenerators + with ValidationSpecification { protected def roundTripTest[V <: Value[_ <: SType]](v: V): Assertion = { val bytes = ValueSerializer.serialize(v) @@ -27,7 +29,8 @@ trait SerializationSpecification extends PropSpec protected def predefinedBytesTest[V <: Value[_ <: SType]](v: V, bytes: Array[Byte]): Assertion = { ValueSerializer.serialize(v) shouldEqual bytes - ValueSerializer.deserialize(bytes) shouldEqual v + val dv = ValueSerializer.deserialize(bytes) + dv shouldEqual v } //check that pos and consumed are being implented correctly diff --git a/src/test/scala/sigmastate/serialization/SubstConstantsSerializerSpecification.scala b/src/test/scala/sigmastate/serialization/SubstConstantsSerializerSpecification.scala index 4cc1a57cd3..b573d06758 100644 --- a/src/test/scala/sigmastate/serialization/SubstConstantsSerializerSpecification.scala +++ b/src/test/scala/sigmastate/serialization/SubstConstantsSerializerSpecification.scala @@ -1,13 +1,15 @@ package sigmastate.serialization -import sigmastate.Values.{ConcreteCollection, IntArrayConstant, IntValue} -import sigmastate.{SInt, SubstConstants} +import sigmastate.Values.{ConcreteCollection, IntArrayConstant, IntConstant, IntValue} +import sigmastate.serialization.ErgoTreeSerializer.DefaultSerializer +import sigmastate.{EQ, SInt, SubstConstants} class SubstConstantsSerializerSpecification extends SerializationSpecification { property("SubstConstant deserialization round trip") { - forAll(numExprTreeNodeGen) { tree => - val bytes = ErgoTreeSerializer.DefaultSerializer.serializeWithSegregation(tree) + forAll(numExprTreeNodeGen) { prop => + val tree = EQ(prop, IntConstant(1)).toSigmaProp.treeWithSegregation + val bytes = DefaultSerializer.serializeErgoTree(tree) val newVals = ConcreteCollection(Vector[IntValue](1), SInt) val expr = SubstConstants(bytes, IntArrayConstant(Array(0)), newVals) roundTripTest(expr) diff --git a/src/test/scala/sigmastate/serialization/TwoArgumentSerializerSpecification.scala b/src/test/scala/sigmastate/serialization/TwoArgumentSerializerSpecification.scala index bd2c6fd3de..8d7a4dfa88 100644 --- a/src/test/scala/sigmastate/serialization/TwoArgumentSerializerSpecification.scala +++ b/src/test/scala/sigmastate/serialization/TwoArgumentSerializerSpecification.scala @@ -23,9 +23,9 @@ class TwoArgumentSerializerSpecification extends TableSerializationSpecification Array[Byte](MinCode, SLong.typeCode, encodeZigZagLong(2).toByte, SLong.typeCode, encodeZigZagLong(3).toByte)), (Max(LongConstant(2), LongConstant(3)), Array[Byte](MaxCode, SLong.typeCode, encodeZigZagLong(2).toByte, SLong.typeCode, encodeZigZagLong(3).toByte)), - (Xor(ByteArrayConstant(Array(1, 2, 3)), ByteArrayConstant(Array(3, 4))), + (Xor(ByteArrayConstant(Array[Byte](1, 2, 3)), ByteArrayConstant(Array[Byte](3, 4))), Array[Byte](XorCode, ByteArrayTypeCode, 3, 1, 2, 3, ByteArrayTypeCode, 2, 3, 4)), - (Append(ByteArrayConstant(Array(1, 2, 3)), ByteArrayConstant(Array(3, 4))), + (Append(ByteArrayConstant(Array[Byte](1, 2, 3)), ByteArrayConstant(Array[Byte](3, 4))), Array[Byte](AppendCode, ByteArrayTypeCode, 3, 1, 2, 3, ByteArrayTypeCode, 2, 3, 4)) ) diff --git a/src/test/scala/sigmastate/serialization/TypeSerializerSpecification.scala b/src/test/scala/sigmastate/serialization/TypeSerializerSpecification.scala index 92a19ddd37..0cc2ef75b7 100644 --- a/src/test/scala/sigmastate/serialization/TypeSerializerSpecification.scala +++ b/src/test/scala/sigmastate/serialization/TypeSerializerSpecification.scala @@ -3,8 +3,6 @@ package sigmastate.serialization import org.scalacheck.Arbitrary._ import org.scalatest.Assertion import sigmastate._ -import sigmastate.lang.exceptions.TypeDeserializeCallDepthExceeded -import scorex.util.Extensions._ class TypeSerializerSpecification extends SerializationSpecification { @@ -60,7 +58,6 @@ class TypeSerializerSpecification extends SerializationSpecification { forAll { t: SPredefType => whenever(t.isInstanceOf[SEmbeddable]) { val e = t.asInstanceOf[SEmbeddable] - val tCode = e.typeCode val tupCode = e.embedIn(PairSymmetricTypeCode) roundtrip(SCollection(STuple(e, e)), Array[Byte](CollectionTypeCode, tupCode)) roundtrip(SCollection(SOption(e)), Array[Byte](CollectionTypeCode, e.embedIn(OptionTypeCode))) @@ -94,12 +91,6 @@ class TypeSerializerSpecification extends SerializationSpecification { Array[Byte](TupleTypeCode, 5, SLong.typeCode, SLong.typeCode, SByte.typeCode, SBoolean.typeCode, SInt.typeCode)) } - property("tuple of tuples crazy deep") { - val bytes = List.tabulate(SigmaSerializer.MaxTreeDepth + 1)(_ => Array[Byte](TupleTypeCode, 2)) - .toArray.flatten - an[TypeDeserializeCallDepthExceeded] should be thrownBy SigmaSerializer.startReader(bytes, 0).getType() - } - property("STypeIdent serialization roundtrip") { forAll(sTypeIdentGen) { ti => roundtrip(ti) diff --git a/src/test/scala/sigmastate/serialization/generators/ConcreteCollectionGenerators.scala b/src/test/scala/sigmastate/serialization/generators/ConcreteCollectionGenerators.scala index a2400abe8f..8cc292e640 100644 --- a/src/test/scala/sigmastate/serialization/generators/ConcreteCollectionGenerators.scala +++ b/src/test/scala/sigmastate/serialization/generators/ConcreteCollectionGenerators.scala @@ -22,5 +22,5 @@ trait ConcreteCollectionGenerators { self: ValueGenerators => implicit val arbCCOfIntConstant: Arbitrary[ConcreteCollection[SInt.type]] = Arbitrary(intConstCollectionGen) implicit val arbCCOfBoolConstant: Arbitrary[ConcreteCollection[SBoolean.type]] = Arbitrary(concreteCollectionGen[SBoolean.type](booleanConstGen)) - implicit val arbCCOfSigmaPropConstant: Arbitrary[ConcreteCollection[SSigmaProp.type]] = Arbitrary(concreteCollectionGen[SSigmaProp.type](sigmaPropGen)) + implicit val arbCCOfSigmaPropConstant: Arbitrary[ConcreteCollection[SSigmaProp.type]] = Arbitrary(concreteCollectionGen[SSigmaProp.type](sigmaPropValueGen)) } diff --git a/src/test/scala/sigmastate/serialization/generators/TransformerGenerators.scala b/src/test/scala/sigmastate/serialization/generators/TransformerGenerators.scala index 2432531b44..b7fc2b2103 100644 --- a/src/test/scala/sigmastate/serialization/generators/TransformerGenerators.scala +++ b/src/test/scala/sigmastate/serialization/generators/TransformerGenerators.scala @@ -1,17 +1,16 @@ package sigmastate.serialization.generators -import org.ergoplatform.{ErgoAddressEncoder, P2PKAddress} +import org.ergoplatform._ +import org.ergoplatform.validation.ValidationSpecification import org.scalacheck.Arbitrary._ import org.scalacheck.{Arbitrary, Gen} -import scorex.util.encode.{Base58, Base64} -import sigmastate.Values.{FalseLeaf, IntConstant, TrueLeaf, Value} +import scorex.util.encode.{Base64, Base58} import sigmastate._ -import sigmastate.Values._ +import sigmastate.Values.{TrueLeaf, Value, IntConstant, _} import sigmastate.lang.TransformingSigmaBuilder import sigmastate.utxo._ -import sigmastate.lang.Terms._ -trait TransformerGenerators { +trait TransformerGenerators extends ValidationSpecification { self: ValueGenerators with ConcreteCollectionGenerators => import TransformingSigmaBuilder._ @@ -74,9 +73,8 @@ trait TransformerGenerators { val filterGen: Gen[Filter[SInt.type]] = for { col1 <- arbCCOfIntConstant.arbitrary - id <- Arbitrary.arbitrary[Byte] - condition <- booleanConstGen - } yield mkFilter(col1, id, condition).asInstanceOf[Filter[SInt.type]] + condition <- funcValueGen + } yield mkFilter(col1, condition).asInstanceOf[Filter[SInt.type]] val appendGen: Gen[Append[SInt.type]] = for { col1 <- arbCCOfIntConstant.arbitrary @@ -102,7 +100,6 @@ trait TransformerGenerators { val extractRegisterAsGen: Gen[ExtractRegisterAs[SInt.type]] = for { input <- arbTaggedBox.arbitrary r <- arbRegisterIdentifier.arbitrary - dvInt <- arbIntConstants.arbitrary } yield ExtractRegisterAs(input, r)(SInt) val extractCreationInfoGen: Gen[ExtractCreationInfo] = arbTaggedBox.arbitrary.map { b => mkExtractCreationInfo(b).asInstanceOf[ExtractCreationInfo] } @@ -262,8 +259,6 @@ trait TransformerGenerators { body <- logicalExprTreeNodeGen(Seq(AND.apply)) } yield FuncValue(args, body) - val sigmaPropValueGen: Gen[SigmaPropValue] = sigmaBooleanGen.map(SigmaPropConstant(_)) - val sigmaAndGen: Gen[SigmaAnd] = for { num <- Gen.chooseNum(1, 10) items <- Gen.listOfN(num, sigmaPropValueGen) @@ -281,4 +276,12 @@ trait TransformerGenerators { val byteArrayToLongGen: Gen[ByteArrayToLong] = arbByteArrayConstant.arbitrary.map { v => mkByteArrayToLong(v).asInstanceOf[ByteArrayToLong] } + + val ergoTreeGen: Gen[ErgoTree] = for { + propWithConstants <- logicalExprTreeNodeGen(Seq(AND.apply)).map(_.toSigmaProp) + propWithoutConstants <- Gen.oneOf(Seq[SigmaPropValue](EQ(SizeOf(Inputs), SizeOf(Outputs)).toSigmaProp)) + prop <- Gen.oneOf(propWithConstants, propWithoutConstants) + treeBuilder <- Gen.oneOf(Seq[SigmaPropValue => ErgoTree](ErgoTree.withSegregation, + ErgoTree.withoutSegregation)) + } yield treeBuilder(prop) } diff --git a/src/test/scala/sigmastate/serialization/generators/TypeGenerators.scala b/src/test/scala/sigmastate/serialization/generators/TypeGenerators.scala index e6e48af120..02d0a4883f 100644 --- a/src/test/scala/sigmastate/serialization/generators/TypeGenerators.scala +++ b/src/test/scala/sigmastate/serialization/generators/TypeGenerators.scala @@ -43,7 +43,7 @@ trait TypeGenerators { )) } yield STuple(values.toIndexedSeq) - val sTypeIdentGen: Gen[STypeIdent] = for { + val sTypeIdentGen: Gen[STypeVar] = for { name <- arbString.arbitrary.suchThat(_.length <= 50) - } yield STypeIdent(name) + } yield STypeVar(name) } diff --git a/src/test/scala/sigmastate/serialization/generators/ValueGenerators.scala b/src/test/scala/sigmastate/serialization/generators/ValueGenerators.scala index 7f5e17afce..f4f0c5131b 100644 --- a/src/test/scala/sigmastate/serialization/generators/ValueGenerators.scala +++ b/src/test/scala/sigmastate/serialization/generators/ValueGenerators.scala @@ -3,7 +3,8 @@ package sigmastate.serialization.generators import org.ergoplatform import org.ergoplatform._ import org.ergoplatform.ErgoBox._ -import org.ergoplatform.ErgoScriptPredef.{TrueProp, FalseProp} +import org.ergoplatform.ErgoScriptPredef.{FalseProp, TrueProp} +import org.ergoplatform.validation._ import org.scalacheck.Arbitrary._ import org.scalacheck.{Arbitrary, Gen} import scorex.crypto.authds.{ADDigest, ADKey} @@ -15,12 +16,16 @@ import sigmastate.basics.DLogProtocol.ProveDlog import sigmastate.basics.ProveDHTuple import sigmastate.interpreter.CryptoConstants.EcPointType import sigmastate.interpreter.{ProverResult, ContextExtension, CryptoConstants} +import sigmastate.eval._ +import sigmastate.eval.Extensions._ +import special.collection.Coll +import special.sigma._ import scala.collection.JavaConverters._ import scala.collection.mutable.ListBuffer import scala.reflect.ClassTag -trait ValueGenerators extends TypeGenerators { +trait ValueGenerators extends TypeGenerators with ValidationSpecification { import sigmastate.lang.TransformingSigmaBuilder._ @@ -43,12 +48,17 @@ trait ValueGenerators extends TypeGenerators { implicit val arbRegisterIdentifier: Arbitrary[RegisterId] = Arbitrary(registerIdentifierGen) - implicit val arbBigInteger = Arbitrary(arbBigInt.arbitrary.map(_.bigInteger)) - implicit val arbGroupElement = Arbitrary(Gen.const(()).flatMap(_ => CryptoConstants.dlogGroup.createRandomGenerator())) + implicit val arbBigInteger = Arbitrary(Arbitrary.arbBigInt.arbitrary.map(_.bigInteger)) + implicit val arbBigInt = Arbitrary(arbBigInteger.arbitrary.map(SigmaDsl.BigInt(_))) + implicit val arbEcPointType = Arbitrary(Gen.const(()).flatMap(_ => CryptoConstants.dlogGroup.createRandomGenerator())) + implicit val arbGroupElement = Arbitrary(arbEcPointType.arbitrary.map(SigmaDsl.GroupElement(_))) implicit val arbSigmaBoolean: Arbitrary[SigmaBoolean] = Arbitrary(Gen.oneOf(proveDHTGen, proveDHTGen)) - implicit val arbSigmaProp: Arbitrary[SigmaPropValue] = Arbitrary(sigmaPropGen) - implicit val arbBox = Arbitrary(ergoBoxGen) + implicit val arbSigmaProp: Arbitrary[SigmaProp] = Arbitrary(sigmaPropGen) + implicit val arbSigmaPropValue: Arbitrary[SigmaPropValue] = Arbitrary(sigmaPropValueGen) + implicit val arbErgoBox = Arbitrary(ergoBoxGen) + implicit val arbBox = Arbitrary(ergoBoxGen.map(SigmaDsl.Box)) implicit val arbAvlTreeData = Arbitrary(avlTreeDataGen) + implicit val arbAvlTree = Arbitrary(avlTreeGen) implicit val arbBoxCandidate = Arbitrary(ergoBoxCandidateGen(tokensGen.sample.get)) implicit val arbTransaction = Arbitrary(ergoTransactionGen) implicit val arbContextExtension = Arbitrary(contextExtensionGen) @@ -68,7 +78,7 @@ trait ValueGenerators extends TypeGenerators { val stringConstGen: Gen[StringConstant] = arbString.arbitrary.map { v => mkConstant[SString.type](v, SString) } val bigIntConstGen: Gen[BigIntConstant] = - arbBigInt.arbitrary.map { v => mkConstant[SBigInt.type](v.bigInteger, SBigInt) } + arbBigInt.arbitrary.map { v => mkConstant[SBigInt.type](v, SBigInt) } val byteArrayConstGen: Gen[CollectionConstant[SByte.type]] = for { length <- Gen.chooseNum(1, 100) bytes <- Gen.listOfN(length, arbByte.arbitrary) @@ -100,8 +110,18 @@ trait ValueGenerators extends TypeGenerators { vv <- groupElementGen } yield ProveDHTuple(gv, hv, uv, vv) - val sigmaBooleanGen: Gen[SigmaBoolean] = Gen.oneOf(proveDlogGen, proveDHTGen) - val sigmaPropGen: Gen[SigmaPropValue] = + def sigmaTreeNodeGen: Gen[SigmaBoolean] = for { + left <- sigmaBooleanGen + right <- sigmaBooleanGen + node <- Gen.oneOf( + COR(Seq(left, right)), + CAND(Seq(left, right)) + ) + } yield node + + val sigmaBooleanGen: Gen[SigmaBoolean] = Gen.oneOf(proveDlogGen, proveDHTGen, Gen.delay(sigmaTreeNodeGen)) + val sigmaPropGen: Gen[SigmaProp] = sigmaBooleanGen.map(SigmaDsl.SigmaProp) + val sigmaPropValueGen: Gen[SigmaPropValue] = Gen.oneOf(proveDlogGen.map(SigmaPropConstant(_)), proveDHTGen.map(SigmaPropConstant(_))) val registerIdentifierGen: Gen[RegisterId] = Gen.oneOf(R0, R1, R2, R3, R4, R5, R6, R7, R8, R9) @@ -121,7 +141,7 @@ trait ValueGenerators extends TypeGenerators { } } - def additionalTokensGen(cnt: Byte): Seq[Gen[(TokenId, Long)]] = + def additionalTokensGen(cnt: Byte): Seq[Gen[(Digest32, Long)]] = (0 until cnt).map { _ => for { id <- Digest32 @@ boxIdGen @@ -190,7 +210,9 @@ trait ValueGenerators extends TypeGenerators { vl <- arbOption[Int](Arbitrary(unsignedIntGen)).arbitrary } yield AvlTreeData(ADDigest @@ digest, flags, keyLength, vl) - def avlTreeConstantGen: Gen[AvlTreeConstant] = avlTreeDataGen.map { v => AvlTreeConstant(v) } + def avlTreeGen: Gen[AvlTree] = avlTreeDataGen.map(SigmaDsl.avlTree) + + def avlTreeConstantGen: Gen[AvlTreeConstant] = avlTreeGen.map { v => AvlTreeConstant(v) } implicit def arrayGen[T: Arbitrary : ClassTag]: Gen[Array[T]] = for { length <- Gen.chooseNum(1, 100) @@ -203,11 +225,11 @@ trait ValueGenerators extends TypeGenerators { case SShort => arbShort case SInt => arbInt case SLong => arbLong - case SBigInt => arbBigInteger + case SBigInt => arbBigInt case SGroupElement => arbGroupElement - case SSigmaProp => arbSigmaBoolean + case SSigmaProp => arbSigmaProp case SBox => arbBox - case SAvlTree => arbAvlTreeData + case SAvlTree => arbAvlTree case SAny => arbAnyVal case SUnit => arbUnit }).asInstanceOf[Arbitrary[T#WrappedType]].arbitrary @@ -243,7 +265,7 @@ trait ValueGenerators extends TypeGenerators { creationHeight <- Gen.choose(-1, Int.MaxValue) } yield ergoplatform.ErgoBox(l, b, creationHeight, tokens.asScala, ar.asScala.toMap, tId.toArray.toModifierId, boxId) - def ergoBoxCandidateGen(availableTokens: Seq[TokenId]): Gen[ErgoBoxCandidate] = for { + def ergoBoxCandidateGen(availableTokens: Seq[Digest32]): Gen[ErgoBoxCandidate] = for { l <- arbLong.arbitrary p <- proveDlogGen b <- Gen.oneOf(TrueProp, FalseProp, ErgoTree.fromSigmaBoolean(p)) @@ -253,15 +275,15 @@ trait ValueGenerators extends TypeGenerators { tokens <- Gen.listOfN(tokensCount, Gen.oneOf(availableTokens)) tokenAmounts <- Gen.listOfN(tokensCount, Gen.oneOf(1, 500, 20000, 10000000, Long.MaxValue)) creationHeight <- Gen.chooseNum(0, 100000) - } yield new ErgoBoxCandidate(l, b, creationHeight, tokens.zip(tokenAmounts), ar.asScala.toMap) + } yield new ErgoBoxCandidate(l, b, creationHeight, tokens.toColl.zip(tokenAmounts.toColl), ar.asScala.toMap) - val boxConstantGen: Gen[BoxConstant] = ergoBoxGen.map { v => BoxConstant(v) } + val boxConstantGen: Gen[BoxConstant] = ergoBoxGen.map { v => BoxConstant(CostingBox(false, v)) } - val tokenIdGen: Gen[TokenId] = for { + val tokenIdGen: Gen[Digest32] = for { bytes <- Gen.listOfN(TokenId.size, arbByte.arbitrary).map(_.toArray) } yield Digest32 @@ bytes - val tokensGen: Gen[Seq[TokenId]] = for { + val tokensGen: Gen[Seq[Digest32]] = for { count <- Gen.chooseNum(10, 50) tokens <- Gen.listOfN(count, tokenIdGen) } yield tokens @@ -294,4 +316,26 @@ trait ValueGenerators extends TypeGenerators { } } } + + val byteArrayGen: Gen[Array[Byte]] = for { + length <- Gen.chooseNum(1, 10) + bytes <- Gen.listOfN(length, arbByte.arbitrary) + } yield bytes.toArray + + import ValidationRules._ + + val numRules = currentSettings.size + + val replacedRuleIdGen = Gen.chooseNum((FirstRuleId + numRules).toShort, Short.MaxValue) + + val ruleIdGen = Gen.chooseNum(FirstRuleId, (FirstRuleId + numRules - 1).toShort) + + val statusGen: Gen[RuleStatus] = Gen.oneOf( + Gen.oneOf(EnabledRule, DisabledRule), + replacedRuleIdGen.map(id => ReplacedRule(id)), + byteArrayGen.map(xs => ChangedRule(xs)) + ) + + implicit val statusArb: Arbitrary[RuleStatus] = Arbitrary(statusGen) + } diff --git a/src/test/scala/sigmastate/utils/GenInfoObjects.scala b/src/test/scala/sigmastate/utils/GenInfoObjects.scala new file mode 100644 index 0000000000..51b7383acf --- /dev/null +++ b/src/test/scala/sigmastate/utils/GenInfoObjects.scala @@ -0,0 +1,72 @@ +package sigmastate.utils + +import sigmastate.SMethod +import scalan.meta.PrintExtensions._ +import scala.util.Try + +/** Generate as a console output all InfoObject objects. + * Those provide stable identifiers to access metadata information. + * This should be regenerated each time metadata is changed. + * Example: + * object AppendInfo extends InfoObject { + * private val method = SMethod.fromIds(12, 9) + * val thisArg = method.argInfo("this") + * val otherArg = method.argInfo("other") + * ... + * } + * The following consistency checks are performed: + * 1) every func/method argument has attached ArgInfo + * 2) method is resolvable by ids (e.g. as SMethod.fromIds(12, 9)) + * 3) func is resolvable by name (e.g. predefinedOps.funcs("sigmaProp")) + */ +object GenInfoObjects extends SpecGen { + + def main(args: Array[String]) = { + val table = collectOpsTable() + val infos = table.map { case (d, m, f) => getOpInfo(d, m, f) } + + val infoStrings = infos.sortBy(_.opDesc.typeName).map { info => + val opName = info.opDesc.typeName + + val res = info.op match { + case Right(m) => + val typeId = m.objType.typeId + assert(m.stype.tDom.length == info.args.length, + s"Method $m has ${m.stype.tDom} arguments, but ${info.args} descriptions attached.") + Try{SMethod.fromIds(typeId, m.methodId)} + .fold(t => throw new RuntimeException(s"Cannot resolve method $m using SMethod.fromIds(${typeId}, ${m.methodId})"), _ => ()) + val args = info.args.map { a => + s"""val ${a.name}Arg: ArgInfo = method.argInfo("${a.name}")""" + } + s""" + | object ${opName}Info extends InfoObject { + | private val method = SMethod.fromIds(${typeId}, ${m.methodId}) + | ${args.rep(sep = "\n ")} + | val argInfos: Seq[ArgInfo] = Seq(${info.args.rep(a => s"${a.name}Arg")}) + | } + """.stripMargin + case Left(f) => + assert(f.declaration.args.length == info.args.length, + s"Predefined function $f has ${f.declaration.args} arguments, but ${info.args} descriptions attached.") + val isSpecialFunc = predefFuncRegistry.specialFuncs.get(f.name).isDefined + Try{assert(predefFuncRegistry.funcs.get(f.name).isDefined || isSpecialFunc)} + .fold(t => throw new RuntimeException(s"Cannot resolve func $f using predefFuncRegistry.funcs(${f.name})", t), _ => ()) + val args = info.args.map { a => + s"""val ${a.name}Arg: ArgInfo = func.argInfo("${a.name}")""" + } + s""" + | object ${opName}Info extends InfoObject { + | private val func = predefinedOps.${ if (isSpecialFunc) "specialFuncs" else "funcs" }("${f.name}") + | ${args.rep(sep = "\n ")} + | val argInfos: Seq[ArgInfo] = Seq(${info.args.rep(a => s"${a.name}Arg")}) + | } + """.stripMargin + } + res + } + val infoText = infoStrings.rep(sep = "") + println(infoText) + println(s"Total infos: ${infoStrings.length}") + } + +} diff --git a/src/test/scala/sigmastate/utils/GenPredefFuncsApp.scala b/src/test/scala/sigmastate/utils/GenPredefFuncsApp.scala new file mode 100644 index 0000000000..9272bbb844 --- /dev/null +++ b/src/test/scala/sigmastate/utils/GenPredefFuncsApp.scala @@ -0,0 +1,43 @@ +package sigmastate.utils + +import scalan.util.FileUtil +import scalan.meta.PrintExtensions._ +import sigmastate.serialization.OpCodes.OpCode +import sigma.util.Extensions.ByteOps + +object GenPredefFuncsApp extends SpecGen { + + def main(args: Array[String]) = { + val rowsFile = FileUtil.file("docs/spec/generated/predeffunc_rows.tex") + val sectionsFile = FileUtil.file(s"docs/spec/generated/predeffunc_sections.tex") + + val opsTable = collectOpsTable() + val opInfos = opsTable.collect { case (d, m, optF @ Some(f)) => + val info = getOpInfo(d, m, optF) + (d, f, info) + }.sortBy(i => toDisplayCode(i._1.opCode)) + + val funcRows = StringBuilder.newBuilder + val sections = StringBuilder.newBuilder + + for (row @ (d, f, info) <- opInfos) { + val opCode = toDisplayCode(d.opCode) + val mnemonic = d.typeName + val opName = toTexName(f.name) + val argsTpe = f.declaration.tpe.tDom.rep(_.toTermString) + val resTpe = f.declaration.tpe.tRange.toTermString + val serRef = s"\\hyperref[sec:serialization:operation:$mnemonic]{\\lst{$mnemonic}}" + val desc = if (info.description.length > 150) "..." else info.description + funcRows.append( + s""" $opCode & $serRef & \\parbox{4cm}{\\lst{$opName:} \\\\ \\lst{($argsTpe)} \\\\ \\lst{ => $resTpe}} & $desc \\\\ + | \\hline + """.stripMargin) + + val subsection = funcSubsection(f) + sections.append(subsection) + } + println(s"Total ops: ${opInfos.length}") + FileUtil.write(rowsFile, funcRows.result()) + FileUtil.write(sectionsFile, sections.result()) + } +} diff --git a/src/test/scala/sigmastate/utils/GenPredefTypesApp.scala b/src/test/scala/sigmastate/utils/GenPredefTypesApp.scala new file mode 100644 index 0000000000..b889abff2b --- /dev/null +++ b/src/test/scala/sigmastate/utils/GenPredefTypesApp.scala @@ -0,0 +1,22 @@ +package sigmastate.utils + +import scalan.util.FileUtil + +object GenPredefTypesApp extends SpecGen { + import SpecGenUtils._ + + def main(args: Array[String]) = { + val table = printTypes(companions) + val fPrimOps = FileUtil.file("docs/spec/generated/predeftypes.tex") + FileUtil.write(fPrimOps, table) + + for (tc <- typesWithMethods) { + val typeName = tc.typeName + val methodsRows = printMethods(tc) + val fMethods = FileUtil.file(s"docs/spec/generated/${typeName}_methods.tex") + FileUtil.write(fMethods, methodsRows) + + println(s"\\input{generated/${typeName}_methods.tex}") + } + } +} diff --git a/src/test/scala/sigmastate/utils/GenSerializers.scala b/src/test/scala/sigmastate/utils/GenSerializers.scala new file mode 100644 index 0000000000..2b565a2da4 --- /dev/null +++ b/src/test/scala/sigmastate/utils/GenSerializers.scala @@ -0,0 +1,172 @@ +package sigmastate.utils + +import scalan.util.FileUtil +import scalan.meta.PrintExtensions._ +import sigmastate.serialization.ValueSerializer._ +import sigma.util.Extensions.ByteOps +import sigmastate.lang.Terms.{PropertyCall, MethodCall} + +/** Generate contents of ErgoTree serializer format specification. + */ +object GenSerializers extends SpecGen { + + def printDataScope(dataScope: DataScope, level: Int, sb: StringBuilder) = { + val prefix = "~~" * level + val name = dataScope.name + val fmt = dataScope.data.format + val size = fmt.size + val desc = dataScope.data.info.description + val row = + s""" $prefix $$ $name $$ & \\lst{$fmt} & $size & $desc \\\\ + | \\hline + """.stripMargin + sb.append(row) + openRow = false + } + + def printForScope(scope: ForScope, level: Int, sb: StringBuilder) = { + val prefix = "~~" * level + val header = + s""" \\multicolumn{4}{l}{${prefix}\\lst{for}~$$i=1$$~\\lst{to}~$$${scope.limitVar}$$} \\\\ + | \\hline + """.stripMargin + sb.append(header) + + for ((_, s) <- scope.children) { + printScope(s, level + 1, sb) + } + + val footer = s" \\multicolumn{4}{l}{${prefix}\\lst{end for}} \\\\" + sb.append(footer) + openRow = true + } + + def printOptionScope(scope: OptionScope, level: Int, sb: StringBuilder) = { + val prefix = "~~" * level + val header = + s""" \\multicolumn{4}{l}{${prefix}\\lst{optional}~$$${scope.name}$$} \\\\ + | \\hline + | ${prefix}~~$$tag$$ & \\lst{Byte} & 1 & 0 - no value; 1 - has value \\\\ + | \\hline + | \\multicolumn{4}{l}{${prefix}~~\\lst{when}~$$tag == 1$$} \\\\ + | \\hline + """.stripMargin + sb.append(header) + + for ((_, s) <- scope.children) { + printScope(s, level + 2, sb) + } + + val footer = s" \\multicolumn{4}{l}{${prefix}\\lst{end optional}} \\\\" + sb.append(footer) + openRow = true + } + + def printCasesScope(scope: CasesScope, level: Int, sb: StringBuilder) = { + val prefix = "~~" * level + val header = + s""" \\multicolumn{4}{l}{${prefix}\\lst{match}~$$ ${scope.matchExpr} $$} \\\\ + """.stripMargin + sb.append(header) + + for (when <- scope.cases) { + val pattern = + s""" + | \\multicolumn{4}{l}{${prefix}~~${ if(when.isOtherwise) s"\\lst{$otherwiseCondition}" else s"\\lst{with}~$$${when.condition}$$" } } \\\\ + | \\hline + """.stripMargin + sb.append(pattern) + for((_,s) <- when.children) { + printScope(s, level + 2, sb) + } + } + + val footer = s" \\multicolumn{4}{l}{${prefix}\\lst{end match}} \\\\" + sb.append(footer) + openRow = true + } + + def printScope(scope: Scope, level: Int, sb: StringBuilder): Unit = { + if (openRow) { + sb.append(s"\\hline\n") + openRow = false // close the table row with a horizontal line + } + scope match { + case scope: DataScope => + printDataScope(scope, level, sb) + case scope: ForScope => + printForScope(scope, level, sb) + case scope: OptionScope => + printOptionScope(scope, level, sb) + case scope: CasesScope => + printCasesScope(scope, level, sb) + case _ => + sb.append(s"% skipped $scope\n") + } + } + + var openRow: Boolean = false + + def printSerScopeSlots(serScope: SerScope) = { + val rows = StringBuilder.newBuilder + openRow = false + serScope.children.map { case (name, scope) => + printScope(scope, 0, rows) + } + rows.result() + } + + def printSerializerSections() = { + val opsTable = collectOpsTable() + val opInfos = opsTable.map { case (d, m, f) => getOpInfo(d, m, f) } + val scopes = serializerInfo + .filter(_._2.children.nonEmpty).toSeq + .sortBy(_._1).map(_._2) + scopes.map { s => + val ser = getSerializer(s.opCode) + val opCode = ser.opCode.toUByte + val opName = ser.opDesc.typeName + val rows = printSerScopeSlots(s) + val opRow = opsTable.find(r => r._1.opCode == s.opCode) + val opInfo = opInfos.find(i => i.opDesc.opCode == s.opCode) + val desc = opInfo.map(_.description) + val opRef = opRow + .filterNot { case (d,_,_) => d == PropertyCall || d == MethodCall } + .opt { case (d, m, f) => + m.fold(f.opt { f => + val refName = f.docInfo.opDesc.typeName + val opName = f.name.replace("%", "\\%") + s"See~\\hyperref[sec:appendix:primops:$refName]{\\lst{${opName}}}" + })({ m => + val typeName = m.objType.typeName + s"See~\\hyperref[sec:type:$typeName:${m.name}]{\\lst{$typeName.${m.name}}}" + }) + } + + s""" + |\\subsubsection{\\lst{$opName} operation (OpCode $opCode)} + |\\label{sec:serialization:operation:$opName} + | + |${desc.opt(_.toString)} $opRef + | + |\\noindent + |\\(\\begin{tabularx}{\\textwidth}{| l | l | l | X |} + | \\hline + | \\bf{Slot} & \\bf{Format} & \\bf{\\#bytes} & \\bf{Description} \\\\ + | \\hline + | $rows + |\\end{tabularx}\\) + """.stripMargin + }.mkString("\n") + } + + def generateSerSpec() = { + val fileName = "ergotree_serialization1.tex" + val formatsTex = printSerializerSections() + val file = FileUtil.file(s"docs/spec/generated/$fileName") + FileUtil.write(file, formatsTex) + + println(s"\\input{generated/$fileName}") + } + +} diff --git a/src/test/scala/sigmastate/utils/SparseArrayContainerSpecification.scala b/src/test/scala/sigmastate/utils/SparseArrayContainerSpecification.scala index f653b5023f..d2ed605821 100644 --- a/src/test/scala/sigmastate/utils/SparseArrayContainerSpecification.scala +++ b/src/test/scala/sigmastate/utils/SparseArrayContainerSpecification.scala @@ -23,14 +23,14 @@ class SparseArrayContainerSpecification extends PropSpec property("get") { forAll(distinctCodeValuePairsGen) { codeValuePairs => - val c = new SparseArrayContainer(codeValuePairs) + val cont = new SparseArrayContainer(codeValuePairs) codeValuePairs.foreach { case (code, v) => - c.get(code) shouldBe v + cont(code) shouldBe v } val mappedValues = codeValuePairs.toMap (Byte.MinValue to Byte.MaxValue).foreach { i => if (mappedValues.get(i.toByte).isEmpty) - c.get(i.toByte) shouldBe null.asInstanceOf[Long] + cont(i.toByte) shouldBe null.asInstanceOf[Long] } } } diff --git a/src/test/scala/sigmastate/utils/SpecGen.scala b/src/test/scala/sigmastate/utils/SpecGen.scala new file mode 100644 index 0000000000..224abeb6f3 --- /dev/null +++ b/src/test/scala/sigmastate/utils/SpecGen.scala @@ -0,0 +1,299 @@ +package sigmastate.utils + +import sigmastate._ +import sigmastate.eval.Evaluation._ +import sigmastate.eval.{Zero, Sized} +import sigma.util.Extensions.ByteOps +import scalan.util.CollectionUtil +import scalan.meta.PrintExtensions._ +import sigmastate.Values.{FalseLeaf, Constant, TrueLeaf, BlockValue, ConstantPlaceholder, Tuple, ValDef, FunDef, ValUse, ValueCompanion, TaggedVariable, ConcreteCollection, ConcreteCollectionBooleanConstant} +import sigmastate.lang.SigmaPredef.{PredefinedFuncRegistry, PredefinedFunc} +import sigmastate.lang.StdSigmaBuilder +import sigmastate.lang.Terms.MethodCall +import sigmastate.serialization.OpCodes.OpCode +import sigmastate.serialization.{ValueSerializer, OpCodes} +import sigmastate.utxo.{SigmaPropIsProven, SelectField} + +object SpecGenUtils { + val types = SType.allPredefTypes.diff(Seq(SString)) + val companions: Seq[STypeCompanion] = types.collect { case tc: STypeCompanion => tc } + val typesWithMethods = companions ++ Seq(SCollection, SOption) +} + +trait SpecGen { + import SpecGenUtils._ + val tT = STypeVar("T") + + case class OpInfo( + opDesc: ValueCompanion, + description: String, + args: Seq[ArgInfo], op: Either[PredefinedFunc, SMethod]) + + def collectSerializers(): Seq[ValueSerializer[_ <: Values.Value[SType]]] = { + ((OpCodes.LastConstantCode + 1) to 255).collect { + case i if ValueSerializer.serializers(i.toByte) != null => + val ser = ValueSerializer.serializers(i.toByte) + assert(i == ser.opDesc.opCode.toUByte) + ser + } + } + + def collectFreeCodes(): Seq[Int] = { + ((OpCodes.LastConstantCode + 1) to 255).collect { + case i if ValueSerializer.serializers(i.toByte) == null => i + } + } + + def collectMethods(): Seq[SMethod] = { + for { + tc <- typesWithMethods.sortBy(_.typeId) + m <- tc.methods.sortBy(_.methodId) + } yield m + } + + def collectSerializableOperations(): Seq[(OpCode, ValueCompanion)] = { + val sers = collectSerializers() + sers.map(s => (s.opCode, s.opDesc)) + } + + protected val predefFuncRegistry = new PredefinedFuncRegistry(StdSigmaBuilder) + val noFuncs: Set[ValueCompanion] = Set(Constant, MethodCall) + val predefFuncs: Seq[PredefinedFunc] = predefFuncRegistry.funcs.values + .filterNot { f => noFuncs.contains(f.docInfo.opDesc) }.toSeq + val specialFuncs: Seq[PredefinedFunc] = predefFuncRegistry.specialFuncs.values.toSeq + + def collectOpsTable() = { + val ops = collectSerializableOperations().filterNot { case (_, opDesc) => noFuncs.contains(opDesc) } + val methods = collectMethods() + val funcs = predefFuncs ++ specialFuncs + + val methodsByOpCode = methods + .groupBy(_.docInfo.flatMap(i => Option(i.opDesc).map(_.opCode))) + // .map { case p @ (k, xs) => p.ensuring({ k.isEmpty || xs.length == 1}, p) } + + val funcsByOpCode = funcs + .groupBy(_.docInfo.opDesc.opCode) + .ensuring(g => g.forall{ case (k, xs) => xs.length <= 1}) + + val table = ops.map { case (opCode, opDesc) => + val methodOpt = methodsByOpCode.get(Some(opCode)).map(_.head) + val funcOpt = funcsByOpCode.get(opCode).map(_.head) + (opCode, opDesc, methodOpt, funcOpt) + } + val rowsWithInfo = + for ((opCode, opDesc, optM, optF) <- table if optM.nonEmpty || optF.nonEmpty) + yield (opDesc, optM, optF) + rowsWithInfo + } + + def getOpInfo(opDesc: ValueCompanion, optM: Option[SMethod], optF: Option[PredefinedFunc]): OpInfo = { + (optM, optF) match { + case (Some(m), _) => + val description = m.docInfo.map(i => i.description).opt() + val args = m.docInfo.map(i => i.args).getOrElse(Seq()) + OpInfo(opDesc, description, args, Right(m)) + case (_, Some(f)) => + val description = f.docInfo.description + val args = f.docInfo.args + OpInfo(opDesc, description, args, Left(f)) + case p => sys.error(s"Unexpected $opDesc with $p") + } + } + + def toTexName(name: String) = name.replace("%", "\\%") + + def toDisplayCode(opCode: OpCode): Int = opCode.toUByte + + def printTypes(companions: Seq[STypeCompanion]) = { + val lines = for { tc <- companions.sortBy(_.typeId) } yield { + val t = tc match { + case t: SType => t + case SCollection => SCollection(tT) + case SOption => SOption(tT) + } + val rtype = stypeToRType(t) + val name = t match { case SGlobal => "Global" case _ => rtype.name } + val isConst = t.isConstantSize + val isPrim = t.isInstanceOf[SPrimType] + val isEmbed = t.isInstanceOf[SEmbeddable] + val isNum = t.isInstanceOf[SNumericType] + val valRange = t match { + case SBoolean => s"$$\\Set{\\lst{true}, \\lst{false}}$$" + case n: SNumericType => + val s = Sized.typeToSized(rtype) + val z = Zero.typeToZero(rtype).zero + val bits = s.size(z).dataSize * 8 - 1 + s"$$\\Set{-2^{$bits} \\dots 2^{$bits}-1}$$~\\ref{sec:type:${name}}" + case SGroupElement => s"$$\\Set{p \\in \\lst{SecP256K1Point}}$$" + case _ => s"Sec.~\\ref{sec:type:${name}}" + } + val line = + s"""\\lst{$name} & $$${tc.typeId}$$ & \\lst{$isConst} & \\lst{$isPrim} & \\lst{$isEmbed} & \\lst{$isNum} & $valRange \\\\""" + line + } + val table = lines.mkString("\n\\hline\n") + table + } + + def methodSubsection(typeName: String, m: SMethod) = { + val argTypes = m.stype.tDom + val resTpe = m.stype.tRange.toTermString + val types = argTypes.map(_.toTermString) + val argInfos = m.docInfo.fold( + Range(0, types.length).map(i => ArgInfo("arg" + i, "")))(info => info.args.toIndexedSeq) + + val serializedAs = m.docInfo.flatMap(i => Option(i.opDesc)).opt { d => + val opName = d.typeName + val opCode = d.opCode.toUByte + s""" + | \\bf{Serialized as} & \\hyperref[sec:serialization:operation:$opName]{\\lst{$opName}} \\\\ + | \\hline + """.stripMargin + } + subsectionTempl( + opName = s"$typeName.${m.name}", + opCode = s"${m.objType.typeId}.${m.methodId}", + label = s"sec:type:$typeName:${m.name}", + desc = m.docInfo.opt(i => i.description + i.isFrontendOnly.opt(" (FRONTEND ONLY)")), + types = types, + argInfos = argInfos, + resTpe = resTpe, + serializedAs = serializedAs + ) + } + + def funcSubsection(f: PredefinedFunc) = { + val argTypes = f.declaration.tpe.tDom + val resTpe = f.declaration.tpe.tRange.toTermString + val types = argTypes.map(_.toTermString) + val argInfos = f.docInfo.args + val opDesc = f.docInfo.opDesc + val opCode = opDesc.opCode.toUByte + + val serializedAs = { + val nodeName = opDesc.typeName + s""" + | \\bf{Serialized as} & \\hyperref[sec:serialization:operation:$nodeName]{\\lst{$nodeName}} \\\\ + | \\hline + """.stripMargin + } + subsectionTempl( + opName = toTexName(f.name), + opCode = opCode.toString, + label = s"sec:appendix:primops:${f.docInfo.opDesc.typeName}", + desc = f.docInfo.description + f.docInfo.isFrontendOnly.opt(" (FRONTEND ONLY)"), + types = types, + argInfos = argInfos, + resTpe = resTpe, + serializedAs = serializedAs + ) + } + + def subsectionTempl(opName: String, opCode: String, label: String, desc: String, types: Seq[String], argInfos: Seq[ArgInfo], resTpe: String, serializedAs: String) = { + val params = types.opt { ts => + val args = argInfos.zip(ts).filter { case (a, _) => a.name != "this" } + s""" + | \\hline + | \\bf{Parameters} & + | \\(\\begin{array}{l l l} + | ${args.rep({ case (info, t) => + s"\\lst{${info.name}} & \\lst{: $t} & \\text{// ${info.description}} \\\\" + }, "\n")} + | \\end{array}\\) \\\\ + """.stripMargin + } + s""" + |\\subsubsection{\\lst{$opName} method (Code $opCode)} + |\\label{$label} + |\\noindent + |\\begin{tabularx}{\\textwidth}{| l | X |} + | \\hline + | \\bf{Description} & $desc \\\\ + | $params + | \\hline + | \\bf{Result} & \\lst{${resTpe}} \\\\ + | \\hline + | $serializedAs + |\\end{tabularx} + |""".stripMargin + } + + def printMethods(tc: STypeCompanion) = { + val methodSubsections = for { m <- tc.methods.sortBy(_.methodId) } yield { + methodSubsection(tc.typeName, m) + } + val res = methodSubsections.mkString("\n\n") + res + } +} + +object PrintSerializersInfoApp extends SpecGen { + def main(args: Array[String]) = { + val sers = collectSerializers() + + val serTypes = sers.groupBy(s => s.getClass.getSimpleName) + for ((name, group) <- serTypes) { + println(name + ":") + for (ser <- group) { + val line = s"\t${ser.opCode.toUByte} -> serializer: ${ser.getClass.getSimpleName}; opDesc: ${ser.opDesc}" + println(line) + } + } + println(s"Total Serializable Ops: ${sers.length}") + println(s"Total Serializer Types: ${serTypes.keys.size}") + + val ops = ValueCompanion.allOperations + for ((k,v) <- ops.toArray.sortBy(_._1.toUByte)) { + println(s"${k.toUByte} -> $v") + } + println(s"Total ops: ${ops.size}") + } +} + +object GenPrimOpsApp extends SpecGen { + + def main(args: Array[String]) = { + val methods = collectMethods() + val ops = collectSerializableOperations() + val noOps = Set( + TaggedVariable, ValUse, ConstantPlaceholder, TrueLeaf, FalseLeaf, + ConcreteCollection, ConcreteCollectionBooleanConstant, Tuple, SelectField, SigmaPropIsProven, ValDef, FunDef, BlockValue + ) + + // join collection of all operations with all methods by optional opCode + val primOps = CollectionUtil.outerJoinSeqs(ops, methods)( + o => Some(o._1), m => m.docInfo.map(_.opDesc.opCode) + )( + (k, o) => Some(o), // left without right + (k,i) => None, // right without left + (k,i,o) => None // left and right + ).map(_._2).collect { case Some(op) if !noOps.contains(op._2) => op } + + // primOps is collection of operations which are not referenced by any method m.irInfo.opDesc + for (p <- primOps) { + println(s"$p") + } + + println(s"Total ops: ${primOps.size}\n") + + + // join collection of operations with all predef functions by opCode + val danglingOps = CollectionUtil.outerJoinSeqs(primOps, predefFuncs)( + o => Some(o._1), f => Some(f.docInfo.opDesc.opCode) + )( + (k, o) => Some(o), // left without right + (k,i) => None, // right without left + (k,i,o) => None // left and right + ).map(_._2).collect { case Some(op) => op } + + // danglingOps are the operations which are not referenced by any predef funcs + for (p <- danglingOps) { + println(s"$p") + } + + println(s"Total dangling: ${danglingOps.size}") + } +} + + + diff --git a/src/test/scala/sigmastate/utxo/AVLTreeScriptsSpecification.scala b/src/test/scala/sigmastate/utxo/AVLTreeScriptsSpecification.scala index 60b2b180ec..f70f972259 100644 --- a/src/test/scala/sigmastate/utxo/AVLTreeScriptsSpecification.scala +++ b/src/test/scala/sigmastate/utxo/AVLTreeScriptsSpecification.scala @@ -11,10 +11,10 @@ import sigmastate.SCollection.SByteArray import sigmastate.Values._ import sigmastate._ import sigmastate.eval.{IRContext, CSigmaProp} +import sigmastate.eval._ import sigmastate.eval.Extensions._ -import sigmastate.helpers.{ErgoLikeTestProvingInterpreter, SigmaTestingCommons} import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeTestInterpreter, SigmaTestingCommons} -import sigmastate.interpreter.Interpreter.{ScriptNameProp, emptyEnv} +import sigmastate.interpreter.Interpreter.ScriptNameProp import sigmastate.lang.Terms._ import special.collection.Coll import special.sigma.{Context, AvlTree} @@ -75,7 +75,6 @@ class AVLTreeScriptsSpecification extends SigmaTestingCommons { suite => .withRegs(reg1 -> tree, reg2 -> endTree) val spendingTx = candidateBlock(50).newTransaction().spending(s) - val newBox1 = spendingTx.outBox(10, contract.proverSig) val in1 = spendingTx.inputs(0) val res = in1.runDsl() @@ -125,7 +124,6 @@ class AVLTreeScriptsSpecification extends SigmaTestingCommons { suite => .withRegs(reg1 -> tree, reg2 -> endTree) val spendingTx = candidateBlock(50).newTransaction().spending(s) - val newBox1 = spendingTx.outBox(10, contract.proverSig) val in1 = spendingTx.inputs(0) val res = in1.runDsl() @@ -158,12 +156,12 @@ class AVLTreeScriptsSpecification extends SigmaTestingCommons { suite => val key = genKey("key") val value = genValue("value") - val (tree, avlProver) = createAvlTree(AvlTreeFlags.AllOperationsAllowed, key -> value, genKey("key2") -> genValue("value2")) + val (_, avlProver) = createAvlTree(AvlTreeFlags.AllOperationsAllowed, key -> value, genKey("key2") -> genValue("value2")) avlProver.performOneOperation(Lookup(genKey("key"))) val digest = avlProver.digest val proof = avlProver.generateProof().toColl - val treeData = new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 32, None) + val treeData = SigmaDsl.avlTree(new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 32, None)) val contract = AvlTreeContract[spec.type](key.toColl, proof, value.toColl, prover)(spec) import contract.spec._ @@ -174,7 +172,6 @@ class AVLTreeScriptsSpecification extends SigmaTestingCommons { suite => .withRegs(reg1 -> treeData) val spendingTx = candidateBlock(50).newTransaction().spending(s) - val newBox1 = spendingTx.outBox(10, contract.proverSig) val in1 = spendingTx.inputs(0) val res = in1.runDsl() @@ -218,7 +215,7 @@ class AVLTreeScriptsSpecification extends SigmaTestingCommons { suite => val spendingTransaction = createTransaction(newBoxes) - val s = ErgoBox(20, TrueProp, 0, Seq(), Map(reg1 -> AvlTreeConstant(treeData))) + val s = ErgoBox(20, TrueProp, 0, Seq(), Map(reg1 -> AvlTreeConstant(SigmaDsl.avlTree(treeData)))) val ctx = ErgoLikeContext( currentHeight = 50, @@ -242,27 +239,18 @@ class AVLTreeScriptsSpecification extends SigmaTestingCommons { suite => val proofId = 0: Byte val elementId = 1: Byte - val prop = AND( - GE(GetVarLong(elementId).get, LongConstant(120)), - IR.builder.mkMethodCall( - ExtractRegisterAs[SAvlTree.type](Self, reg1).get, SAvlTree.containsMethod, - IndexedSeq(CalcBlake2b256(LongToByteArray(GetVarLong(elementId).get)), GetVarByteArray(proofId).get) - ).asBoolValue - ).toSigmaProp val env = Map("proofId" -> proofId.toLong, "elementId" -> elementId.toLong) - val propCompiled = compile(env, + val prop = compile(env, """{ - | val tree = SELF.R3[AvlTree].get + | val tree = SELF.R4[AvlTree].get | val proof = getVar[Coll[Byte]](proofId).get | val element = getVar[Long](elementId).get | val elementKey = blake2b256(longToByteArray(element)) - | element >= 120 && tree.contains(elementKey, proof) + | element >= 120L && tree.contains(elementKey, proof) |}""".stripMargin).asBoolValue.toSigmaProp - //TODO: propCompiled shouldBe prop - val recipientProposition = new ContextEnrichingTestProvingInterpreter().dlogSecrets.head.publicImage - val selfBox = ErgoBox(20, TrueProp, 0, Seq(), Map(reg1 -> AvlTreeConstant(treeData))) + val selfBox = ErgoBox(20, TrueProp, 0, Seq(), Map(reg1 -> AvlTreeConstant(SigmaDsl.avlTree(treeData)))) val ctx = ErgoLikeContext( currentHeight = 50, lastBlockUtxoRoot = AvlTreeData.dummy, @@ -286,7 +274,7 @@ class AVLTreeScriptsSpecification extends SigmaTestingCommons { suite => .withContextExtender(proofId, ByteArrayConstant(smallLeafTreeProof)) .withContextExtender(elementId, LongConstant(elements.head)) smallProver.prove(prop, ctx, fakeMessage).isSuccess shouldBe false - // TODO check that verifier return false for incorrect proofs? + // TODO coverage: check that verifier return false for incorrect proofs? } property("avl tree - prover provides proof") { @@ -302,7 +290,7 @@ class AVLTreeScriptsSpecification extends SigmaTestingCommons { suite => val digest = avlProver.digest val proof = avlProver.generateProof() - val treeData = new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 32, None) + val treeData = SigmaDsl.avlTree(new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 32, None)) val proofId = 31: Byte @@ -367,7 +355,7 @@ class AVLTreeScriptsSpecification extends SigmaTestingCommons { suite => val verifier = new ErgoLikeTestInterpreter val pubkey = prover.dlogSecrets.head.publicImage - val treeData = new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 32, None) + val treeData = SigmaDsl.avlTree(new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 32, None)) val env = Map("proofId" -> proofId.toLong, "keys" -> ConcreteCollection(genKey("3"), genKey("4"), genKey("5"))) diff --git a/src/test/scala/sigmastate/utxo/BasicOpsSpecification.scala b/src/test/scala/sigmastate/utxo/BasicOpsSpecification.scala index a20c119f21..3472d2a37a 100644 --- a/src/test/scala/sigmastate/utxo/BasicOpsSpecification.scala +++ b/src/test/scala/sigmastate/utxo/BasicOpsSpecification.scala @@ -3,14 +3,16 @@ package sigmastate.utxo import org.ergoplatform.ErgoBox.{R6, R8} import org.ergoplatform.ErgoLikeContext.dummyPubkey import org.ergoplatform._ +import scalan.RType import sigmastate.SCollection.SByteArray import sigmastate.Values._ import sigmastate._ -import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeTestInterpreter, SigmaTestingCommons} +import sigmastate.eval.Extensions._ +import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, SigmaTestingCommons, ErgoLikeTestInterpreter} import sigmastate.interpreter.Interpreter._ import sigmastate.lang.Terms._ import special.sigma.InvalidType - +import SType.AnyOps class BasicOpsSpecification extends SigmaTestingCommons { implicit lazy val IR = new TestingIRContext { @@ -244,49 +246,50 @@ class BasicOpsSpecification extends SigmaTestingCommons { } property("Tuple as Collection operations") { -// test("TupColl1", env, ext, -// """{ val p = (getVar[Int](intVar1).get, getVar[Byte](byteVar2).get) -// | p.size == 2 }""".stripMargin, -// { -// TrueLeaf -// }, true) -// test("TupColl2", env, ext, -// """{ val p = (getVar[Int](intVar1).get, getVar[Byte](byteVar2).get) -// | p(0) == 1 }""".stripMargin, -// { -// EQ(GetVarInt(intVar1).get, IntConstant(1)) -// }) + test("TupColl1", env, ext, + """{ val p = (getVar[Int](intVar1).get, getVar[Byte](byteVar2).get) + | p.size == 2 }""".stripMargin, + { + TrueLeaf.toSigmaProp + }, true) + test("TupColl2", env, ext, + """{ val p = (getVar[Int](intVar1).get, getVar[Byte](byteVar2).get) + | p(0) == 1 }""".stripMargin, + { + EQ(GetVarInt(intVar1).get, IntConstant(1)).toSigmaProp + }) val dataVar = (lastExtVar + 1).toByte val Colls = IR.sigmaDslBuilderValue.Colls - val data = Array(Array[Any](Array[Byte](1,2,3), 10L)) + implicit val eAny = RType.AnyType + val data = Colls.fromItems((Array[Byte](1,2,3).toColl, 10L)) val env1 = env + ("dataVar" -> dataVar) val dataType = SCollection(STuple(SByteArray, SLong)) - val ext1 = ext :+ ((dataVar, Constant[SCollection[STuple]](data, dataType))) -// test("TupColl3", env1, ext1, -// """{ -// | val data = getVar[Coll[(Coll[Byte], Long)]](dataVar).get -// | data.size == 1 -// |}""".stripMargin, -// { -// val data = GetVar(dataVar, dataType).get -// EQ(SizeOf(data), IntConstant(1)) -// } -// ) -// test("TupColl4", env1, ext1, -// """{ -// | val data = getVar[Coll[(Coll[Byte], Long)]](dataVar).get -// | data.exists({ (p: (Coll[Byte], Long)) => p._2 == 10L }) -// |}""".stripMargin, -// { -// val data = GetVar(dataVar, dataType).get -// Exists(data, -// FuncValue( -// Vector((1, STuple(SByteArray, SLong))), -// EQ(SelectField(ValUse(1, STuple(SByteArray, SLong)), 2), LongConstant(10))) -// ) -// } -// ) + val ext1 = ext :+ ((dataVar, Constant[SType](data.asWrappedType, dataType))) + test("TupColl3", env1, ext1, + """{ + | val data = getVar[Coll[(Coll[Byte], Long)]](dataVar).get + | data.size == 1 + |}""".stripMargin, + { + val data = GetVar(dataVar, dataType).get + EQ(SizeOf(data), IntConstant(1)).toSigmaProp + } + ) + test("TupColl4", env1, ext1, + """{ + | val data = getVar[Coll[(Coll[Byte], Long)]](dataVar).get + | data.exists({ (p: (Coll[Byte], Long)) => p._2 == 10L }) + |}""".stripMargin, + { + val data = GetVar(dataVar, dataType).get + Exists(data, + FuncValue( + Vector((1, STuple(SByteArray, SLong))), + EQ(SelectField(ValUse(1, STuple(SByteArray, SLong)), 2), LongConstant(10))) + ).toSigmaProp + } + ) test("TupColl5", env1, ext1, """{ | val data = getVar[Coll[(Coll[Byte], Long)]](dataVar).get @@ -311,15 +314,6 @@ class BasicOpsSpecification extends SigmaTestingCommons { EQ(SizeOf(data), IntConstant(1)).toSigmaProp } ) - -// TODO uncomment after operations over Any are implemented -// test(env, ext, -// """{ val p = (getVar[Int](intVar1).get, getVar[Byte](byteVar2).get) -// | p.getOrElse(2, 3).isInstanceOf[Int] }""".stripMargin, -// { -// val p = Tuple(GetVarInt(intVar1).get, GetVarByte(byteVar2).get) -// EQ(ByIndex[SAny.type](p, IntConstant(2), Some(IntConstant(3).asValue[SAny.type])), IntConstant(3)) -// }) } property("GetVar") { @@ -338,11 +332,11 @@ class BasicOpsSpecification extends SigmaTestingCommons { } property("ExtractRegisterAs") { -// test("Extract1", env, ext, -// "{ SELF.R4[SigmaProp].get.isProven }", -// ExtractRegisterAs[SSigmaProp.type](Self, reg1).get, -// true -// ) + test("Extract1", env, ext, + "{ SELF.R4[SigmaProp].get.isProven }", + ExtractRegisterAs[SSigmaProp.type](Self, reg1).get, + true + ) // wrong type assertExceptionThrown( test("Extract2", env, ext, @@ -356,7 +350,7 @@ class BasicOpsSpecification extends SigmaTestingCommons { // TODO related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/416 ignore("Box.getReg") { test("Extract1", env, ext, - "{ SELF.getReg[Int]( (getVar[Int](intVar1).get + 4).toByte ).get == 1}", + "{ SELF.getReg[Int]( (getVar[Int](intVar1).get + 4)).get == 1}", BoolToSigmaProp( EQ( MethodCall(Self, SBox.getRegMethod, @@ -490,10 +484,11 @@ class BasicOpsSpecification extends SigmaTestingCommons { // println(CostTableStat.costTableString) } -// property("ZKProof") { -// test("zk1", env, ext, "ZKProof { sigmaProp(HEIGHT >= 0) }", -// ZKProofBlock(BoolToSigmaProp(GE(Height, LongConstant(0)))), true) -// } + //TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/236 + ignore("ZKProof") { + test("zk1", env, ext, "ZKProof { sigmaProp(HEIGHT >= 0) }", + ZKProofBlock(BoolToSigmaProp(GE(Height, LongConstant(0)))), true) + } property("numeric cast") { test("downcast", env, ext, @@ -576,8 +571,7 @@ class BasicOpsSpecification extends SigmaTestingCommons { ) } - //TODO: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/425 - ignore("Option.map") { + property("Option.map") { test("Option.map", env, ext, "getVar[Int](intVar1).map({(i: Int) => i + 1}).get == 2", null, @@ -585,8 +579,7 @@ class BasicOpsSpecification extends SigmaTestingCommons { ) } - //TODO: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/425 - ignore("Option.filter") { + property("Option.filter") { test("Option.filter", env, ext, "getVar[Int](intVar1).filter({(i: Int) => i > 0}).get == 1", null, diff --git a/src/test/scala/sigmastate/utxo/BlockchainSimulationSpecification.scala b/src/test/scala/sigmastate/utxo/BlockchainSimulationSpecification.scala index eabd8e2b00..110e55a41c 100644 --- a/src/test/scala/sigmastate/utxo/BlockchainSimulationSpecification.scala +++ b/src/test/scala/sigmastate/utxo/BlockchainSimulationSpecification.scala @@ -1,22 +1,21 @@ package sigmastate.utxo -import java.io.{FileWriter, File} +import java.io.{File, FileWriter} import org.ergoplatform +import org.ergoplatform.ErgoBox.TokenId import org.ergoplatform._ import org.scalacheck.Gen -import org.scalatest.prop.{PropertyChecks, GeneratorDrivenPropertyChecks} -import org.scalatest.{PropSpec, Matchers} -import scorex.crypto.authds.avltree.batch.{Remove, BatchAVLProver, Insert} +import scorex.crypto.authds.avltree.batch.{BatchAVLProver, Insert, Remove} import scorex.crypto.authds.{ADDigest, ADKey, ADValue} -import scorex.crypto.hash.{Digest32, Blake2b256} +import scorex.crypto.hash.{Blake2b256, Digest32} import scorex.util._ -import sigmastate.Values.LongConstant -import sigmastate.helpers.{ErgoLikeTestProvingInterpreter, SigmaTestingCommons, ErgoTransactionValidator} +import sigmastate.Values.{IntConstant, LongConstant} +import sigmastate.helpers.{ErgoLikeTestProvingInterpreter, ErgoTransactionValidator, SigmaTestingCommons} import sigmastate.interpreter.ContextExtension -import sigmastate.eval.IRContext +import sigmastate.eval._ import sigmastate.interpreter.Interpreter.{ScriptNameProp, emptyEnv} -import sigmastate.{GE, AvlTreeData, AvlTreeFlags} +import sigmastate.{AvlTreeData, AvlTreeFlags, GE} import scala.annotation.tailrec import scala.collection.concurrent.TrieMap @@ -34,7 +33,7 @@ class BlockchainSimulationSpecification extends SigmaTestingCommons { val txs = boxesToSpend.map { box => val newBoxCandidate = - new ErgoBoxCandidate(10, minerPubKey, height, Seq(), Map(heightReg -> LongConstant(height + windowSize))) + new ErgoBoxCandidate(10, minerPubKey, height, Colls.emptyColl[(TokenId, Long)], Map(heightReg -> IntConstant(height + windowSize))) val unsignedInput = new UnsignedInput(box.id) val tx = UnsignedErgoLikeTransaction(IndexedSeq(unsignedInput), IndexedSeq(newBoxCandidate)) val context = ErgoLikeContext(height + 1, @@ -158,7 +157,7 @@ object BlockchainSimulationSpecification { def byId(boxId: KeyType): Try[ErgoBox] = Try(boxes(boxId)) def byHeightRegValue(i: Int): Iterable[ErgoBox] = - boxes.values.filter(_.get(heightReg).getOrElse(LongConstant(i + 1)) == LongConstant(i)) + boxes.values.filter(_.get(heightReg).getOrElse(IntConstant(i + 1)) == IntConstant(i)) def byTwoInts(r1Id: ErgoBox.RegisterId, int1: Int, r2Id: ErgoBox.RegisterId, int2: Int): Option[ErgoBox] = @@ -213,7 +212,7 @@ object BlockchainSimulationSpecification { val initBlock = Block( (0 until windowSize).map { i => val txId = hash.hash(i.toString.getBytes ++ scala.util.Random.nextString(12).getBytes).toModifierId - val boxes = (1 to 50).map(_ => ErgoBox(10, GE(Height, LongConstant(i)).toSigmaProp, 0, Seq(), Map(heightReg -> LongConstant(i)), txId)) + val boxes = (1 to 50).map(_ => ErgoBox(10, GE(Height, IntConstant(i)).toSigmaProp, 0, Seq(), Map(heightReg -> IntConstant(i)), txId)) ergoplatform.ErgoLikeTransaction(IndexedSeq(), boxes) }, ErgoLikeContext.dummyPubkey diff --git a/src/test/scala/sigmastate/utxo/CollectionOperationsSpecification.scala b/src/test/scala/sigmastate/utxo/CollectionOperationsSpecification.scala index e2af51afd3..7b6c266ac6 100644 --- a/src/test/scala/sigmastate/utxo/CollectionOperationsSpecification.scala +++ b/src/test/scala/sigmastate/utxo/CollectionOperationsSpecification.scala @@ -18,7 +18,6 @@ class CollectionOperationsSpecification extends SigmaTestingCommons { private def context(boxesToSpend: IndexedSeq[ErgoBox] = IndexedSeq(), outputs: IndexedSeq[ErgoBox]): ErgoLikeContext = { - // TODO this means the context is not totally correct val (selfBox, toSpend) = if (boxesToSpend.isEmpty) (fakeSelf, IndexedSeq(fakeSelf)) else (boxesToSpend(0), boxesToSpend) ergoplatform.ErgoLikeContext( currentHeight = 50, @@ -38,6 +37,14 @@ class CollectionOperationsSpecification extends SigmaTestingCommons { verifier.verify(emptyEnv + (ScriptNameProp -> "verify"), prop, ctx, pr, fakeMessage).get._1 shouldBe true } + private def assertProof(code: String, + outputBoxValues: IndexedSeq[Long], + boxesToSpendValues: IndexedSeq[Long]) = { + val (prover, verifier, prop, ctx) = buildEnv(code, None, outputBoxValues, boxesToSpendValues) + val pr = prover.prove(emptyEnv + (ScriptNameProp -> "prove"), prop, ctx, fakeMessage).fold(t => throw t, x => x) + verifier.verify(emptyEnv + (ScriptNameProp -> "verify"), prop, ctx, pr, fakeMessage).get._1 shouldBe true + } + private def assertProverFail(code: String, expectedComp: SigmaPropValue, outputBoxValues: IndexedSeq[Long], @@ -49,14 +56,22 @@ class CollectionOperationsSpecification extends SigmaTestingCommons { private def buildEnv(code: String, expectedComp: Value[SType], outputBoxValues: IndexedSeq[Long], - boxesToSpendValues: IndexedSeq[Long]) = { + boxesToSpendValues: IndexedSeq[Long]): + (ContextEnrichingTestProvingInterpreter, ErgoLikeTestInterpreter, SigmaPropValue, ErgoLikeContext) = + buildEnv(code, Some(expectedComp), outputBoxValues, boxesToSpendValues) + + private def buildEnv(code: String, + expectedComp: Option[Value[SType]], + outputBoxValues: IndexedSeq[Long], + boxesToSpendValues: IndexedSeq[Long]): + (ContextEnrichingTestProvingInterpreter, ErgoLikeTestInterpreter, SigmaPropValue, ErgoLikeContext) = { val prover = new ContextEnrichingTestProvingInterpreter val verifier = new ErgoLikeTestInterpreter val pubkey = prover.dlogSecrets.head.publicImage val prop = compile(Map(), code).asBoolValue.toSigmaProp - prop shouldBe expectedComp + expectedComp.foreach(prop shouldBe _) val ctx = context(boxesToSpendValues.map(ErgoBox(_, pubkey, 0)), outputBoxValues.map(ErgoBox(_, pubkey, 0))) (prover, verifier, prop, ctx) @@ -92,7 +107,8 @@ class CollectionOperationsSpecification extends SigmaTestingCommons { val pr = prover.prove(prop, ctx, fakeMessage).get verifier.verify(prop, ctx, pr, fakeMessage).get._1 shouldBe true - //todo: finish + + //TODO coverage: add negative case for `exists` } property("forall") { @@ -123,7 +139,6 @@ class CollectionOperationsSpecification extends SigmaTestingCommons { val pr = prover.prove(prop, ctx, fakeMessage).get verifier.verify(prop, ctx, pr, fakeMessage).get._1 shouldBe true - //todo: finish } @@ -440,9 +455,7 @@ class CollectionOperationsSpecification extends SigmaTestingCommons { assertProof(code, expectedPropTree, outputBoxValues) } - //TODO: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/423 - // TODO costing rule in CollCoster - ignore("flatMap") { + property("flatMap") { assertProof("OUTPUTS.flatMap({ (out: Box) => out.propositionBytes })(0) == 0.toByte", EQ( ByIndex( @@ -472,84 +485,30 @@ class CollectionOperationsSpecification extends SigmaTestingCommons { IndexedSeq(1L, 1L)) } - //TODO: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/421 property("indices") { assertProof("OUTPUTS.indices == Coll(0, 1)", EQ(MethodCall(Outputs, IndicesMethod.withConcreteTypes(Map(tIV -> SBox)), Vector(), Map()), ConcreteCollection(IntConstant(0), IntConstant(1))), IndexedSeq(1L, 1L)) } - property("segmentLength") { - assertProof("OUTPUTS.segmentLength({ (out: Box) => out.value == 1L }, 0) == 1", - EQ( - MethodCall(Outputs, - SegmentLengthMethod.withConcreteTypes(Map(tIV -> SBox)), - Vector( - FuncValue(Vector((1, SBox)),EQ(ExtractAmount(ValUse(1, SBox)), LongConstant(1))), - IntConstant(0) - ), - Map()), - IntConstant(1)), - IndexedSeq(1L, 2L)) - } - - property("indexWhere") { - assertProof("OUTPUTS.indexWhere({ (out: Box) => out.value == 1L }, 0) == 0", - EQ( - MethodCall(Outputs, - IndexWhereMethod.withConcreteTypes(Map(tIV -> SBox)), - Vector( - FuncValue(Vector((1, SBox)), EQ(ExtractAmount(ValUse(1, SBox)), LongConstant(1))), - IntConstant(0) - ), - Map()), - IntConstant(0)), - IndexedSeq(1L, 2L)) - } - - property("lastIndexWhere") { - assertProof("OUTPUTS.lastIndexWhere({ (out: Box) => out.value == 1L }, 1) == 0", - EQ( - MethodCall(Outputs, - LastIndexWhereMethod.withConcreteTypes(Map(tIV -> SBox)), - Vector( - FuncValue(Vector((1, SBox)), EQ(ExtractAmount(ValUse(1, SBox)), LongConstant(1))), - IntConstant(1) - ), - Map()), - IntConstant(0)), - IndexedSeq(1L, 2L)) - } - property("zip") { - assertProof("OUTPUTS.zip(Coll(1,2)).size == 2", + assertProof("OUTPUTS.zip(INPUTS).size == 2", EQ( SizeOf(MethodCall(Outputs, - SCollection.ZipMethod.withConcreteTypes(Map(SCollection.tIV -> SBox, SCollection.tOV -> SInt)), - Vector( - ConcreteCollection(IntConstant(1), IntConstant(2)) - ), + SCollection.ZipMethod.withConcreteTypes(Map(SCollection.tIV -> SBox, SCollection.tOV -> SBox)), + Vector(Inputs), Map()).asCollection[STuple]), IntConstant(2)), - IndexedSeq(1L, 2L)) + IndexedSeq(1L, 2L), IndexedSeq(3L, 4L)) } - property("partition") { - assertProof("OUTPUTS.partition({ (box: Box) => box.value < 2L})._1.size == 1", - EQ( - SizeOf( - SelectField( - MethodCall(Outputs, - PartitionMethod.withConcreteTypes(Map(tIV -> SBox)), - Vector( - FuncValue(Vector((1, SBox)), LT(ExtractAmount(ValUse(1, SBox)), LongConstant(2))) - ), - Map()).asValue[STuple], - 1 - ).asCollection[SType] - ), - IntConstant(1)), - IndexedSeq(1L, 2L)) + property("zip (nested)") { + assertProof( + """OUTPUTS.zip(INPUTS).zip(OUTPUTS).zip(INPUTS) + | .map({ (t: (((Box, Box), Box), Box)) => + | t._1._2.value + t._2.value + | }).fold(0L, { (a: Long, v: Long) => a + v }) == 10""".stripMargin, + IndexedSeq(1L, 2L), IndexedSeq(3L, 4L)) } property("patch") { diff --git a/src/test/scala/sigmastate/utxo/ContextEnrichingSpecification.scala b/src/test/scala/sigmastate/utxo/ContextEnrichingSpecification.scala index 8751fcc6f6..4143faac81 100644 --- a/src/test/scala/sigmastate/utxo/ContextEnrichingSpecification.scala +++ b/src/test/scala/sigmastate/utxo/ContextEnrichingSpecification.scala @@ -6,7 +6,8 @@ import scorex.crypto.hash.Blake2b256 import sigmastate.Values._ import sigmastate._ import sigmastate.lang.Terms._ -import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeTestInterpreter, SigmaTestingCommons} +import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, SigmaTestingCommons, ErgoLikeTestInterpreter} +import special.collection.Coll class ContextEnrichingSpecification extends SigmaTestingCommons { @@ -15,10 +16,10 @@ class ContextEnrichingSpecification extends SigmaTestingCommons { property("context enriching mixed w. crypto") { val prover = new ContextEnrichingTestProvingInterpreter - val preimage = prover.contextExtenders(1).value.asInstanceOf[Array[Byte]] + val preimage = prover.contextExtenders(1).value.asInstanceOf[Coll[Byte]] val pubkey = prover.dlogSecrets.head.publicImage - val env = Map("blake" -> Blake2b256(preimage), "pubkey" -> pubkey) + val env = Map("blake" -> Blake2b256(preimage.toArray), "pubkey" -> pubkey) val compiledScript = compile(env, """{ | pubkey && blake2b256(getVar[Coll[Byte]](1).get) == blake @@ -26,7 +27,7 @@ class ContextEnrichingSpecification extends SigmaTestingCommons { """.stripMargin).asSigmaProp val prop = SigmaAnd( pubkey, - EQ(CalcBlake2b256(GetVarByteArray(1).get), ByteArrayConstant(Blake2b256(preimage))).toSigmaProp + EQ(CalcBlake2b256(GetVarByteArray(1).get), ByteArrayConstant(Blake2b256(preimage.toArray))).toSigmaProp ) compiledScript shouldBe prop @@ -42,11 +43,11 @@ class ContextEnrichingSpecification extends SigmaTestingCommons { property("context enriching mixed w. crypto 2") { val prover = new ContextEnrichingTestProvingInterpreter - val preimage1 = prover.contextExtenders(1).value.asInstanceOf[Array[Byte]] - val preimage2 = prover.contextExtenders(2).value.asInstanceOf[Array[Byte]] + val preimage1 = prover.contextExtenders(1).value.asInstanceOf[Coll[Byte]] + val preimage2 = prover.contextExtenders(2).value.asInstanceOf[Coll[Byte]] val pubkey = prover.dlogSecrets.head.publicImage - val env = Map("blake" -> Blake2b256(preimage1 ++ preimage2), "pubkey" -> pubkey) + val env = Map("blake" -> Blake2b256(preimage1.append(preimage2).toArray), "pubkey" -> pubkey) val compiledScript = compile(env, """{ | pubkey && blake2b256(getVar[Coll[Byte]](1).get ++ getVar[Coll[Byte]](2).get) == blake @@ -57,7 +58,7 @@ class ContextEnrichingSpecification extends SigmaTestingCommons { pubkey, EQ( CalcBlake2b256(Append(GetVarByteArray(1).get, GetVarByteArray(2).get)), - ByteArrayConstant(Blake2b256(preimage1 ++ preimage2)) + ByteArrayConstant(Blake2b256(preimage1.append(preimage2).toArray)) ).toSigmaProp ) compiledScript shouldBe prop @@ -72,9 +73,7 @@ class ContextEnrichingSpecification extends SigmaTestingCommons { verifier.verify(env, compiledScript, ctxv, pr.proof, fakeMessage).get._1 shouldBe true } - // todo: ignored because of https://github.com/ScorexFoundation/sigmastate-interpreter/issues/419 - // todo: and https://github.com/ScorexFoundation/sigmastate-interpreter/issues/420 - ignore("prover enriching context - xor") { + property("prover enriching context - xor") { val v1 = Base16.decode("abcdef7865").get val k1 = 21: Byte @@ -89,15 +88,7 @@ class ContextEnrichingSpecification extends SigmaTestingCommons { val env = Map("k1" -> k1.toInt, "k2" -> k2.toInt, "r" -> r) val compiledScript = compile(env, - """{ - | - | // def Xor(c1: Coll[Byte], c2: Coll[Byte]): Coll[Byte] = c1.zipWith(c2, { (x, y) => x ^ y }) - | - | def Xor(c1: Coll[Byte], c2: Coll[Byte]): Coll[Byte] = c1.zip(c2).map({ (t : (Byte, Byte)) => t._1 ^ t._2 }) - | - | Xor(getVar[Coll[Byte]](k1).get, getVar[Coll[Byte]](k2).get) == r - |} - """.stripMargin).asBoolValue.toSigmaProp + "{ xor(getVar[Coll[Byte]](k1).get, getVar[Coll[Byte]](k2).get) == r }").asBoolValue.toSigmaProp val prop = EQ(Xor(GetVarByteArray(k1).get, GetVarByteArray(k2).get), ByteArrayConstant(r)).toSigmaProp compiledScript shouldBe prop @@ -111,7 +102,7 @@ class ContextEnrichingSpecification extends SigmaTestingCommons { //context w/out extensions assertExceptionThrown( verifier.verify(env, prop, ctx, pr.proof, fakeMessage).get, - rootCause(_).isInstanceOf[NoSuchElementException] + rootCause(_).isInstanceOf[ArrayIndexOutOfBoundsException] ) verifier.verify(env, prop, ctxv, pr.proof, fakeMessage).get._1 shouldBe true } @@ -121,16 +112,16 @@ class ContextEnrichingSpecification extends SigmaTestingCommons { */ property("prover enriching context") { val prover = new ContextEnrichingTestProvingInterpreter - val preimage = prover.contextExtenders(1: Byte).value.asInstanceOf[Array[Byte]] + val preimage = prover.contextExtenders(1).value.asInstanceOf[Coll[Byte]] - val env = Map("blake" -> Blake2b256(preimage)) + val env = Map("blake" -> Blake2b256(preimage.toArray)) val compiledScript = compile(env, """{ | blake2b256(getVar[Coll[Byte]](1).get) == blake |} """.stripMargin).asBoolValue.toSigmaProp - val prop = EQ(CalcBlake2b256(GetVarByteArray(1).get), ByteArrayConstant(Blake2b256(preimage))).toSigmaProp + val prop = EQ(CalcBlake2b256(GetVarByteArray(1).get), ByteArrayConstant(Blake2b256(preimage.toArray))).toSigmaProp compiledScript shouldBe prop val ctx = ErgoLikeContext.dummy(fakeSelf) @@ -147,10 +138,10 @@ class ContextEnrichingSpecification extends SigmaTestingCommons { property("prover enriching context 2") { val prover = new ContextEnrichingTestProvingInterpreter - val preimage1 = prover.contextExtenders(1).value.asInstanceOf[Array[Byte]] - val preimage2 = prover.contextExtenders(2).value.asInstanceOf[Array[Byte]] + val preimage1 = prover.contextExtenders(1).value.asInstanceOf[Coll[Byte]] + val preimage2 = prover.contextExtenders(2).value.asInstanceOf[Coll[Byte]] - val env = Map("blake" -> Blake2b256(preimage2 ++ preimage1)) + val env = Map("blake" -> Blake2b256(preimage2.append(preimage1).toArray)) val compiledScript = compile(env, """{ | blake2b256(getVar[Coll[Byte]](2).get ++ getVar[Coll[Byte]](1).get) == blake @@ -158,7 +149,7 @@ class ContextEnrichingSpecification extends SigmaTestingCommons { """.stripMargin).asBoolValue.toSigmaProp val prop = EQ(CalcBlake2b256(Append(GetVarByteArray(2).get, GetVarByteArray(1).get)), - ByteArrayConstant(Blake2b256(preimage2 ++ preimage1))).toSigmaProp + ByteArrayConstant(Blake2b256(preimage2.append(preimage1).toArray))).toSigmaProp compiledScript shouldBe prop val ctx = ErgoLikeContext.dummy(fakeSelf) diff --git a/src/test/scala/sigmastate/utxo/ErgoLikeInterpreterSpecification.scala b/src/test/scala/sigmastate/utxo/ErgoLikeInterpreterSpecification.scala index 27d9a1c176..ae1c5da8b0 100644 --- a/src/test/scala/sigmastate/utxo/ErgoLikeInterpreterSpecification.scala +++ b/src/test/scala/sigmastate/utxo/ErgoLikeInterpreterSpecification.scala @@ -3,20 +3,25 @@ package sigmastate.utxo import com.google.common.primitives.Bytes import org.ergoplatform.ErgoBox.R4 import org.ergoplatform._ +import org.ergoplatform.validation.ValidationException import org.scalatest.TryValues._ import scorex.crypto.hash.Blake2b256 import sigmastate.SCollection.SByteArray import sigmastate.TrivialProp.{FalseProp, TrueProp} import sigmastate.Values._ import sigmastate._ +import sigmastate.eval._ import sigmastate.interpreter.Interpreter._ import sigmastate.basics.DLogProtocol.ProveDlog import sigmastate.basics.ProveDHTuple -import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeTestInterpreter, SigmaTestingCommons} +import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, SigmaTestingCommons, ErgoLikeTestInterpreter} import sigmastate.lang.Terms._ -import sigmastate.serialization.ValueSerializer +import sigmastate.lang.exceptions.InterpreterException +import sigmastate.serialization.{ValueSerializer, SerializationSpecification} + +class ErgoLikeInterpreterSpecification extends SigmaTestingCommons + with SerializationSpecification { -class ErgoLikeInterpreterSpecification extends SigmaTestingCommons { implicit lazy val IR: TestingIRContext = new TestingIRContext private val reg1 = ErgoBox.nonMandatoryRegisters.head @@ -31,14 +36,16 @@ class ErgoLikeInterpreterSpecification extends SigmaTestingCommons { val ctx = ErgoLikeContext.dummy(fakeSelf) - val e = compile(Map("h1" -> h1.bytes, "h2" -> h2.bytes), "h1 == h1") + val e = compile(Map("h1" -> h1.treeWithSegregation.bytes, "h2" -> h2.treeWithSegregation.bytes), "h1 == h1") val exp = TrueLeaf e shouldBe exp val res = verifier.reduceToCrypto(ctx, exp).get._1 res shouldBe TrueProp - val res2 = verifier.reduceToCrypto(ctx, EQ(ByteArrayConstant(h1.bytes), ByteArrayConstant(h2.bytes))).get._1 + val res2 = verifier.reduceToCrypto(ctx, + EQ(ByteArrayConstant(h1.treeWithSegregation.bytes), + ByteArrayConstant(h2.treeWithSegregation.bytes))).get._1 res2 shouldBe FalseProp } @@ -605,7 +612,7 @@ class ErgoLikeInterpreterSpecification extends SigmaTestingCommons { val pr = prover.prove(emptyEnv + (ScriptNameProp -> "prove"), prop, ctx, fakeMessage).get verifier.verify(emptyEnv + (ScriptNameProp -> "verify"), prop, ctx, pr, fakeMessage).get._1 shouldBe true - //todo: check failing branches + //TODO coverage: check failing branches } property("DeserializeRegister value type mismatch") { @@ -631,6 +638,38 @@ class ErgoLikeInterpreterSpecification extends SigmaTestingCommons { prover.prove(emptyEnv + (ScriptNameProp -> "prove"), prop, ctx, fakeMessage).fold(t => throw t, x => x) } + property("DeserializeContext value(script) type mismatch") { + forAll(logicalExprTreeNodeGen(Seq(AND.apply))) { scriptProp => + val verifier = new ErgoLikeTestInterpreter + val scriptId = 21.toByte + val prover0 = new ContextEnrichingTestProvingInterpreter() + // serialize boolean expression + val prover = prover0.withContextExtender(scriptId, ByteArrayConstant(ValueSerializer.serialize(scriptProp))) + val prop = OR( + LogicalNot(DeserializeContext(scriptId, scriptProp.tpe)), + DeserializeContext(scriptId, scriptProp.tpe) + ).toSigmaProp + + val box = ErgoBox(20, ErgoScriptPredef.TrueProp, 0, Seq(), Map()) + val ctx = ErgoLikeContext( + currentHeight = 50, + lastBlockUtxoRoot = AvlTreeData.dummy, + minerPubkey = ErgoLikeContext.dummyPubkey, + boxesToSpend = IndexedSeq(box), + createTransaction(IndexedSeq(ErgoBox(10, TrueProp, 0))), + self = box) + + val pr = prover.prove(prop, ctx, fakeMessage).get + // make sure verifier will fail on deserializing context with mismatched type + // try to deserialize it as an expression with integer type + val prop1 = EQ(DeserializeContext(scriptId, SInt), IntConstant(1)).toSigmaProp + an[ValidationException] should be thrownBy + verifier.verify(emptyEnv + (ScriptNameProp -> "verify"), prop1, ctx, pr, fakeMessage).get + // make sure prover fails as well on deserializing context with mismatched type + an[ValidationException] should be thrownBy prover.prove(prop1, ctx, fakeMessage).get + } + } + property("non-const ProveDHT") { import sigmastate.interpreter.CryptoConstants.dlogGroup compile(Map("gA" -> dlogGroup.generator), diff --git a/src/test/scala/sigmastate/utxo/FuncVarSpecification.scala b/src/test/scala/sigmastate/utxo/FuncVarSpecification.scala index 0a89571dff..6576a1d77e 100644 --- a/src/test/scala/sigmastate/utxo/FuncVarSpecification.scala +++ b/src/test/scala/sigmastate/utxo/FuncVarSpecification.scala @@ -14,7 +14,7 @@ class FuncVarSpecification extends SigmaTestingCommons { override val okPrintEvaluatedEntries: Boolean = false } - // TODO : related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/417 + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/417 ignore("Func context variable") { // val scriptId = 21.toByte // val code = compileWithCosting(emptyEnv, s"{ (x: Int) => x + 1 }") diff --git a/src/test/scala/sigmastate/utxo/SigmaCompilerSpecification.scala b/src/test/scala/sigmastate/utxo/SigmaCompilerSpecification.scala index fafd4818f0..3d6f7aed45 100644 --- a/src/test/scala/sigmastate/utxo/SigmaCompilerSpecification.scala +++ b/src/test/scala/sigmastate/utxo/SigmaCompilerSpecification.scala @@ -30,8 +30,7 @@ class SigmaCompilerSpecification extends SigmaTestingCommons { compile("10.toBigInt.modQ") shouldEqual ModQ(BigIntConstant(10)) } - // TODO: enable after https://github.com/ScorexFoundation/sigmastate-interpreter/issues/324 is done - ignore("modular arithmetic ops: BinOps") { + property("modular arithmetic ops: BinOps") { compile("10.toBigInt.plusModQ(2.toBigInt)") shouldEqual PlusModQ(BigIntConstant(10), BigIntConstant(2)) compile("10.toBigInt.minusModQ(2.toBigInt)") shouldEqual diff --git a/src/test/scala/sigmastate/utxo/SpamSpecification.scala b/src/test/scala/sigmastate/utxo/SpamSpecification.scala index b3756163fd..5179a192c1 100644 --- a/src/test/scala/sigmastate/utxo/SpamSpecification.scala +++ b/src/test/scala/sigmastate/utxo/SpamSpecification.scala @@ -1,6 +1,7 @@ package sigmastate.utxo import org.ergoplatform._ +import org.ergoplatform.validation.{ValidationRules, ValidationException} import org.scalacheck.Gen import scalan.util.BenchmarkUtil import scorex.crypto.authds.{ADKey, ADValue} @@ -11,6 +12,7 @@ import sigmastate.SCollection.SByteArray import sigmastate.Values._ import sigmastate.lang.Terms._ import sigmastate._ +import sigmastate.eval._ import sigmastate.interpreter.Interpreter._ import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, SigmaTestingCommons, ErgoLikeTestInterpreter} import sigmastate.lang.exceptions.CosterException @@ -31,7 +33,7 @@ class SpamSpecification extends SigmaTestingCommons { (1 to 1000000).foreach(_ => hf(block)) val t0 = System.currentTimeMillis() - (1 to 3000000).foreach(_ => hf(block)) + (1 to 6000000).foreach(_ => hf(block)) val t = System.currentTimeMillis() t - t0 } @@ -44,7 +46,7 @@ class SpamSpecification extends SigmaTestingCommons { } property("huge byte array") { - //todo: make value dependent on CostTable constants, not magic constant + //TODO coverage: make value dependent on CostTable constants, not magic constant val ba = Random.randomBytes(10000000) val id = 11: Byte @@ -119,9 +121,7 @@ class SpamSpecification extends SigmaTestingCommons { val publicImages = secret.publicImage +: simulated val prop = OR(publicImages.map(image => SigmaPropConstant(image).isProven)).toSigmaProp - val pt0 = System.currentTimeMillis() val proof = prover.prove(emptyEnv + (ScriptNameProp -> "prove"), prop, ctx, fakeMessage).get - val pt = System.currentTimeMillis() val (_, terminated) = termination(() => verifier.verify(emptyEnv + (ScriptNameProp -> "verify"), prop, ctx, proof, fakeMessage) @@ -146,7 +146,9 @@ class SpamSpecification extends SigmaTestingCommons { FuncValue(Vector((1, SBox)), AND( GE(ExtractAmount(ValUse(1, SBox)), LongConstant(10)), - EQ(ExtractScriptBytes(ValUse(1, SBox)), ByteArrayConstant(propToCompare.bytes)) + EQ( + ExtractScriptBytes(ValUse(1, SBox)), + ByteArrayConstant(propToCompare.treeWithSegregation.bytes)) ) ) ).toSigmaProp @@ -171,7 +173,6 @@ class SpamSpecification extends SigmaTestingCommons { property("transaction with many inputs and outputs") { implicit lazy val IR = new TestingIRContext { - this.useAlphaEquality = true override val okPrintEvaluatedEntries = false override def onEvaluatedGraphNode(env: DataEnv, sym: Sym, value: AnyRef): Unit = { if (okPrintEvaluatedEntries) @@ -214,15 +215,17 @@ class SpamSpecification extends SigmaTestingCommons { terminated shouldBe true assertExceptionThrown( res.fold(t => throw t, identity), - t => { - rootCause(t).isInstanceOf[CosterException] && t.getMessage.contains("Script cannot be executed") + { + case ve: ValidationException => + ve.rule == ValidationRules.CheckCostWithContext && + rootCause(ve).isInstanceOf[CosterException] } ) // measure time required to execute the script itself and it is more then timeout val (_, calcTime) = BenchmarkUtil.measureTime { import limitlessProver.IR._ - val costingRes @ Pair(calcF, costF) = doCostingEx(emptyEnv, prop, true) + val Pair(calcF, _) = doCostingEx(emptyEnv, prop, true) val calcCtx = ctx.toSigmaContext(limitlessProver.IR, isCost = false) limitlessProver.calcResult(calcCtx, calcF) } @@ -257,7 +260,7 @@ class SpamSpecification extends SigmaTestingCommons { println("proof size: " + proof.length) - val treeData = new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 32, None) + val treeData = SigmaDsl.avlTree(new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 32, None)) val key1 = genKey("key1") val value1 = genValue("value1") diff --git a/src/test/scala/sigmastate/utxo/ThresholdSpecification.scala b/src/test/scala/sigmastate/utxo/ThresholdSpecification.scala index 1833431704..9ba01a676a 100644 --- a/src/test/scala/sigmastate/utxo/ThresholdSpecification.scala +++ b/src/test/scala/sigmastate/utxo/ThresholdSpecification.scala @@ -169,7 +169,7 @@ class ThresholdSpecification extends SigmaTestingCommons { for (t <- testCaseSeq) { for (bound <- 0 to testCaseSeq.length + 1) { val pReduced = prover.reduceToCrypto(ctx, AtLeast(bound, t.vector)) - pReduced.fold(t => throw t, x => true) shouldBe true + pReduced.fold(t => throw t, _ => true) shouldBe true if (t.dlogOnlyVector.v.isEmpty) { // Case 0: no ProveDlogs in the test vector -- just booleans if (t.numTrue >= bound) { pReduced.get._1 shouldBe TrueProp diff --git a/src/test/scala/sigmastate/utxo/benchmarks/BenchmarkUtils.scala b/src/test/scala/sigmastate/utxo/benchmarks/BenchmarkUtils.scala deleted file mode 100644 index 53bac6af16..0000000000 --- a/src/test/scala/sigmastate/utxo/benchmarks/BenchmarkUtils.scala +++ /dev/null @@ -1,30 +0,0 @@ -package sigmastate.utxo.benchmarks - -import scala.concurrent.duration.Duration -import scala.concurrent.{Await, Future} -import scala.concurrent.ExecutionContext.Implicits.global - -object BenchmarkUtils { -// def measureTime[T](action: => T): (T, Long) = { -// val t0 = System.currentTimeMillis() -// val res = action -// val t = System.currentTimeMillis() -// (res, t - t0) -// } -// -// def runTasks(nTasks: Int)(block: Int => Unit) = { -// val (_, total) = measureTime { -// val tasks = (1 to nTasks).map(iTask => Future(block(iTask))) -// val res = Await.result(Future.sequence(tasks), Duration.Inf) -// } -// println(s"Completed $nTasks tasks in $total msec") -// } -// -// def measure[T](nIters: Int, okShow: Boolean = true)(action: Int => Unit): Unit = { -// for (i <- 0 until nIters) { -// val (res, iterTime) = measureTime(action(i)) -// if (okShow) -// println(s"Iter $i: $iterTime ms") -// } -// } -} diff --git a/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingKernelContract.scala b/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingKernelContract.scala index 9bca79221c..bb52ae09f2 100644 --- a/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingKernelContract.scala +++ b/src/test/scala/sigmastate/utxo/benchmarks/CrowdFundingKernelContract.scala @@ -8,6 +8,7 @@ import sigmastate.basics.DLogProtocol.{DLogInteractiveProver, DLogProverInput, F import sigmastate.basics.VerifierMessage.Challenge import scorex.crypto.hash.Blake2b256 import sigmastate._ +import sigmastate.lang.Terms._ import sigmastate.helpers.ContextEnrichingTestProvingInterpreter import sigmastate.interpreter.{CryptoConstants, Interpreter} import sigmastate.utils.Helpers @@ -57,7 +58,8 @@ class CrowdFundingKernelContract( val c2 = Array( ctx.currentHeight < timeout, ctx.spendingTransaction.outputs.exists(out => { - out.value >= minToRaise && util.Arrays.equals(out.propositionBytes, projectPubKey.toSigmaProp.bytes) + out.value >= minToRaise && + util.Arrays.equals(out.propositionBytes, projectPubKey.toSigmaProp.treeWithSegregation.bytes) }) ).forall(identity) var proof: projectProver.ProofT = null diff --git a/src/test/scala/sigmastate/utxo/benchmarks/CrowdfundingBenchmark.scala b/src/test/scala/sigmastate/utxo/benchmarks/CrowdfundingBenchmark.scala index 7309f546b5..bc2bf4dd57 100644 --- a/src/test/scala/sigmastate/utxo/benchmarks/CrowdfundingBenchmark.scala +++ b/src/test/scala/sigmastate/utxo/benchmarks/CrowdfundingBenchmark.scala @@ -42,7 +42,7 @@ class CrowdfundingBenchmark extends SigmaTestingCommons { val (ok, time) = measureTime { var res = true - for (i <- 1 to nIters) { + for (_ <- 1 to nIters) { val proof = contract.prove(ctx, fakeMessage) res = contract.verify(proof, ctx, fakeMessage).get._1 res shouldBe true @@ -65,7 +65,7 @@ class CrowdfundingBenchmark extends SigmaTestingCommons { val (ok, time) = measureTime { var res = true - for (i <- 1 to nIters) { + for (_ <- 1 to nIters) { val proof = contract.prove(ctx, fakeMessage) res = contract.verify(proof, ctx, fakeMessage).get._1 res shouldBe true diff --git a/src/test/scala/sigmastate/utxo/blockchain/BlockchainSimulationTestingCommons.scala b/src/test/scala/sigmastate/utxo/blockchain/BlockchainSimulationTestingCommons.scala index 320ef74bd0..0cc674b270 100644 --- a/src/test/scala/sigmastate/utxo/blockchain/BlockchainSimulationTestingCommons.scala +++ b/src/test/scala/sigmastate/utxo/blockchain/BlockchainSimulationTestingCommons.scala @@ -1,13 +1,13 @@ package sigmastate.utxo.blockchain -import org.ergoplatform +import org.ergoplatform.ErgoBox.TokenId import org.ergoplatform._ import scorex.crypto.authds.{ADDigest, ADKey, ADValue} import scorex.crypto.authds.avltree.batch.{Remove, BatchAVLProver, Insert} import scorex.crypto.hash.{Digest32, Blake2b256} -import sigmastate.{GE, AvlTreeData, Values, AvlTreeFlags} +import sigmastate.{GE, AvlTreeData, AvlTreeFlags, Values} import sigmastate.Values.{LongConstant, ErgoTree} -import sigmastate.eval.IRContext +import sigmastate.eval._ import sigmastate.helpers.{ErgoLikeTestProvingInterpreter, SigmaTestingCommons, ErgoTransactionValidator} import scala.collection.mutable @@ -48,7 +48,7 @@ trait BlockchainSimulationTestingCommons extends SigmaTestingCommons { val txs = boxesToSpend.map { box => val newBoxCandidate = - new ErgoBoxCandidate(10, prop, height, Seq(), Map()) + new ErgoBoxCandidate(10, prop, height, Colls.emptyColl[(TokenId, Long)], Map()) val unsignedInput = new UnsignedInput(box.id) val tx = UnsignedErgoLikeTransaction(IndexedSeq(unsignedInput), IndexedSeq(newBoxCandidate)) val context = ErgoLikeContext(height + 1, diff --git a/src/test/scala/sigmastate/utxo/examples/AssetsAtomicExchange.scala b/src/test/scala/sigmastate/utxo/examples/AssetsAtomicExchange.scala index 3cee860569..ab18f0ce99 100644 --- a/src/test/scala/sigmastate/utxo/examples/AssetsAtomicExchange.scala +++ b/src/test/scala/sigmastate/utxo/examples/AssetsAtomicExchange.scala @@ -41,13 +41,12 @@ case class AssetsAtomicExchange[Spec <: ContractSpec] """{ | (HEIGHT > deadline && pkA) || { | val tokenData = OUTPUTS(0).R2[Coll[(Coll[Byte], Long)]].get(0) - | val c = allOf(Coll( + | allOf(Coll( | tokenData._1 == tokenId, | tokenData._2 >= 60L, | OUTPUTS(0).propositionBytes == pkA.propBytes, | OUTPUTS(0).R4[Coll[Byte]].get == SELF.id | )) - | c | } |} """.stripMargin) @@ -57,7 +56,7 @@ case class AssetsAtomicExchange[Spec <: ContractSpec] (HEIGHT > deadline && pkB) || { val knownBoxId = OUTPUTS(1).R4[Coll[Byte]].get == SELF.id allOf(Coll( - OUTPUTS(1).value >= 100, + OUTPUTS(1).value >= 100L, knownBoxId, OUTPUTS(1).propositionBytes == pkB.propBytes )) @@ -66,9 +65,9 @@ case class AssetsAtomicExchange[Spec <: ContractSpec] """{ | (HEIGHT > deadline && pkB) || | allOf(Coll( - | OUTPUTS(1).value >= 100, - | OUTPUTS(1).R4[Coll[Byte]].get == SELF.id, - | OUTPUTS(1).propositionBytes == pkB.propBytes + | OUTPUTS(1).value >= 100L, + | OUTPUTS(1).propositionBytes == pkB.propBytes, + | OUTPUTS(1).R4[Coll[Byte]].get == SELF.id | )) |} """.stripMargin) diff --git a/src/test/scala/sigmastate/utxo/examples/AssetsAtomicExchangeTests.scala b/src/test/scala/sigmastate/utxo/examples/AssetsAtomicExchangeTests.scala index ae3c1b527c..112d492b14 100644 --- a/src/test/scala/sigmastate/utxo/examples/AssetsAtomicExchangeTests.scala +++ b/src/test/scala/sigmastate/utxo/examples/AssetsAtomicExchangeTests.scala @@ -8,7 +8,7 @@ import org.ergoplatform.dsl.TestContractSpec import scorex.crypto.hash.Blake2b256 import sigmastate.SCollection.SByteArray import sigmastate._ -import sigmastate.Values.{LongConstant, BlockValue, Value, ByteArrayConstant, ValDef, ValUse} +import sigmastate.Values.{LongConstant, BlockValue, Value, ByteArrayConstant, ErgoTree, ValDef, ValUse} import sigmastate.eval.CSigmaProp import sigmastate.eval.Extensions._ import sigmastate.lang.Terms.ValueOps @@ -72,8 +72,8 @@ class AssetsAtomicExchangeTests extends SigmaTestingCommons { suite => EQ(ExtractRegisterAs(ValUse(1, SBox), R4, SOption(SCollection(SByte))).get, ExtractId(Self)) ).toSigmaProp )) - ).asBoolValue - buyerProp.ergoTree.proposition shouldBe expectedBuyerTree + ).asSigmaProp + buyerProp.ergoTree shouldBe ErgoTree.fromProposition(expectedBuyerTree) } import contract.spec._ diff --git a/src/test/scala/sigmastate/utxo/examples/AtomicSwapExampleSpecification.scala b/src/test/scala/sigmastate/utxo/examples/AtomicSwapExampleSpecification.scala index 89f27fdfb2..fb8f846b3a 100644 --- a/src/test/scala/sigmastate/utxo/examples/AtomicSwapExampleSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/AtomicSwapExampleSpecification.scala @@ -8,10 +8,11 @@ import sigmastate._ import interpreter.Interpreter._ import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeTestInterpreter, SigmaTestingCommons} import sigmastate.lang.Terms._ -import sigmastate.utxo.{GetVar, SizeOf} +import sigmastate.utxo.SizeOf class AtomicSwapExampleSpecification extends SigmaTestingCommons { - implicit lazy val IR = new TestingIRContext + private implicit lazy val IR: TestingIRContext = new TestingIRContext + /** * Atomic cross-chain trading example: * Alice(A) has coins in chain 1, Bob(B) has coins in chain 2, they want to exchange them atomically and with no @@ -40,19 +41,19 @@ class AtomicSwapExampleSpecification extends SigmaTestingCommons { val env = Map( ScriptNameProp -> "atomic", "height1" -> height1, "height2" -> height2, - "deadlineA" -> deadlineA, "deadlineB" -> deadlineB, + "deadlineBob" -> deadlineB, "deadlineAlice" -> deadlineA, "pubkeyA" -> pubkeyA, "pubkeyB" -> pubkeyB, "hx" -> hx) val prop1 = compile(env, """{ | anyOf(Coll( - | HEIGHT > height1 + deadlineA && pubkeyA, + | HEIGHT > height1 + deadlineBob && pubkeyA, | pubkeyB && blake2b256(getVar[Coll[Byte]](1).get) == hx | )) |}""".stripMargin).asSigmaProp //chain1 script val prop1Tree = SigmaOr( - SigmaAnd(GT(Height, IntConstant(height1 + deadlineA)).toSigmaProp, pubkeyA), + SigmaAnd(GT(Height, IntConstant(height1 + deadlineB)).toSigmaProp, pubkeyA), SigmaAnd(pubkeyB, EQ(CalcBlake2b256(GetVarByteArray(1).get), hx).toSigmaProp) ) prop1 shouldBe prop1Tree @@ -60,10 +61,10 @@ class AtomicSwapExampleSpecification extends SigmaTestingCommons { val script2 = """{ | anyOf(Coll( - | HEIGHT > height2 + deadlineB && pubkeyB, + | HEIGHT > height2 + deadlineAlice && pubkeyB, | allOf(Coll( | pubkeyA, - | getVar[Coll[Byte]](1).get.size<33, + | getVar[Coll[Byte]](1).get.size < 33, | blake2b256(getVar[Coll[Byte]](1).get) == hx | )) | )) @@ -76,7 +77,7 @@ class AtomicSwapExampleSpecification extends SigmaTestingCommons { Vector(ValDef(1, GetVarByteArray(1).get)), SigmaOr( SigmaAnd( - GT(Height, IntConstant(height2 + deadlineB)).toSigmaProp, + GT(Height, IntConstant(height2 + deadlineA)).toSigmaProp, pubkeyB), SigmaAnd( AND( diff --git a/src/test/scala/sigmastate/utxo/examples/CoinEmissionSpecification.scala b/src/test/scala/sigmastate/utxo/examples/CoinEmissionSpecification.scala index c5709a5644..731ccb0a10 100644 --- a/src/test/scala/sigmastate/utxo/examples/CoinEmissionSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/CoinEmissionSpecification.scala @@ -10,6 +10,7 @@ import sigmastate.lang.Terms._ import sigmastate.utxo.blockchain.BlockchainSimulationTestingCommons._ import sigmastate.utxo._ import sigmastate._ +import sigmastate.eval._ /** * An example of currency emission contract. @@ -21,7 +22,9 @@ import sigmastate._ class CoinEmissionSpecification extends SigmaTestingCommons with ScorexLogging { // don't use TestingIRContext, this suite also serves the purpose of testing the RuntimeIRContext implicit lazy val IR: TestingIRContext = new TestingIRContext { + // uncomment if you want to log script evaluation // override val okPrintEvaluatedEntries = true + saveGraphsInFile = false } private val reg1 = ErgoBox.nonMandatoryRegisters.head @@ -81,7 +84,7 @@ class CoinEmissionSpecification extends SigmaTestingCommons with ScorexLogging { val lastCoins = LE(ExtractAmount(Self), s.oneEpochReduction) val prop = BinOr( - AND(heightIncreased, sameScriptRule, correctCoinsConsumed, heightCorrect), + AND(heightCorrect, heightIncreased, sameScriptRule, correctCoinsConsumed), BinAnd(heightIncreased, lastCoins) ).toSigmaProp @@ -100,7 +103,7 @@ class CoinEmissionSpecification extends SigmaTestingCommons with ScorexLogging { | val heightIncreased = HEIGHT > SELF.R4[Int].get | val heightCorrect = out.R4[Int].get == HEIGHT | val lastCoins = SELF.value <= oneEpochReduction - | allOf(Coll(heightIncreased, sameScriptRule, correctCoinsConsumed, heightCorrect)) || (heightIncreased && lastCoins) + | allOf(Coll(heightCorrect, heightIncreased, sameScriptRule, correctCoinsConsumed)) || (heightIncreased && lastCoins) |}""".stripMargin).asBoolValue.toSigmaProp prop1 shouldEqual prop @@ -113,8 +116,9 @@ class CoinEmissionSpecification extends SigmaTestingCommons with ScorexLogging { val initBlock = FullBlock(IndexedSeq(createTransaction(initialBoxCandidate)), minerPubkey) val genesisState = ValidationState.initialState(initBlock) val fromState = genesisState.boxesReader.byId(genesisState.boxesReader.allIds.head).get - val initialBox = ErgoBox(initialBoxCandidate.value, initialBoxCandidate.ergoTree, 0, - initialBoxCandidate.additionalTokens, initialBoxCandidate.additionalRegisters, initBlock.txs.head.id) + val initialBox = new ErgoBox(initialBoxCandidate.value, initialBoxCandidate.ergoTree, + initialBoxCandidate.additionalTokens, initialBoxCandidate.additionalRegisters, + initBlock.txs.head.id, 0, 0) initialBox shouldBe fromState def genCoinbaseLikeTransaction(state: ValidationState, @@ -122,17 +126,17 @@ class CoinEmissionSpecification extends SigmaTestingCommons with ScorexLogging { height: Int): ErgoLikeTransaction = { assert(state.state.currentHeight == height - 1) val ut = if (emissionBox.value > s.oneEpochReduction) { - val minerBox = new ErgoBoxCandidate(emissionAtHeight(height), minerProp, height, Seq(), Map()) + val minerBox = new ErgoBoxCandidate(emissionAtHeight(height), minerProp, height, Colls.emptyColl, Map()) val newEmissionBox: ErgoBoxCandidate = - new ErgoBoxCandidate(emissionBox.value - minerBox.value, prop, height, Seq(), Map(register -> IntConstant(height))) + new ErgoBoxCandidate(emissionBox.value - minerBox.value, prop, height, Colls.emptyColl, Map(register -> IntConstant(height))) UnsignedErgoLikeTransaction( IndexedSeq(new UnsignedInput(emissionBox.id)), IndexedSeq(newEmissionBox, minerBox) ) } else { - val minerBox1 = new ErgoBoxCandidate(emissionBox.value - 1, minerProp, height, Seq(), Map(register -> IntConstant(height))) - val minerBox2 = new ErgoBoxCandidate(1, minerProp, height, Seq(), Map(register -> IntConstant(height))) + val minerBox1 = new ErgoBoxCandidate(emissionBox.value - 1, minerProp, height, Colls.emptyColl, Map(register -> IntConstant(height))) + val minerBox2 = new ErgoBoxCandidate(1, minerProp, height, Colls.emptyColl, Map(register -> IntConstant(height))) UnsignedErgoLikeTransaction( IndexedSeq(new UnsignedInput(emissionBox.id)), IndexedSeq(minerBox1, minerBox2) diff --git a/src/test/scala/sigmastate/utxo/examples/CoopExampleSpecification.scala b/src/test/scala/sigmastate/utxo/examples/CoopExampleSpecification.scala index 0a5d9f77fd..cbf578a0ef 100644 --- a/src/test/scala/sigmastate/utxo/examples/CoopExampleSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/CoopExampleSpecification.scala @@ -287,8 +287,8 @@ class CoopExampleSpecification extends SigmaTestingCommons { "pubkeyB" -> pubkeyB, "pubkeyC" -> pubkeyC, "pubkeyD" -> pubkeyD, - "spendingContract1Hash" -> ByteArrayConstant(Blake2b256(spendingProp1.bytes)), - "spendingContract2Hash" -> ByteArrayConstant(Blake2b256(spendingProp3.bytes)) + "spendingContract1Hash" -> ByteArrayConstant(Blake2b256(spendingProp1.treeWithSegregation.bytes)), + "spendingContract2Hash" -> ByteArrayConstant(Blake2b256(spendingProp3.treeWithSegregation.bytes)) ) // Basic compilation @@ -357,7 +357,7 @@ class CoopExampleSpecification extends SigmaTestingCommons { val inputEnv = Map( - "thresholdProp" -> ByteArrayConstant(Blake2b256(thresholdProp.bytes)), + "thresholdProp" -> ByteArrayConstant(Blake2b256(thresholdProp.treeWithSegregation.bytes)), "pubkeyA" -> pubkeyA ) diff --git a/src/test/scala/sigmastate/utxo/examples/DHTupleExampleSpecification.scala b/src/test/scala/sigmastate/utxo/examples/DHTupleExampleSpecification.scala index c98e97c777..fa551e6a20 100644 --- a/src/test/scala/sigmastate/utxo/examples/DHTupleExampleSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/DHTupleExampleSpecification.scala @@ -13,6 +13,7 @@ import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeTestI import sigmastate.interpreter.CryptoConstants import sigmastate.interpreter.Interpreter._ import sigmastate.lang.Terms._ +import sigmastate.eval._ class DHTupleExampleSpecification extends SigmaTestingCommons { private implicit lazy val IR = new TestingIRContext diff --git a/src/test/scala/sigmastate/utxo/examples/DemurrageExampleSpecification.scala b/src/test/scala/sigmastate/utxo/examples/DemurrageExampleSpecification.scala index 1e8bbf1c8d..65877b7013 100644 --- a/src/test/scala/sigmastate/utxo/examples/DemurrageExampleSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/DemurrageExampleSpecification.scala @@ -1,7 +1,5 @@ package sigmastate.utxo.examples -import sigmastate.Values.{LongConstant, SigmaPropConstant, TaggedBox} -import sigmastate._ import sigmastate.interpreter.Interpreter._ import org.ergoplatform._ import sigmastate.Values.ShortConstant @@ -12,7 +10,6 @@ import sigmastate.lang.Terms._ class DemurrageExampleSpecification extends SigmaTestingCommons { implicit lazy val IR = new TestingIRContext - private val reg1 = ErgoBox.nonMandatoryRegisters.head /** * Demurrage currency example. @@ -54,7 +51,6 @@ class DemurrageExampleSpecification extends SigmaTestingCommons { "regScript" -> regScript ) - //todo: add condition on val prop = compile(env, """{ | val outIdx = getVar[Short](127).get @@ -76,26 +72,6 @@ class DemurrageExampleSpecification extends SigmaTestingCommons { | } """.stripMargin).asSigmaProp - /* - todo: fix / uncomment - val reg1 = ErgoBox.nonMandatoryRegisters.head - val propTree = BinOr( - SigmaPropConstant(regScript).isProven, - AND( - GE(Height, Plus(ExtractRegisterAs[STuple](Self, reg1).get.asTuple. , LongConstant(demurragePeriod))), - Exists(Outputs, 21, - BinAnd( - GE(ExtractAmount(TaggedBox(21)), Minus(ExtractAmount(Self), LongConstant(demurrageCost))), - EQ(ExtractScriptBytes(TaggedBox(21)), ExtractScriptBytes(Self)), - LE(ExtractRegisterAs[SLong.type](TaggedBox(21), reg1).get, Height), - GE(ExtractRegisterAs[SLong.type](TaggedBox(21), reg1).get, Minus(Height, 50L)) - ) - ) - ) - ) - prop shouldBe propTree - */ - val inHeight = 0 val outValue = 100 val approxSize = createBox(outValue, prop, inHeight).bytes.length + 2 diff --git a/src/test/scala/sigmastate/utxo/examples/DummyExamplesSpecification.scala b/src/test/scala/sigmastate/utxo/examples/DummyExamplesSpecification.scala index 6b760b3607..2a2f2b2f7b 100644 --- a/src/test/scala/sigmastate/utxo/examples/DummyExamplesSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/DummyExamplesSpecification.scala @@ -5,7 +5,8 @@ import org.ergoplatform.dsl.{ContractSpec, SigmaContractSyntax, StdContracts, Te import scorex.crypto.hash import sigmastate.helpers.SigmaTestingCommons import special.collection.Coll -import special.sigma.{Box, Context, Extensions} +import special.sigma.{Box, Context} +import sigmastate.eval.Extensions class DummyExamplesSpecification extends SigmaTestingCommons { suite => implicit lazy val IR = new TestingIRContext diff --git a/src/test/scala/sigmastate/utxo/examples/ExecuteFromExamplesSpecification.scala b/src/test/scala/sigmastate/utxo/examples/ExecuteFromExamplesSpecification.scala index 94e06a5a0c..3c0130bf2b 100644 --- a/src/test/scala/sigmastate/utxo/examples/ExecuteFromExamplesSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/ExecuteFromExamplesSpecification.scala @@ -4,7 +4,7 @@ import org.ergoplatform._ import org.ergoplatform.dsl.{ContractSpec, SigmaContractSyntax, StdContracts, TestContractSpec} import sigmastate.helpers.SigmaTestingCommons import special.sigma.Context -import special.sigma.Extensions._ +import sigmastate.eval.Extensions._ class ExecuteFromExamplesSpecification extends SigmaTestingCommons { suite => implicit lazy val IR = new TestingIRContext @@ -41,7 +41,7 @@ class ExecuteFromExamplesSpecification extends SigmaTestingCommons { suite => lazy val alice = spec.ProvingParty("Alice") - //todo: fix the test + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/443 ignore("Execute from var example (ErgoDsl)") { val contract = OracleContract[spec.type](alice)(spec) import contract.spec._ diff --git a/src/test/scala/sigmastate/utxo/examples/FsmExampleSpecification.scala b/src/test/scala/sigmastate/utxo/examples/FsmExampleSpecification.scala index f1fc217e7c..af80dc5e35 100644 --- a/src/test/scala/sigmastate/utxo/examples/FsmExampleSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/FsmExampleSpecification.scala @@ -8,6 +8,7 @@ import scorex.crypto.hash.{Digest32, Blake2b256} import sigmastate.SCollection.SByteArray import sigmastate.Values._ import sigmastate._ +import sigmastate.eval._ import sigmastate.lang.Terms._ import sigmastate.helpers.{ErgoLikeTestProvingInterpreter, SigmaTestingCommons} import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeTestInterpreter, SigmaTestingCommons} @@ -74,7 +75,7 @@ class FsmExampleSpecification extends SigmaTestingCommons { avlProver.generateProof() val digest = avlProver.digest - val treeData = new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 34, Some(0)) + val treeData = SigmaDsl.avlTree(new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 34, Some(0))) val fsmDescRegister = ErgoBox.nonMandatoryRegisters.head val currentStateRegister = ErgoBox.nonMandatoryRegisters(1) @@ -101,7 +102,7 @@ class FsmExampleSpecification extends SigmaTestingCommons { val treePreservation = EQ( ExtractRegisterAs[SAvlTree.type](ByIndex(Outputs, IntConstant.Zero), - fsmDescRegister).getOrElse(AvlTreeConstant(AvlTreeData.dummy)), + fsmDescRegister).getOrElse(AvlTreeConstant(SigmaDsl.avlTree(AvlTreeData.dummy))), ExtractRegisterAs[SAvlTree.type](Self, fsmDescRegister).get) val preservation = AND(scriptPreservation, treePreservation) diff --git a/src/test/scala/sigmastate/utxo/examples/IcoExample.scala b/src/test/scala/sigmastate/utxo/examples/IcoExample.scala index 735f5906fb..1a5a8bb890 100644 --- a/src/test/scala/sigmastate/utxo/examples/IcoExample.scala +++ b/src/test/scala/sigmastate/utxo/examples/IcoExample.scala @@ -3,74 +3,407 @@ package sigmastate.utxo.examples import com.google.common.primitives.Longs import org.ergoplatform.ErgoBox.{R4, R5} import org.ergoplatform.dsl.TestContractSpec -import org.ergoplatform.{ErgoBox, ErgoLikeContext, ErgoLikeTransaction} +import org.ergoplatform._ import scorex.crypto.authds.{ADKey, ADValue} -import scorex.crypto.authds.avltree.batch.{BatchAVLProver, Insert} +import scorex.crypto.authds.avltree.batch._ import scorex.crypto.hash.{Blake2b256, Digest32} -import sigmastate.Values.{AvlTreeConstant, ByteArrayConstant, CollectionConstant} -import sigmastate.{AvlTreeData, AvlTreeFlags, SByte} +import sigmastate.Values.{AvlTreeConstant, ByteArrayConstant, CollectionConstant, IntArrayConstant, SigmaPropValue} +import sigmastate._ import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeTestProvingInterpreter, SigmaTestingCommons} import sigmastate.interpreter.Interpreter.ScriptNameProp import sigmastate.lang.Terms._ +import sigmastate.serialization.ErgoTreeSerializer +import ErgoTreeSerializer.DefaultSerializer +import sigmastate.eval.{CompiletimeCosting, IRContext} +import sigmastate.interpreter.CryptoConstants + +import scala.util.Random +import sigmastate.eval._ + +/** + * + * An ICO Example On Top Of Ergo + ============================== + +We cover such complex example as an ICO (Initial Coin Offering) here covering several important novel features of the Ergo Platform. + +## Part 1. Preliminaries + + +A cryptocurrency protocol needs to specify what a spending transaction actually spends. There are two possibilities here. +In Bitcoin, a transaction is spending one-time digital coins and creates new ones. In Nxt, Ethereum, or Waves, a transaction is deducting +a transaction amount from long-living account or establishing a new one~(with possible side-effects on the way, like contract execution in Waves or Ethereum). Ergo is similar to Bitcoin, it is spending one-time coins (Ergo transaction also can have data inputs which are not to be spent, rather, they are providing information from current set of unspent coins). + +It is not trivial to do an ICO in model of one-time coins, as, in opposite to account-based cryptocurrencies, there is no explicit +persistent storage here. However, Ergo brings spending transaction into execution context of a script which is protecting a coin. +With this small change it becomes possible to express dependencies between transaction outputs and inputs. In turn, by setting dependencies we can execute even arbitrarily complex Turing-complete programs on top of blockchain (see ...). In this article we will define a concrete scenario of a multi-stage contract, as for ICO we need for different stages (funding, token issuance, withdrawal). + +Now imagine an ICO for thousands of participants. Unlike Ethereum, Ergo does not provide possibility to store large sets of data and carry them over throughout contract execution. Rather, it allows to store only about 40-bytes header of a data +structure, represented as key -> value dictionary, authenticated similarly to Merkle tree. To access some elements +in the dictionary, or to modify it, a spending transaction which is triggering protecting script execution should +provide lookup or modification proofs. This gives possibility for a contract to authenticate potentially huge datasets without requiring much memory to store contract state. However, storing space in the state (of active contracts) would mean bigger transactions, but this problem is easier from scalability point of view, and scalability is a top priority for Ergo. + + +## Part 2. The ICO Contract + +There could be many possible scenarios associated with the Initial Coin Offering (ICO). In this article we are working with the ICO contract steps briefly described below with details provided further: + - first, funding epoch takes place. It starts with a project's coin authenticating an empty dictionary. The dictionary is intended for holding (investor, balance) pairs, where the investor is associated with script of its withdrawing coin. For the balance, we assume that 1 token is equal to 1 Ergo during the ICO. During the funding epoch, it is possible to put Ergs into the project's coin. + For that, a spending transaction for the project's coin provides inputs which holding investor withdrawing scripts. Investor scripts and input values should be added to the tree. There could be many chained funding transactions. + - second, the funding period should be finished with closing the tree holding investors data. An authenticated tree could have different modification operations allowed individually: inserts, deletes, updates, or all the operations could be disallowed (so the tree could be in the read-only mode). Also, this transaction creates tokens of the ICO project which will be withdrawn in the next stage. The project can withdraw Ergs at this stage. + - third, investors are withdrawing their tokens. For that, a spending transaction creates outputs with guarding conditions and token values from the tree. The tree should be cleared from withdrawn pairs. There could be many chained funding transactions. + +This three stages should be linked together, and form logical order. To fulfill these goals, we are using the same coin, + +## Part 3. The ICO Contract Details + +Now it is the time to provide details and ErgoScript code on the ICO contract stages. + +### The Funding Stage + +First, the funding stage. We assume that initially a project creates a coin which is committing to an empty dictionary +(stored in the register R5) and also the scripts. This stage is lasts for height 2,000 at least, more concretely, the +first transaction with height of 2,000 at least should change the coin's script. + +The project's coin is considering and checking that it is always input number zero, and also output number zero. The +other inputs are considered investors' inputs. An investor's input contains hash of a withdrawing coin guarding script +in the register R4. The hashes as well as a monetary values of investing inputs should be added to the dictionary. The +spending transaction should provide a proof that investor data are indeed added to the dictionary, +and the proof is checked in the contract. + +It is not checked in the funding sub-contract, rather, investors should check that the dictionary allows insertions +only, not updating existing values or removals (it is not hard to add an explicit check though). + +The spending transaction should pay a fee, otherwise, it it unlikely that it would be included in a block. Thus the +funding contract is checking that the spending transaction has two outputs (one for itself, another to pay a fee), +the fee is to be no more than a certain limit (just one nanoErg in our example), and the guarding proposition should +be such that only a miner can spend the output (we use just a variable "feeProp" from compilation environment in our +example without providing any details, but this "feeProp" is corresponding to standard though not required by +consensus guarding script). + +The code below basically checks all that described above, in the form of compilable code. Please note that the +"nextStageScriptHash" environment variable contains hash of the issuance stage serialized script. + + val selfIndexIsZero = INPUTS(0).id == SELF.id + + val proof = getVar[Coll[Byte]](1).get + + val inputsCount = INPUTS.size + + val toAdd: Coll[(Coll[Byte], Coll[Byte])] = INPUTS.slice(1, inputsCount).map({ (b: Box) => + val pk = b.R4[Coll[Byte]].get + val value = longToByteArray(b.value) + (pk, value) + }) + + val modifiedTree = SELF.R5[AvlTree].get.insert(toAdd, proof).get + + val expectedTree = OUTPUTS(0).R5[AvlTree].get + + val properTreeModification = modifiedTree == expectedTree + + val outputsCount = OUTPUTS.size == 2 + + val selfOutputCorrect = if(HEIGHT < 2000) { + OUTPUTS(0).propositionBytes == SELF.propositionBytes + } else { + blake2b256(OUTPUTS(0).propositionBytes) == nextStageScriptHash + } + + val feeOutputCorrect = (OUTPUTS(1).value <= 1) && (OUTPUTS(1).propositionBytes == feeBytes) + + val outputsCorrect = outputsCount && feeOutputCorrect && selfOutputCorrect + + selfIndexIsZero && outputsCorrect && properTreeModification + +### The Issuance Stage + +This stage is about only one spending transaction to get to the next stage, which is the withdrawal stage. At the +issuance stage, the main things should happen. As in the previous case, the issuance sub-contract assumes + +In the first place, the tree should change a list of allowed operations from "inserts only" to "removals only", as the +next stage, the withdrawal one, is about removing from the dictionary. + +In the second place, the contract is checking that proper amount of ICO tokens are issued. In Ergo, it is allowed to +issue one new kind of token per transaction, and the identifier of the token should be equal to the (unique) +identifier of the first input coin. The issuance sub-contract is checking that a new token has been issued, and the +amount of it is equal to the amount of nanoErgs collected by the ICO contract coin to the moment. + +In the third place, the contract is checking that a spending transaction is indeed re-creating the coin with a +guarding script corresponding to the next stage, the withdrawal stage. + +At this stage a project can withdraw collected Ergs. And, of course, the spending transaction should pay a fee. Thus +the sub-contract is checking that the spending transaction has indeed 3 outputs (for the contract itself, for the +project withdrawal, and for the fee), and that the only first output is carrying the tokens issued. As we do not +specify project money withdrawal details, we require a project signature on the spending transaction. + + val openTree = SELF.R5[AvlTree].get + + val closedTree = OUTPUTS(0).R5[AvlTree].get + + val digestPreserved = openTree.digest == closedTree.digest + val keyLengthPreserved = openTree.keyLength == closedTree.keyLength + val valueLengthPreserved = openTree.valueLengthOpt == closedTree.valueLengthOpt + val treeIsClosed = closedTree.enabledOperations == 4 + + val tokenId: Coll[Byte] = INPUTS(0).id + + val tokensIssued = OUTPUTS(0).tokens(0)._2 + + val outputsCountCorrect = OUTPUTS.size == 3 + val secondOutputNoTokens = OUTPUTS(0).tokens.size == 1 && OUTPUTS(1).tokens.size == 0 && OUTPUTS(2).tokens.size == 0 + + val correctTokensIssued = SELF.value == tokensIssued + + val correctTokenId = OUTPUTS(0).R4[Coll[Byte]].get == tokenId && OUTPUTS(0).tokens(0)._1 == tokenId + + val valuePreserved = outputsCountCorrect && secondOutputNoTokens && correctTokensIssued && correctTokenId + val stateChanged = blake2b256(OUTPUTS(0).propositionBytes) == nextStageScriptHash + + val treeIsCorrect = digestPreserved && valueLengthPreserved && keyLengthPreserved && treeIsClosed + + projectPubKey && treeIsCorrect && valuePreserved && stateChanged + +### The Withdrawal Stage + +At this stage, it is allowed for an investor to withdraw money to predefined guarding script (which hash is written down +into the dictionary). A withdrawing transaction thus is having N + 2 outputs, where the first output should carry over the +withdrawal sub-contract, the following N outputs have guarding scripts and token values according to the dictionary, +and for the the last output there are no any conditions, aside of that it is not allowed to carry away tokens with it +(supposedly, this output pays a fee). The contract is requiring two proofs for the dictionary elements: one proof +is showing that values to be withdrawn are indeed in the dictionary, and the second proof is proving that a resulting +dictionary is free of the withdrawn values. The sub-contract is below. + + val removeProof = getVar[Coll[Byte]](2).get + val lookupProof = getVar[Coll[Byte]](3).get + val withdrawIndexes = getVar[Coll[Int]](4).get + + val out0 = OUTPUTS(0) + + val tokenId: Coll[Byte] = SELF.R4[Coll[Byte]].get + + val withdrawals = withdrawIndexes.map({(idx: Int) => + val b = OUTPUTS(idx) + if(b.tokens(0)._1 == tokenId) { + (blake2b256(b.propositionBytes), b.tokens(0)._2) + } else { + (blake2b256(b.propositionBytes), 0L) + } + }) + + val withdrawValues = withdrawals.map({(t: (Coll[Byte], Long)) => t._2}) + + val withdrawTotal = withdrawValues.fold(0L, { (l1: Long, l2: Long) => l1 + l2 }) + + val toRemove = withdrawals.map({(t: (Coll[Byte], Long)) => t._1}) + + val initialTree = SELF.R5[AvlTree].get + + val removedValues = initialTree.getMany(toRemove, lookupProof).map({(o: Option[Coll[Byte]]) => byteArrayToLong(o.get)}) + val valuesCorrect = removedValues == withdrawValues + + val modifiedTree = initialTree.remove(toRemove, removeProof).get + + val expectedTree = out0.R5[AvlTree].get + + val selfTokensCorrect = SELF.tokens(0)._1 == tokenId + val selfOutTokensAmount = SELF.tokens(0)._2 + val soutTokensCorrect = out0.tokens(0)._1 == tokenId + val soutTokensAmount = out0.tokens(0)._2 + + val tokensPreserved = selfTokensCorrect && soutTokensCorrect && (soutTokensAmount + withdrawTotal == selfOutTokensAmount) + + val properTreeModification = modifiedTree == expectedTree + + val selfOutputCorrect = out0.propositionBytes == SELF.propositionBytes + + properTreeModification && valuesCorrect && selfOutputCorrect && tokensPreserved + +## Possible Enhancements + +Please note that there are many nuances our example contract is ignoring. For example, it is allowed to execute the +contract to anyone who is able to construct proper spending transactions (in out example, anyone listening to the +blockchain) during funding and withdrawal stages. In the real-world cases, additional signature from the project or +a trusted arbiter could be needed. +Also, there is no self-destruction case considered in the withdrawal contract, so it will live before being destroyed +by miners via storage rent mechanism, potentially for decades or even centuries. For the funding stage, it would be +reasonable to have an additional input from the project with the value equal to the value of the fee output. And so on. + */ + +class IcoExample extends SigmaTestingCommons { suite => + + // Not mixed with TestContext since it is not possible to call commpiler.compile outside tests if mixed + implicit lazy val IR: IRContext = new IRContext with CompiletimeCosting -class IcoExample extends SigmaTestingCommons { - suite => - implicit lazy val IR: TestingIRContext = new TestingIRContext() lazy val spec = TestContractSpec(suite) - lazy val backer = spec.ProvingParty("Alice") - lazy val project = spec.ProvingParty("Bob") + lazy val project = new ErgoLikeTestProvingInterpreter() - /** - * Simplest ICO example - */ - property("simple ico example - fundraising stage only") { - val fundingEnv = Map( - ScriptNameProp -> "fundingScriptEnv" - ) - - val fundingScript = compile(fundingEnv, - """{ - | val proof = getVar[Coll[Byte]](1).get - | - | // val funders: Coll[Box] = INPUTS.filter({(b: Box) => b.R5[Int].isEmpty}) - | - | val toAdd: Coll[(Coll[Byte], Coll[Byte])] = INPUTS.map({ (b: Box) => - | val pk = b.R4[Coll[Byte]].get - | val value = longToByteArray(b.value) - | (pk, value) - | }) - | - | val modifiedTree = SELF.R5[AvlTree].get.insert(toAdd, proof).get - | - | val expectedTree = OUTPUTS(0).R5[AvlTree].get - | - | modifiedTree == expectedTree - | - |}""".stripMargin - ).asBoolValue.toSigmaProp + private val miningRewardsDelay = 720 + private val feeProp = ErgoScriptPredef.feeProposition(miningRewardsDelay) + private val feeBytes = feeProp.bytes + + val env = Map( + ScriptNameProp -> "withdrawalScriptEnv", + "feeBytes" -> feeBytes, + "projectPubKey" -> project.secrets.head.publicImage + ) + + val withdrawalScript: SigmaPropValue = compiler.compile(env, + """{ + | val removeProof = getVar[Coll[Byte]](2).get + | val lookupProof = getVar[Coll[Byte]](3).get + | val withdrawIndexes = getVar[Coll[Int]](4).get + | + | val out0 = OUTPUTS(0) + | + | val tokenId: Coll[Byte] = SELF.R4[Coll[Byte]].get + | + | val withdrawals = withdrawIndexes.map({(idx: Int) => + | val b = OUTPUTS(idx) + | if(b.tokens(0)._1 == tokenId) { + | (blake2b256(b.propositionBytes), b.tokens(0)._2) + | } else { + | (blake2b256(b.propositionBytes), 0L) + | } + | }) + | + | //val withdrawals = OUTPUTS.slice(1, OUTPUTS.size-1).map(...) + | + | val withdrawValues = withdrawals.map({(t: (Coll[Byte], Long)) => t._2}) + | + | val withdrawTotal = withdrawValues.fold(0L, { (l1: Long, l2: Long) => l1 + l2 }) + | + | val toRemove = withdrawals.map({(t: (Coll[Byte], Long)) => t._1}) + | + | val initialTree = SELF.R5[AvlTree].get + | + | val removedValues = initialTree.getMany(toRemove, lookupProof).map({(o: Option[Coll[Byte]]) => byteArrayToLong(o.get)}) + | val valuesCorrect = removedValues == withdrawValues + | + | val modifiedTree = initialTree.remove(toRemove, removeProof).get + | + | val expectedTree = out0.R5[AvlTree].get + | + | val selfTokensCorrect = SELF.tokens(0)._1 == tokenId + | val selfOutTokensAmount = SELF.tokens(0)._2 + | val soutTokensCorrect = out0.tokens(0)._1 == tokenId + | val soutTokensAmount = out0.tokens(0)._2 + | + | val tokensPreserved = selfTokensCorrect && soutTokensCorrect && (soutTokensAmount + withdrawTotal == selfOutTokensAmount) + | + | val properTreeModification = modifiedTree == expectedTree + | + | val selfOutputCorrect = out0.propositionBytes == SELF.propositionBytes + | + | selfOutputCorrect + | // properTreeModification && valuesCorrect && selfOutputCorrect && tokensPreserved + |}""".stripMargin + ).asSigmaProp + + val wsHash = Blake2b256(ErgoTreeSerializer.DefaultSerializer.serializeErgoTree(withdrawalScript)) + + val issuanceScript: SigmaPropValue = compile(env.updated("nextStageScriptHash", wsHash), + """{ + | val openTree = SELF.R5[AvlTree].get + | + | val closedTree = OUTPUTS(0).R5[AvlTree].get + | + | val digestPreserved = openTree.digest == closedTree.digest + | val keyLengthPreserved = openTree.keyLength == closedTree.keyLength + | val valueLengthPreserved = openTree.valueLengthOpt == closedTree.valueLengthOpt + | val treeIsClosed = closedTree.enabledOperations == 4 + | + | val tokenId: Coll[Byte] = INPUTS(0).id + | + | val tokensIssued = OUTPUTS(0).tokens(0)._2 + | + | val outputsCountCorrect = OUTPUTS.size == 3 + | val secondOutputNoTokens = OUTPUTS(0).tokens.size == 1 && OUTPUTS(1).tokens.size == 0 && OUTPUTS(2).tokens.size == 0 + | + | val correctTokensIssued = SELF.value == tokensIssued + | + | val correctTokenId = OUTPUTS(0).R4[Coll[Byte]].get == tokenId && OUTPUTS(0).tokens(0)._1 == tokenId + | + | val valuePreserved = outputsCountCorrect && secondOutputNoTokens && correctTokensIssued && correctTokenId + | val stateChanged = blake2b256(OUTPUTS(0).propositionBytes) == nextStageScriptHash + | + | val treeIsCorrect = digestPreserved && valueLengthPreserved && keyLengthPreserved && treeIsClosed + | + | projectPubKey && treeIsCorrect && valuePreserved && stateChanged + |}""".stripMargin + ).asSigmaProp + + val issuanceHash = Blake2b256(ErgoTreeSerializer.DefaultSerializer.serializeErgoTree(issuanceScript)) + val fundingScript: SigmaPropValue = compile(env.updated("nextStageScriptHash", issuanceHash), + """{ + | + | val selfIndexIsZero = INPUTS(0).id == SELF.id + | + | val proof = getVar[Coll[Byte]](1).get + | + | val inputsCount = INPUTS.size + | + | val toAdd: Coll[(Coll[Byte], Coll[Byte])] = INPUTS.slice(1, inputsCount).map({ (b: Box) => + | val pk = b.R4[Coll[Byte]].get + | val value = longToByteArray(b.value) + | (pk, value) + | }) + | + | val modifiedTree = SELF.R5[AvlTree].get.insert(toAdd, proof).get + | + | val expectedTree = OUTPUTS(0).R5[AvlTree].get + | + | val properTreeModification = modifiedTree == expectedTree + | + | val outputsCount = OUTPUTS.size == 2 + | + | val selfOutputCorrect = if(HEIGHT < 2000) { + | OUTPUTS(0).propositionBytes == SELF.propositionBytes + | } else { + | blake2b256(OUTPUTS(0).propositionBytes) == nextStageScriptHash + | } + | + | val feeOutputCorrect = (OUTPUTS(1).value <= 1) && (OUTPUTS(1).propositionBytes == feeBytes) + | + | val outputsCorrect = outputsCount && feeOutputCorrect && selfOutputCorrect + | + | selfIndexIsZero && outputsCorrect && properTreeModification + |}""".stripMargin + ).asSigmaProp + + + + property("simple ico example - fundraising stage only") { val avlProver = new BatchAVLProver[Digest32, Blake2b256.type](keyLength = 32, None) val digest = avlProver.digest - val initTreeData = new AvlTreeData(digest, AvlTreeFlags.AllOperationsAllowed, 32, None) + val initTreeData = SigmaDsl.avlTree(new AvlTreeData(digest, AvlTreeFlags.AllOperationsAllowed, 32, None)) val projectBoxBefore = ErgoBox(10, fundingScript, 0, Seq(), - Map(R4 -> ByteArrayConstant(Array.fill(16)(0: Byte) ++ Array.fill(16)(1: Byte)), R5 -> AvlTreeConstant(initTreeData))) + Map(R4 -> ByteArrayConstant(Array.fill(1)(0: Byte)), R5 -> AvlTreeConstant(initTreeData))) - val inputBoxes = IndexedSeq(projectBoxBefore) + val funderBoxCount = 2000 - inputBoxes.foreach { b => - val k = b.get(R4).get.asInstanceOf[CollectionConstant[SByte.type]].value + val funderBoxes = (1 to funderBoxCount).map { _ => + ErgoBox(10, Values.TrueLeaf.asSigmaProp, 0, Seq(), + Map(R4 -> ByteArrayConstant(Array.fill(32)(Random.nextInt(Byte.MaxValue).toByte)))) + } + + val inputBoxes = IndexedSeq(projectBoxBefore) ++ funderBoxes + + inputBoxes.tail.foreach { b => + val k = b.get(R4).get.asInstanceOf[CollectionConstant[SByte.type]].value.toArray val v = Longs.toByteArray(b.value) avlProver.performOneOperation(Insert(ADKey @@ k, ADValue @@ v)) } val proof = avlProver.generateProof() - val endTree = new AvlTreeData(avlProver.digest, AvlTreeFlags.AllOperationsAllowed, 32, None) + val endTree = SigmaDsl.avlTree(new AvlTreeData(avlProver.digest, AvlTreeFlags.AllOperationsAllowed, 32, None)) - val projectBoxAfter = ErgoBox(10, fundingScript, 0, Seq(), - Map(R4 -> ByteArrayConstant(Array.fill(32)(0: Byte)), R5 -> AvlTreeConstant(endTree))) + val projectBoxAfter = ErgoBox(funderBoxCount * 10 - 1, fundingScript, 0, Seq(), + Map(R4 -> ByteArrayConstant(Array.fill(1)(0: Byte)), R5 -> AvlTreeConstant(endTree))) + val feeBox = ErgoBox(1, feeProp, 0, Seq(), Map()) - val fundingTx = ErgoLikeTransaction(IndexedSeq(), IndexedSeq(projectBoxAfter)) + val fundingTx = ErgoLikeTransaction(IndexedSeq(), IndexedSeq(projectBoxAfter, feeBox)) val fundingContext = ErgoLikeContext( currentHeight = 1000, @@ -83,53 +416,122 @@ class IcoExample extends SigmaTestingCommons { val projectProver = new ContextEnrichingTestProvingInterpreter() .withContextExtender(1, ByteArrayConstant(proof)) - projectProver.prove(fundingEnv + (ScriptNameProp -> "fundingScriptEnv"), fundingScript, fundingContext, fakeMessage).get + val res = projectProver.prove(env, fundingScript, fundingContext, fakeMessage).get + println("funding script cost: " + res.cost) + println("lookup proof size: " + proof.length) + + //todo: test switching to fixing stage } - property("simple ico example - fixing stage") { - - val fixingEnv = Map( - ScriptNameProp -> "fixingScriptEnv" - ) - - val fixingProp = compile(fixingEnv, - """{ - | val openTree = SELF.R4[AvlTree].get - | - | val closedTree = OUTPUTS(0).R4[AvlTree].get - | - | val digestPreserved = openTree.digest == closedTree.digest - | val keyLengthPreserved = openTree.keyLength == closedTree.keyLength - | val valueLengthPreserved = openTree.valueLengthOpt == closedTree.valueLengthOpt - | val treeIsClosed = closedTree.enabledOperations == 0 - | - | sigmaProp(digestPreserved && valueLengthPreserved && keyLengthPreserved && treeIsClosed) - |}""".stripMargin - ).asSigmaProp - - val projectProver = new ErgoLikeTestProvingInterpreter + property("simple ico example - issuance stage") { val avlProver = new BatchAVLProver[Digest32, Blake2b256.type](keyLength = 32, None) val digest = avlProver.digest - val openTreeData = new AvlTreeData(digest, AvlTreeFlags.AllOperationsAllowed, 32, None) + val openTreeData = SigmaDsl.avlTree(new AvlTreeData(digest, AvlTreeFlags.AllOperationsAllowed, 32, None)) - val projectBoxBeforeClosing = ErgoBox(10, fixingProp, 0, Seq(), - Map(R4 -> AvlTreeConstant(openTreeData))) + val projectBoxBeforeClosing = ErgoBox(10, issuanceScript, 0, Seq(), + Map(R4 -> ByteArrayConstant(Array.emptyByteArray), R5 -> AvlTreeConstant(openTreeData))) - val closedTreeData = new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 32, None) + val tokenId = Digest32 @@ projectBoxBeforeClosing.id + val closedTreeData = SigmaDsl.avlTree(new AvlTreeData(digest, AvlTreeFlags.RemoveOnly, 32, None)) - val projectBoxAfterClosing = ErgoBox(10, fixingProp, 0, Seq(), - Map(R4 -> AvlTreeConstant(closedTreeData))) + val projectBoxAfterClosing = ErgoBox(1, withdrawalScript, 0, Seq(tokenId -> projectBoxBeforeClosing.value), + Map(R4 -> ByteArrayConstant(tokenId), R5 -> AvlTreeConstant(closedTreeData))) - val fixingTx = ErgoLikeTransaction(IndexedSeq(), IndexedSeq(projectBoxAfterClosing)) + val ergoWithdrawalBox = ErgoBox(8, Values.TrueLeaf.asSigmaProp, 0, Seq(), Map()) + val feeBox = ErgoBox(1, feeProp, 0, Seq(), Map()) - val fundingContext = ErgoLikeContext( + val issuanceTx = ErgoLikeTransaction(IndexedSeq(), IndexedSeq(projectBoxAfterClosing, ergoWithdrawalBox, feeBox)) + + val issuanceContext = ErgoLikeContext( currentHeight = 1000, lastBlockUtxoRoot = AvlTreeData.dummy, minerPubkey = ErgoLikeContext.dummyPubkey, boxesToSpend = IndexedSeq(projectBoxBeforeClosing), - spendingTransaction = fixingTx, + spendingTransaction = issuanceTx, self = projectBoxBeforeClosing) - projectProver.prove(fixingEnv, fixingProp, fundingContext, fakeMessage).get + val res = project.prove(env, issuanceScript, issuanceContext, fakeMessage).get + println("token issuance script cost: " + res.cost) } -} \ No newline at end of file + + property("simple ico example - withdrawal stage") { + val avlProver = new BatchAVLProver[Digest32, Blake2b256.type](keyLength = 32, None) + + val funderBoxCount = 2000 + val funderProps = (1 to funderBoxCount).map { _ => + val keyPoint = CryptoConstants.dlogGroup.createRandomElement() + val prop = CreateProveDlog(SGroupElement.mkConstant(keyPoint)).asSigmaProp + val propBytes = DefaultSerializer.serializeErgoTree(prop) + propBytes -> Longs.toByteArray(Random.nextInt(Int.MaxValue).toLong) + } + val funderKvs = funderProps.map { case (prop, v) => + val k = Blake2b256(prop) + k -> v + } + + funderKvs.foreach { case (k, v) => + avlProver.performOneOperation(Insert(ADKey @@ k, ADValue @@ v)) + } + val digest = avlProver.digest + val fundersTree = new AvlTreeData(digest, AvlTreeFlags.RemoveOnly, 32, None) + + val withdrawalsCount = 8 + val withdrawals = funderKvs.take(withdrawalsCount) + + avlProver.generateProof() + + withdrawals.foreach { case (k, _) => + avlProver.performOneOperation(Lookup(ADKey @@ k)) + } + val lookupProof = avlProver.generateProof() + + withdrawals.foreach { case (k, _) => + avlProver.performOneOperation(Remove(ADKey @@ k)) + } + val removalProof = avlProver.generateProof() + + val finalTree = new AvlTreeData(avlProver.digest, AvlTreeFlags.RemoveOnly, 32, None) + + val tokenId = Digest32 @@ Array.fill(32)(Random.nextInt(100).toByte) + + val withdrawalAmounts = funderProps.take(withdrawalsCount).map { case (prop, v) => + val tv = Longs.fromByteArray(v) + prop -> tv + } + + val withdrawBoxes = withdrawalAmounts.map { case (prop, tv) => + ErgoBox(1, DefaultSerializer.deserializeErgoTree(prop), 0, Seq(tokenId -> tv)) + } + + val totalTokenAmount = withdrawalAmounts.map(_._2).sum + 1 + + val projectBoxBefore = ErgoBox(11, withdrawalScript, 0, Seq(tokenId -> totalTokenAmount), + Map(R4 -> ByteArrayConstant(tokenId), R5 -> AvlTreeConstant(fundersTree))) + val projectBoxAfter = ErgoBox(1, withdrawalScript, 0, Seq(tokenId -> 1), + Map(R4 -> ByteArrayConstant(tokenId), R5 -> AvlTreeConstant(finalTree))) + val feeBox = ErgoBox(1, feeProp, 0, Seq(), Map()) + + val outputs = IndexedSeq(projectBoxAfter) ++ withdrawBoxes ++ IndexedSeq(feeBox) + val fundingTx = ErgoLikeTransaction(IndexedSeq(), outputs) + + val fundingContext = ErgoLikeContext( + currentHeight = 1000, + lastBlockUtxoRoot = AvlTreeData.dummy, + minerPubkey = ErgoLikeContext.dummyPubkey, + boxesToSpend = IndexedSeq(projectBoxBefore), + spendingTransaction = fundingTx, + self = projectBoxBefore) + + val projectProver = + new ContextEnrichingTestProvingInterpreter() + .withContextExtender(2, ByteArrayConstant(removalProof)) + .withContextExtender(3, ByteArrayConstant(lookupProof)) + .withContextExtender(4, IntArrayConstant((1 to withdrawalsCount).toArray)) + + val res = projectProver.prove(env, withdrawalScript, fundingContext, fakeMessage).get + println("withdrawal script cost: " + res.cost) + println("remove proof size: " + removalProof.length) + println("lookup proof size: " + lookupProof.length) + } + +} diff --git a/src/test/scala/sigmastate/utxo/examples/LetsSpecification.scala b/src/test/scala/sigmastate/utxo/examples/LetsSpecification.scala new file mode 100644 index 0000000000..79528bd6c8 --- /dev/null +++ b/src/test/scala/sigmastate/utxo/examples/LetsSpecification.scala @@ -0,0 +1,368 @@ +package sigmastate.utxo.examples + +import org.ergoplatform._ +import org.ergoplatform.ErgoBox.{R4, R5} +import scorex.crypto.authds.{ADKey, ADValue} +import scorex.crypto.authds.avltree.batch.{BatchAVLProver, Insert, Lookup} +import scorex.crypto.hash.{Blake2b256, Digest32} +import sigmastate.{AvlTreeData, AvlTreeFlags, TrivialProp} +import sigmastate.Values.{AvlTreeConstant, ByteArrayConstant, LongConstant, SigmaPropConstant} +import sigmastate.eval.{CompiletimeCosting, IRContext, SigmaDsl} +import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeTestProvingInterpreter, SigmaTestingCommons} +import sigmastate.interpreter.Interpreter.ScriptNameProp +import sigmastate.serialization.ErgoTreeSerializer +import sigmastate.lang.Terms._ + +import scala.util.Random + +/** + * A Local Exchange Trading System On Top Of Ergo +============================================== + + A local exchange trading system (LETS) is a local mutual credit association which members are allowed to create common + credit money individually, with all the deals in the system written into a common ledger. + As an example, assume that Alice with zero balance is willing to buy a liter of raw milk from Bob. + First, they agree on a price, for example, assume that the price is about 2 Euro (as Alice and Bob + are living in Ireland). After the deal being written into a ledger, Alice's balance becomes -2 (minus + two) Euro, and Bob's balance becomes 2 Euro. Then Bob may spend his 2 Euro, for example, on + home-made beer from Charlie. Often, such systems impose limits on negative balances, and sometimes + even on positive ones, in order to promote exchange in the community. + + Historically, such systems become popular during crisis times. The first system was established by Michael Linton in + a Canadian town stuck in depression back in 1981. Local exchange trading systems were extremely popular during + 1998-2002 Argentine Great Depression. Most LETS groups range from 50 to 250 members, with paper-based credit notes and + ledger maintained by a core committee. However, paper-based LETS currencies have shown some problems, such as + counterfeit notes, possible rogue behavior of system managers, and so on. Therefore, blockchain-based LETS could be superior + to the old systems. More information on LETS could be found in + ["The Ecology of Money" book (by Richard Douthwaite)](http://feasta.org/documents/moneyecology/chaptertwo.htm) and + [Wikipedia](https://en.wikipedia.org/wiki/Local_exchange_trading_system). + + In this article we show how LETS could be implemented on top of Ergo. To the best of our knowledge, this is + the first implementation of such kind of community currency on top of a blockchain. + Our reference implementation + is simple and consists of two contracts, namely, a management contract and an exchange contract. + We skip Ergo preliminaries, so please read + [the ICO article](https://github.com/ergoplatform/ergo/wiki/An-ICO-Example-On-Top-Of-Ergo) and + ErgoScript tutorials([basic](https://docs.ergoplatform.com/ErgoScript.pdf) and + [advanced](https://docs.ergoplatform.com/sigmastate_protocols.pdf)) for starters. + Nevertheless, we are going to introduce a couple of new terms in following sentences. + If a token is issued with amount equal to one, we call it the singleton token. Similarly, + a box which contains the singleton token is called the singleton box. + + The management contract is controlling a singleton box which holds members of the LETS system. + The contract enables the adding of new members at the pace of one member per one transaction. The box + is not storing members, but only a small digest of authenticated data structure built on top of + the members' directory. A member is associated with a singleton token issued in a transaction which + is adding the member to the directory. The transaction creates a new member's box which contains + the member's singleton token. The member's box is protected by the exchange contract. Also, the newly + created member's box has initial balance written down into the R4 register, and the balance is + equal to zero in our example. The transaction creating a new member must provide a proof of correctness for + directory transformation. + + The management contract box is controlled usually by a committee, and the committee could evolve over time. To support + that, we allow committee logic to reside in the register R5. + For example, assume that a new committee member has been added along with a new LETS member, + the input management contract box is requiring 2-out-of-3 signatures, and the output box requires 3-out-of-4 signatures. + In this case contents of the R5 register in the input and the output box would differ. + + The management contract code in ErgoScript with comments is provided below. Please note that + "userContractHash" is about exchange contract hash. + + val selfOut = OUTPUTS(0) + + // Management script + val managementScript = selfOut.R5[SigmaProp].get + + // The management script template is replicating self, and management script is satisfied + val scriptCorrect = (selfOut.propositionBytes == SELF.propositionBytes) && managementScript + + // A spending transaction is creating boxes for directory, user, fee. + val outsSizeCorrect = OUTPUTS.size == 3 + + // Checks that the management label token is replicating self + val outTokenCorrect = (selfOut.tokens.size == 1) && (selfOut.tokens(0)._1 == letsToken) + + // Checks that new token is issued, and its amount is correct + // OUTPUTS(0) tokens already checked via outtokenCorrect + val issuedTokenId = INPUTS(0).id + val userOut = OUTPUTS(1) + val correctTokenAmounts = + (userOut.tokens.size == 1 && + userOut.tokens(0)._1 == issuedTokenId && + userOut.tokens(0)._2 == 1 && + OUTPUTS(2).tokens.size == 0 && + outTokenCorrect) + + // Checks that the new user has been created with the zero balance + val zeroUserBalance = userOut.R4[Long].get == 0 + + val properUserScript = blake2b256(userOut.propositionBytes) == userContractHash + + // Checks that the new token identifier has been added to the directory + val selfTree = SELF.R4[AvlTree].get + val toAdd: Coll[(Coll[Byte], Coll[Byte])] = Coll((issuedTokenId, Coll[Byte]())) + val proof = getVar[Coll[Byte]](1).get + val modifiedTree = selfTree.insert(toAdd, proof).get + val expectedTree = selfOut.R4[AvlTree].get + val treeCorrect = modifiedTree == expectedTree + + correctTokenAmounts && scriptCorrect && treeCorrect && zeroUserBalance && properUserScript + + + The exchange contract script is fairly straightforward and provided below along with comments describing its logic. In the + contract, it is assumed that a spending transaction for an exchange contract box is receiving at least two inputs, + and the first two inputs should be protected by the exchange contract script and contain LETS member tokens. To check + that singleton member tokens in the inputs do indeed belong to the LETS system, a spending transaction provides the management + contract box as the first read-only data input, and also should provide a proof that the member tokens do belong to + the directory authenticated via the R4 register of the management contract box. "letsToken" in the script is about + the singleton token of the management box. + + // Minimal balance allowed for LETS trader + val minBalance = -20000 + + val lookupProof = getVar[Coll[Byte]](1).get + + // The read-only box which contains directory of LETS members + val treeHolderBox = CONTEXT.dataInputs(0) + val properLetsToken = treeHolderBox.tokens(0)._1 == letsToken + val membersTree = treeHolderBox.R4[AvlTree].get + + // A spending transaction is taking two boxes of LETS members willing to make a deal, + // and returns boxes with modified balances. + val participant0 = INPUTS(0) + val participant1 = INPUTS(1) + val participantOut0 = OUTPUTS(0) + val participantOut1 = OUTPUTS(1) + + //Check that members do indeed belong to the LETS + val token0 = participant0.tokens(0)._1 + val token1 = participant1.tokens(0)._1 + val memberTokens = Coll(token0, token1) + val membersExist = membersTree.getMany(memberTokens, lookupProof).forall({ (o: Option[Coll[Byte]]) => o.isDefined }) + + // Check that LETS member balance changes during the deal are correct + val initialBalance0 = participant0.R4[Long].get + val initialBalance1 = participant1.R4[Long].get + val finishBalance0 = participantOut0.R4[Long].get + val finishBalance1 = participantOut1.R4[Long].get + val diff0 = finishBalance0 - initialBalance0 + val diff1 = finishBalance1 - initialBalance1 + val diffCorrect = diff0 == -diff1 + val balancesCorrect = (finishBalance0 > minBalance) && (finishBalance1 > minBalance) && diffCorrect + + // Check that member boxes save their scripts. + // todo: optimization could be made here + val script0Saved = participantOut0.propositionBytes == participant0.propositionBytes + val script1Saved = participantOut1.propositionBytes == participant1.propositionBytes + val scriptsSaved = script0Saved && script1Saved + + // Member-specific box protection + val selfPubKey = SELF.R5[SigmaProp].get + + selfPubKey && properLetsToken && membersExist && diffCorrect && scriptsSaved + + Note that both contracts could be modified in many ways to get new systems with different properties. So hopefully + some day this article will be continued! + */ + +class LetsSpecification extends SigmaTestingCommons { + suite => + // Not mixed with TestContext since it is not possible to call compiler.compile outside tests if mixed + implicit lazy val IR: IRContext = new IRContext with CompiletimeCosting + + lazy val project = new ErgoLikeTestProvingInterpreter() + + val letsTokenId = Digest32 @@ Array.fill(32)(Random.nextInt(100).toByte) + + val env = Map(ScriptNameProp -> "withdrawalScriptEnv", "letsToken" -> ByteArrayConstant(letsTokenId)) + + private val miningRewardsDelay = 720 + private val feeProp = ErgoScriptPredef.feeProposition(miningRewardsDelay) + + val exchangeScript = compiler.compile(env, + """{ + | + | // Minimal balance allowed for LETS trader + | val minBalance = -20000 + | + | val lookupProof = getVar[Coll[Byte]](1).get + | + | // The read-only box which contains directory of LETS members + | val treeHolderBox = CONTEXT.dataInputs(0) + | val properLetsToken = treeHolderBox.tokens(0)._1 == letsToken + | val membersTree = treeHolderBox.R4[AvlTree].get + | + | // A spending transaction is taking two boxes of LETS members willing to make a deal, + | // and returns boxes with modified balances. + | val participant0 = INPUTS(0) + | val participant1 = INPUTS(1) + | val participantOut0 = OUTPUTS(0) + | val participantOut1 = OUTPUTS(1) + | + | //Check that members do indeed belong to the LETS + | val token0 = participant0.tokens(0)._1 + | val token1 = participant1.tokens(0)._1 + | val memberTokens = Coll(token0, token1) + | val membersExist = membersTree.getMany(memberTokens, lookupProof).forall({ (o: Option[Coll[Byte]]) => o.isDefined }) + | + | // Check that LETS member balance changes during the deal are correct + | val initialBalance0 = participant0.R4[Long].get + | val initialBalance1 = participant1.R4[Long].get + | val finishBalance0 = participantOut0.R4[Long].get + | val finishBalance1 = participantOut1.R4[Long].get + | val diff0 = finishBalance0 - initialBalance0 + | val diff1 = finishBalance1 - initialBalance1 + | val diffCorrect = diff0 == -diff1 + | val balancesCorrect = (finishBalance0 > minBalance) && (finishBalance1 > minBalance) && diffCorrect + | + | // Check that member boxes save their scripts. + | // todo: optimization could be made here + | val script0Saved = participantOut0.propositionBytes == participant0.propositionBytes + | val script1Saved = participantOut1.propositionBytes == participant1.propositionBytes + | val scriptsSaved = script0Saved && script1Saved + | + | // Member-specific box protection + | val selfPubKey = SELF.R5[SigmaProp].get + | + | selfPubKey && properLetsToken && membersExist && diffCorrect && scriptsSaved + |}""".stripMargin + ).asSigmaProp + + val userContractHash = Blake2b256(ErgoTreeSerializer.DefaultSerializer.serializeErgoTree(exchangeScript)) + + val managementScript = compiler.compile(env.updated("userContractHash", userContractHash), + """{ + | + | val selfOut = OUTPUTS(0) + | + | // Management script + | val managementScript = selfOut.R5[SigmaProp].get + | + | // The management script template is replicating self, and management script is satisfied + | val scriptCorrect = (selfOut.propositionBytes == SELF.propositionBytes) && managementScript + | + | // A spending transaction is creating boxes for directory, user, fee. + | val outsSizeCorrect = OUTPUTS.size == 3 + | + | // Checks that the management label token is replicating self + | val outTokenCorrect = (selfOut.tokens.size == 1) && (selfOut.tokens(0)._1 == letsToken) + | + | // Checks that new token is issued, and its amount is correct + | // OUTPUTS(0) tokens already checked via outtokenCorrect + | val issuedTokenId = INPUTS(0).id + | val userOut = OUTPUTS(1) + | val correctTokenAmounts = + | (userOut.tokens.size == 1 && + | userOut.tokens(0)._1 == issuedTokenId && + | userOut.tokens(0)._2 == 1 && + | OUTPUTS(2).tokens.size == 0 && + | outTokenCorrect) + | + | // Checks that the new user has been created with the zero balance + | val zeroUserBalance = userOut.R4[Long].get == 0 + | + | val properUserScript = blake2b256(userOut.propositionBytes) == userContractHash + | + | // Checks that the new token identifier has been added to the directory + | val selfTree = SELF.R4[AvlTree].get + | val toAdd: Coll[(Coll[Byte], Coll[Byte])] = Coll((issuedTokenId, Coll[Byte]())) + | val proof = getVar[Coll[Byte]](1).get + | val modifiedTree = selfTree.insert(toAdd, proof).get + | val expectedTree = selfOut.R4[AvlTree].get + | val treeCorrect = modifiedTree == expectedTree + | + | correctTokenAmounts && scriptCorrect && treeCorrect && zeroUserBalance && properUserScript + |}""".stripMargin + ).asSigmaProp + + println(exchangeScript) + println(managementScript) + + property("adding new member") { + val avlProver = new BatchAVLProver[Digest32, Blake2b256.type](keyLength = 32, None) + val digest = avlProver.digest + val initTreeData = new AvlTreeData(digest, AvlTreeFlags.InsertOnly, 32, None) + + val projectBoxBefore = ErgoBox(10, managementScript, 0, + Seq(letsTokenId -> 1L), + Map(R4 -> AvlTreeConstant(SigmaDsl.avlTree(initTreeData)), R5 -> SigmaPropConstant(TrivialProp.TrueProp))) + + val userTokenId = Digest32 @@ projectBoxBefore.id + + avlProver.performOneOperation(Insert(ADKey @@ userTokenId, ADValue @@ Array.emptyByteArray)) + + val proof = avlProver.generateProof() + val endTree = new AvlTreeData(avlProver.digest, AvlTreeFlags.InsertOnly, 32, None) + + val projectBoxAfter = ErgoBox(9, managementScript, 0, + Seq(letsTokenId -> 1L), + Map(R4 -> AvlTreeConstant(SigmaDsl.avlTree(endTree)), R5 -> SigmaPropConstant(TrivialProp.TrueProp))) + val feeBox = ErgoBox(1, feeProp, 0, Seq(), Map()) + val userBox = ErgoBox(1, exchangeScript, 0, Seq(userTokenId -> 1L), Map(R4 -> LongConstant(0))) + + val issuanceTx = ErgoLikeTransaction(IndexedSeq(), IndexedSeq(projectBoxAfter, userBox, feeBox)) + + val fundingContext = ErgoLikeContext( + currentHeight = 1000, + lastBlockUtxoRoot = AvlTreeData.dummy, + minerPubkey = ErgoLikeContext.dummyPubkey, + boxesToSpend = IndexedSeq(projectBoxBefore), + spendingTransaction = issuanceTx, + self = projectBoxBefore) + + val managementProver = new ContextEnrichingTestProvingInterpreter() + .withContextExtender(1, ByteArrayConstant(proof)) + + val res = managementProver.prove(env, managementScript, fundingContext, fakeMessage).get + println("new user script cost: " + res.cost) + } + + property("exchange") { + + val userTokenId0 = Digest32 @@ Array.fill(32)(Random.nextInt(100).toByte) + val userTokenId1 = Digest32 @@ Array.fill(32)(Random.nextInt(100).toByte) + + val avlProver = new BatchAVLProver[Digest32, Blake2b256.type](keyLength = 32, None) + avlProver.performOneOperation(Insert(ADKey @@ userTokenId0, ADValue @@ Array.emptyByteArray)) + avlProver.performOneOperation(Insert(ADKey @@ userTokenId1, ADValue @@ Array.emptyByteArray)) + val digest = avlProver.digest + avlProver.generateProof() + val initTreeData = new AvlTreeData(digest, AvlTreeFlags.InsertOnly, 32, None) + + avlProver.performOneOperation(Lookup(ADKey @@ userTokenId0)) + avlProver.performOneOperation(Lookup(ADKey @@ userTokenId1)) + val proof = avlProver.generateProof() + + val directoryBox = ErgoBox(10, managementScript, 0, + Seq(letsTokenId -> 1L), + Map(R4 -> AvlTreeConstant(SigmaDsl.avlTree(initTreeData)), R5 -> SigmaPropConstant(TrivialProp.TrueProp))) + + val directoryDataInput = DataInput(directoryBox.id) + + val userBoxBefore0 = ErgoBox(1, exchangeScript, 0, Seq(userTokenId0 -> 1L), + Map(R4 -> LongConstant(0), R5 -> SigmaPropConstant(TrivialProp.TrueProp))) + val userBoxBefore1 = ErgoBox(1, exchangeScript, 0, Seq(userTokenId1 -> 1L), + Map(R4 -> LongConstant(0), R5 -> SigmaPropConstant(TrivialProp.TrueProp))) + + val userBoxAfter0 = ErgoBox(1, exchangeScript, 0, Seq(userTokenId0 -> 1L), Map(R4 -> LongConstant(-5))) + val userBoxAfter1 = ErgoBox(1, exchangeScript, 0, Seq(userTokenId1 -> 1L), Map(R4 -> LongConstant(5))) + + val issuanceTx = new ErgoLikeTransaction(IndexedSeq(), IndexedSeq(directoryDataInput), IndexedSeq(userBoxAfter0, userBoxAfter1)) + + val exchangeContext = ErgoLikeContext( + currentHeight = 1000, + lastBlockUtxoRoot = AvlTreeData.dummy, + minerPubkey = ErgoLikeContext.dummyPubkey, + dataBoxes = IndexedSeq(directoryBox), + boxesToSpend = IndexedSeq(userBoxBefore0, userBoxBefore1), + spendingTransaction = issuanceTx, + self = userBoxBefore0) + + val managementProver = new ContextEnrichingTestProvingInterpreter() + .withContextExtender(1, ByteArrayConstant(proof)) + + val res = managementProver.prove(env, exchangeScript, exchangeContext, fakeMessage).get + println("exchange script cost: " + res.cost) + } + +} diff --git a/src/test/scala/sigmastate/utxo/examples/MASTExampleSpecification.scala b/src/test/scala/sigmastate/utxo/examples/MASTExampleSpecification.scala index 447eb2c626..8fad6e8025 100644 --- a/src/test/scala/sigmastate/utxo/examples/MASTExampleSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/MASTExampleSpecification.scala @@ -12,6 +12,7 @@ import sigmastate.lang.Terms._ import sigmastate.interpreter.Interpreter._ import sigmastate.serialization.ValueSerializer import sigmastate.utxo._ +import sigmastate.eval._ import scala.util.Random @@ -86,7 +87,7 @@ class MASTExampleSpecification extends SigmaTestingCommons { val avlProver = new BatchAVLProver[Digest32, Blake2b256.type](keyLength = 32, None) treeElements.foreach(s => avlProver.performOneOperation(Insert(s._1, s._2))) avlProver.generateProof() - val treeData = new AvlTreeData(avlProver.digest, AvlTreeFlags.ReadOnly, 32, None) + val treeData = SigmaDsl.avlTree(new AvlTreeData(avlProver.digest, AvlTreeFlags.ReadOnly, 32, None)) val merklePathToScript = OptionIsDefined( IR.builder.mkMethodCall( diff --git a/src/test/scala/sigmastate/utxo/examples/MixExampleSpecification.scala b/src/test/scala/sigmastate/utxo/examples/MixExampleSpecification.scala index c87c71c84f..f3c0998420 100644 --- a/src/test/scala/sigmastate/utxo/examples/MixExampleSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/MixExampleSpecification.scala @@ -13,6 +13,7 @@ import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeTestI import sigmastate.interpreter.CryptoConstants import sigmastate.interpreter.Interpreter._ import sigmastate.lang.Terms._ +import sigmastate.eval._ class MixExampleSpecification extends SigmaTestingCommons { private implicit lazy val IR: TestingIRContext = new TestingIRContext @@ -24,9 +25,9 @@ class MixExampleSpecification extends SigmaTestingCommons { // Alice is first player, who initiates the mix val alice = new ContextEnrichingTestProvingInterpreter - val alicePubKey:ProveDlog = alice.dlogSecrets.head.publicImage + val alicePubKey: ProveDlog = alice.dlogSecrets.head.publicImage - val x:BigInteger = alice.dlogSecrets.head.w // x is Alice's private key + val x: BigInteger = alice.dlogSecrets.head.w // x is Alice's private key val gX = alicePubKey.h // g_x is Alice's public key (g_x = g^x) // Alternative 1: @@ -60,7 +61,7 @@ class MixExampleSpecification extends SigmaTestingCommons { ScriptNameProp -> "halfMixEnv", "g" -> g, "gX" -> gX, - "fullMixScriptHash" -> Blake2b256(fullMixScript.bytes) + "fullMixScriptHash" -> Blake2b256(fullMixScript.treeWithSegregation.bytes) ) // Note that below script allows Alice to spend the half-mix output anytime before Bob spends it. @@ -112,9 +113,9 @@ class MixExampleSpecification extends SigmaTestingCommons { // If Alice wants to abort the mix, she can take Bob's role and spend her Half-Mix output val bob = new ContextEnrichingTestProvingInterpreter - val bobPubKey:ProveDlog = bob.dlogSecrets.head.publicImage + val bobPubKey: ProveDlog = bob.dlogSecrets.head.publicImage - val y:BigInteger = bob.dlogSecrets.head.w // y is Bob's private key + val y: BigInteger = bob.dlogSecrets.head.w // y is Bob's private key val gY = GroupElementConstant(bobPubKey.h) // g^y val gY_alt = GroupElementConstant(dlogGroup.exponentiate(g, y)) @@ -180,7 +181,7 @@ class MixExampleSpecification extends SigmaTestingCommons { // some 3rd person that will be paid val carol = new ContextEnrichingTestProvingInterpreter - val carolPubKey:ProveDlog = carol.dlogSecrets.head.publicImage + val carolPubKey: ProveDlog = carol.dlogSecrets.head.publicImage val spendHeight = 90 val carolOutput = ErgoBox(mixAmount, carolPubKey, spendHeight) @@ -198,7 +199,7 @@ class MixExampleSpecification extends SigmaTestingCommons { fullMixOutput0_R4 shouldBe c0 fullMixOutput0_R5 shouldBe c1 - val r4X = dlogGroup.exponentiate(fullMixOutput0_R4.asInstanceOf[GroupElementConstant], x) // R4^x + val r4X = SigmaDsl.GroupElement(dlogGroup.exponentiate(fullMixOutput0_R4.asInstanceOf[GroupElementConstant], x)) // R4^x // if R4^x == R5 then this fullMixOutput0 is Alice's output else its Bob's output. val (aliceAnonBox, bobAnonBox) = if (r4X == fullMixOutput0_R5.asInstanceOf[GroupElementConstant].value) { @@ -206,7 +207,7 @@ class MixExampleSpecification extends SigmaTestingCommons { (fullMixOutput0, fullMixOutput1) } else { println("First output is Bob's") - dlogGroup.exponentiate(fullMixOutput0_R5.asInstanceOf[GroupElementConstant], x) shouldBe fullMixOutput0_R4.asInstanceOf[GroupElementConstant].value + SigmaDsl.GroupElement(dlogGroup.exponentiate(fullMixOutput0_R5.asInstanceOf[GroupElementConstant], x)) shouldBe fullMixOutput0_R4.asInstanceOf[GroupElementConstant].value (fullMixOutput1, fullMixOutput0) } diff --git a/src/test/scala/sigmastate/utxo/examples/OracleDataInputsExamplesSpecification.scala b/src/test/scala/sigmastate/utxo/examples/OracleDataInputsExamplesSpecification.scala index 6ca2b59d14..5915781b15 100644 --- a/src/test/scala/sigmastate/utxo/examples/OracleDataInputsExamplesSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/OracleDataInputsExamplesSpecification.scala @@ -12,6 +12,7 @@ class OracleDataInputsExamplesSpecification extends SigmaTestingCommons { suite implicit lazy val IR: TestingIRContext = new TestingIRContext private val reg1 = ErgoBox.nonMandatoryRegisters(0) + private val reg2 = ErgoBox.nonMandatoryRegisters(1) private lazy val tokenId: Coll[Byte] = spec.Coll(Blake2b256("token1")) @@ -32,15 +33,17 @@ class OracleDataInputsExamplesSpecification extends SigmaTestingCommons { suite import CONTEXT._ val dataInput = CONTEXT.dataInputs(0) val inReg = dataInput.R4[Long].get + val inTime = dataInput.R5[Long].get val inToken = dataInput.R2[Coll[(Coll[Byte], Long)]].get(0)._1 == tokenId - val okContractLogic = (inReg > 15L && pkA) || (inReg <= 15L && pkB) + val okContractLogic = (inTime > 1556089223) && ((inReg > 15L && pkA) || (inReg <= 15L && pkB)) inToken && okContractLogic }, """{ | val dataInput = CONTEXT.dataInputs(0) | val inReg = dataInput.R4[Long].get - | val inToken = dataInput.R2[Coll[(Coll[Byte], Long)]].get(0)._1 == tokenId - | val okContractLogic = (inReg > 15L && pkA) || (inReg <= 15L && pkB) + | val inTime = dataInput.R5[Long].get + | val inToken = dataInput.tokens(0)._1 == tokenId + | val okContractLogic = (inTime > 1556089223) && ((inReg > 15L && pkA) || (inReg <= 15L && pkB)) | inToken && okContractLogic |} """.stripMargin) @@ -57,6 +60,7 @@ class OracleDataInputsExamplesSpecification extends SigmaTestingCommons { suite property("lightweight oracle token example (ErgoDsl)") { val temperature: Long = 18 + val time = 1556089423L val contract = OracleContract[spec.type](temperature, tokenId, alice, bob)(spec) import contract.spec._ @@ -68,7 +72,7 @@ class OracleDataInputsExamplesSpecification extends SigmaTestingCommons { suite val sOracle = mockTx // in real world, this must be protected by pkOracle .outBox(value = 1L, contract.dummySignature) - .withRegs(reg1 -> temperature) + .withRegs(reg1 -> temperature, reg2 -> time) .withTokens(Token(tokenId, 1)) val sAlice = mockTx.outBox(10, contract.prop) @@ -84,3 +88,8 @@ class OracleDataInputsExamplesSpecification extends SigmaTestingCommons { suite contract.verifier.verify(in, pr) shouldBe true } } + + +object TimePrinter extends App { + println(System.currentTimeMillis() / 1000) +} diff --git a/src/test/scala/sigmastate/utxo/examples/OracleExamplesSpecification.scala b/src/test/scala/sigmastate/utxo/examples/OracleExamplesSpecification.scala index 4d69d25d97..aada39aa4d 100644 --- a/src/test/scala/sigmastate/utxo/examples/OracleExamplesSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/OracleExamplesSpecification.scala @@ -3,30 +3,26 @@ package sigmastate.utxo.examples import java.security.SecureRandom import com.google.common.primitives.Longs -import org.ergoplatform.ErgoBox.{R4, RegisterId} +import org.ergoplatform.ErgoBox.RegisterId import scorex.crypto.authds.avltree.batch.{BatchAVLProver, Insert, Lookup} import scorex.crypto.authds.{ADKey, ADValue} -import scorex.crypto.hash.{Blake2b256, Digest32} +import scorex.crypto.hash.{Digest32, Blake2b256} import sigmastate.SCollection.SByteArray import sigmastate.Values._ import sigmastate._ +import sigmastate.eval._ import sigmastate.lang.Terms._ -import sigmastate.helpers.{ErgoLikeTestProvingInterpreter, SigmaTestingCommons} -import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeTestInterpreter, SigmaTestingCommons} +import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, SigmaTestingCommons, ErgoLikeTestInterpreter} import sigmastate.interpreter.CryptoConstants import org.ergoplatform._ -import org.ergoplatform.dsl.ContractSyntax.Token -import org.ergoplatform.dsl.{ContractSpec, SigmaContractSyntax, StdContracts, TestContractSpec} -import sigmastate.TrivialProp.TrueProp -import sigmastate.eval.CSigmaProp +import org.ergoplatform.dsl.{SigmaContractSyntax, ContractSpec, TestContractSpec, StdContracts} import sigmastate.interpreter.Interpreter.{ScriptNameProp, emptyEnv} import sigmastate.utxo._ -import special.collection.Coll import special.sigma.Context class OracleExamplesSpecification extends SigmaTestingCommons { suite => - implicit lazy val IR = new TestingIRContext + implicit lazy val IR: TestingIRContext = new TestingIRContext private val reg1 = ErgoBox.nonMandatoryRegisters(0) private val reg2 = ErgoBox.nonMandatoryRegisters(1) @@ -100,7 +96,7 @@ class OracleExamplesSpecification extends SigmaTestingCommons { suite => val reducedHashSize = 31 val e = BigInt(1, Blake2b256.hash(Longs.toByteArray(temperature) ++ Longs.toByteArray(ts)).take(reducedHashSize)) - val z = (r + e.bigInteger.multiply(oraclePrivKey.w)).mod(group.order).bigInteger // todo : check + val z = (r + e.bigInteger.multiply(oraclePrivKey.w)).mod(group.order).bigInteger val oracleBox = ErgoBox( value = 1L, diff --git a/src/test/scala/sigmastate/utxo/examples/RPSGameExampleSpecification.scala b/src/test/scala/sigmastate/utxo/examples/RPSGameExampleSpecification.scala index 3575813884..b80fb1f36b 100644 --- a/src/test/scala/sigmastate/utxo/examples/RPSGameExampleSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/RPSGameExampleSpecification.scala @@ -80,7 +80,7 @@ class RPSGameExampleSpecification extends SigmaTestingCommons { val halfGameEnv = Map( ScriptNameProp -> "halfGameScript", "alice" -> alicePubKey, - "fullGameScriptHash" -> Blake2b256(fullGameScript.bytes) + "fullGameScriptHash" -> Blake2b256(fullGameScript.treeWithSegregation.bytes) ) // Note that below script allows Alice to spend the half-game output anytime before Bob spends it. diff --git a/src/test/scala/sigmastate/utxo/examples/RevenueSharingExamplesSpecification.scala b/src/test/scala/sigmastate/utxo/examples/RevenueSharingExamplesSpecification.scala index c909069489..c7a3ecbe08 100644 --- a/src/test/scala/sigmastate/utxo/examples/RevenueSharingExamplesSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/RevenueSharingExamplesSpecification.scala @@ -4,7 +4,8 @@ import org.ergoplatform.ErgoBox.R4 import org.ergoplatform.dsl.{ContractSpec, SigmaContractSyntax, StdContracts, TestContractSpec} import sigmastate.helpers.SigmaTestingCommons import special.collection.Coll -import special.sigma.{Context, Extensions, SigmaProp} +import special.sigma.{Context,SigmaProp} +import sigmastate.eval.Extensions class RevenueSharingExamplesSpecification extends SigmaTestingCommons { suite => implicit lazy val IR = new TestingIRContext diff --git a/src/test/scala/sigmastate/utxo/examples/ReversibleTxExampleSpecification.scala b/src/test/scala/sigmastate/utxo/examples/ReversibleTxExampleSpecification.scala index 744a346f58..564389d24e 100644 --- a/src/test/scala/sigmastate/utxo/examples/ReversibleTxExampleSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/ReversibleTxExampleSpecification.scala @@ -96,7 +96,7 @@ class ReversibleTxExampleSpecification extends SigmaTestingCommons { "blocksIn24h" -> blocksIn24h, "maxFee" -> 10L, "feePropositionBytes" -> feeProposition.bytes, - "withdrawScriptHash" -> Blake2b256(withdrawScript.bytes) + "withdrawScriptHash" -> Blake2b256(withdrawScript.treeWithSegregation.bytes) ) val depositScript = compile(depositEnv, @@ -107,11 +107,9 @@ class ReversibleTxExampleSpecification extends SigmaTestingCommons { | val isFee = {(b:Box) => b.propositionBytes == feePropositionBytes} | val isValidOut = {(b:Box) => isChange(b) || isWithdraw(b) || isFee(b)} | - | val totalFee = OUTPUTS.fold(0L, {(acc:Long, b:Box) => if (b.propositionBytes == feePropositionBytes) acc + b.value else acc }) | val totalFeeAlt = OUTPUTS.fold(0L, {(acc:Long, b:Box) => if (isFee(b)) acc + b.value else acc }) | - | alice && OUTPUTS.forall(isValidOut) && totalFee <= maxFee // works - | //alice && OUTPUTS.forall(isValidOut) && totalFeeAlt <= maxFee // gives error + | alice && OUTPUTS.forall(isValidOut) && totalFeeAlt <= maxFee |}""".stripMargin ).asSigmaProp // Note: in above bobDeadline is stored in R5. After this height, Bob gets to spend unconditionally diff --git a/src/test/scala/sigmastate/utxo/examples/Rule110Specification.scala b/src/test/scala/sigmastate/utxo/examples/Rule110Specification.scala index 54d68919eb..2e9d124eef 100644 --- a/src/test/scala/sigmastate/utxo/examples/Rule110Specification.scala +++ b/src/test/scala/sigmastate/utxo/examples/Rule110Specification.scala @@ -3,9 +3,10 @@ package sigmastate.utxo.examples import org.ergoplatform._ import scorex.crypto.hash.Blake2b256 import scorex.util._ -import sigmastate.Values.{BooleanConstant, ByteArrayConstant, ByteConstant, FalseLeaf, GetVarByteArray, IntConstant, LongConstant, TrueLeaf, Value} +import sigmastate.Values.{LongConstant, FalseLeaf, TrueLeaf, GetVarByteArray, Value, ByteArrayConstant, IntConstant, BooleanConstant, ByteConstant} import sigmastate._ -import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeTestInterpreter, SigmaTestingCommons} +import sigmastate.eval._ +import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, SigmaTestingCommons, ErgoLikeTestInterpreter} import sigmastate.interpreter.ContextExtension import sigmastate.lang.Terms._ import sigmastate.serialization.ValueSerializer @@ -51,8 +52,8 @@ class Rule110Specification extends SigmaTestingCommons { | (OUTPUTS(0).propositionBytes == SELF.propositionBytes) }""".stripMargin).asBoolValue.toSigmaProp - val input = ErgoBox(1, prop, 0, Seq(), Map(reg1 -> ByteArrayConstant(Array(0, 1, 1, 0, 1, 0)))) - val output = ErgoBox(1, prop, 0, Seq(), Map(reg1 -> ByteArrayConstant(Array(1, 1, 1, 1, 1, 0)))) + val input = ErgoBox(1, prop, 0, Seq(), Map(reg1 -> ByteArrayConstant(Array[Byte](0, 1, 1, 0, 1, 0)))) + val output = ErgoBox(1, prop, 0, Seq(), Map(reg1 -> ByteArrayConstant(Array[Byte](1, 1, 1, 1, 1, 0)))) val tx = UnsignedErgoLikeTransaction(IndexedSeq(new UnsignedInput(input.id)), IndexedSeq(output)) val ctx = ErgoLikeContext( @@ -431,7 +432,7 @@ class Rule110Specification extends SigmaTestingCommons { val value = ValueReg -> BooleanConstant.fromBoolean(calcRule110(lv, cv, rv)) - val c = new ErgoBoxCandidate(0L, prop, row, Seq(), Map(RowReg -> LongConstant(row), ColumnReg -> LongConstant(col), value)) + val c = new ErgoBoxCandidate(0L, prop, row, Colls.emptyColl, Map(RowReg -> LongConstant(row), ColumnReg -> LongConstant(col), value)) val ut = UnsignedErgoLikeTransaction( IndexedSeq(new UnsignedInput(left.id), new UnsignedInput(center.id), new UnsignedInput(right.id)), diff --git a/src/test/scala/sigmastate/utxo/examples/XorGameExampleSpecification.scala b/src/test/scala/sigmastate/utxo/examples/XorGameExampleSpecification.scala index 4cfab52b2f..05f1fca36a 100644 --- a/src/test/scala/sigmastate/utxo/examples/XorGameExampleSpecification.scala +++ b/src/test/scala/sigmastate/utxo/examples/XorGameExampleSpecification.scala @@ -74,7 +74,7 @@ class XorGameExampleSpecification extends SigmaTestingCommons { val halfGameEnv = Map( ScriptNameProp -> "halfGameScript", "alice" -> alicePubKey, - "fullGameScriptHash" -> Blake2b256(fullGameScript.bytes) + "fullGameScriptHash" -> Blake2b256(fullGameScript.treeWithSegregation.bytes) ) // Note that below script allows Alice to spend the half-game output anytime before Bob spends it. diff --git a/src/test/scala/special/sigma/ContractsTestkit.scala b/src/test/scala/special/sigma/ContractsTestkit.scala index 8ca4a0f4f7..116177380d 100644 --- a/src/test/scala/special/sigma/ContractsTestkit.scala +++ b/src/test/scala/special/sigma/ContractsTestkit.scala @@ -1,10 +1,11 @@ package special.sigma +import org.ergoplatform.ErgoBox import scalan._ import special.collection.{Coll, CollOverArrayBuilder} import scalan.RType -import sigmastate.AvlTreeData -import sigmastate.eval.{CAvlTree, CostingDataContext, CostingSigmaDslBuilder} +import sigmastate.{AvlTreeData, TrivialProp} +import sigmastate.eval._ import sigmastate.eval.Extensions._ trait ContractsTestkit { @@ -55,11 +56,11 @@ trait ContractsTestkit { } val AliceId = Array[Byte](1) // 0x0001 - def newAliceBox(id: Byte, value: Long, registers: Map[Int, AnyValue] = Map()): Box = new TestBox( - Colls.fromArray(Array[Byte](0, id)), value, - Colls.fromArray(AliceId), noBytes, noBytes, - regs(registers.map { case (k, v) => (k.toByte, v) }) - ) + def newAliceBox(id: Byte, value: Long): Box = { + val ergoBox = ErgoBox(value, TrivialProp.TrueProp.toSigmaProp, 0, Seq(), Map()) + new CostingBox(false, ergoBox) + } + def testContext(inputs: Array[Box], outputs: Array[Box], height: Int, self: Box, tree: AvlTree, minerPk: Array[Byte], vars: Array[AnyValue]) = diff --git a/src/test/scala/special/sigma/SigmaDslCostedTests.scala b/src/test/scala/special/sigma/SigmaDslCostedTests.scala deleted file mode 100644 index ea6026a9e6..0000000000 --- a/src/test/scala/special/sigma/SigmaDslCostedTests.scala +++ /dev/null @@ -1,22 +0,0 @@ -package special.sigma - -import org.scalatest.{Matchers, FunSuite} -import special.sigma.Extensions._ - -class SigmaDslCostedTests extends FunSuite with ContractsTestkit with Matchers { - val boxA1 = newAliceBox(1, 100, Map(1 -> toAnyValue(20))) - val boxA2 = newAliceBox(2, 200) - val ctx = newContext(10, boxA1) - .withInputs(boxA2) - .withVariables(Map(1 -> toAnyValue(30), 2 -> toAnyValue(40))) - val p1: SigmaProp = new special.sigma.MockSigma(true) - val p2: SigmaProp = new special.sigma.MockSigma(false) - val dsl: SigmaDslBuilder = SigmaDsl - -// test("CostedContext") { -// val ctxC = new CCostedContext(ctx) -// ctx.cost shouldBe 4 -// ctxC.INPUTS.cost shouldBe 2 -// ctxC.OUTPUTS.cost shouldBe 1 -// } -} diff --git a/src/test/scala/special/sigma/SigmaDslStaginTests.scala b/src/test/scala/special/sigma/SigmaDslStaginTests.scala index f3851b201e..ba4047b1b2 100644 --- a/src/test/scala/special/sigma/SigmaDslStaginTests.scala +++ b/src/test/scala/special/sigma/SigmaDslStaginTests.scala @@ -1,15 +1,15 @@ package special.sigma import special.wrappers.WrappersTests - +import special.collection._ import scala.language.reflectiveCalls -import scalan.SigmaLibrary -import special.sigma.Extensions._ +import scalan.{SigmaLibrary, BaseCtxTests, BaseLiftableTests} +import sigmastate.eval.Extensions._ +import sigmastate.eval.{IRContext, ErgoScriptTestkit} +import sigmastate.helpers.SigmaTestingCommons -class SigmaDslStaginTests extends WrappersTests with ContractsTestkit { - class Ctx extends WrappersCtx with SigmaLibrary { - import TestSigmaDslBuilder._ - val sigmaDslBuilder = RTestSigmaDslBuilder() +class SigmaDslStaginTests extends BaseCtxTests with ErgoScriptTestkit with BaseLiftableTests { + class Ctx extends TestContext with IRContext with LiftableTestKit { } test("invokeUnlifted") { @@ -17,17 +17,18 @@ class SigmaDslStaginTests extends WrappersTests with ContractsTestkit { import cake._ import Liftables._ import Context._ + import Coll._ import Box._ import SigmaProp._ import SigmaDslBuilder._ import EnvRep._ - val dsl: SSigmaDslBuilder = SigmaDsl + val dsl: SSigmaDslBuilder = sigmastate.eval.SigmaDsl type RSigmaDslBuilder = cake.SigmaDslBuilder type RContext = cake.Context type RBox = cake.Box type RSigmaProp = cake.SigmaProp - val boxA1 = newAliceBox(1, 100, Map(1 -> toAnyValue(20), 3 -> toAnyValue((10 -> SigmaDsl.Colls.fromArray(Array.emptyByteArray))))) + val boxA1 = newAliceBox(1, 100) val boxA2 = newAliceBox(2, 200) val ctx: SContext = newContext(10, boxA1) .withInputs(boxA2) @@ -35,22 +36,19 @@ class SigmaDslStaginTests extends WrappersTests with ContractsTestkit { val p1: SSigmaProp = new special.sigma.MockSigma(true) val p2: SSigmaProp = new special.sigma.MockSigma(false) - check(dsl, { env: EnvRep[RSigmaDslBuilder] => + cake.check(dsl, { env: EnvRep[RSigmaDslBuilder] => for { dsl <- env; arg <- lifted(true) } yield dsl.sigmaProp(arg) }, dsl.sigmaProp(true)) - check(ctx, { env: EnvRep[RContext] => for { obj <- env } yield obj.SELF }, ctx.SELF) - check(ctx, { env: EnvRep[RContext] => + cake.check(ctx, { env: EnvRep[RContext] => for { obj <- env } yield obj.SELF }, ctx.SELF) + cake.check(ctx, { env: EnvRep[RContext] => for { obj <- env; id <- lifted(1.toByte) } yield obj.getVar[Int](id) }, ctx.getVar[Int](1)) - check(boxA1, { env: EnvRep[RBox] => for { obj <- env } yield obj.value }, boxA1.value) - check(boxA1, { env: EnvRep[RBox] => for { obj <- env } yield obj.creationInfo }, boxA1.creationInfo) - check(boxA1, { env: EnvRep[RBox] => for { obj <- env; arg <- lifted(1) } yield obj.getReg[Int](arg) }, boxA1.getReg[Int](1)) - check(boxA1, { env: EnvRep[RBox] => for { obj <- env } yield obj.registers }, boxA1.registers) - - check(p1, { env: EnvRep[RSigmaProp] => for { p1 <- env; arg <- lifted(true) } yield p1 && arg }, p1 && true) - check(p1, { env: EnvRep[RSigmaProp] => for { p1 <- env; arg <- lifted(p2) } yield p1 && arg }, p1 && p2) + cake.check(boxA1, { env: EnvRep[RBox] => for { obj <- env } yield obj.value }, boxA1.value) + cake.check(boxA1, { env: EnvRep[RBox] => for { obj <- env } yield obj.creationInfo }, boxA1.creationInfo) + cake.check(boxA1, { env: EnvRep[RBox] => for { obj <- env; arg <- lifted(1) } yield obj.getReg[Coll[Byte]](arg) }, boxA1.getReg[special.collection.Coll[Byte]](1)) + cake.check(boxA1, { env: EnvRep[RBox] => for { obj <- env } yield obj.registers }, boxA1.registers) - val th = () => p2 -// check(p1, { env: EnvRep[RSigmaProp] => for { p1 <- env; thL <- lifted(th) } yield p1.lazyAnd(thL) }, p1.lazyAnd(th())) + cake.check(p1, { env: EnvRep[RSigmaProp] => for { p1 <- env; arg <- lifted(true) } yield p1 && arg }, p1 && true) + cake.check(p1, { env: EnvRep[RSigmaProp] => for { p1 <- env; arg <- lifted(p2) } yield p1 && arg }, p1 && p2) } } diff --git a/src/test/scala/special/sigma/SigmaDslTest.scala b/src/test/scala/special/sigma/SigmaDslTest.scala index e4e9eec50f..21b96900e7 100644 --- a/src/test/scala/special/sigma/SigmaDslTest.scala +++ b/src/test/scala/special/sigma/SigmaDslTest.scala @@ -2,25 +2,28 @@ package special.sigma import java.math.BigInteger +import org.ergoplatform.ErgoLikeContext.dummyPubkey +import org.ergoplatform.ErgoScriptPredef.TrueProp import org.ergoplatform.dsl.{SigmaContractSyntax, TestContractSpec} -import org.ergoplatform.{ErgoLikeContext, ErgoLikeTransaction, ErgoBox} +import org.ergoplatform._ import org.scalacheck.Gen.containerOfN import org.scalacheck.{Arbitrary, Gen} import org.scalatest.prop.PropertyChecks -import org.scalatest.{PropSpec, Matchers} +import org.scalatest.{Matchers, PropSpec} import scalan.RType import scorex.crypto.authds.avltree.batch._ import scorex.crypto.authds.{ADKey, ADValue} -import scorex.crypto.hash.{Digest32, Blake2b256} +import scorex.crypto.hash.{Blake2b256, Digest32} import sigma.util.Extensions._ -import sigmastate.Values.{BooleanConstant, IntConstant} +import sigmastate.Values.{BooleanConstant, EvaluatedValue, IntConstant} import sigmastate._ +import sigmastate.Values._ import sigmastate.eval.Extensions._ import sigmastate.eval._ -import sigmastate.helpers.SigmaTestingCommons +import sigmastate.helpers.{ContextEnrichingTestProvingInterpreter, ErgoLikeTestInterpreter, SigmaTestingCommons} import sigmastate.interpreter.ContextExtension -import sigmastate.interpreter.Interpreter.ScriptEnv -import special.collection.{Coll, Builder} +import sigmastate.interpreter.Interpreter.{ScriptEnv, ScriptNameProp} +import special.collection.{Builder, Coll} /** This suite tests every method of every SigmaDsl type to be equivalent to @@ -28,7 +31,7 @@ import special.collection.{Coll, Builder} class SigmaDslTest extends PropSpec with PropertyChecks with Matchers - with SigmaTestingCommons with SigmaContractSyntax + with SigmaTestingData with SigmaContractSyntax with SigmaTypeGens { suite => lazy val spec = TestContractSpec(suite)(new TestingIRContext) @@ -41,7 +44,6 @@ class SigmaDslTest extends PropSpec def checkEq[A,B](f: A => B)(g: A => B): A => Unit = { x: A => val b1 = f(x); val b2 = g(x) -// assert(b1.getClass == b2.getClass) assert(b1 == b2) } @@ -51,38 +53,60 @@ class SigmaDslTest extends PropSpec assert(r1 == r2) } + def getRandomIndex(size: Int): Int = { + val r = scala.util.Random + if (size > 1) r.nextInt(size) else 0 + } + + def makeSlicePair(size: Int): (Int, Int) = { + val r = scala.util.Random + val rBorder = getRandomIndex(size) + val lBorder = getRandomIndex(rBorder) + (lBorder, rBorder) + } + + // TODO: make more effective + def generateIndexColl(maxSize: Int): Coll[Int] = { + var ret: Coll[Int] = Colls.emptyColl + var index = getRandomIndex(maxSize) + while (index > 0) { + ret = ret.append(Colls.fromArray(Array(index))) + index = getRandomIndex(index) + } + ret + } + case class EqualityChecker[T: RType](obj: T) { def apply[R: RType](dslFunc: T => R)(script: String) = checkEq(func[T, R](script))(dslFunc)(obj) } - property("Boolean methods equivalence") { - lazy val toByte = checkEq(func[Boolean,Byte]("{ (x: Boolean) => x.toByte }"))(x => x.toByte) + ignore("Boolean methods equivalence") { + lazy val toByte = checkEq(func[Boolean,Byte]("{ (x: Boolean) => x.toByte }"))((x: Boolean) => x.toByte) forAll { x: Boolean => - x.toByte + //TODO soft-fork: for new operation below + Seq(toByte).foreach(_(x)) } } property("Byte methods equivalence") { + val toByte = checkEq(func[Byte, Byte]("{ (x: Byte) => x.toByte }"))(x => x.toByte) val toShort = checkEq(func[Byte,Short]("{ (x: Byte) => x.toShort }"))(x => x.toShort) val toInt = checkEq(func[Byte,Int]("{ (x: Byte) => x.toInt }"))(x => x.toInt) val toLong = checkEq(func[Byte,Long]("{ (x: Byte) => x.toLong }"))(x => x.toLong) val toBigInt = checkEq(func[Byte,BigInt]("{ (x: Byte) => x.toBigInt }"))(x => x.toBigInt) + + //TODO soft-fork: for new 4 operations below lazy val toBytes = checkEq(func[Byte,Coll[Byte]]("{ (x: Byte) => x.toBytes }"))(x => x.toBytes) lazy val toBits = checkEq(func[Byte,Coll[Boolean]]("{ (x: Byte) => x.toBits }"))(x => x.toBits) lazy val toAbs = checkEq(func[Byte,Byte]("{ (x: Byte) => x.toAbs }"))(x => x.toAbs) lazy val compareTo = checkEq(func[(Byte, Byte), Int]("{ (x: (Byte, Byte)) => x._1.compareTo(x._2) }"))({ (x: (Byte, Byte)) => x._1.compareTo(x._2) }) forAll { x: Byte => - Seq(toInt, toLong, toBigInt).foreach(_(x)) -//TODO toBytes, toBits, toAbs - } - forAll { x: (Byte, Byte) => -//TODO compareTo(x) + Seq(toByte, toShort, toInt, toLong, toBigInt).foreach(_(x)) } } - property("Int methods equivalence") { val toByte = checkEq(func[Int,Byte]("{ (x: Int) => x.toByte }"))(x => x.toByte) val toShort = checkEq(func[Int,Short]("{ (x: Int) => x.toShort }"))(x => x.toShort) @@ -102,13 +126,111 @@ class SigmaDslTest extends PropSpec toShort(x) } Seq(toInt, toLong, toBigInt).foreach(_(x)) - //TODO toBytes, toBits, toAbs + //TODO soft-fork: toBytes, toBits, toAbs } forAll { x: (Int, Int) => - //TODO compareTo(x) + //TODO soft-fork: compareTo(x) + } + } + + property("Short methods equivalence") { + val toByte = checkEq(func[Short,Byte]("{ (x: Short) => x.toByte }"))(x => x.toByte) + val toShort = checkEq(func[Short,Short]("{ (x: Short) => x.toShort }"))(x => x.toShort) + val toInt = checkEq(func[Short,Int]("{ (x: Short) => x.toInt }"))(x => x.toInt) + val toLong = checkEq(func[Short,Long]("{ (x: Short) => x.toLong }"))(x => x.toLong) + val toBigInt = checkEq(func[Short,BigInt]("{ (x: Short) => x.toBigInt }"))(x => x.toBigInt) + lazy val toBytes = checkEq(func[Short,Coll[Byte]]("{ (x: Short) => x.toBytes }"))(x => x.toBytes) + lazy val toBits = checkEq(func[Short,Coll[Boolean]]("{ (x: Short) => x.toBits }"))(x => x.toBits) + // TODO: Implement Short.toAbs + lazy val toAbs = checkEq(func[Short,Short]("{ (x: Short) => x.toAbs }"))((x: Short) => if (x >= 0.toShort) x else (-x).toShort) + lazy val compareTo = checkEq(func[(Short, Short), Int]("{ (x: (Short, Short)) => x._1.compareTo(x._2) }"))(x => x._1.compareTo(x._2)) + + forAll { x: Short => + whenever(Byte.MinValue <= x && x <= scala.Byte.MaxValue) { + toByte(x) + } + Seq(toShort, toInt, toLong, toBigInt).foreach(_(x)) + //TODO soft-fork: toBytes, toBits, toAbs + } + forAll { x: (Short, Short) => + //TODO soft-fork: compareTo(x) + } + } + + property("Long methods equivalence") { + val toByte = checkEq(func[Long,Byte]("{ (x: Long) => x.toByte }"))(x => x.toByte) + val toShort = checkEq(func[Long,Short]("{ (x: Long) => x.toShort }"))(x => x.toShort) + val toInt = checkEq(func[Long,Int]("{ (x: Long) => x.toInt }"))(x => x.toInt) + val toLong = checkEq(func[Long,Long]("{ (x: Long) => x.toLong }"))(x => x.toLong) + val toBigInt = checkEq(func[Long,BigInt]("{ (x: Long) => x.toBigInt }"))(x => BigInt(x).bigInteger) + /* + lazy val toBytes = checkEq(func[Long,Coll[Byte]]("{ (x: Long) => x.toBytes }"))(x => x.toBytes) + lazy val toBits = checkEq(func[Long,Coll[Boolean]]("{ (x: Long) => x.toBits }"))(x => x.toBits) + lazy val toAbs = checkEq(func[Long,Long]("{ (x: Long) => x.toAbs }"))(x => x.toAbs) + */ + lazy val compareTo = checkEq(func[(Long, Long), Int]("{ (x: (Long, Long)) => x._1.compareTo(x._2) }"))(x => x._1.compareTo(x._2)) + + forAll { x: Long => + whenever(Byte.MinValue <= x && x <= scala.Byte.MaxValue) { + toByte(x) + } + whenever(Short.MinValue <= x && x <= Short.MaxValue) { + toShort(x) + } + whenever(Int.MinValue <= x && x <= Int.MaxValue) { + toInt(x) + } + Seq(toLong, toBigInt).foreach(_(x)) + //TODO soft-fork: toBytes, toBits, toAbs + } + forAll { x: (Long, Long) => + //TODO soft-fork: compareTo(x) + } + } + property("BigInt methods equivalence") { + val toByte = checkEq(func[(Byte, BigInt),Boolean]("{ (x: (Byte, BigInt)) => x._2.toByte == x._1 }")) { x => + x._2.toByte == x._1 + } + val toShort = checkEq(func[(Short, BigInt),Boolean]("{ (x: (Short, BigInt)) => x._2.toShort == x._1 }")) { x => + x._2.toShort == x._1 + } + val toInt = checkEq(func[(Int, BigInt),Boolean]("{ (x: (Int, BigInt)) => x._2.toInt == x._1 }")) { x => + x._2.toInt == x._1 + } + val toLong = checkEq(func[(Long, BigInt),Boolean]("{ (x: (Long, BigInt)) => x._2.toLong == x._1 }")) { x => + x._2.toLong == x._1 + } + val toBigInt = checkEq(func[(BigInt, BigInt),Boolean]("{ (x: (BigInt, BigInt)) => x._2.toBigInt == x._1 }")) { x => + x._2 == x._1 + } + + lazy val toBytes = checkEq(func[BigInt,Coll[Byte]]("{ (x: BigInt) => x.toBytes }"))(x => x.toBytes) + lazy val toBits = checkEq(func[BigInt,Coll[Boolean]]("{ (x: BigInt) => x.toBits }"))(x => x.toBits) + lazy val toAbs = checkEq(func[BigInt,BigInt]("{ (x: BigInt) => x.toAbs }"))(x => x.toAbs) + lazy val compareTo = checkEq(func[(BigInt, BigInt), Int]("{ (x: (BigInt, BigInt)) => x._1.compareTo(x._2) }"))(x => x._1.compareTo(x._2)) + + /* + forAll { x: Byte => + toByte((x, x.toBigInt)) + } + forAll { x: Short => + toShort((x, x.toBigInt)) + } + forAll { x: Int => + toInt((x, x.toBigInt)) + } + forAll { x: Long => + toLong((x, BigInt(x).bigInteger)) + } + */ + forAll { x: BigInt => + Seq(toBigInt).foreach(_((x, x))) + //TODO soft-fork: toBytes, toBits, toAbs + } + forAll { x: (BigInt, BigInt) => + //TODO soft-fork: compareTo(x) } } - // TODO add tests for Short, Long, BigInt operations property("GroupElement operations equivalence") { val ge = SigmaDsl.groupGenerator @@ -117,10 +239,12 @@ class SigmaDslTest extends PropSpec { val eq = EqualityChecker(ge) -// eq({ (x: GroupElement) => x.isIdentity })("{ (x: GroupElement) => x.isIdentity }") eq({ (x: GroupElement) => x.getEncoded })("{ (x: GroupElement) => x.getEncoded }") eq({ (x: GroupElement) => decodePoint(x.getEncoded) == x })("{ (x: GroupElement) => decodePoint(x.getEncoded) == x }") eq({ (x: GroupElement) => x.negate })("{ (x: GroupElement) => x.negate }") + + //TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 + // eq({ (x: GroupElement) => x.isIdentity })("{ (x: GroupElement) => x.isIdentity }") } { @@ -134,46 +258,6 @@ class SigmaDslTest extends PropSpec } } -// property("sigma.types.Byte methods equivalence") { -// import sigma.types._ -// val toInt = checkEq(func[Byte,Int]("{ (x: Byte) => x.toInt }"))(x => x.toInt) -// forAll { x: Byte => -// Seq(toInt).foreach(_(x)) -// } -// } -// -// property("sigma.types.Int methods equivalence") { -// import sigma.types._ -// val toByte = checkEq(func[Int,Byte]("{ (x: Int) => x.toByte }"))(x => x.toByte) -// lazy val compareTo = checkEq(func[(Int, Int), Int]("{ (x: (Int, Int)) => x._1.compareTo(x._2) }"))(x => x._1.compareTo(x._2)) -// forAll { in: scala.Int => -// whenever(scala.Byte.MinValue <= in && in <= scala.Byte.MaxValue) { -// val x = CInt(in) -// toByte(x) -// } -// } -// } - - val bytesGen: Gen[Array[Byte]] = containerOfN[Array, Byte](100, Arbitrary.arbByte.arbitrary) - val bytesCollGen = bytesGen.map(Colls.fromArray(_)) - implicit val arbBytes = Arbitrary(bytesCollGen) - val keyCollGen = bytesCollGen.map(_.slice(0, 32)) - import org.ergoplatform.dsl.AvlTreeHelpers._ - - private def sampleAvlProver = { - val key = keyCollGen.sample.get - val value = bytesCollGen.sample.get - val (_, avlProver) = createAvlTree(AvlTreeFlags.AllOperationsAllowed, ADKey @@ key.toArray -> ADValue @@ value.toArray) - (key, value, avlProver) - } - - private def sampleAvlTree:CAvlTree = { - val (key, _, avlProver) = sampleAvlProver - val digest = avlProver.digest.toColl - val tree = SigmaDsl.avlTree(AvlTreeFlags.ReadOnly.serializeToByte, digest, 32, None) - tree - } - property("AvlTree properties equivalence") { val doDigest = checkEq(func[AvlTree, Coll[Byte]]("{ (t: AvlTree) => t.digest }")) { (t: AvlTree) => t.digest } val doEnabledOps = checkEq(func[AvlTree, Byte]( @@ -184,15 +268,18 @@ class SigmaDslTest extends PropSpec val updateAllowed = checkEq(func[AvlTree, Boolean]("{ (t: AvlTree) => t.isUpdateAllowed }")) { (t: AvlTree) => t.isUpdateAllowed } val removeAllowed = checkEq(func[AvlTree, Boolean]("{ (t: AvlTree) => t.isRemoveAllowed }")) { (t: AvlTree) => t.isRemoveAllowed } - val tree = sampleAvlTree - - doDigest(tree) - doEnabledOps(tree) - doKeyLength(tree) - doValueLength(tree) - insertAllowed(tree) - updateAllowed(tree) - removeAllowed(tree) + val newTree = sampleAvlTree.updateOperations(1.toByte) + val trees = Array(sampleAvlTree, newTree) + + for (tree <- trees) { + doDigest(tree) + doEnabledOps(tree) + doKeyLength(tree) + doValueLength(tree) + insertAllowed(tree) + updateAllowed(tree) + removeAllowed(tree) + } } property("AvlTree.{contains, get, getMany} equivalence") { @@ -275,6 +362,17 @@ class SigmaDslTest extends PropSpec forAll { x: Long => eq(x) } } + property("byteArrayToBigInt equivalence") { + val eq = checkEq(func[Coll[Byte], BigInt]("{ (x: Coll[Byte]) => byteArrayToBigInt(x) }")){ x => + byteArrayToBigInt(x) + } + forAll { x: Array[Byte] => + whenever(x.length <= ErgoConstants.MaxBigIntSizeInBytes.get) { + eq(Builder.DefaultCollBuilder.fromArray(x)) + } + } + } + property("byteArrayToLong equivalence") { val eq = checkEq(func[Coll[Byte],Long]("{ (x: Coll[Byte]) => byteArrayToLong(x) }")){ x => byteArrayToLong(x) @@ -286,78 +384,15 @@ class SigmaDslTest extends PropSpec } } - // TODO: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/427 + // TODO soft-fork: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/427 // TODO costing: expression t._1(t._2) cannot be costed because t is lambda argument - ignore("Func context variable") { -// val doApply = checkEq(func[(Int => Int, Int), Int]("{ (t: (Int => Int, Int)) => t._1(t._2) }")) { (t: (Int => Int, Int)) => t._1(t._2) } -// val code = compileWithCosting(emptyEnv, s"{ (x: Int) => x + 1 }") -// val ctx = ErgoLikeContext.dummy(fakeSelf) -// doApply((CFunc[Int, Int](ctx, code), 10)) - } - - val tokenId1: Digest32 = Blake2b256("id1") - val tokenId2: Digest32 = Blake2b256("id2") - val inBox = createBox(10, TrivialProp.TrueProp, - Seq(tokenId1 -> 10L, tokenId2 -> 20L), - Map(ErgoBox.R4 -> IntConstant(100), ErgoBox.R5 -> BooleanConstant(true))) - - val dataBox = createBox(1000, TrivialProp.TrueProp, - Seq(tokenId1 -> 10L, tokenId2 -> 20L), - Map(ErgoBox.R4 -> IntConstant(100), ErgoBox.R5 -> BooleanConstant(true))) - - val outBox = createBox(10, TrivialProp.TrueProp, - Seq(tokenId1 -> 10L, tokenId2 -> 20L), - Map(ErgoBox.R4 -> IntConstant(100), ErgoBox.R5 -> BooleanConstant(true))) - - val header1: Header = CHeader(Blake2b256("Header.id").toColl, - 0, - Blake2b256("Header.parentId").toColl, - Blake2b256("ADProofsRoot").toColl, - sampleAvlTree, - Blake2b256("transactionsRoot").toColl, - timestamp = 0, - nBits = 0, - height = 0, - extensionRoot = Blake2b256("transactionsRoot").toColl, - minerPk = SigmaDsl.groupGenerator, - powOnetimePk = SigmaDsl.groupGenerator, - powNonce = Colls.fromArray(Array[Byte](0, 1, 2, 3, 4, 5, 6, 7)), - powDistance = SigmaDsl.BigInt(BigInt("1405498250268750867257727119510201256371618473728619086008183115260323").bigInteger), - votes = Colls.fromArray(Array[Byte](0, 1, 2)) - ) - val header2: Header = CHeader(Blake2b256("Header2.id").toColl, - 0, - header1.id, - Blake2b256("ADProofsRoot2").toColl, - sampleAvlTree, - Blake2b256("transactionsRoot2").toColl, - timestamp = 2, - nBits = 0, - height = 1, - extensionRoot = Blake2b256("transactionsRoot2").toColl, - minerPk = SigmaDsl.groupGenerator, - powOnetimePk = SigmaDsl.groupGenerator, - powNonce = Colls.fromArray(Array.fill(0.toByte)(8)), - powDistance = SigmaDsl.BigInt(BigInt("19306206489815517413186395405558417825367537880571815686937307203793939").bigInteger), - votes = Colls.fromArray(Array[Byte](0, 1, 0)) - ) - val headers = Colls.fromItems(header2, header1) - val preHeader: PreHeader = CPreHeader(0, - header2.id, - timestamp = 3, - nBits = 0, - height = 2, - minerPk = SigmaDsl.groupGenerator, - votes = Colls.emptyColl[Byte] - ) - val ergoCtx = new ErgoLikeContext( - currentHeight = preHeader.height, - lastBlockUtxoRoot = header2.stateRoot.asInstanceOf[CAvlTree].treeData, - preHeader.minerPk.getEncoded.toArray, - boxesToSpend = IndexedSeq(inBox), - spendingTransaction = ErgoLikeTransaction(IndexedSeq(), IndexedSeq(outBox)), - self = inBox, headers = headers, preHeader = preHeader, dataBoxes = IndexedSeq(dataBox), - extension = ContextExtension(Map())) + // ignore("Func context variable") { + // val doApply = checkEq(func[(Int => Int, Int), Int]("{ (t: (Int => Int, Int)) => t._1(t._2) }")) { (t: (Int => Int, Int)) => t._1(t._2) } + // val code = compileWithCosting(emptyEnv, s"{ (x: Int) => x + 1 }") + // val ctx = ErgoLikeContext.dummy(fakeSelf) + // doApply((CFunc[Int, Int](ctx, code), 10)) + // } + lazy val ctx = ergoCtx.toSigmaContext(IR, false) property("Box properties equivalence") { @@ -370,10 +405,35 @@ class SigmaDslTest extends PropSpec eq({ (x: Box) => x.bytesWithoutRef })("{ (x: Box) => x.bytesWithoutRef }") eq({ (x: Box) => x.creationInfo })("{ (x: Box) => x.creationInfo }") eq({ (x: Box) => x.tokens })("{ (x: Box) => x.tokens }") -// TODO -// checkEq(func[Box, Coll[(Coll[Byte], Long)]]("{ (x: Box) => x.registers }"))({ (x: Box) => x.registers })(box) } + property("Advanced Box test") { + val avlProver = new BatchAVLProver[Digest32, Blake2b256.type](keyLength = 32, None) + avlProver.generateProof() + + val digest = avlProver.digest + val treeData = SigmaDsl.avlTree(new AvlTreeData(digest, AvlTreeFlags.ReadOnly, 32, None)) + val box = ctx.dataInputs(0) + + val s = ErgoBox(20, TrueProp, 0, Seq(),Map( + ErgoBox.nonMandatoryRegisters(0) -> ByteConstant(1.toByte), + ErgoBox.nonMandatoryRegisters(1) -> ShortConstant(1024.toShort), + ErgoBox.nonMandatoryRegisters(2) -> IntConstant(1024 * 1024), + ErgoBox.nonMandatoryRegisters(3) -> LongConstant(1024.toLong), + ErgoBox.nonMandatoryRegisters(4) -> BigIntConstant(222L), + ErgoBox.nonMandatoryRegisters(5) -> AvlTreeConstant(treeData) + )) + lazy val byteCheck = checkEq(func[Box,Byte]("{ (x: Box) => x.R4[Byte].get }"))((x: Box) => x.R4[Byte].get) + lazy val shortCheck = checkEq(func[Box,Short]("{ (x: Box) => x.R5[Short].get }"))((x: Box) => x.R5[Short].get) + lazy val intCheck = checkEq(func[Box,Int]("{ (x: Box) => x.R6[Int].get }"))((x: Box) => x.R6[Int].get) + lazy val longCheck = checkEq(func[Box,Long]("{ (x: Box) => x.R7[Long].get }"))((x: Box) => x.R7[Long].get) + lazy val BigIntCheck = checkEq(func[Box,BigInt]("{ (x: Box) => x.R8[BigInt].get }"))((x: Box) => x.R8[BigInt].get) + byteCheck(s) + shortCheck(s) + intCheck(s) + longCheck(s) + BigIntCheck(s) + } property("PreHeader properties equivalence") { val h = ctx.preHeader @@ -390,7 +450,7 @@ class SigmaDslTest extends PropSpec property("Header properties equivalence") { val h = ctx.headers(0) val eq = EqualityChecker(h) -// TODO costing for eq({ (x: Header) => x.id })("{ (x: Header) => x.id }") + eq({ (x: Header) => x.id })("{ (x: Header) => x.id }") eq({ (x: Header) => x.version })("{ (x: Header) => x.version }") eq({ (x: Header) => x.parentId })("{ (x: Header) => x.parentId }") eq({ (x: Header) => x.ADProofsRoot})("{ (x: Header) => x.ADProofsRoot}") @@ -419,6 +479,9 @@ class SigmaDslTest extends PropSpec eq({ (x: Context) => x.HEIGHT })("{ (x: Context) => x.HEIGHT }") eq({ (x: Context) => x.SELF })("{ (x: Context) => x.SELF }") eq({ (x: Context) => x.INPUTS.map { (b: Box) => b.value } })("{ (x: Context) => x.INPUTS.map { (b: Box) => b.value } }") + eq({ (x: Context) => x.selfBoxIndex })("{ (x: Context) => x.selfBoxIndex }") + eq({ (x: Context) => x.LastBlockUtxoRootHash.isUpdateAllowed })("{ (x: Context) => x.LastBlockUtxoRootHash.isUpdateAllowed }") + eq({ (x: Context) => x.minerPubKey })("{ (x: Context) => x.minerPubKey }") eq({ (x: Context) => x.INPUTS.map { (b: Box) => (b.value, b.value) } })( @@ -441,9 +504,13 @@ class SigmaDslTest extends PropSpec | (pk, value) | } |}""".stripMargin) + } -// TODO -// checkEq(func[Context, Coll[Box]]("{ (x: Context) => INPUTS }"))({ (x: Context) => x.INPUTS })(ctx) + property("getVar equivalence") { + val eq = checkEq(func[Int,Int]("{ (x: Int) => getVar[Int](2).get }", 2.toByte -> IntConstant(10))) { x => + 10 + } + eq(1) } property("xorOf equivalence") { @@ -463,34 +530,287 @@ class SigmaDslTest extends PropSpec property("Negation equivalence") { // TODO make a prefix method - val negByte = checkEq(func[Byte, Byte]("{ (x: Byte) => -x }")) { x => (-x).toByte } + val negByte = checkEq(func[Byte, Byte]("{ (x: Byte) => -x }")) { (x: Byte) => (-x).toByte } forAll { x: Byte => negByte(x) } - val negShort = checkEq(func[Short, Short]("{ (x: Short) => -x }")) { x => (-x).toShort } + val negShort = checkEq(func[Short, Short]("{ (x: Short) => -x }")) { (x: Short) => (-x).toShort } forAll { x: Short => negShort(x) } - val negInt = checkEq(func[Int, Int]("{ (x: Int) => -x }")) { x => -x } + val negInt = checkEq(func[Int, Int]("{ (x: Int) => -x }")) { (x: Int) => -x } forAll { x: Int => negInt(x) } - val negLong = checkEq(func[Long, Long]("{ (x: Long) => -x }")) { x => -x } + val negLong = checkEq(func[Long, Long]("{ (x: Long) => -x }")) { (x: Long) => -x } forAll { x: Long => negLong(x) } } property("special.sigma.BigInt Negation equivalence") { // TODO make negate() into a prefix method - val negBigInteger = checkEq(func[BigInt, BigInt]("{ (x: BigInt) => -x }")) { x => x.negate() } + val negBigInteger = checkEq(func[BigInt, BigInt]("{ (x: BigInt) => -x }")) { (x: BigInt) => x.negate() } forAll { x: BigInt => negBigInteger(x) } } - //TODO: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/424 - ignore("BinXor(logical XOR) equivalence") { + property("BinXor(logical XOR) equivalence") { val eq = checkEq(func[(Boolean, Boolean), Boolean]("{ (x: (Boolean, Boolean)) => x._1 ^ x._2 }")) { x => x._1 ^ x._2 } forAll { x: (Boolean, Boolean) => eq(x) } } + property("BinXor(logical XOR) test") { + val eq = checkEq(func[(Int, Boolean), Boolean]("{ (x: (Int, Boolean)) => (x._1 == 0) ^ x._2 }")) { + x => (x._1 == 0) ^ x._2 + } + forAll { x: (Int, Boolean) => eq(x) } + } + // TODO: related to https://github.com/ScorexFoundation/sigmastate-interpreter/issues/416 ignore("Box.getReg equivalence") { - // TODO implement in SigmaDsl (interpreter test passes in BasicOpsSpec.Box.getReg test) // val eq = checkEq(func[Box, Int]("{ (x: Box) => x.getReg[Int](1).get }")) { x => x.getReg(1).get } // forAll { x: Box => eq(x) } } + + property("global functions equivalence") { + val n = SigmaDsl.BigInt(BigInteger.TEN) + val Global = SigmaDsl + + { + val eq = EqualityChecker(1) + eq({ (x: Int) => groupGenerator })("{ (x: Int) => groupGenerator }") + eq({ (x: Int) => Global.groupGenerator })("{ (x: Int) => Global.groupGenerator }") + } + + { + val eq = EqualityChecker(n) + eq({ (n: BigInt) => groupGenerator.exp(n) })("{ (n: BigInt) => groupGenerator.exp(n) }") + } + + { + val eq = checkEq(func[(Coll[Byte], Coll[Byte]), Coll[Byte]]( + "{ (x: (Coll[Byte], Coll[Byte])) => xor(x._1, x._2) }")) + { x => Global.xor(x._1, x._2) } + forAll(bytesGen, bytesGen) { (l, r) => + eq(Builder.DefaultCollBuilder.fromArray(l), Builder.DefaultCollBuilder.fromArray(r)) + } + } + } + + property("Coll methods equivalence") { + val coll = ctx.OUTPUTS + val eq = EqualityChecker(coll) + eq({ (x: Coll[Box]) => x.filter({ (b: Box) => b.value > 1 }) })("{ (x: Coll[Box]) => x.filter({(b: Box) => b.value > 1 }) }") + eq({ (x: Coll[Box]) => x.flatMap({ (b: Box) => b.propositionBytes }) })("{ (x: Coll[Box]) => x.flatMap({(b: Box) => b.propositionBytes }) }") + eq({ (x: Coll[Box]) => x.zip(x) })("{ (x: Coll[Box]) => x.zip(x) }") + eq({ (x: Coll[Box]) => x.size })("{ (x: Coll[Box]) => x.size }") + eq({ (x: Coll[Box]) => x.indices })("{ (x: Coll[Box]) => x.indices }") + eq({ (x: Coll[Box]) => x.forall({ (b: Box) => b.value > 1 }) })("{ (x: Coll[Box]) => x.forall({(b: Box) => b.value > 1 }) }") + eq({ (x: Coll[Box]) => x.exists({ (b: Box) => b.value > 1 }) })("{ (x: Coll[Box]) => x.exists({(b: Box) => b.value > 1 }) }") + } + + property("Coll size method equivalnce") { + val eq = checkEq(func[Coll[Int],Int]("{ (x: Coll[Int]) => x.size }")){ x => + x.size + } + forAll { x: Array[Int] => + eq(Builder.DefaultCollBuilder.fromArray(x)) + } + } + + property("Coll patch method equivalnce") { + val eq = checkEq(func[(Coll[Int], (Int, Int)),Coll[Int]]("{ (x: (Coll[Int], (Int, Int))) => x._1.patch(x._2._1, x._1, x._2._2) }")){ x => + x._1.patch(x._2._1, x._1, x._2._2) + } + forAll { x: Array[Int] => + whenever (x.size > 1) { + eq(Builder.DefaultCollBuilder.fromArray(x), makeSlicePair(x.size)) + } + } + } + + property("Coll updated method equivalnce") { + val eq = checkEq(func[(Coll[Int], (Int, Int)),Coll[Int]]("{ (x: (Coll[Int], (Int, Int))) => x._1.updated(x._2._1, x._2._2) }")){ x => + x._1.updated(x._2._1, x._2._2) + } + forAll { x: (Array[Int], Int) => + val size = x._1.size + whenever (size > 1) { + val index = getRandomIndex(size) + eq(Builder.DefaultCollBuilder.fromArray(x._1), (index, x._2)) + } + } + } + + property("Coll updateMany method equivalnce") { + val eq = checkEq(func[(Coll[Int], (Coll[Int], Coll[Int])),Coll[Int]]("{ (x: (Coll[Int], (Coll[Int], Coll[Int]))) => x._1.updateMany(x._2._1, x._2._2) }")){ x => + x._1.updateMany(x._2._1, x._2._2) + } + forAll { x: (Array[Int], Int) => + val size = x._1.size + whenever (size > 1) { + val fromColl = Builder.DefaultCollBuilder.fromArray(x._1) + val indexColl = generateIndexColl(size) + eq(fromColl, (indexColl, fromColl.reverse.slice(0, indexColl.size))) + } + } + } + + // TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 + ignore("Coll find method equivalnce") { + val eq = checkEq(func[Coll[Int],Option[Int]]("{ (x: Coll[Int]) => x.find({(v: Int) => v > 0})}")){ x => + x.find(v => v > 0) + } + forAll { x: Array[Int] => + eq(Builder.DefaultCollBuilder.fromArray(x)) + } + } + + // https://github.com/ScorexFoundation/sigmastate-interpreter/issues/418 + ignore("Coll bitwise methods equivalnce") { + val eq = checkEq(func[Coll[Boolean],Coll[Boolean]]("{ (x: Coll[Boolean]) => x >> 2 }")){ x => + if (x.size > 2) x.slice(0, x.size - 2) else Colls.emptyColl + } + forAll { x: Array[Boolean] => + eq(Builder.DefaultCollBuilder.fromArray(x)) + } + } + + // TODO soft-fork: https://github.com/ScorexFoundation/sigmastate-interpreter/issues/479 + ignore("Coll diff methods equivalnce") { + val eq = checkEq(func[Coll[Int],Coll[Int]]("{ (x: Coll[Int]) => x.diff(x) }")){ x => + x.diff(x) + } + forAll { x: Array[Int] => + eq(Builder.DefaultCollBuilder.fromArray(x)) + } + } + + property("Coll fold method equivalnce") { + val monoid = Builder.DefaultCollBuilder.Monoids.intPlusMonoid + val eq = checkEq(func[(Coll[Int], Int),Int]("{ (x: (Coll[Int], Int)) => x._1.fold(x._2, { (i1: Int, i2: Int) => i1 + i2 }) }")) + { x => + x._1.sum(monoid) + x._2 + } + val eqIndexOf = checkEq(func[(Coll[Int], Int),Int]("{ (x: (Coll[Int], Int)) => x._1.indexOf(x._2, 0) }")) + { x => + x._1.indexOf(x._2, 0) + } + forAll { x: (Array[Int], Int) => + eq(Builder.DefaultCollBuilder.fromArray(x._1), x._2) + eqIndexOf(Builder.DefaultCollBuilder.fromArray(x._1), x._2) + } + } + + property("Coll indexOf method equivalnce") { + val eqIndexOf = checkEq(func[(Coll[Int], (Int, Int)),Int]("{ (x: (Coll[Int], (Int, Int))) => x._1.indexOf(x._2._1, x._2._2) }")) + { x => + x._1.indexOf(x._2._1, x._2._2) + } + forAll { x: (Array[Int], Int) => + eqIndexOf(Builder.DefaultCollBuilder.fromArray(x._1), (getRandomIndex(x._1.size), x._2)) + } + } + + property("Coll apply method equivalnce") { + val eqApply = checkEq(func[(Coll[Int], Int),Int]("{ (x: (Coll[Int], Int)) => x._1(x._2) }")) + { x => + x._1(x._2) + } + forAll { x: Array[Int] => + whenever (0 < x.size) { + eqApply(Builder.DefaultCollBuilder.fromArray(x), getRandomIndex(x.size)) + } + } + } + + property("Coll getOrElse method equivalnce") { + val eqGetOrElse = checkEq(func[(Coll[Int], (Int, Int)),Int]("{ (x: (Coll[Int], (Int, Int))) => x._1.getOrElse(x._2._1, x._2._2) }")) + { x => + x._1.getOrElse(x._2._1, x._2._2) + } + forAll { x: (Array[Int], (Int, Int)) => + eqGetOrElse(Builder.DefaultCollBuilder.fromArray(x._1), x._2) + } + } + + property("Tuple size method equivalence") { + val eq = checkEq(func[(Int, Int),Int]("{ (x: (Int, Int)) => x.size }")) { x => 2 } + eq((-1, 1)) + } + + property("Tuple apply method equivalence") { + val eq1 = checkEq(func[(Int, Int),Int]("{ (x: (Int, Int)) => x(0) }")) { x => -1 } + val eq2 = checkEq(func[(Int, Int),Int]("{ (x: (Int, Int)) => x(1) }")) { x => 1 } + eq1((-1, 1)) + eq2((-1, 1)) + } + + property("Coll map method equivalnce") { + val eq = checkEq(func[Coll[Int],Coll[Int]]("{ (x: Coll[Int]) => x.map({ (v: Int) => v + 1 }) }")) + { x => + x.map(v => v + 1) + } + forAll { x: Array[Int] => + eq(Builder.DefaultCollBuilder.fromArray(x)) + } + } + + property("Coll slice method equivalnce") { + val eq = checkEq(func[(Coll[Int], (Int, Int)),Coll[Int]]("{ (x: (Coll[Int], (Int, Int))) => x._1.slice(x._2._1, x._2._2) }")) + { x => + x._1.slice(x._2._1, x._2._2) + } + forAll { x: Array[Int] => + val size = x.size + whenever (size > 0) { + eq(Builder.DefaultCollBuilder.fromArray(x), makeSlicePair(size)) + } + } + val arr = Array[Int](1, 2, 3, 4, 5) + eq(Builder.DefaultCollBuilder.fromArray(arr), (0, 2)) + } + + property("Coll append method equivalence") { + val eq = checkEq(func[(Coll[Int], (Int, Int)),Coll[Int]]( + """{ (x: (Coll[Int], (Int, Int))) => + |val sliced: Coll[Int] = x._1.slice(x._2._1, x._2._2) + |val toAppend: Coll[Int] = x._1 + |sliced.append(toAppend) + |}""".stripMargin)) + { x => + val sliced: Coll[Int] = x._1.slice(x._2._1, x._2._2) + val toAppend: Coll[Int] = x._1 + sliced.append(toAppend) + } + forAll { x: Array[Int] => + val size = x.size + whenever (size > 0) { + eq(Builder.DefaultCollBuilder.fromArray(x), makeSlicePair(size)) + } + } + } + + property("Option methods equivalence") { + val opt: Option[Long] = ctx.dataInputs(0).R0[Long] + val eq = EqualityChecker(opt) + eq({ (x: Option[Long]) => x.get })("{ (x: Option[Long]) => x.get }") + // TODO implement Option.isEmpty + // eq({ (x: Option[Long]) => x.isEmpty })("{ (x: Option[Long]) => x.isEmpty }") + eq({ (x: Option[Long]) => x.isDefined })("{ (x: Option[Long]) => x.isDefined }") + eq({ (x: Option[Long]) => x.getOrElse(1L) })("{ (x: Option[Long]) => x.getOrElse(1L) }") + eq({ (x: Option[Long]) => x.filter({ (v: Long) => v == 1} ) })("{ (x: Option[Long]) => x.filter({ (v: Long) => v == 1 }) }") + eq({ (x: Option[Long]) => x.map( (v: Long) => v + 1 ) })("{ (x: Option[Long]) => x.map({ (v: Long) => v + 1 }) }") + } + + // TODO implement Option.fold + ignore("Option fold method") { + val opt: Option[Long] = ctx.dataInputs(0).R0[Long] + val eq = EqualityChecker(opt) + eq({ (x: Option[Long]) => x.fold(5.toLong)( (v: Long) => v + 1 ) })("{ (x: Option[Long]) => x.fold(5, { (v: Long) => v + 1 }) }") + } + + property("Option fold workaround method") { + val opt: Option[Long] = ctx.dataInputs(0).R0[Long] + val eq = EqualityChecker(opt) + eq({ (x: Option[Long]) => x.fold(5.toLong)( (v: Long) => v + 1 ) })( + """{(x: Option[Long]) => + | def f(opt: Long): Long = opt + 1 + | if (x.isDefined) f(x.get) else 5L + |}""".stripMargin) + } } diff --git a/src/test/scala/special/sigma/SigmaExamplesTests.scala b/src/test/scala/special/sigma/SigmaExamplesTests.scala index 8e344e7730..e69de29bb2 100644 --- a/src/test/scala/special/sigma/SigmaExamplesTests.scala +++ b/src/test/scala/special/sigma/SigmaExamplesTests.scala @@ -1,110 +0,0 @@ -package special.sigma - -import org.scalatest.FunSuite -import special.sigma.Extensions._ - -class SigmaExamplesTests extends FunSuite with ContractsTestkit { - - val backer = MockProveDlog(true, noBytes) - val project = MockProveDlog(true, noBytes) - val selfId = collection[Byte](0, 1) - val outId = collection[Byte](0, 2) - - test("crowd funding") { - val timeout = 100 - val minToRaise = 1000 - val contract = new CrowdFundingContract(timeout, minToRaise, backer, project) - val bytes = Colls.fromArray(Array[Byte]()) - val self = new TestBox(selfId, 10, noBytes, noBytes, noBytes, noRegisters) - - { // when backer can open - val ctxForBacker = testContext(noInputs, noOutputs, height = 200, self, emptyAvlTree, dummyPubkey, Array()) - val ok = contract.canOpen(ctxForBacker) - assert(ok) - } - - { // then project can open - val out = new TestBox(outId, minToRaise, noBytes, noBytes, project.propBytes, noRegisters) - val ctxForProject = testContext(Array(), Array(out), height = 50, self, emptyAvlTree, dummyPubkey, Array()) - val ok = contract.canOpen(ctxForProject) - assert(ok) - } - } - - test("demurrage") { - val demurragePeriod = 100 - val demurrageCost = 2 - val userProof = new MockProveDlog(isValid = true, noBytes) - val contract = new DemurrageCurrencyContract(demurragePeriod, demurrageCost, userProof) - - val prop = Colls.fromArray(Array[Byte](1, 2)) - val outHeight = 100 - val outValue = 10L - val curHeight = outHeight + demurragePeriod - val out = new TestBox(outId, outValue, noBytes, noBytes, prop, regs(Map(R4 -> toAnyValue(curHeight)))) - - { //case 1: demurrage time hasn't come yet - val ctxForProject = testContext( - inputs = Array(), - outputs = Array(out), - height = outHeight + demurragePeriod - 1, - self = new TestBox( - selfId, outValue, - noBytes, noBytes, - prop, - regs(Map(R4 -> toAnyValue(outHeight)))), - emptyAvlTree, - dummyPubkey, - vars = Array() - ) - userProof.isValid = true - val userCan = contract.canOpen(ctxForProject) - assert(userCan) - - userProof.isValid = false - val minerCan = contract.canOpen(ctxForProject) - assert(!minerCan) - } - - { //case 2: demurrage time has come (user can spend all the money) - val ctxForProject = testContext( - inputs = Array(), - outputs = Array(out), - height = outHeight + demurragePeriod, - self = new TestBox( - selfId, outValue, - noBytes, noBytes, - prop, - regs(Map(R4 -> toAnyValue(outHeight)))), - emptyAvlTree, - dummyPubkey, - vars = Array() - ) - userProof.isValid = true - val userCan = contract.canOpen(ctxForProject) - assert(userCan) - } - - { //case 3: demurrage time has come (miner can spend "demurrageCost" tokens) - val minerOut = new TestBox(outId, outValue - demurrageCost, - noBytes, noBytes, - prop, regs(Map(R4 -> toAnyValue(curHeight)))) - val ctxForMiner = testContext( - inputs = Array(), - outputs = Array(minerOut), - height = outHeight + demurragePeriod, - self = new TestBox( - selfId, outValue, - noBytes, noBytes, - prop, - regs(Map(R4 -> toAnyValue(outHeight)))), - emptyAvlTree, - dummyPubkey, - vars = Array() - ) - userProof.isValid = false - val minerCan = contract.canOpen(ctxForMiner) - assert(minerCan) - } - } -} diff --git a/src/test/scala/special/sigma/SigmaTestingData.scala b/src/test/scala/special/sigma/SigmaTestingData.scala new file mode 100644 index 0000000000..2a86a83779 --- /dev/null +++ b/src/test/scala/special/sigma/SigmaTestingData.scala @@ -0,0 +1,99 @@ +package special.sigma + +import sigmastate.interpreter.ContextExtension +import org.scalacheck.Gen.containerOfN +import sigmastate.{AvlTreeFlags, TrivialProp} +import sigmastate.Values.{BooleanConstant, IntConstant} +import org.scalacheck.{Arbitrary, Gen} +import sigmastate.helpers.SigmaTestingCommons +import sigmastate.eval._ +import sigmastate.eval.Extensions._ +import org.ergoplatform.{ErgoLikeContext, ErgoLikeTransaction, ErgoBox} +import scorex.crypto.hash.{Digest32, Blake2b256} +import scorex.crypto.authds.{ADKey, ADValue} + +trait SigmaTestingData extends SigmaTestingCommons with SigmaTypeGens { + val bytesGen: Gen[Array[Byte]] = containerOfN[Array, Byte](100, Arbitrary.arbByte.arbitrary) + val bytesCollGen = bytesGen.map(Colls.fromArray(_)) + implicit val arbBytes = Arbitrary(bytesCollGen) + val keyCollGen = bytesCollGen.map(_.slice(0, 32)) + import org.ergoplatform.dsl.AvlTreeHelpers._ + + protected def sampleAvlProver = { + val key = keyCollGen.sample.get + val value = bytesCollGen.sample.get + val (_, avlProver) = createAvlTree(AvlTreeFlags.AllOperationsAllowed, ADKey @@ key.toArray -> ADValue @@ value.toArray) + (key, value, avlProver) + } + + protected def sampleAvlTree: AvlTree = { + val (key, _, avlProver) = sampleAvlProver + val digest = avlProver.digest.toColl + val tree = SigmaDsl.avlTree(AvlTreeFlags.ReadOnly.serializeToByte, digest, 32, None) + tree + } + + val tokenId1: Digest32 = Blake2b256("id1") + val tokenId2: Digest32 = Blake2b256("id2") + val inBox = createBox(10, TrivialProp.TrueProp, + Seq(tokenId1 -> 10L, tokenId2 -> 20L), + Map(ErgoBox.R4 -> IntConstant(100), ErgoBox.R5 -> BooleanConstant(true))) + + val dataBox = createBox(1000, TrivialProp.TrueProp, + Seq(tokenId1 -> 10L, tokenId2 -> 20L), + Map(ErgoBox.R4 -> IntConstant(100), ErgoBox.R5 -> BooleanConstant(true))) + + val outBox = createBox(10, TrivialProp.TrueProp, + Seq(tokenId1 -> 10L, tokenId2 -> 20L), + Map(ErgoBox.R4 -> IntConstant(100), ErgoBox.R5 -> BooleanConstant(true))) + + val header1: Header = CHeader(Blake2b256("Header.id").toColl, + 0, + Blake2b256("Header.parentId").toColl, + Blake2b256("ADProofsRoot").toColl, + sampleAvlTree, + Blake2b256("transactionsRoot").toColl, + timestamp = 0, + nBits = 0, + height = 0, + extensionRoot = Blake2b256("transactionsRoot").toColl, + minerPk = SigmaDsl.groupGenerator, + powOnetimePk = SigmaDsl.groupGenerator, + powNonce = Colls.fromArray(Array[Byte](0, 1, 2, 3, 4, 5, 6, 7)), + powDistance = SigmaDsl.BigInt(BigInt("1405498250268750867257727119510201256371618473728619086008183115260323").bigInteger), + votes = Colls.fromArray(Array[Byte](0, 1, 2)) + ) + val header2: Header = CHeader(Blake2b256("Header2.id").toColl, + 0, + header1.id, + Blake2b256("ADProofsRoot2").toColl, + sampleAvlTree, + Blake2b256("transactionsRoot2").toColl, + timestamp = 2, + nBits = 0, + height = 1, + extensionRoot = Blake2b256("transactionsRoot2").toColl, + minerPk = SigmaDsl.groupGenerator, + powOnetimePk = SigmaDsl.groupGenerator, + powNonce = Colls.fromArray(Array.fill(0.toByte)(8)), + powDistance = SigmaDsl.BigInt(BigInt("19306206489815517413186395405558417825367537880571815686937307203793939").bigInteger), + votes = Colls.fromArray(Array[Byte](0, 1, 0)) + ) + val headers = Colls.fromItems(header2, header1) + val preHeader: PreHeader = CPreHeader(0, + header2.id, + timestamp = 3, + nBits = 0, + height = 2, + minerPk = SigmaDsl.groupGenerator, + votes = Colls.emptyColl[Byte] + ) + val ergoCtx = new ErgoLikeContext( + currentHeight = preHeader.height, + lastBlockUtxoRoot = header2.stateRoot.asInstanceOf[CAvlTree].treeData, + preHeader.minerPk.getEncoded.toArray, + boxesToSpend = IndexedSeq(inBox), + spendingTransaction = ErgoLikeTransaction(IndexedSeq(), IndexedSeq(outBox)), + self = inBox, headers = headers, preHeader = preHeader, dataBoxes = IndexedSeq(dataBox), + extension = ContextExtension(Map())) +} diff --git a/src/test/scala/special/sigma/SigmaTypeGens.scala b/src/test/scala/special/sigma/SigmaTypeGens.scala index fbdd4955b9..32abc3a2c2 100644 --- a/src/test/scala/special/sigma/SigmaTypeGens.scala +++ b/src/test/scala/special/sigma/SigmaTypeGens.scala @@ -1,11 +1,9 @@ package special.sigma import org.scalacheck.{Arbitrary, Gen} -import sigmastate.eval.CBigInt import sigmastate.serialization.generators.ValueGenerators trait SigmaTypeGens extends ValueGenerators { - import Gen._; import Arbitrary._ import sigma.types._ val genBoolean = Arbitrary.arbBool.arbitrary.map(CBoolean(_): Boolean) implicit val arbBoolean = Arbitrary(genBoolean) @@ -15,8 +13,5 @@ trait SigmaTypeGens extends ValueGenerators { val genInt = Arbitrary.arbInt.arbitrary.map(CInt(_): Int) implicit val arbInt = Arbitrary(genInt) - - val genBigInt = arbBigInteger.arbitrary.map(CBigInt(_): BigInt) - implicit val arbBigInt = Arbitrary(genBigInt) }