From 72a88432dbc52a5730a2c1e14aa0a9c17bc84d76 Mon Sep 17 00:00:00 2001 From: Danny Willems Date: Mon, 4 Nov 2024 16:07:07 +0100 Subject: [PATCH 1/3] o1vm/riscv32i: define interpreter interface --- o1vm/src/interpreters/riscv32i/interpreter.rs | 803 ++++++++++++++++++ 1 file changed, 803 insertions(+) diff --git a/o1vm/src/interpreters/riscv32i/interpreter.rs b/o1vm/src/interpreters/riscv32i/interpreter.rs index 833451418f..19a97428dc 100644 --- a/o1vm/src/interpreters/riscv32i/interpreter.rs +++ b/o1vm/src/interpreters/riscv32i/interpreter.rs @@ -1,3 +1,6 @@ +use super::registers::{REGISTER_CURRENT_IP, REGISTER_HEAP_POINTER, REGISTER_NEXT_IP}; +use crate::lookups::{Lookup, LookupTableIDs}; +use ark_ff::{One, Zero}; use strum::{EnumCount, IntoEnumIterator}; use strum_macros::{EnumCount, EnumIter}; @@ -243,3 +246,803 @@ impl std::fmt::Display for UJInstruction { } } } + +pub trait InterpreterEnv { + /// A position can be seen as an indexed variable + type Position; + + /// Allocate a new abstract variable for the current step. + /// The variable can be used to store temporary values. + /// The variables are "freed" after each step/instruction. + /// The variable allocation can be seen as an allocation on a stack that is + /// popped after each step execution. + /// At the moment, [crate::interpreters::riscv32i::SCRATCH_SIZE] + /// elements can be allocated. If more temporary variables are required for + /// an instruction, increase the value + /// [crate::interpreters::riscv32i::SCRATCH_SIZE] + fn alloc_scratch(&mut self) -> Self::Position; + + type Variable: Clone + + std::ops::Add + + std::ops::Sub + + std::ops::Mul + + std::fmt::Debug + + Zero + + One; + + // Returns the variable in the current row corresponding to a given column alias. + fn variable(&self, column: Self::Position) -> Self::Variable; + + /// Add a constraint to the proof system, asserting that + /// `assert_equals_zero` is 0. + fn add_constraint(&mut self, assert_equals_zero: Self::Variable); + + /// Activate the selector for the given instruction. + fn activate_selector(&mut self, selector: Instruction); + + /// Check that the witness value in `assert_equals_zero` is 0; otherwise abort. + fn check_is_zero(assert_equals_zero: &Self::Variable); + + /// Assert that the value `assert_equals_zero` is 0, and add a constraint in the proof system. + fn assert_is_zero(&mut self, assert_equals_zero: Self::Variable) { + Self::check_is_zero(&assert_equals_zero); + self.add_constraint(assert_equals_zero); + } + + /// Check that the witness values in `x` and `y` are equal; otherwise abort. + fn check_equal(x: &Self::Variable, y: &Self::Variable); + + /// Assert that the values `x` and `y` are equal, and add a constraint in the proof system. + fn assert_equal(&mut self, x: Self::Variable, y: Self::Variable) { + // NB: We use a different function to give a better error message for debugging. + Self::check_equal(&x, &y); + self.add_constraint(x - y); + } + + /// Check that the witness value `x` is a boolean (`0` or `1`); otherwise abort. + fn check_boolean(x: &Self::Variable); + + /// Assert that the value `x` is boolean, and add a constraint in the proof system. + fn assert_boolean(&mut self, x: Self::Variable) { + Self::check_boolean(&x); + self.add_constraint(x.clone() * x.clone() - x); + } + + fn add_lookup(&mut self, lookup: Lookup); + + fn instruction_counter(&self) -> Self::Variable; + + fn increase_instruction_counter(&mut self); + + /// Fetch the value of the general purpose register with index `idx` and store it in local + /// position `output`. + /// + /// # Safety + /// + /// No lookups or other constraints are added as part of this operation. The caller must + /// manually add the lookups for this operation. + unsafe fn fetch_register( + &mut self, + idx: &Self::Variable, + output: Self::Position, + ) -> Self::Variable; + + /// Set the general purpose register with index `idx` to `value` if `if_is_true` is true. + /// + /// # Safety + /// + /// No lookups or other constraints are added as part of this operation. The caller must + /// manually add the lookups for this operation. + unsafe fn push_register_if( + &mut self, + idx: &Self::Variable, + value: Self::Variable, + if_is_true: &Self::Variable, + ); + + /// Set the general purpose register with index `idx` to `value`. + /// + /// # Safety + /// + /// No lookups or other constraints are added as part of this operation. The caller must + /// manually add the lookups for this operation. + unsafe fn push_register(&mut self, idx: &Self::Variable, value: Self::Variable) { + self.push_register_if(idx, value, &Self::constant(1)) + } + + /// Fetch the last 'access index' for the general purpose register with index `idx`, and store + /// it in local position `output`. + /// + /// # Safety + /// + /// No lookups or other constraints are added as part of this operation. The caller must + /// manually add the lookups for this operation. + unsafe fn fetch_register_access( + &mut self, + idx: &Self::Variable, + output: Self::Position, + ) -> Self::Variable; + + /// Set the last 'access index' for the general purpose register with index `idx` to `value` if + /// `if_is_true` is true. + /// + /// # Safety + /// + /// No lookups or other constraints are added as part of this operation. The caller must + /// manually add the lookups for this operation. + unsafe fn push_register_access_if( + &mut self, + idx: &Self::Variable, + value: Self::Variable, + if_is_true: &Self::Variable, + ); + + /// Set the last 'access index' for the general purpose register with index `idx` to `value`. + /// + /// # Safety + /// + /// No lookups or other constraints are added as part of this operation. The caller must + /// manually add the lookups for this operation. + unsafe fn push_register_access(&mut self, idx: &Self::Variable, value: Self::Variable) { + self.push_register_access_if(idx, value, &Self::constant(1)) + } + + /// Access the general purpose register with index `idx`, adding constraints asserting that the + /// old value was `old_value` and that the new value will be `new_value`, if `if_is_true` is + /// true. + /// + /// # Safety + /// + /// Callers of this function must manually update the registers if required, this function will + /// only update the access counter. + unsafe fn access_register_if( + &mut self, + idx: &Self::Variable, + old_value: &Self::Variable, + new_value: &Self::Variable, + if_is_true: &Self::Variable, + ) { + let last_accessed = { + let last_accessed_location = self.alloc_scratch(); + unsafe { self.fetch_register_access(idx, last_accessed_location) } + }; + let instruction_counter = self.instruction_counter(); + let elapsed_time = instruction_counter.clone() - last_accessed.clone(); + let new_accessed = { + // Here, we write as if the register had been written *at the start of the next + // instruction*. This ensures that we can't 'time travel' within this + // instruction, and claim to read the value that we're about to write! + instruction_counter + Self::constant(1) + // A register should allow multiple accesses to the same register within the same instruction. + // In order to allow this, we always increase the instruction counter by 1. + }; + unsafe { self.push_register_access_if(idx, new_accessed.clone(), if_is_true) }; + self.add_lookup(Lookup::write_if( + if_is_true.clone(), + LookupTableIDs::RegisterLookup, + vec![idx.clone(), last_accessed, old_value.clone()], + )); + self.add_lookup(Lookup::read_if( + if_is_true.clone(), + LookupTableIDs::RegisterLookup, + vec![idx.clone(), new_accessed, new_value.clone()], + )); + self.range_check64(&elapsed_time); + + // Update instruction counter after accessing a register. + self.increase_instruction_counter(); + } + + fn read_register(&mut self, idx: &Self::Variable) -> Self::Variable { + let value = { + let value_location = self.alloc_scratch(); + unsafe { self.fetch_register(idx, value_location) } + }; + unsafe { + self.access_register(idx, &value, &value); + }; + value + } + + /// Access the general purpose register with index `idx`, adding constraints asserting that the + /// old value was `old_value` and that the new value will be `new_value`. + /// + /// # Safety + /// + /// Callers of this function must manually update the registers if required, this function will + /// only update the access counter. + unsafe fn access_register( + &mut self, + idx: &Self::Variable, + old_value: &Self::Variable, + new_value: &Self::Variable, + ) { + self.access_register_if(idx, old_value, new_value, &Self::constant(1)) + } + + fn write_register_if( + &mut self, + idx: &Self::Variable, + new_value: Self::Variable, + if_is_true: &Self::Variable, + ) { + let old_value = { + let value_location = self.alloc_scratch(); + unsafe { self.fetch_register(idx, value_location) } + }; + // Ensure that we only write 0 to the 0 register. + let actual_new_value = { + let idx_is_zero = self.is_zero(idx); + let pos = self.alloc_scratch(); + self.copy(&((Self::constant(1) - idx_is_zero) * new_value), pos) + }; + unsafe { + self.access_register_if(idx, &old_value, &actual_new_value, if_is_true); + }; + unsafe { + self.push_register_if(idx, actual_new_value, if_is_true); + }; + } + + fn write_register(&mut self, idx: &Self::Variable, new_value: Self::Variable) { + self.write_register_if(idx, new_value, &Self::constant(1)) + } + + /// Fetch the memory value at address `addr` and store it in local position `output`. + /// + /// # Safety + /// + /// No lookups or other constraints are added as part of this operation. The caller must + /// manually add the lookups for this memory operation. + unsafe fn fetch_memory( + &mut self, + addr: &Self::Variable, + output: Self::Position, + ) -> Self::Variable; + + /// Set the memory value at address `addr` to `value`. + /// + /// # Safety + /// + /// No lookups or other constraints are added as part of this operation. The caller must + /// manually add the lookups for this memory operation. + unsafe fn push_memory(&mut self, addr: &Self::Variable, value: Self::Variable); + + /// Fetch the last 'access index' that the memory at address `addr` was written at, and store + /// it in local position `output`. + /// + /// # Safety + /// + /// No lookups or other constraints are added as part of this operation. The caller must + /// manually add the lookups for this memory operation. + unsafe fn fetch_memory_access( + &mut self, + addr: &Self::Variable, + output: Self::Position, + ) -> Self::Variable; + + /// Set the last 'access index' for the memory at address `addr` to `value`. + /// + /// # Safety + /// + /// No lookups or other constraints are added as part of this operation. The caller must + /// manually add the lookups for this memory operation. + unsafe fn push_memory_access(&mut self, addr: &Self::Variable, value: Self::Variable); + + /// Access the memory address `addr`, adding constraints asserting that the old value was + /// `old_value` and that the new value will be `new_value`. + /// + /// # Safety + /// + /// Callers of this function must manually update the memory if required, this function will + /// only update the access counter. + unsafe fn access_memory( + &mut self, + addr: &Self::Variable, + old_value: &Self::Variable, + new_value: &Self::Variable, + ) { + let last_accessed = { + let last_accessed_location = self.alloc_scratch(); + unsafe { self.fetch_memory_access(addr, last_accessed_location) } + }; + let instruction_counter = self.instruction_counter(); + let elapsed_time = instruction_counter.clone() - last_accessed.clone(); + let new_accessed = { + // Here, we write as if the memory had been written *at the start of the next + // instruction*. This ensures that we can't 'time travel' within this + // instruction, and claim to read the value that we're about to write! + instruction_counter + Self::constant(1) + }; + unsafe { self.push_memory_access(addr, new_accessed.clone()) }; + self.add_lookup(Lookup::write_one( + LookupTableIDs::MemoryLookup, + vec![addr.clone(), last_accessed, old_value.clone()], + )); + self.add_lookup(Lookup::read_one( + LookupTableIDs::MemoryLookup, + vec![addr.clone(), new_accessed, new_value.clone()], + )); + self.range_check64(&elapsed_time); + + // Update instruction counter after accessing a memory address. + self.increase_instruction_counter(); + } + + fn read_memory(&mut self, addr: &Self::Variable) -> Self::Variable { + let value = { + let value_location = self.alloc_scratch(); + unsafe { self.fetch_memory(addr, value_location) } + }; + unsafe { + self.access_memory(addr, &value, &value); + }; + value + } + + fn write_memory(&mut self, addr: &Self::Variable, new_value: Self::Variable) { + let old_value = { + let value_location = self.alloc_scratch(); + unsafe { self.fetch_memory(addr, value_location) } + }; + unsafe { + self.access_memory(addr, &old_value, &new_value); + }; + unsafe { + self.push_memory(addr, new_value); + }; + } + + /// Adds a lookup to the RangeCheck16Lookup table + fn lookup_16bits(&mut self, value: &Self::Variable) { + self.add_lookup(Lookup::read_one( + LookupTableIDs::RangeCheck16Lookup, + vec![value.clone()], + )); + } + + /// Range checks with 2 lookups to the RangeCheck16Lookup table that a value + /// is at most 2^`bits`-1 (bits <= 16). + fn range_check16(&mut self, value: &Self::Variable, bits: u32) { + assert!(bits <= 16); + // 0 <= value < 2^bits + // First, check lowerbound: 0 <= value < 2^16 + self.lookup_16bits(value); + // Second, check upperbound: value + 2^16 - 2^bits < 2^16 + self.lookup_16bits(&(value.clone() + Self::constant(1 << 16) - Self::constant(1 << bits))); + } + + /// Adds a lookup to the ByteLookup table + fn lookup_8bits(&mut self, value: &Self::Variable) { + self.add_lookup(Lookup::read_one( + LookupTableIDs::ByteLookup, + vec![value.clone()], + )); + } + + /// Range checks with 2 lookups to the ByteLookup table that a value + /// is at most 2^`bits`-1 (bits <= 8). + fn range_check8(&mut self, value: &Self::Variable, bits: u32) { + assert!(bits <= 8); + // 0 <= value < 2^bits + // First, check lowerbound: 0 <= value < 2^8 + self.lookup_8bits(value); + // Second, check upperbound: value + 2^8 - 2^bits < 2^8 + self.lookup_8bits(&(value.clone() + Self::constant(1 << 8) - Self::constant(1 << bits))); + } + + /// Adds a lookup to the AtMost4Lookup table + fn lookup_2bits(&mut self, value: &Self::Variable) { + self.add_lookup(Lookup::read_one( + LookupTableIDs::AtMost4Lookup, + vec![value.clone()], + )); + } + + fn range_check64(&mut self, _value: &Self::Variable) { + // TODO + } + + fn set_instruction_pointer(&mut self, ip: Self::Variable) { + let idx = Self::constant(REGISTER_CURRENT_IP as u32); + let new_accessed = self.instruction_counter() + Self::constant(1); + unsafe { + self.push_register_access(&idx, new_accessed.clone()); + } + unsafe { + self.push_register(&idx, ip.clone()); + } + self.add_lookup(Lookup::read_one( + LookupTableIDs::RegisterLookup, + vec![idx, new_accessed, ip], + )); + } + + fn get_instruction_pointer(&mut self) -> Self::Variable { + let idx = Self::constant(REGISTER_CURRENT_IP as u32); + let ip = { + let value_location = self.alloc_scratch(); + unsafe { self.fetch_register(&idx, value_location) } + }; + self.add_lookup(Lookup::write_one( + LookupTableIDs::RegisterLookup, + vec![idx, self.instruction_counter(), ip.clone()], + )); + ip + } + + fn set_next_instruction_pointer(&mut self, ip: Self::Variable) { + let idx = Self::constant(REGISTER_NEXT_IP as u32); + let new_accessed = self.instruction_counter() + Self::constant(1); + unsafe { + self.push_register_access(&idx, new_accessed.clone()); + } + unsafe { + self.push_register(&idx, ip.clone()); + } + self.add_lookup(Lookup::read_one( + LookupTableIDs::RegisterLookup, + vec![idx, new_accessed, ip], + )); + } + + fn get_next_instruction_pointer(&mut self) -> Self::Variable { + let idx = Self::constant(REGISTER_NEXT_IP as u32); + let ip = { + let value_location = self.alloc_scratch(); + unsafe { self.fetch_register(&idx, value_location) } + }; + self.add_lookup(Lookup::write_one( + LookupTableIDs::RegisterLookup, + vec![idx, self.instruction_counter(), ip.clone()], + )); + ip + } + + fn constant(x: u32) -> Self::Variable; + + /// Extract the bits from the variable `x` between `highest_bit` and `lowest_bit`, and store + /// the result in `position`. + /// `lowest_bit` becomes the least-significant bit of the resulting value. + /// + /// # Safety + /// + /// There are no constraints on the returned value; callers must assert the relationship with + /// the source variable `x` and that the returned value fits in `highest_bit - lowest_bit` + /// bits. + /// + /// Do not call this function with highest_bit - lowest_bit >= 32. + // TODO: embed the range check in the function when highest_bit - lowest_bit <= 16? + unsafe fn bitmask( + &mut self, + x: &Self::Variable, + highest_bit: u32, + lowest_bit: u32, + position: Self::Position, + ) -> Self::Variable; + + /// Return the result of shifting `x` by `by`, storing the result in `position`. + /// + /// # Safety + /// + /// There are no constraints on the returned value; callers must assert the relationship with + /// the source variable `x` and the shift amount `by`. + unsafe fn shift_left( + &mut self, + x: &Self::Variable, + by: &Self::Variable, + position: Self::Position, + ) -> Self::Variable; + + /// Return the result of shifting `x` by `by`, storing the result in `position`. + /// + /// # Safety + /// + /// There are no constraints on the returned value; callers must assert the relationship with + /// the source variable `x` and the shift amount `by`. + unsafe fn shift_right( + &mut self, + x: &Self::Variable, + by: &Self::Variable, + position: Self::Position, + ) -> Self::Variable; + + /// Return the result of shifting `x` by `by`, storing the result in `position`. + /// + /// # Safety + /// + /// There are no constraints on the returned value; callers must assert the relationship with + /// the source variable `x` and the shift amount `by`. + unsafe fn shift_right_arithmetic( + &mut self, + x: &Self::Variable, + by: &Self::Variable, + position: Self::Position, + ) -> Self::Variable; + + /// Returns 1 if `x` is 0, or 0 otherwise, storing the result in `position`. + /// + /// # Safety + /// + /// There are no constraints on the returned value; callers must assert the relationship with + /// `x`. + unsafe fn test_zero(&mut self, x: &Self::Variable, position: Self::Position) -> Self::Variable; + + /// Returns `x^(-1)`, or `0` if `x` is `0`, storing the result in `position`. + /// + /// # Safety + /// + /// There are no constraints on the returned value; callers must assert the relationship with + /// `x`. + /// + /// The value returned may be a placeholder; callers should be careful not to depend directly + /// on the value stored in the variable. + unsafe fn inverse_or_zero( + &mut self, + x: &Self::Variable, + position: Self::Position, + ) -> Self::Variable; + + fn is_zero(&mut self, x: &Self::Variable) -> Self::Variable; + + /// Returns 1 if `x` is equal to `y`, or 0 otherwise, storing the result in `position`. + fn equal(&mut self, x: &Self::Variable, y: &Self::Variable) -> Self::Variable; + + /// Returns 1 if `x < y` as unsigned integers, or 0 otherwise, storing the result in + /// `position`. + /// + /// # Safety + /// + /// There are no constraints on the returned value; callers must assert that the value + /// correctly represents the relationship between `x` and `y` + unsafe fn test_less_than( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable; + + /// Returns 1 if `x < y` as signed integers, or 0 otherwise, storing the result in `position`. + /// + /// # Safety + /// + /// There are no constraints on the returned value; callers must assert that the value + /// correctly represents the relationship between `x` and `y` + unsafe fn test_less_than_signed( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable; + + /// Returns `x or y`, storing the result in `position`. + /// + /// # Safety + /// + /// There are no constraints on the returned value; callers must manually add constraints to + /// ensure that it is correctly constructed. + unsafe fn and_witness( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable; + + /// Returns `x or y`, storing the result in `position`. + /// + /// # Safety + /// + /// There are no constraints on the returned value; callers must manually add constraints to + /// ensure that it is correctly constructed. + unsafe fn or_witness( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable; + + /// Returns `x nor y`, storing the result in `position`. + /// + /// # Safety + /// + /// There are no constraints on the returned value; callers must manually add constraints to + /// ensure that it is correctly constructed. + unsafe fn nor_witness( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable; + + /// Returns `x xor y`, storing the result in `position`. + /// + /// # Safety + /// + /// There are no constraints on the returned value; callers must manually add constraints to + /// ensure that it is correctly constructed. + unsafe fn xor_witness( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable; + + /// Returns `x + y` and the overflow bit, storing the results in `position_out` and + /// `position_overflow` respectively. + /// + /// # Safety + /// + /// There are no constraints on the returned values; callers must manually add constraints to + /// ensure that they are correctly constructed. + unsafe fn add_witness( + &mut self, + y: &Self::Variable, + x: &Self::Variable, + out_position: Self::Position, + overflow_position: Self::Position, + ) -> (Self::Variable, Self::Variable); + + /// Returns `x + y` and the underflow bit, storing the results in `position_out` and + /// `position_underflow` respectively. + /// + /// # Safety + /// + /// There are no constraints on the returned values; callers must manually add constraints to + /// ensure that they are correctly constructed. + unsafe fn sub_witness( + &mut self, + y: &Self::Variable, + x: &Self::Variable, + out_position: Self::Position, + underflow_position: Self::Position, + ) -> (Self::Variable, Self::Variable); + + /// Returns `x * y`, where `x` and `y` are treated as integers, storing the result in `position`. + /// + /// # Safety + /// + /// There are no constraints on the returned value; callers must manually add constraints to + /// ensure that it is correctly constructed. + unsafe fn mul_signed_witness( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable; + + /// Returns `((x * y) >> 32, (x * y) & ((1 << 32) - 1))`, storing the results in `position_hi` + /// and `position_lo` respectively. + /// + /// # Safety + /// + /// There are no constraints on the returned values; callers must manually add constraints to + /// ensure that the pair of returned values correspond to the given values `x` and `y`, and + /// that they fall within the desired range. + unsafe fn mul_hi_lo_signed( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position_hi: Self::Position, + position_lo: Self::Position, + ) -> (Self::Variable, Self::Variable); + + /// Returns `((x * y) >> 32, (x * y) & ((1 << 32) - 1))`, storing the results in `position_hi` + /// and `position_lo` respectively. + /// + /// # Safety + /// + /// There are no constraints on the returned values; callers must manually add constraints to + /// ensure that the pair of returned values correspond to the given values `x` and `y`, and + /// that they fall within the desired range. + unsafe fn mul_hi_lo( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position_hi: Self::Position, + position_lo: Self::Position, + ) -> (Self::Variable, Self::Variable); + + /// Returns `(x / y, x % y)`, storing the results in `position_quotient` and + /// `position_remainder` respectively. + /// + /// # Safety + /// + /// There are no constraints on the returned values; callers must manually add constraints to + /// ensure that the pair of returned values correspond to the given values `x` and `y`, and + /// that they fall within the desired range. + unsafe fn divmod_signed( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position_quotient: Self::Position, + position_remainder: Self::Position, + ) -> (Self::Variable, Self::Variable); + + /// Returns `(x / y, x % y)`, storing the results in `position_quotient` and + /// `position_remainder` respectively. + /// + /// # Safety + /// + /// There are no constraints on the returned values; callers must manually add constraints to + /// ensure that the pair of returned values correspond to the given values `x` and `y`, and + /// that they fall within the desired range. + unsafe fn divmod( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position_quotient: Self::Position, + position_remainder: Self::Position, + ) -> (Self::Variable, Self::Variable); + + /// Returns the number of leading 0s in `x`, storing the result in `position`. + /// + /// # Safety + /// + /// There are no constraints on the returned value; callers must manually add constraints to + /// ensure that it is correctly constructed. + unsafe fn count_leading_zeros( + &mut self, + x: &Self::Variable, + position: Self::Position, + ) -> Self::Variable; + + /// Returns the number of leading 1s in `x`, storing the result in `position`. + /// + /// # Safety + /// + /// There are no constraints on the returned value; callers must manually add constraints to + /// ensure that it is correctly constructed. + unsafe fn count_leading_ones( + &mut self, + x: &Self::Variable, + position: Self::Position, + ) -> Self::Variable; + + fn copy(&mut self, x: &Self::Variable, position: Self::Position) -> Self::Variable; + + /// Increases the heap pointer by `by_amount` if `if_is_true` is `1`, and returns the previous + /// value of the heap pointer. + fn increase_heap_pointer( + &mut self, + by_amount: &Self::Variable, + if_is_true: &Self::Variable, + ) -> Self::Variable { + let idx = Self::constant(REGISTER_HEAP_POINTER as u32); + let old_ptr = { + let value_location = self.alloc_scratch(); + unsafe { self.fetch_register(&idx, value_location) } + }; + let new_ptr = old_ptr.clone() + by_amount.clone(); + unsafe { + self.access_register_if(&idx, &old_ptr, &new_ptr, if_is_true); + }; + unsafe { + self.push_register_if(&idx, new_ptr, if_is_true); + }; + old_ptr + } + + fn set_halted(&mut self, flag: Self::Variable); + + /// Given a variable `x`, this function extends it to a signed integer of + /// `bitlength` bits. + fn sign_extend(&mut self, x: &Self::Variable, bitlength: u32) -> Self::Variable { + // FIXME: Constrain `high_bit` + let high_bit = { + let pos = self.alloc_scratch(); + unsafe { self.bitmask(x, bitlength, bitlength - 1, pos) } + }; + high_bit * Self::constant(((1 << (32 - bitlength)) - 1) << bitlength) + x.clone() + } + + fn report_exit(&mut self, exit_code: &Self::Variable); + + fn reset(&mut self); +} + +pub fn interpret_instruction(env: &mut Env, instr: Instruction) { + env.activate_selector(instr); + + // TODO: match on instructions + unimplemented!("TODO"); +} From 0b02dda5636515d1474489dbb1de3ed942c143a4 Mon Sep 17 00:00:00 2001 From: Danny Willems Date: Mon, 4 Nov 2024 16:12:06 +0100 Subject: [PATCH 2/3] o1vm/riscv32i: sketch the witness environment --- o1vm/src/interpreters/riscv32i/mod.rs | 2 + o1vm/src/interpreters/riscv32i/witness.rs | 656 ++++++++++++++++++++++ 2 files changed, 658 insertions(+) create mode 100644 o1vm/src/interpreters/riscv32i/witness.rs diff --git a/o1vm/src/interpreters/riscv32i/mod.rs b/o1vm/src/interpreters/riscv32i/mod.rs index 2c7abc7ab4..a23ce8e256 100644 --- a/o1vm/src/interpreters/riscv32i/mod.rs +++ b/o1vm/src/interpreters/riscv32i/mod.rs @@ -15,3 +15,5 @@ pub mod interpreter; /// All the registers used by the ISA pub mod registers; + +pub mod witness; diff --git a/o1vm/src/interpreters/riscv32i/witness.rs b/o1vm/src/interpreters/riscv32i/witness.rs new file mode 100644 index 0000000000..3a36c96f65 --- /dev/null +++ b/o1vm/src/interpreters/riscv32i/witness.rs @@ -0,0 +1,656 @@ +// TODO: do we want to be more restrictive and refer to the number of accesses +// to the SAME register/memory addrss? +use super::{ + column::Column, + interpreter::{Instruction, InterpreterEnv}, + registers::Registers, + INSTRUCTION_SET_SIZE, SCRATCH_SIZE, +}; +use crate::{ + cannon::{PAGE_ADDRESS_MASK, PAGE_ADDRESS_SIZE, PAGE_SIZE}, + lookups::Lookup, +}; +use ark_ff::Field; +use std::array; + +/// Maximum number of register accesses per instruction (based on demo) +// FIXME: can be different +pub const MAX_NB_REG_ACC: u64 = 7; +/// Maximum number of memory accesses per instruction (based on demo) +// FIXME: can be different +pub const MAX_NB_MEM_ACC: u64 = 12; +/// Maximum number of memory or register accesses per instruction +pub const MAX_ACC: u64 = MAX_NB_REG_ACC + MAX_NB_MEM_ACC; + +pub const NUM_GLOBAL_LOOKUP_TERMS: usize = 1; +pub const NUM_DECODING_LOOKUP_TERMS: usize = 2; +pub const NUM_INSTRUCTION_LOOKUP_TERMS: usize = 5; +pub const NUM_LOOKUP_TERMS: usize = + NUM_GLOBAL_LOOKUP_TERMS + NUM_DECODING_LOOKUP_TERMS + NUM_INSTRUCTION_LOOKUP_TERMS; + +/// This structure represents the environment the virtual machine state will use +/// to transition. This environment will be used by the interpreter. The virtual +/// machine has access to its internal state and some external memory. In +/// addition to that, it has access to the environment of the Keccak interpreter +/// that is used to verify the preimage requested during the execution. +pub struct Env { + pub instruction_counter: u64, + pub memory: Vec<(u32, Vec)>, + pub last_memory_accesses: [usize; 3], + pub memory_write_index: Vec<(u32, Vec)>, + pub last_memory_write_index_accesses: [usize; 3], + pub registers: Registers, + pub registers_write_index: Registers, + pub scratch_state_idx: usize, + pub scratch_state: [Fp; SCRATCH_SIZE], + pub halt: bool, + pub selector: usize, +} + +fn fresh_scratch_state() -> [Fp; N] { + array::from_fn(|_| Fp::zero()) +} + +impl InterpreterEnv for Env { + type Position = Column; + + fn alloc_scratch(&mut self) -> Self::Position { + let scratch_idx = self.scratch_state_idx; + self.scratch_state_idx += 1; + Column::ScratchState(scratch_idx) + } + + type Variable = u64; + + fn variable(&self, _column: Self::Position) -> Self::Variable { + todo!() + } + + fn add_constraint(&mut self, _assert_equals_zero: Self::Variable) { + // No-op for witness + // Do not assert that _assert_equals_zero is zero here! + // Some variables may have placeholders that do not faithfully + // represent the underlying values. + } + + fn activate_selector(&mut self, instruction: Instruction) { + self.selector = instruction.into(); + } + + fn check_is_zero(assert_equals_zero: &Self::Variable) { + assert_eq!(*assert_equals_zero, 0); + } + + fn check_equal(x: &Self::Variable, y: &Self::Variable) { + assert_eq!(*x, *y); + } + + fn check_boolean(x: &Self::Variable) { + if !(*x == 0 || *x == 1) { + panic!("The value {} is not a boolean", *x); + } + } + + fn add_lookup(&mut self, _lookup: Lookup) { + // No-op, constraints only + // TODO: keep track of multiplicities of fixed tables here as in Keccak? + } + + fn instruction_counter(&self) -> Self::Variable { + self.instruction_counter + } + + fn increase_instruction_counter(&mut self) { + self.instruction_counter += 1; + } + + unsafe fn fetch_register( + &mut self, + idx: &Self::Variable, + output: Self::Position, + ) -> Self::Variable { + let res = self.registers[*idx as usize] as u64; + self.write_column(output, res); + res + } + + unsafe fn push_register_if( + &mut self, + idx: &Self::Variable, + value: Self::Variable, + if_is_true: &Self::Variable, + ) { + let value: u32 = value.try_into().unwrap(); + if *if_is_true == 1 { + self.registers[*idx as usize] = value + } else if *if_is_true == 0 { + // No-op + } else { + panic!("Bad value for flag in push_register: {}", *if_is_true); + } + } + + unsafe fn fetch_register_access( + &mut self, + idx: &Self::Variable, + output: Self::Position, + ) -> Self::Variable { + let res = self.registers_write_index[*idx as usize]; + self.write_column(output, res); + res + } + + unsafe fn push_register_access_if( + &mut self, + idx: &Self::Variable, + value: Self::Variable, + if_is_true: &Self::Variable, + ) { + if *if_is_true == 1 { + self.registers_write_index[*idx as usize] = value + } else if *if_is_true == 0 { + // No-op + } else { + panic!("Bad value for flag in push_register: {}", *if_is_true); + } + } + + unsafe fn fetch_memory( + &mut self, + addr: &Self::Variable, + output: Self::Position, + ) -> Self::Variable { + let addr: u32 = (*addr).try_into().unwrap(); + let page = addr >> PAGE_ADDRESS_SIZE; + let page_address = (addr & PAGE_ADDRESS_MASK) as usize; + let memory_page_idx = self.get_memory_page_index(page); + let value = self.memory[memory_page_idx].1[page_address]; + self.write_column(output, value.into()); + value.into() + } + + unsafe fn push_memory(&mut self, addr: &Self::Variable, value: Self::Variable) { + let addr: u32 = (*addr).try_into().unwrap(); + let page = addr >> PAGE_ADDRESS_SIZE; + let page_address = (addr & PAGE_ADDRESS_MASK) as usize; + let memory_page_idx = self.get_memory_page_index(page); + self.memory[memory_page_idx].1[page_address] = + value.try_into().expect("push_memory values fit in a u8"); + } + + unsafe fn fetch_memory_access( + &mut self, + addr: &Self::Variable, + output: Self::Position, + ) -> Self::Variable { + let addr: u32 = (*addr).try_into().unwrap(); + let page = addr >> PAGE_ADDRESS_SIZE; + let page_address = (addr & PAGE_ADDRESS_MASK) as usize; + let memory_write_index_page_idx = self.get_memory_access_page_index(page); + let value = self.memory_write_index[memory_write_index_page_idx].1[page_address]; + self.write_column(output, value); + value + } + + unsafe fn push_memory_access(&mut self, addr: &Self::Variable, value: Self::Variable) { + let addr = *addr as u32; + let page = addr >> PAGE_ADDRESS_SIZE; + let page_address = (addr & PAGE_ADDRESS_MASK) as usize; + let memory_write_index_page_idx = self.get_memory_access_page_index(page); + self.memory_write_index[memory_write_index_page_idx].1[page_address] = value; + } + + fn constant(x: u32) -> Self::Variable { + x as u64 + } + + unsafe fn bitmask( + &mut self, + x: &Self::Variable, + highest_bit: u32, + lowest_bit: u32, + position: Self::Position, + ) -> Self::Variable { + let x: u32 = (*x).try_into().unwrap(); + let res = (x >> lowest_bit) & ((1 << (highest_bit - lowest_bit)) - 1); + let res = res as u64; + self.write_column(position, res); + res + } + + unsafe fn shift_left( + &mut self, + x: &Self::Variable, + by: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + let x: u32 = (*x).try_into().unwrap(); + let by: u32 = (*by).try_into().unwrap(); + let res = x << by; + let res = res as u64; + self.write_column(position, res); + res + } + + unsafe fn shift_right( + &mut self, + x: &Self::Variable, + by: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + let x: u32 = (*x).try_into().unwrap(); + let by: u32 = (*by).try_into().unwrap(); + let res = x >> by; + let res = res as u64; + self.write_column(position, res); + res + } + + unsafe fn shift_right_arithmetic( + &mut self, + x: &Self::Variable, + by: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + let x: u32 = (*x).try_into().unwrap(); + let by: u32 = (*by).try_into().unwrap(); + let res = ((x as i32) >> by) as u32; + let res = res as u64; + self.write_column(position, res); + res + } + + unsafe fn test_zero(&mut self, x: &Self::Variable, position: Self::Position) -> Self::Variable { + let res = if *x == 0 { 1 } else { 0 }; + self.write_column(position, res); + res + } + + unsafe fn inverse_or_zero( + &mut self, + x: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + if *x == 0 { + self.write_column(position, 0); + 0 + } else { + self.write_field_column(position, Fp::from(*x).inverse().unwrap()); + 1 // Placeholder value + } + } + + fn is_zero(&mut self, x: &Self::Variable) -> Self::Variable { + // write the result + let pos = self.alloc_scratch(); + let res = if *x == 0 { 1 } else { 0 }; + self.write_column(pos, res); + // write the non deterministic advice inv_or_zero + let pos = self.alloc_scratch(); + let inv_or_zero = if *x == 0 { + Fp::zero() + } else { + Fp::inverse(&Fp::from(*x)).unwrap() + }; + self.write_field_column(pos, inv_or_zero); + // return the result + res + } + + fn equal(&mut self, x: &Self::Variable, y: &Self::Variable) -> Self::Variable { + // To avoid subtraction overflow in the witness interpreter for u32 + if x > y { + self.is_zero(&(*x - *y)) + } else { + self.is_zero(&(*y - *x)) + } + } + + unsafe fn test_less_than( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + let x: u32 = (*x).try_into().unwrap(); + let y: u32 = (*y).try_into().unwrap(); + let res = if x < y { 1 } else { 0 }; + let res = res as u64; + self.write_column(position, res); + res + } + + unsafe fn test_less_than_signed( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + let x: u32 = (*x).try_into().unwrap(); + let y: u32 = (*y).try_into().unwrap(); + let res = if (x as i32) < (y as i32) { 1 } else { 0 }; + let res = res as u64; + self.write_column(position, res); + res + } + + unsafe fn and_witness( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + let x: u32 = (*x).try_into().unwrap(); + let y: u32 = (*y).try_into().unwrap(); + let res = x & y; + let res = res as u64; + self.write_column(position, res); + res + } + + unsafe fn nor_witness( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + let x: u32 = (*x).try_into().unwrap(); + let y: u32 = (*y).try_into().unwrap(); + let res = !(x | y); + let res = res as u64; + self.write_column(position, res); + res + } + + unsafe fn or_witness( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + let x: u32 = (*x).try_into().unwrap(); + let y: u32 = (*y).try_into().unwrap(); + let res = x | y; + let res = res as u64; + self.write_column(position, res); + res + } + + unsafe fn xor_witness( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + let x: u32 = (*x).try_into().unwrap(); + let y: u32 = (*y).try_into().unwrap(); + let res = x ^ y; + let res = res as u64; + self.write_column(position, res); + res + } + + unsafe fn add_witness( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + out_position: Self::Position, + overflow_position: Self::Position, + ) -> (Self::Variable, Self::Variable) { + let x: u32 = (*x).try_into().unwrap(); + let y: u32 = (*y).try_into().unwrap(); + // https://doc.rust-lang.org/std/primitive.u32.html#method.overflowing_add + let res = x.overflowing_add(y); + let (res_, overflow) = (res.0 as u64, res.1 as u64); + self.write_column(out_position, res_); + self.write_column(overflow_position, overflow); + (res_, overflow) + } + + unsafe fn sub_witness( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + out_position: Self::Position, + underflow_position: Self::Position, + ) -> (Self::Variable, Self::Variable) { + let x: u32 = (*x).try_into().unwrap(); + let y: u32 = (*y).try_into().unwrap(); + // https://doc.rust-lang.org/std/primitive.u32.html#method.overflowing_sub + let res = x.overflowing_sub(y); + let (res_, underflow) = (res.0 as u64, res.1 as u64); + self.write_column(out_position, res_); + self.write_column(underflow_position, underflow); + (res_, underflow) + } + + unsafe fn mul_signed_witness( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + let x: u32 = (*x).try_into().unwrap(); + let y: u32 = (*y).try_into().unwrap(); + let res = ((x as i32) * (y as i32)) as u32; + let res = res as u64; + self.write_column(position, res); + res + } + + unsafe fn mul_hi_lo_signed( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position_hi: Self::Position, + position_lo: Self::Position, + ) -> (Self::Variable, Self::Variable) { + let x: u32 = (*x).try_into().unwrap(); + let y: u32 = (*y).try_into().unwrap(); + let mul = (((x as i32) as i64) * ((y as i32) as i64)) as u64; + let hi = (mul >> 32) as u32; + let lo = (mul & ((1 << 32) - 1)) as u32; + let hi = hi as u64; + let lo = lo as u64; + self.write_column(position_hi, hi); + self.write_column(position_lo, lo); + (hi, lo) + } + + unsafe fn mul_hi_lo( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position_hi: Self::Position, + position_lo: Self::Position, + ) -> (Self::Variable, Self::Variable) { + let x: u32 = (*x).try_into().unwrap(); + let y: u32 = (*y).try_into().unwrap(); + let mul = (x as u64) * (y as u64); + let hi = (mul >> 32) as u32; + let lo = (mul & ((1 << 32) - 1)) as u32; + let hi = hi as u64; + let lo = lo as u64; + self.write_column(position_hi, hi); + self.write_column(position_lo, lo); + (hi, lo) + } + + unsafe fn divmod_signed( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position_quotient: Self::Position, + position_remainder: Self::Position, + ) -> (Self::Variable, Self::Variable) { + let x: u32 = (*x).try_into().unwrap(); + let y: u32 = (*y).try_into().unwrap(); + let q = ((x as i32) / (y as i32)) as u32; + let r = ((x as i32) % (y as i32)) as u32; + let q = q as u64; + let r = r as u64; + self.write_column(position_quotient, q); + self.write_column(position_remainder, r); + (q, r) + } + + unsafe fn divmod( + &mut self, + x: &Self::Variable, + y: &Self::Variable, + position_quotient: Self::Position, + position_remainder: Self::Position, + ) -> (Self::Variable, Self::Variable) { + let x: u32 = (*x).try_into().unwrap(); + let y: u32 = (*y).try_into().unwrap(); + let q = x / y; + let r = x % y; + let q = q as u64; + let r = r as u64; + self.write_column(position_quotient, q); + self.write_column(position_remainder, r); + (q, r) + } + + unsafe fn count_leading_zeros( + &mut self, + x: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + let x: u32 = (*x).try_into().unwrap(); + let res = x.leading_zeros(); + let res = res as u64; + self.write_column(position, res); + res + } + + unsafe fn count_leading_ones( + &mut self, + x: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + let x: u32 = (*x).try_into().unwrap(); + let res = x.leading_ones(); + let res = res as u64; + self.write_column(position, res); + res + } + + fn copy(&mut self, x: &Self::Variable, position: Self::Position) -> Self::Variable { + self.write_column(position, *x); + *x + } + + fn set_halted(&mut self, flag: Self::Variable) { + if flag == 0 { + self.halt = false + } else if flag == 1 { + self.halt = true + } else { + panic!("Bad value for flag in set_halted: {}", flag); + } + } + + fn report_exit(&mut self, exit_code: &Self::Variable) { + println!( + "Exited with code {} at step {}", + *exit_code, + self.normalized_instruction_counter() + ); + } + + fn reset(&mut self) { + self.scratch_state_idx = 0; + self.scratch_state = fresh_scratch_state(); + self.selector = INSTRUCTION_SET_SIZE; + } +} + +impl Env { + pub fn reset_scratch_state(&mut self) { + self.scratch_state_idx = 0; + self.scratch_state = fresh_scratch_state(); + self.selector = INSTRUCTION_SET_SIZE; + } + + pub fn write_column(&mut self, column: Column, value: u64) { + self.write_field_column(column, value.into()) + } + + pub fn write_field_column(&mut self, column: Column, value: Fp) { + match column { + Column::ScratchState(idx) => self.scratch_state[idx] = value, + Column::InstructionCounter => panic!("Cannot overwrite the column {:?}", column), + Column::Selector(s) => self.selector = s, + } + } + + pub fn update_last_memory_access(&mut self, i: usize) { + let [i_0, i_1, _] = self.last_memory_accesses; + self.last_memory_accesses = [i, i_0, i_1] + } + + pub fn get_memory_page_index(&mut self, page: u32) -> usize { + for &i in self.last_memory_accesses.iter() { + if self.memory_write_index[i].0 == page { + return i; + } + } + for (i, (page_index, _memory)) in self.memory.iter_mut().enumerate() { + if *page_index == page { + self.update_last_memory_access(i); + return i; + } + } + + // Memory not found; dynamically allocate + let memory = vec![0u8; PAGE_SIZE as usize]; + self.memory.push((page, memory)); + let i = self.memory.len() - 1; + self.update_last_memory_access(i); + i + } + + pub fn update_last_memory_write_index_access(&mut self, i: usize) { + let [i_0, i_1, _] = self.last_memory_write_index_accesses; + self.last_memory_write_index_accesses = [i, i_0, i_1] + } + + pub fn get_memory_access_page_index(&mut self, page: u32) -> usize { + for &i in self.last_memory_write_index_accesses.iter() { + if self.memory_write_index[i].0 == page { + return i; + } + } + for (i, (page_index, _memory_write_index)) in self.memory_write_index.iter_mut().enumerate() + { + if *page_index == page { + self.update_last_memory_write_index_access(i); + return i; + } + } + + // Memory not found; dynamically allocate + let memory_write_index = vec![0u64; PAGE_SIZE as usize]; + self.memory_write_index.push((page, memory_write_index)); + let i = self.memory_write_index.len() - 1; + self.update_last_memory_write_index_access(i); + i + } + + pub fn get_memory_direct(&mut self, addr: u32) -> u8 { + let page = addr >> PAGE_ADDRESS_SIZE; + let page_address = (addr & PAGE_ADDRESS_MASK) as usize; + let memory_idx = self.get_memory_page_index(page); + self.memory[memory_idx].1[page_address] + } + + /// The actual number of instructions executed results from dividing the + /// instruction counter by MAX_ACC (floor). + /// + /// NOTE: actually, in practice it will be less than that, as there is no + /// single instruction that performs all of them. + pub fn normalized_instruction_counter(&self) -> u64 { + self.instruction_counter / MAX_ACC + } +} From 692acab050ab6499386f7868a6c100829c70568d Mon Sep 17 00:00:00 2001 From: Danny Willems Date: Mon, 4 Nov 2024 16:16:44 +0100 Subject: [PATCH 3/3] o1vm/riscv32i: implement constraints environment --- o1vm/src/interpreters/riscv32i/constraints.rs | 436 ++++++++++++++++++ o1vm/src/interpreters/riscv32i/mod.rs | 2 + 2 files changed, 438 insertions(+) create mode 100644 o1vm/src/interpreters/riscv32i/constraints.rs diff --git a/o1vm/src/interpreters/riscv32i/constraints.rs b/o1vm/src/interpreters/riscv32i/constraints.rs new file mode 100644 index 0000000000..300af50897 --- /dev/null +++ b/o1vm/src/interpreters/riscv32i/constraints.rs @@ -0,0 +1,436 @@ +use super::{ + column::{Column, E}, + interpreter::{Instruction, InterpreterEnv}, + INSTRUCTION_SET_SIZE, +}; +use crate::{ + interpreters::riscv32i::{constraints::ConstantTerm::Literal, SCRATCH_SIZE}, + lookups::Lookup, +}; +use ark_ff::{Field, One}; +use kimchi::circuits::{ + expr::{ConstantTerm, Expr, ExprInner, Operations, Variable}, + gate::CurrOrNext, +}; + +pub struct Env { + pub scratch_state_idx: usize, + pub lookups: Vec>>, + pub constraints: Vec>, + pub selector: Option>, +} + +impl Default for Env { + fn default() -> Self { + Self { + scratch_state_idx: 0, + constraints: Vec::new(), + lookups: Vec::new(), + selector: None, + } + } +} + +impl InterpreterEnv for Env { + /// In the concrete implementation for the constraints, the interpreter will + /// work over columns. The position in this case can be seen as a new + /// variable/input of our circuit. + type Position = Column; + + // Use one of the available columns. It won't create a new column every time + // this function is called. The number of columns is defined upfront by + // crate::mips::witness::SCRATCH_SIZE. + fn alloc_scratch(&mut self) -> Self::Position { + // All columns are implemented using a simple index, and a name is given + // to the index. See crate::SCRATCH_SIZE for the maximum number of + // columns the circuit can use. + let scratch_idx = self.scratch_state_idx; + self.scratch_state_idx += 1; + Column::ScratchState(scratch_idx) + } + + type Variable = E; + + fn variable(&self, column: Self::Position) -> Self::Variable { + Expr::Atom(ExprInner::Cell(Variable { + col: column, + row: CurrOrNext::Curr, + })) + } + + fn activate_selector(&mut self, selector: Instruction) { + // Sanity check: we only want to activate once per instruction + assert!(self.selector.is_none(), "A selector has been already activated. You might need to reset the environment if you want to start a new instruction."); + let n = usize::from(selector) - SCRATCH_SIZE - 1; + self.selector = Some(self.variable(Column::Selector(n))) + } + + fn add_constraint(&mut self, assert_equals_zero: Self::Variable) { + self.constraints.push(assert_equals_zero) + } + + fn check_is_zero(_assert_equals_zero: &Self::Variable) { + // No-op, witness only + } + + fn check_equal(_x: &Self::Variable, _y: &Self::Variable) { + // No-op, witness only + } + + fn check_boolean(_x: &Self::Variable) { + // No-op, witness only + } + + fn add_lookup(&mut self, lookup: Lookup) { + self.lookups.push(lookup); + } + + fn instruction_counter(&self) -> Self::Variable { + self.variable(Column::InstructionCounter) + } + + fn increase_instruction_counter(&mut self) { + // No-op, witness only + } + + unsafe fn fetch_register( + &mut self, + _idx: &Self::Variable, + output: Self::Position, + ) -> Self::Variable { + self.variable(output) + } + + unsafe fn push_register_if( + &mut self, + _idx: &Self::Variable, + _value: Self::Variable, + _if_is_true: &Self::Variable, + ) { + // No-op, witness only + } + + unsafe fn fetch_register_access( + &mut self, + _idx: &Self::Variable, + output: Self::Position, + ) -> Self::Variable { + self.variable(output) + } + + unsafe fn push_register_access_if( + &mut self, + _idx: &Self::Variable, + _value: Self::Variable, + _if_is_true: &Self::Variable, + ) { + // No-op, witness only + } + + unsafe fn fetch_memory( + &mut self, + _addr: &Self::Variable, + output: Self::Position, + ) -> Self::Variable { + self.variable(output) + } + + unsafe fn push_memory(&mut self, _addr: &Self::Variable, _value: Self::Variable) { + // No-op, witness only + } + + unsafe fn fetch_memory_access( + &mut self, + _addr: &Self::Variable, + output: Self::Position, + ) -> Self::Variable { + self.variable(output) + } + + unsafe fn push_memory_access(&mut self, _addr: &Self::Variable, _value: Self::Variable) { + // No-op, witness only + } + + fn constant(x: u32) -> Self::Variable { + Self::Variable::constant(Operations::from(Literal(Fp::from(x)))) + } + + unsafe fn bitmask( + &mut self, + _x: &Self::Variable, + _highest_bit: u32, + _lowest_bit: u32, + position: Self::Position, + ) -> Self::Variable { + self.variable(position) + } + + unsafe fn shift_left( + &mut self, + _x: &Self::Variable, + _by: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + self.variable(position) + } + + unsafe fn shift_right( + &mut self, + _x: &Self::Variable, + _by: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + self.variable(position) + } + + unsafe fn shift_right_arithmetic( + &mut self, + _x: &Self::Variable, + _by: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + self.variable(position) + } + + unsafe fn test_zero( + &mut self, + _x: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + self.variable(position) + } + + fn is_zero(&mut self, x: &Self::Variable) -> Self::Variable { + let res = { + let pos = self.alloc_scratch(); + unsafe { self.test_zero(x, pos) } + }; + let x_inv_or_zero = { + let pos = self.alloc_scratch(); + unsafe { self.inverse_or_zero(x, pos) } + }; + // If x = 0, then res = 1 and x_inv_or_zero = 0 + // If x <> 0, then res = 0 and x_inv_or_zero = x^(-1) + self.add_constraint(x.clone() * x_inv_or_zero.clone() + res.clone() - Self::constant(1)); + self.add_constraint(x.clone() * res.clone()); + res + } + + unsafe fn inverse_or_zero( + &mut self, + _x: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + self.variable(position) + } + + fn equal(&mut self, x: &Self::Variable, y: &Self::Variable) -> Self::Variable { + self.is_zero(&(x.clone() - y.clone())) + } + + unsafe fn test_less_than( + &mut self, + _x: &Self::Variable, + _y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + self.variable(position) + } + + unsafe fn test_less_than_signed( + &mut self, + _x: &Self::Variable, + _y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + self.variable(position) + } + + unsafe fn and_witness( + &mut self, + _x: &Self::Variable, + _y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + self.variable(position) + } + + unsafe fn nor_witness( + &mut self, + _x: &Self::Variable, + _y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + self.variable(position) + } + + unsafe fn or_witness( + &mut self, + _x: &Self::Variable, + _y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + self.variable(position) + } + + unsafe fn xor_witness( + &mut self, + _x: &Self::Variable, + _y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + self.variable(position) + } + + unsafe fn add_witness( + &mut self, + _y: &Self::Variable, + _x: &Self::Variable, + out_position: Self::Position, + overflow_position: Self::Position, + ) -> (Self::Variable, Self::Variable) { + ( + self.variable(out_position), + self.variable(overflow_position), + ) + } + + unsafe fn sub_witness( + &mut self, + _y: &Self::Variable, + _x: &Self::Variable, + out_position: Self::Position, + underflow_position: Self::Position, + ) -> (Self::Variable, Self::Variable) { + ( + self.variable(out_position), + self.variable(underflow_position), + ) + } + + unsafe fn mul_signed_witness( + &mut self, + _x: &Self::Variable, + _y: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + self.variable(position) + } + + unsafe fn mul_hi_lo_signed( + &mut self, + _x: &Self::Variable, + _y: &Self::Variable, + position_hi: Self::Position, + position_lo: Self::Position, + ) -> (Self::Variable, Self::Variable) { + (self.variable(position_hi), self.variable(position_lo)) + } + + unsafe fn mul_hi_lo( + &mut self, + _x: &Self::Variable, + _y: &Self::Variable, + position_hi: Self::Position, + position_lo: Self::Position, + ) -> (Self::Variable, Self::Variable) { + (self.variable(position_hi), self.variable(position_lo)) + } + + unsafe fn divmod_signed( + &mut self, + _x: &Self::Variable, + _y: &Self::Variable, + position_quotient: Self::Position, + position_remainder: Self::Position, + ) -> (Self::Variable, Self::Variable) { + ( + self.variable(position_quotient), + self.variable(position_remainder), + ) + } + + unsafe fn divmod( + &mut self, + _x: &Self::Variable, + _y: &Self::Variable, + position_quotient: Self::Position, + position_remainder: Self::Position, + ) -> (Self::Variable, Self::Variable) { + ( + self.variable(position_quotient), + self.variable(position_remainder), + ) + } + + unsafe fn count_leading_zeros( + &mut self, + _x: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + self.variable(position) + } + + unsafe fn count_leading_ones( + &mut self, + _x: &Self::Variable, + position: Self::Position, + ) -> Self::Variable { + self.variable(position) + } + + fn copy(&mut self, x: &Self::Variable, position: Self::Position) -> Self::Variable { + let res = self.variable(position); + self.constraints.push(x.clone() - res.clone()); + res + } + + fn set_halted(&mut self, _flag: Self::Variable) { + // TODO + } + + fn report_exit(&mut self, _exit_code: &Self::Variable) {} + + fn reset(&mut self) { + self.scratch_state_idx = 0; + self.constraints.clear(); + self.lookups.clear(); + self.selector = None; + } +} + +impl Env { + /// Return the constraints for the selector. + /// Each selector must be a boolean. + pub fn get_selector_constraints(&self) -> Vec> { + let one = ::Variable::one(); + let mut enforce_bool: Vec> = (0..INSTRUCTION_SET_SIZE) + .map(|i| { + let var = self.variable(Column::Selector(i)); + (var.clone() - one.clone()) * var.clone() + }) + .collect(); + let enforce_one_activation = (0..INSTRUCTION_SET_SIZE).fold(E::::one(), |res, i| { + let var = self.variable(Column::Selector(i)); + res - var.clone() + }); + + enforce_bool.push(enforce_one_activation); + enforce_bool + } + + pub fn get_selector(&self) -> E { + self.selector + .clone() + .unwrap_or_else(|| panic!("Selector is not set")) + } + + /// Return the constraints for the current instruction, without the selector + pub fn get_constraints(&self) -> Vec> { + self.constraints.clone() + } + + pub fn get_lookups(&self) -> Vec>> { + self.lookups.clone() + } +} diff --git a/o1vm/src/interpreters/riscv32i/mod.rs b/o1vm/src/interpreters/riscv32i/mod.rs index a23ce8e256..ee62e20250 100644 --- a/o1vm/src/interpreters/riscv32i/mod.rs +++ b/o1vm/src/interpreters/riscv32i/mod.rs @@ -11,6 +11,8 @@ pub const INSTRUCTION_SET_SIZE: usize = 47; /// List all columns used by the interpreter pub mod column; +pub mod constraints; + pub mod interpreter; /// All the registers used by the ISA