Skip to content

Commit

Permalink
Merge branch 'okx:main' into main
Browse files Browse the repository at this point in the history
  • Loading branch information
jiangjinyuan authored Aug 29, 2023
2 parents f639550 + 4137797 commit 8fd394d
Show file tree
Hide file tree
Showing 11 changed files with 143 additions and 92 deletions.
Binary file modified zk_STARK/.DS_Store
Binary file not shown.
6 changes: 4 additions & 2 deletions zk_STARK/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,12 +8,14 @@
USER_NUM_INIT = 2**10 - 1
MAX_USER_NUM_FOR_ONE_BATCH = 2**12
MAX_USER_VALUE = 4**30//1000
MAIN_COINS_NUM = 21
# MAIN_COINS_NUM = 31

COINS = ["BTC", "ETH", "USDT", "USDC", "XRP", "DOGE", "SOL", "OKB", "APT", "DASH", "DOT", "ELF", "EOS", "ETC", "FIL", "LINK", "OKT", "PEOPLE", "TON", "TRX", "UNI", "1INCH", "AAVE", "ADA", "AGLD", "AKITA", "ALGO", "ALPHA", "ANT", "APE", "API3", "AR", "ARB", "ATOM", "AVAX", "AXS", "BABYDOGE", "BADGER", "BAL", "BAND", "BAT", "BCH", "BETH", "BICO", "BLUR", "BNB", "BNT", "BSV", "BTM", "BZZ", "CEL", "CELO", "CELR", "CFX", "CHZ", "CLV", "COMP", "CONV", "CORE", "CQT", "CRO", "CRV", "CSPR", "CVC", "DOME", "DORA", "DYDX", "EFI", "EGLD", "ENJ", "ENS", "ETHW", "FITFI", "FLM", "FLOKI", "FLOW", "FTM", "GALA", "GFT", "GLMR", "GMT", "GMX", "GODS", "GRT", "HBAR", "ICP", "IMX", "IOST", "IOTA", "JST", "KISHU", "KLAY", "KNC", "KSM", "LAT", "LDO", "LON", "LOOKS", "LPT", "LRC", "LTC", "LUNA", "LUNC", "MAGIC", "MANA", "MASK", "MATIC", "MINA", "MKR", "NEAR", "NEO", "NFT", "OMG", "ONT", "OP", "PERP", "QTUM", "REN", "RSR", "RSS3", "RVN", "SAND", "SHIB", "SKL", "SLP", "SNT", "SNX", "STARL", "STORJ", "STX", "SUSHI", "SWEAT", "SWRV", "THETA", "TRB", "TUSD", "UMA", "USTC", "WAVES", "WOO", "XCH", "XLM", "XMR", "XTZ", "YFI", "YFII", "YGG", "ZEC", "ZEN", "ZIL", "ZRX"]
# COINS = ["BTC", "ETH", "USDT", "XXC", "HFB", "HPR", "YPS", "QCZ", "YJE", "OUS", "TKR", "AWM", "ATD", "NFI", "ZMV", "IRZ", "YIX", "KVB", "YJA", "PZH", "BRY", "ECF", "HZW", "EGN", "BVR", "IYY", "DKA", "LEH", "RJW", "YEM", "PTO", "JOU", "DUU", "YGH", "EYP", "AJV", "FJX", "SLY", "UXQ", "WSQ", "YSN", "UCK", "DHU", "WIF", "UQY", "WWK", "CAO", "VBA", "YYM", "DIZ", "JLM", "GXT", "ORF", "KRJ", "PEQ", "AJR", "JDX", "ELU", "WTS", "UNR", "VLS", "AUF", "ZZC", "MZZ", "AVF", "EZV", "XQV", "BUK", "OQB", "KNA", "CPJ", "ASI", "AZG", "QXJ", "UUH", "QJP", "VWB", "VEY", "TQK", "GVH", "JRX", "KDE", "VNH", "OKP", "BXE", "OUI", "KQV", "DWF", "WXX", "PQE", "VVI", "WEF", "MLM", "MGE", "ZWV", "VZF", "OMS", "UTJ", "OAK", "FDJ"]
# # COINS = ["BTC", "ETH", "USDT"]
UTS16 = 16
UTS32 = 32
UTS_FOR_TRUNK = 32
MAX_UTS = 64
USER_DATA_PATH = "./user_data/"
CONFIG_PATH = "./sum_proof_data/config.json"

91 changes: 55 additions & 36 deletions zk_STARK/mk_and_verify_proofs.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,28 +13,34 @@

f = PrimeField(MODULUS)

def init_user_data(batch_size, batch_index):
def init_user_data(batch_size, batch_index, config_path):
with open(config_path, "r") as ff:
config_json = json.load(ff)
coins = config_json["coins"]
data = []
coins_len = len(COINS)
coins_len = len(coins)
for i in range(batch_size):
items = {"id" : str(keccak_256(i.to_bytes(32,'big')).hex())}
for coin in COINS:
for coin in coins:
items[coin] = str(random.randrange(4**(UTS16 -2))//coins_len)
data.append(items)
with open(USER_DATA_PATH + "batch" + str(batch_index) + ".json", "w") as f:
json.dump(data, f)
return

# path: json path of user data
def read_user_data(path):
with open(path, "r") as f:
def read_user_data(data_path, config_path):
with open(data_path, "r") as f:
data = json.load(f)
with open(config_path, "r") as ff:
config_json = json.load(ff)
coins = config_json["coins"]
ids = []
balances = [[]]*(len(COINS))
balances = [[]]*(len(coins))
for item in data:
ids.append(int(item["id"],16))
j = 0
for coin_name in COINS:
for coin_name in coins:
if coin_name in item.keys():
balances[j] = balances[j] + [int(item[coin_name],10)]
else:
Expand All @@ -44,14 +50,20 @@ def read_user_data(path):

# input_path: json path of user data, xxx/xxx.json
# output_path: batch path of proof data, xxx/xxx/a1/
def mk_batch_proof(uts, input_path, output_path):
ids, coins = read_user_data(input_path)
def mk_batch_proof(uts, input_path, output_path, config_path):
ids, balances = read_user_data(input_path, config_path)
assert len(ids) < MAX_USER_NUM_FOR_ONE_BATCH, "too much users in one batch"
mk_por_proof(ids, coins, uts, output_path)
with open(config_path, "r") as ff:
config_json = json.load(ff)
main_coins_num = int(config_json["main_coins_num"])
coins = config_json["coins"]
mk_por_proof(ids, balances, uts, output_path, main_coins_num, coins)
return

# batch path of proof data, xxx/xxx/
def verify_batch_proof(input_path):
def verify_batch_proof(input_path, config):
main_coins_num = int(config["main_coins_num"])
coins = config["coins"]
with open(input_path + "sum_proof.json", "r") as ff:
sum_proof_json = json.load(ff)
sum_proof = [sum_proof_json["steps"],
Expand All @@ -66,17 +78,22 @@ def verify_batch_proof(input_path):
with open(input_path + "sum_values.json", "r") as ff:
sum_values_json = json.load(ff)
sum_values = []
for coin in COINS:
for coin in coins:
sum_values.append(sum_values_json[coin])
sum_values.append(sum_values_json["total_value"])
assert verify_por_proof(sum_values, sum_proof), "invalid batch proof"

assert verify_por_proof(sum_values, sum_proof, main_coins_num), "invalid batch proof"
return sum_values

# input_path: basic batch path, xxx/batches/
# output_path: trunk path, xxx/xxx/
def mk_trunk_proof(input_path, output_path):
def mk_trunk_proof(input_path, output_path, config_path):
with open(config_path, "r") as ff:
config_json = json.load(ff)
main_coins_num = int(config_json["main_coins_num"])
coins = config_json["coins"]
ids = []
coins = [[]]*(len(COINS))
values = [[]]*(len(coins))

a_count = 0
b_count = 0
Expand All @@ -93,8 +110,8 @@ def mk_trunk_proof(input_path, output_path):
with open(input_path + "a" + str(i) + "/sum_values.json", "r") as ff:
sum_values = json.load(ff)
j = 0
for coin in COINS:
coins[j] = coins[j] + [sum_values[coin]]
for coin in coins:
values[j] = values[j] + [sum_values[coin]]
j += 1

for i in range(b_count):
Expand All @@ -103,15 +120,17 @@ def mk_trunk_proof(input_path, output_path):
with open(input_path + "b" + str(i) + "/sum_values.json", "r") as ff:
sum_values = json.load(ff)
j = 0
for coin in COINS:
coins[j] = coins[j] + [sum_values[coin]]
for coin in coins:
values[j] = values[j] + [sum_values[coin]]
j += 1

mk_por_proof(ids, coins, UTS_FOR_TRUNK, output_path)
mk_por_proof(ids, values, UTS_FOR_TRUNK, output_path, main_coins_num, coins)
return

# input_path: trunk path, xxx/xxx/
def verify_trunk_proof(input_path):
def verify_trunk_proof(input_path, config):
coins = config["coins"]
main_coins_num = int(config["main_coins_num"])
with open(input_path + "sum_proof.json", "r") as ff:
sum_proof_json = json.load(ff)
sum_proof = [sum_proof_json["steps"],
Expand All @@ -127,19 +146,22 @@ def verify_trunk_proof(input_path):
with open(input_path + "sum_values.json", "r") as ff:
sum_values_json = json.load(ff)
sum_values = []
for coin in COINS:
for coin in coins:
sum_values.append(sum_values_json[coin])
sum_values.append(sum_values_json["total_value"])
assert verify_por_proof(sum_values, sum_proof), "invalid trunk proof"
assert verify_por_proof(sum_values, sum_proof, main_coins_num), "invalid trunk proof"
return sum_values

# batch_index: batch index in trunk
# input_batch_path: batch path, xxx/batches/a1/
# input_trunk_path: trunk path, xxx/trunk/
# output_path: path for saving inclusion data, xxx/inclusion_proof_data/a1/
def mk_inclusion_proof(batch_index, uts, input_batch_path, input_trunk_path, output_path):
def mk_inclusion_proof(batch_index, uts, input_batch_path, input_trunk_path, output_path, config_path):
start_time = time.time()
coin_num = len(COINS)
with open(config_path, "r") as ff:
config_json = json.load(ff)
coins = config_json["coins"]
coin_num = len(coins)

with open(input_trunk_path + "mtree.json", "r") as ff:
trunk_mtree = json.load(ff)["mtree"]
Expand All @@ -155,7 +177,7 @@ def mk_inclusion_proof(batch_index, uts, input_batch_path, input_trunk_path, out
trunk_inclusion_proof["batch_id"] = str(batch_entry_data[(coin_num+1)*32:(coin_num+2)*32].hex())
trunk_inclusion_proof["total_value"] = str(int.from_bytes(batch_entry_data[:32], 'big'))
j = 0
for coin in COINS:
for coin in coins:
value = int.from_bytes(batch_entry_data[(j+1)*32:(j+2)*32], 'big')
if value > MAX_USER_VALUE:
value = value - MODULUS
Expand All @@ -179,7 +201,7 @@ def mk_inclusion_proof(batch_index, uts, input_batch_path, input_trunk_path, out
batch_inclusion_proof["user_id"] = str(user_entry_data[(coin_num+1)*32:(coin_num+2)*32].hex())
batch_inclusion_proof["total_value"] = str(int.from_bytes(user_entry_data[:32], 'big'))
j = 0
for coin in COINS:
for coin in coins:
value = int.from_bytes(user_entry_data[(j+1)*32:(j+2)*32], 'big')
if value > MAX_USER_VALUE:
value = value - MODULUS
Expand All @@ -195,7 +217,8 @@ def mk_inclusion_proof(batch_index, uts, input_batch_path, input_trunk_path, out

inclusion_proof = {
"batch_inclusion_proof":batch_inclusion_proof,
"trunk_inclusion_proof":trunk_inclusion_proof
"trunk_inclusion_proof":trunk_inclusion_proof,
"config":config_json
}
with open(output_path + "user_%d_inclusion_proof.json"%i, "w") as ff:
json.dump(inclusion_proof, ff)
Expand All @@ -207,20 +230,17 @@ def mk_inclusion_proof(batch_index, uts, input_batch_path, input_trunk_path, out
# batch_index: batch index in trunk
# input_path: inclusion proof path, xxx/inclusion_proof_data/a1/
def verify_inclusion_proof(input_path):
start_time = time.time()
coin_num = len(COINS)
file_count = 0

for root, dirs, files in os.walk(input_path):
for proof_file in files:
if re.search("inclusion_proof.json", proof_file):
verify_single_inclusion_proof(input_path + proof_file)
# print("verify inclusion proof in %.4f sec" %(time.time() - start_time))
return

def verify_single_inclusion_proof(proof_file):
with open(proof_file, "r") as ff:
inclusion_proof = json.load(ff)

coins = inclusion_proof["config"]["coins"]

batch_inclusion_proof = inclusion_proof["batch_inclusion_proof"]
batch_index = batch_inclusion_proof["batch_index"]
Expand All @@ -230,7 +250,7 @@ def verify_single_inclusion_proof(proof_file):
user_entry = int(batch_inclusion_proof["total_value"]).to_bytes(32, 'big')
j = 0
temp = b''
for coin in COINS:
for coin in coins:
value = int(batch_inclusion_proof[coin]) % MODULUS
temp = temp + value.to_bytes(32, 'big')
j += 1
Expand All @@ -242,7 +262,7 @@ def verify_single_inclusion_proof(proof_file):
batch_entry = int(trunk_inclusion_proof["total_value"]).to_bytes(32, 'big')
j = 0
temp = b''
for coin in COINS:
for coin in coins:
value = int(trunk_inclusion_proof[coin]) % MODULUS
temp = temp + value.to_bytes(32, 'big')
j += 1
Expand All @@ -252,4 +272,3 @@ def verify_single_inclusion_proof(proof_file):
assert trunk_inclusion_proof["batch_id"] == batch_inclusion_proof["batch_mtree_root"]

return

37 changes: 22 additions & 15 deletions zk_STARK/por_stark.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,10 @@
# See https://vitalik.ca/general/2022/11/19/proof_of_solvency.html
# It provides users with proofs that constrain the sum of all assets and the non-negativity of their net asset value.
# This is a basic version of the solution.
# Most of the modules used here including fft and fri etc. came from "../mimc_stark".

# THIS IS EDUCATIONAL CODE, NOT PRODUCTION! HIRE A SECURITY AUDITOR
# WHEN BUILDING SOMETHING FOR PRODUCTION USE.

from permuted_tree import merkelize,keccak_256, mk_multi_branch, verify_multi_branch, mk_branch, verify_branch
from poly_utils import PrimeField
Expand All @@ -15,24 +19,24 @@
f = PrimeField(MODULUS)

# ids: array of user ids
# coins: array of user values of all coins
# values: array of user values of all coins
# uts: user trace size, number of data rows for each user, for non-negative proof
# data_path: user data path
def mk_por_proof(ids, coins, uts, data_path):
def mk_por_proof(ids, values, uts, data_path, main_coins_num, coins):
start_time = time.time()

# tranform data into the field, -x => MODULUS -x
transform_into_field(coins, MODULUS)
# tranform data into the field, -1 => MODULUS -1
transform_into_field(values, MODULUS)
assert is_a_power_of_2(uts) and uts <= MAX_UTS, "invalid uts"

if not is_a_power_of_2(len(ids)+1):
ids, coins = pad(ids, coins, MAX_USER_NUM_FOR_ONE_BATCH)
ids, values = pad(ids, values, MAX_USER_NUM_FOR_ONE_BATCH)
user_num = len(ids)+1
steps = uts * user_num
precision = steps * EXTENSION_FACTOR

ids, coins = extend_user_data(ids, coins, uts)
sum_trace, sum_values, coins = get_sum_trace(coins, uts, MODULUS)
ids, values = extend_user_data(ids, values, uts)
sum_trace, sum_values, values = get_sum_trace(values, uts, MODULUS)

# get generators
G2 = f.exp(NONRESIDUE, (MODULUS - 1) // precision)
Expand All @@ -49,8 +53,8 @@ def mk_por_proof(ids, coins, uts, data_path):

# get poly and eval for values of each coin
b_eval = []
for i in range(len(coins)):
b_poly = fft(coins[i], MODULUS, G1, inv=True)
for i in range(len(values)):
b_poly = fft(values[i], MODULUS, G1, inv=True)
b_eval.append(fft(b_poly, MODULUS, G2))
del b_poly
gc.collect()
Expand Down Expand Up @@ -97,9 +101,9 @@ def mk_por_proof(ids, coins, uts, data_path):
z_eval = f.multi_inv([f.eval_poly_at(z_poly, x) for x in xs])
tc_eval.append([f.mul(f.sub(t, i), z) % MODULUS for t, i, z in zip(t_eval, i_eval, z_eval)])

# coins constraints eval
# values constraints eval
cc_eval = []
for i in range(MAIN_COINS_NUM):
for i in range(main_coins_num):
# cc_constraint 1: Accumulation of each coin
# b_eval[i][j + uts*EXTENSION_FACTOR] = b_eval[i][j + (uts-1)*EXTENSION_FACTOR] + b_eval[i][j], j mod uts*EXTENSION_FACTOR == (uts-1)*EXTENSION_FACTOR,and j != last_step_position,
# z(x) = (x^user_num - G2^((uts-1) * EXTENSION_FACTOR * user_num))/(x - last_step_position)
Expand Down Expand Up @@ -178,12 +182,13 @@ def mk_por_proof(ids, coins, uts, data_path):
mk_multi_branch(l_mtree, positions),
prove_low_degree(l_eval, G2, 4*steps, MODULUS, exclude_multiples_of=EXTENSION_FACTOR)]

save_data(data_path, sum_proof, mtree, sum_values, COINS)
save_data(data_path, sum_proof, mtree, sum_values, coins)
# print("mk por proof in %.4f sec: " % (time.time() - start_time))
return

# sum_values: The sum values of each coin ant total coin that prover claimed
# proof: The proof for the sum amounts
def verify_por_proof(sum_values, proof):
def verify_por_proof(sum_values, proof, main_coins_num):
start_time = time.time()
check_sum_values(sum_values, MODULUS)
coins_num = len(sum_values) - 1
Expand All @@ -209,7 +214,7 @@ def verify_por_proof(sum_values, proof):
last_step_position = f.exp(G2, (steps - 1) * skips)

main_branch_leaves = verify_multi_branch(m_root, aug_positions, main_branches)
check_entry_hash(main_branch_leaves, mtree_entries_data, MAIN_COINS_NUM, MODULUS)
check_entry_hash(main_branch_leaves, mtree_entries_data, main_coins_num, MODULUS)

linear_comb_branch_leaves = verify_multi_branch(l_root, positions, linear_comb_branches)

Expand Down Expand Up @@ -266,7 +271,7 @@ def verify_por_proof(sum_values, proof):


# check coins constraint:
for i in range(MAIN_COINS_NUM):
for i in range(main_coins_num):
# check cc_constraint 1: b_eval[i][j + uts*EXTENSION_FACTOR] = b_eval[i][j + (uts-1)*EXTENSION_FACTOR] + b_eval[i][j]
# j mod uts*EXTENSION_FACTOR == (uts-1)*EXTENSION_FACTOR,and j != last_step_position
assert f.sub(f.sub(b_of_uts_skips_x[i], b_of_uts_sub_1_skips_x[i]), b_of_x[i]) == f.mul(cc_of_x[2*i], z3)
Expand All @@ -279,5 +284,7 @@ def verify_por_proof(sum_values, proof):
# check correctness of the linear combination
assert verify_l(k, x_to_the_steps, l_of_x, [t_of_x, b_of_x, id_of_x, tc_of_x[0], tc_of_x[2:], cc_of_x], tc_of_x[1], MODULUS)

# print('Verified %d consistency checks' % SPOT_CHECK_SECURITY_FACTOR)
# print('Verified sum proof in %.4f sec' % (time.time() - start_time))
return True

15 changes: 9 additions & 6 deletions zk_STARK/por_verify_tool.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,20 +6,23 @@
import time
import re

def verify_sum_proofs():
def verify_sum_proofs(config_path):
with open(config_path, "r") as ff:
config_json = json.load(ff)
coins = config_json["coins"]
abs_dir = os.path.dirname(os.path.realpath(sys.argv[0]))
start_time = time.time()
batches_proof_path = abs_dir + "/sum_proof_data/batches/"
sum_values = [0] * (len(COINS) + 1)
sum_values = [0] * (len(coins) + 1)
for root, dirs, files in os.walk(batches_proof_path):
for dir in dirs:
result = verify_batch_proof(batches_proof_path + dir + "/")
result = verify_batch_proof(batches_proof_path + dir + "/", config_json)
for i in range(len(sum_values)):
sum_values[i] = (sum_values[i] + result[i]) % MODULUS
print("Sum Proof of Batch %s Verified" %dir)

trunk_proof_path = abs_dir + "/sum_proof_data/trunk/"
result = verify_trunk_proof(trunk_proof_path)
result = verify_trunk_proof(trunk_proof_path, config_json)
for i in range(len(sum_values)):
assert sum_values[i] == (result[i] % MODULUS)
print("Sum Proof of Trunk Verified")
Expand All @@ -37,5 +40,5 @@ def verify_all_inclusion_proof():
print("All Proofs Verified in %.4f secs!" %(time.time() - start_time))

if __name__ == '__main__':
verify_sum_proofs()
verify_all_inclusion_proof()
verify_sum_proofs(CONFIG_PATH)
verify_all_inclusion_proof()
4 changes: 4 additions & 0 deletions zk_STARK/sum_proof_data/config.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
{
"main_coins_num": 22,
"coins": ["BTC","ETH","USDT","USDC","XRP","DOGE","SOL","OKB","APT","DASH","DOT","ELF","EOS","ETC","FIL","LINK","LTC","OKT","PEOPLE","TON","TRX","UNI","1INCH","AAVE","ADA","AGLD","AIDOGE","AKITA","ALGO","ALPHA","ANT","APE","API3","AR","ARB","ATOM","AVAX","AXS","BABYDOGE","BADGER","BAL","BAND","BAT","BCH","BETH","BICO","BLUR","BNB","BNT","BSV","BTM","BZZ","CEL","CELO","CELR","CETUS","CFX","CHZ","CLV","COMP","CONV","CORE","CQT","CRO","CRV","CSPR","CVC","DOME","DORA","DYDX","EFI","EGLD","ENJ","ENS","ETHW","FITFI","FLM","FLOKI","FLOW","FTM","GALA","GFT","GLMR","GMT","GMX","GODS","GRT","HBAR","ICP","IMX","IOST","IOTA","JST","KISHU","KLAY","KNC","KSM","LAT","LDO","LON","LOOKS","LPT","LRC","LUNA","LUNC","MAGIC","MANA","MASK","MATIC","MINA","MKR","NEAR","NEO","NFT","OMG","ONT","OP","PEPE","PERP","QTUM","RDNT","REN","RSR","RSS3","RVN","SAND","SHIB","SKL","SLP","SNT","SNX","STARL","STORJ","STX","SUI","SUSHI","SWEAT","SWRV","THETA","TRB","TUSD","UMA","USTC","WAVES","WOO","XCH","XLM","XMR","XTZ","YFI","YFII","YGG","ZEC","ZEN","ZIL","ZRX"]
}
Loading

0 comments on commit 8fd394d

Please sign in to comment.