initialise

This commit is contained in:
gozzy 2022-09-02 17:56:58 +00:00
commit 8b77b73323
31 changed files with 11277 additions and 0 deletions

22
LICENSE Normal file
View File

@ -0,0 +1,22 @@
The MIT License (MIT)
Copyright (c) 2018 Truffle
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

33
README.md Normal file
View File

@ -0,0 +1,33 @@
# Tornado.cash trees [![Build Status](https://github.com/tornadocash/tornado-trees/workflows/build/badge.svg)](https://github.com/tornadocash/tornado-trees/actions)
This repo implements a more optimized version of the [TornadoTrees](https://github.com/tornadocash/tornado-anonymity-mining/blob/080d0f83665fa686d7fe42dd57fb5975d0f1ca58/contracts/TornadoTrees.sol) mechanism.
## Dependencies
1. node 12
2. yarn
3. zkutil (`brew install rust && cargo install zkutil`)
## Start
```bash
$ yarn
$ yarn circuit
$ yarn test
```
## Mainnet testing
```bash
$ yarn circuit
$ npx hardhat node --fork <https://eth-mainnet.alchemyapi.io/v2/API_KEY> --fork-block-number 11827889
$ npx hardhat test
```
## build large circuits
Make sure you have enough RAM
```bash
docker build . -t tornadocash/tornado-trees
```

View File

@ -0,0 +1,91 @@
include "../node_modules/circomlib/circuits/poseidon.circom";
include "../node_modules/circomlib/circuits/bitify.circom";
include "./MerkleTreeUpdater.circom";
include "./Utils.circom";
template TreeLayer(height) {
signal input ins[1 << (height + 1)];
signal output outs[1 << height];
component hash[1 << height];
for(var i = 0; i < (1 << height); i++) {
hash[i] = HashLeftRight();
hash[i].left <== ins[i * 2];
hash[i].right <== ins[i * 2 + 1];
hash[i].hash ==> outs[i];
}
}
// Inserts a leaf batch into a tree
// Checks that tree previously contained zero leaves in the same position
template BatchTreeUpdate(levels, batchLevels, zeroBatchLeaf) {
var height = levels - batchLevels;
var nLeaves = 1 << batchLevels;
signal input argsHash;
signal private input oldRoot;
signal private input newRoot;
signal private input pathIndices;
signal private input pathElements[height];
signal private input hashes[nLeaves];
signal private input instances[nLeaves];
signal private input blocks[nLeaves];
// Check that hash of arguments is correct
// We compress arguments into a single hash to considerably reduce gas usage on chain
component argsHasher = TreeUpdateArgsHasher(nLeaves);
argsHasher.oldRoot <== oldRoot;
argsHasher.newRoot <== newRoot;
argsHasher.pathIndices <== pathIndices;
for(var i = 0; i < nLeaves; i++) {
argsHasher.hashes[i] <== hashes[i];
argsHasher.instances[i] <== instances[i];
argsHasher.blocks[i] <== blocks[i];
}
argsHash === argsHasher.out;
// Compute hashes of all leaves
component leaves[nLeaves];
for(var i = 0; i < nLeaves; i++) {
leaves[i] = Poseidon(3);
leaves[i].inputs[0] <== instances[i];
leaves[i].inputs[1] <== hashes[i];
leaves[i].inputs[2] <== blocks[i];
}
// Compute batch subtree merkle root
component layers[batchLevels];
for(var level = batchLevels - 1; level >= 0; level--) {
layers[level] = TreeLayer(level);
for(var i = 0; i < (1 << (level + 1)); i++) {
layers[level].ins[i] <== level == batchLevels - 1 ? leaves[i].out : layers[level + 1].outs[i];
}
}
// Verify that batch subtree was inserted correctly
component treeUpdater = MerkleTreeUpdater(height, zeroBatchLeaf);
treeUpdater.oldRoot <== oldRoot;
treeUpdater.newRoot <== newRoot;
treeUpdater.leaf <== layers[0].outs[0];
treeUpdater.pathIndices <== pathIndices;
for(var i = 0; i < height; i++) {
treeUpdater.pathElements[i] <== pathElements[i];
}
}
// zeroLeaf = keccak256("tornado") % FIELD_SIZE
// zeroBatchLeaf is poseidon(zeroLeaf, zeroLeaf) (batchLevels - 1) times
function nthZero(n) {
if (n == 0) return 21663839004416932945382355908790599225266501822907911457504978515578255421292;
if (n == 1) return 11850551329423159860688778991827824730037759162201783566284850822760196767874;
if (n == 2) return 21572503925325825116380792768937986743990254033176521064707045559165336555197;
if (n == 3) return 11224495635916644180335675565949106569141882748352237685396337327907709534945;
if (n == 4) return 2399242030534463392142674970266584742013168677609861039634639961298697064915;
if (n == 5) return 13182067204896548373877843501261957052850428877096289097123906067079378150834;
if (n == 6) return 7106632500398372645836762576259242192202230138343760620842346283595225511823;
if (n == 7) return 17857585024203959071818533000506593455576509792639288560876436361491747801924;
if (n == 8) return 17278668323652664881420209773995988768195998574629614593395162463145689805534;
if (n == 9) return 209436188287252095316293336871467217491997565239632454977424802439169726471;
}
var CHUNK_TREE_HEIGHT = 8
component main = BatchTreeUpdate(20, CHUNK_TREE_HEIGHT, nthZero(CHUNK_TREE_HEIGHT))

View File

@ -0,0 +1,71 @@
include "../node_modules/circomlib/circuits/poseidon.circom";
include "../node_modules/circomlib/circuits/bitify.circom";
// Computes Poseidon([left, right])
template HashLeftRight() {
signal input left;
signal input right;
signal output hash;
component hasher = Poseidon(2);
hasher.inputs[0] <== left;
hasher.inputs[1] <== right;
hash <== hasher.out;
}
// if s == 0 returns [in[0], in[1]]
// if s == 1 returns [in[1], in[0]]
template DualMux() {
signal input in[2];
signal input s;
signal output out[2];
s * (1 - s) === 0;
out[0] <== (in[1] - in[0])*s + in[0];
out[1] <== (in[0] - in[1])*s + in[1];
}
// Verifies that merkle proof is correct for given merkle root and a leaf
// pathIndices input is an array of 0/1 selectors telling whether given pathElement is on the left or right side of merkle path
template RawMerkleTree(levels) {
signal input leaf;
signal input pathElements[levels];
signal input pathIndices[levels];
signal output root;
component selectors[levels];
component hashers[levels];
for (var i = 0; i < levels; i++) {
selectors[i] = DualMux();
selectors[i].in[0] <== i == 0 ? leaf : hashers[i - 1].hash;
selectors[i].in[1] <== pathElements[i];
selectors[i].s <== pathIndices[i];
hashers[i] = HashLeftRight();
hashers[i].left <== selectors[i].out[0];
hashers[i].right <== selectors[i].out[1];
}
root <== hashers[levels - 1].hash;
}
template MerkleTree(levels) {
signal input leaf;
signal input pathElements[levels];
signal input pathIndices;
signal output root;
component indexBits = Num2Bits(levels);
indexBits.in <== pathIndices;
component tree = RawMerkleTree(levels)
tree.leaf <== leaf;
for (var i = 0; i < levels; i++) {
tree.pathIndices[i] <== indexBits.out[i];
tree.pathElements[i] <== pathElements[i];
}
root <== tree.root
}

View File

@ -0,0 +1,33 @@
include "./MerkleTree.circom";
// inserts a leaf into a tree
// checks that tree previously contained zero in the same position
template MerkleTreeUpdater(levels, zeroLeaf) {
signal input oldRoot;
signal input newRoot;
signal input leaf;
signal input pathIndices;
signal private input pathElements[levels];
// Compute indexBits once for both trees
// Since Num2Bits is non deterministic, 2 duplicate calls to it cannot be
// optimized by circom compiler
component indexBits = Num2Bits(levels);
indexBits.in <== pathIndices;
component treeBefore = RawMerkleTree(levels);
for(var i = 0; i < levels; i++) {
treeBefore.pathIndices[i] <== indexBits.out[i];
treeBefore.pathElements[i] <== pathElements[i];
}
treeBefore.leaf <== zeroLeaf;
treeBefore.root === oldRoot;
component treeAfter = RawMerkleTree(levels);
for(var i = 0; i < levels; i++) {
treeAfter.pathIndices[i] <== indexBits.out[i];
treeAfter.pathElements[i] <== pathElements[i];
}
treeAfter.leaf <== leaf;
treeAfter.root === newRoot;
}

67
circuits/Utils.circom Normal file
View File

@ -0,0 +1,67 @@
include "../node_modules/circomlib/circuits/bitify.circom";
include "../node_modules/circomlib/circuits/sha256/sha256.circom";
template TreeUpdateArgsHasher(nLeaves) {
signal private input oldRoot;
signal private input newRoot;
signal private input pathIndices;
signal private input instances[nLeaves];
signal private input hashes[nLeaves];
signal private input blocks[nLeaves];
signal output out;
var header = 256 + 256 + 32;
var bitsPerLeaf = 160 + 256 + 32;
component hasher = Sha256(header + nLeaves * bitsPerLeaf);
// the range check on old root is optional, it's enforced by smart contract anyway
component bitsOldRoot = Num2Bits_strict();
component bitsNewRoot = Num2Bits_strict();
component bitsPathIndices = Num2Bits(32);
component bitsInstance[nLeaves];
component bitsHash[nLeaves];
component bitsBlock[nLeaves];
bitsOldRoot.in <== oldRoot;
bitsNewRoot.in <== newRoot;
bitsPathIndices.in <== pathIndices;
hasher.in[0] <== 0;
hasher.in[1] <== 0;
for(var i = 0; i < 254; i++) {
hasher.in[i + 2] <== bitsOldRoot.out[253 - i];
}
hasher.in[256] <== 0;
hasher.in[257] <== 0;
for(var i = 0; i < 254; i++) {
hasher.in[i + 258] <== bitsNewRoot.out[253 - i];
}
for(var i = 0; i < 32; i++) {
hasher.in[i + 512] <== bitsPathIndices.out[31 - i];
}
for(var leaf = 0; leaf < nLeaves; leaf++) {
// the range check on hash is optional, it's enforced by the smart contract anyway
bitsHash[leaf] = Num2Bits_strict();
bitsInstance[leaf] = Num2Bits(160);
bitsBlock[leaf] = Num2Bits(32);
bitsHash[leaf].in <== hashes[leaf];
bitsInstance[leaf].in <== instances[leaf];
bitsBlock[leaf].in <== blocks[leaf];
hasher.in[header + leaf * bitsPerLeaf + 0] <== 0;
hasher.in[header + leaf * bitsPerLeaf + 1] <== 0;
for(var i = 0; i < 254; i++) {
hasher.in[header + leaf * bitsPerLeaf + i + 2] <== bitsHash[leaf].out[253 - i];
}
for(var i = 0; i < 160; i++) {
hasher.in[header + leaf * bitsPerLeaf + i + 256] <== bitsInstance[leaf].out[159 - i];
}
for(var i = 0; i < 32; i++) {
hasher.in[header + leaf * bitsPerLeaf + i + 416] <== bitsBlock[leaf].out[31 - i];
}
}
component b2n = Bits2Num(256);
for (var i = 0; i < 256; i++) {
b2n.in[i] <== hasher.out[255 - i];
}
out <== b2n.out;
}

View File

@ -0,0 +1,19 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.6.0;
import "@openzeppelin/contracts/proxy/TransparentUpgradeableProxy.sol";
/**
* @dev TransparentUpgradeableProxy where admin is allowed to call implementation methods.
*/
contract AdminUpgradeableProxy is TransparentUpgradeableProxy {
/**
* @dev Initializes an upgradeable proxy backed by the implementation at `_logic`.
*/
constructor(address _logic, bytes memory _data) public payable TransparentUpgradeableProxy(_logic, msg.sender, _data) {}
/**
* @dev Override to allow admin access the fallback function.
*/
function _beforeFallback() internal override {}
}

281
contracts/TornadoTrees.sol Normal file
View File

@ -0,0 +1,281 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.6.0;
pragma experimental ABIEncoderV2;
import "./interfaces/ITornadoTreesV1.sol";
import "./interfaces/IBatchTreeUpdateVerifier.sol";
import "@openzeppelin/upgrades-core/contracts/Initializable.sol";
/// @dev This contract holds a merkle tree of all tornado cash deposit and withdrawal events
contract TornadoTrees is Initializable {
address public immutable governance;
bytes32 public depositRoot;
bytes32 public previousDepositRoot;
bytes32 public withdrawalRoot;
bytes32 public previousWithdrawalRoot;
address public tornadoProxy;
IBatchTreeUpdateVerifier public treeUpdateVerifier;
ITornadoTreesV1 public immutable tornadoTreesV1;
uint256 public constant CHUNK_TREE_HEIGHT = 8;
uint256 public constant CHUNK_SIZE = 2**CHUNK_TREE_HEIGHT;
uint256 public constant ITEM_SIZE = 32 + 20 + 4;
uint256 public constant BYTES_SIZE = 32 + 32 + 4 + CHUNK_SIZE * ITEM_SIZE;
uint256 public constant SNARK_FIELD = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
mapping(uint256 => bytes32) public deposits;
uint256 public depositsLength;
uint256 public lastProcessedDepositLeaf;
uint256 public immutable depositsV1Length;
mapping(uint256 => bytes32) public withdrawals;
uint256 public withdrawalsLength;
uint256 public lastProcessedWithdrawalLeaf;
uint256 public immutable withdrawalsV1Length;
event DepositData(address instance, bytes32 indexed hash, uint256 block, uint256 index);
event WithdrawalData(address instance, bytes32 indexed hash, uint256 block, uint256 index);
struct TreeLeaf {
bytes32 hash;
address instance;
uint32 block;
}
modifier onlyTornadoProxy {
require(msg.sender == tornadoProxy, "Not authorized");
_;
}
modifier onlyGovernance() {
require(msg.sender == governance, "Only governance can perform this action");
_;
}
struct SearchParams {
uint256 depositsFrom;
uint256 depositsStep;
uint256 withdrawalsFrom;
uint256 withdrawalsStep;
}
constructor(
address _governance,
ITornadoTreesV1 _tornadoTreesV1,
SearchParams memory _searchParams
) public {
governance = _governance;
tornadoTreesV1 = _tornadoTreesV1;
depositsV1Length = findArrayLength(
_tornadoTreesV1,
"deposits(uint256)",
_searchParams.depositsFrom,
_searchParams.depositsStep
);
withdrawalsV1Length = findArrayLength(
_tornadoTreesV1,
"withdrawals(uint256)",
_searchParams.withdrawalsFrom,
_searchParams.withdrawalsStep
);
}
function initialize(address _tornadoProxy, IBatchTreeUpdateVerifier _treeUpdateVerifier) public initializer onlyGovernance {
tornadoProxy = _tornadoProxy;
treeUpdateVerifier = _treeUpdateVerifier;
depositRoot = tornadoTreesV1.depositRoot();
uint256 lastDepositLeaf = tornadoTreesV1.lastProcessedDepositLeaf();
require(lastDepositLeaf % CHUNK_SIZE == 0, "Incorrect TornadoTrees state");
lastProcessedDepositLeaf = lastDepositLeaf;
depositsLength = depositsV1Length;
withdrawalRoot = tornadoTreesV1.withdrawalRoot();
uint256 lastWithdrawalLeaf = tornadoTreesV1.lastProcessedWithdrawalLeaf();
require(lastWithdrawalLeaf % CHUNK_SIZE == 0, "Incorrect TornadoTrees state");
lastProcessedWithdrawalLeaf = lastWithdrawalLeaf;
withdrawalsLength = withdrawalsV1Length;
}
function registerDeposit(address _instance, bytes32 _commitment) public onlyTornadoProxy {
uint256 _depositsLength = depositsLength;
deposits[_depositsLength] = keccak256(abi.encode(_instance, _commitment, blockNumber()));
emit DepositData(_instance, _commitment, blockNumber(), _depositsLength);
depositsLength = _depositsLength + 1;
}
function registerWithdrawal(address _instance, bytes32 _nullifierHash) public onlyTornadoProxy {
uint256 _withdrawalsLength = withdrawalsLength;
withdrawals[_withdrawalsLength] = keccak256(abi.encode(_instance, _nullifierHash, blockNumber()));
emit WithdrawalData(_instance, _nullifierHash, blockNumber(), _withdrawalsLength);
withdrawalsLength = _withdrawalsLength + 1;
}
function updateDepositTree(
bytes calldata _proof,
bytes32 _argsHash,
bytes32 _currentRoot,
bytes32 _newRoot,
uint32 _pathIndices,
TreeLeaf[CHUNK_SIZE] calldata _events
) public {
uint256 offset = lastProcessedDepositLeaf;
require(_newRoot != previousDepositRoot, "Outdated deposit root");
require(_currentRoot == depositRoot, "Proposed deposit root is invalid");
require(_pathIndices == offset >> CHUNK_TREE_HEIGHT, "Incorrect deposit insert index");
bytes memory data = new bytes(BYTES_SIZE);
assembly {
mstore(add(data, 0x44), _pathIndices)
mstore(add(data, 0x40), _newRoot)
mstore(add(data, 0x20), _currentRoot)
}
for (uint256 i = 0; i < CHUNK_SIZE; i++) {
(bytes32 hash, address instance, uint32 blockNumber) = (_events[i].hash, _events[i].instance, _events[i].block);
bytes32 leafHash = keccak256(abi.encode(instance, hash, blockNumber));
bytes32 deposit = offset + i >= depositsV1Length ? deposits[offset + i] : tornadoTreesV1.deposits(offset + i);
require(leafHash == deposit, "Incorrect deposit");
assembly {
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x7c), blockNumber)
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x78), instance)
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x64), hash)
}
if (offset + i >= depositsV1Length) {
delete deposits[offset + i];
} else {
emit DepositData(instance, hash, blockNumber, offset + i);
}
}
uint256 argsHash = uint256(sha256(data)) % SNARK_FIELD;
require(argsHash == uint256(_argsHash), "Invalid args hash");
require(treeUpdateVerifier.verifyProof(_proof, [argsHash]), "Invalid deposit tree update proof");
previousDepositRoot = _currentRoot;
depositRoot = _newRoot;
lastProcessedDepositLeaf = offset + CHUNK_SIZE;
}
function updateWithdrawalTree(
bytes calldata _proof,
bytes32 _argsHash,
bytes32 _currentRoot,
bytes32 _newRoot,
uint32 _pathIndices,
TreeLeaf[CHUNK_SIZE] calldata _events
) public {
uint256 offset = lastProcessedWithdrawalLeaf;
require(_newRoot != previousWithdrawalRoot, "Outdated withdrawal root");
require(_currentRoot == withdrawalRoot, "Proposed withdrawal root is invalid");
require(_pathIndices == offset >> CHUNK_TREE_HEIGHT, "Incorrect withdrawal insert index");
bytes memory data = new bytes(BYTES_SIZE);
assembly {
mstore(add(data, 0x44), _pathIndices)
mstore(add(data, 0x40), _newRoot)
mstore(add(data, 0x20), _currentRoot)
}
for (uint256 i = 0; i < CHUNK_SIZE; i++) {
(bytes32 hash, address instance, uint32 blockNumber) = (_events[i].hash, _events[i].instance, _events[i].block);
bytes32 leafHash = keccak256(abi.encode(instance, hash, blockNumber));
bytes32 withdrawal = offset + i >= withdrawalsV1Length ? withdrawals[offset + i] : tornadoTreesV1.withdrawals(offset + i);
require(leafHash == withdrawal, "Incorrect withdrawal");
assembly {
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x7c), blockNumber)
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x78), instance)
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x64), hash)
}
if (offset + i >= withdrawalsV1Length) {
delete withdrawals[offset + i];
} else {
emit WithdrawalData(instance, hash, blockNumber, offset + i);
}
}
uint256 argsHash = uint256(sha256(data)) % SNARK_FIELD;
require(argsHash == uint256(_argsHash), "Invalid args hash");
require(treeUpdateVerifier.verifyProof(_proof, [argsHash]), "Invalid withdrawal tree update proof");
previousWithdrawalRoot = _currentRoot;
withdrawalRoot = _newRoot;
lastProcessedWithdrawalLeaf = offset + CHUNK_SIZE;
}
function validateRoots(bytes32 _depositRoot, bytes32 _withdrawalRoot) public view {
require(_depositRoot == depositRoot || _depositRoot == previousDepositRoot, "Incorrect deposit tree root");
require(_withdrawalRoot == withdrawalRoot || _withdrawalRoot == previousWithdrawalRoot, "Incorrect withdrawal tree root");
}
/// @dev There is no array length getter for deposit and withdrawal arrays
/// in the previous contract, so we have to find them length manually.
/// Used only during deployment
function findArrayLength(
ITornadoTreesV1 _tornadoTreesV1,
string memory _type,
uint256 _from, // most likely array length after the proposal has passed
uint256 _step // optimal step size to find first match, approximately equals dispersion
) internal view returns (uint256) {
if (_from == 0 && _step == 0) {
return 0; // for tests
}
// Find the segment with correct array length
bool direction = elementExists(_tornadoTreesV1, _type, _from);
do {
_from = direction ? _from + _step : _from - _step;
} while (direction == elementExists(_tornadoTreesV1, _type, _from));
uint256 high = direction ? _from : _from + _step;
uint256 low = direction ? _from - _step : _from;
uint256 mid = (high + low) / 2;
// Perform a binary search in this segment
while (low < mid) {
if (elementExists(_tornadoTreesV1, _type, mid)) {
low = mid;
} else {
high = mid;
}
mid = (low + high) / 2;
}
return mid + 1;
}
function elementExists(
ITornadoTreesV1 _tornadoTreesV1,
string memory _type,
uint256 index
) public view returns (bool success) {
// Try to get the element. If it succeeds the array length is higher, it it reverts the length is equal or lower
(success, ) = address(_tornadoTreesV1).staticcall{ gas: 2500 }(abi.encodeWithSignature(_type, index));
}
function getRegisteredDeposits() external view returns (bytes32[] memory _deposits) {
uint256 count = depositsLength - lastProcessedDepositLeaf;
_deposits = new bytes32[](count);
for (uint256 i = 0; i < count; i++) {
_deposits[i] = deposits[lastProcessedDepositLeaf + i];
}
}
function getRegisteredWithdrawals() external view returns (bytes32[] memory _withdrawals) {
uint256 count = withdrawalsLength - lastProcessedWithdrawalLeaf;
_withdrawals = new bytes32[](count);
for (uint256 i = 0; i < count; i++) {
_withdrawals[i] = withdrawals[lastProcessedWithdrawalLeaf + i];
}
}
function setTornadoProxyContract(address _tornadoProxy) external onlyGovernance {
tornadoProxy = _tornadoProxy;
}
function setVerifierContract(IBatchTreeUpdateVerifier _treeUpdateVerifier) external onlyGovernance {
treeUpdateVerifier = _treeUpdateVerifier;
}
function blockNumber() public view virtual returns (uint256) {
return block.number;
}
}

View File

@ -0,0 +1,7 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.6.0;
interface IBatchTreeUpdateVerifier {
function verifyProof(bytes calldata proof, uint256[1] calldata input) external view returns (bool);
}

View File

@ -0,0 +1,21 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.6.0;
interface ITornadoTreesV1 {
function lastProcessedDepositLeaf() external view returns (uint256);
function lastProcessedWithdrawalLeaf() external view returns (uint256);
function depositRoot() external view returns (bytes32);
function withdrawalRoot() external view returns (bytes32);
function deposits(uint256 i) external view returns (bytes32);
function withdrawals(uint256 i) external view returns (bytes32);
function registerDeposit(address instance, bytes32 commitment) external;
function registerWithdrawal(address instance, bytes32 nullifier) external;
}

74
contracts/mocks/Pack.sol Normal file
View File

@ -0,0 +1,74 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.6.0;
contract Pack {
uint256 public constant CHUNK_TREE_HEIGHT = 8;
uint256 public constant CHUNK_SIZE = 2**CHUNK_TREE_HEIGHT;
uint256 public constant ITEM_SIZE = 32 + 20 + 4;
uint256 public constant BYTES_SIZE = CHUNK_SIZE * ITEM_SIZE;
uint256 public gas1;
uint256 public gas2;
uint256 public gas3;
uint256 public gas4;
bytes32 public hash;
event DepositData(address instance, bytes32 indexed hash, uint256 block, uint256 index);
function pack2(
bytes32[CHUNK_SIZE] memory hashes,
address[CHUNK_SIZE] memory instances,
uint32[CHUNK_SIZE] memory blocks
) public {
uint256 gasBefore = gasleft();
bytes memory data = new bytes(BYTES_SIZE);
for (uint256 i = 0; i < CHUNK_SIZE; i++) {
(bytes32 _hash, address _instance, uint32 _block) = (hashes[i], instances[i], blocks[i]);
assembly {
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x38), _block)
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x34), _instance)
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x20), _hash)
}
}
uint256 gasHash = gasleft();
bytes32 hash1 = sha256(data);
uint256 gasEvents = gasleft();
for (uint256 i = 0; i < CHUNK_SIZE; i++) {
emit DepositData(instances[i], hashes[i], blocks[i], i);
}
gas1 = gasEvents - gasleft();
gas2 = gasHash - gasEvents;
gas3 = gasBefore - gasHash;
gas4 = gasBefore;
hash = hash1;
}
function pack3(
bytes32[CHUNK_SIZE] memory hashes,
address[CHUNK_SIZE] memory instances,
uint32[CHUNK_SIZE] memory blocks
)
public
view
returns (
uint256,
uint256,
bytes32
)
{
uint256 gasBefore = gasleft();
bytes memory data = new bytes(BYTES_SIZE);
for (uint256 i = 0; i < CHUNK_SIZE; i++) {
(bytes32 _hash, address _instance, uint32 _block) = (hashes[i], instances[i], blocks[i]);
assembly {
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x38), _block)
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x34), _instance)
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x20), _hash)
}
}
uint256 gasHash = gasleft();
bytes32 hash1 = sha256(data);
return (gasleft() - gasHash, gasHash - gasBefore, hash1);
}
}

View File

@ -0,0 +1,28 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.6.0;
contract PublicArray {
uint256[] public deposits;
uint256[] public withdrawals;
function lastProcessedDepositLeaf() external view returns (uint256) {}
function lastProcessedWithdrawalLeaf() external view returns (uint256) {}
function depositRoot() external view returns (bytes32) {}
function withdrawalRoot() external view returns (bytes32) {}
function setDeposits(uint256[] memory _deposits) public {
for (uint256 i = 0; i < _deposits.length; i++) {
deposits.push(_deposits[i]);
}
}
function setWithdrawals(uint256[] memory _withdrawals) public {
for (uint256 i = 0; i < _withdrawals.length; i++) {
withdrawals.push(_withdrawals[i]);
}
}
}

View File

@ -0,0 +1,100 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.6.0;
pragma experimental ABIEncoderV2;
import "../TornadoTrees.sol";
import "../interfaces/ITornadoTreesV1.sol";
import "../interfaces/IBatchTreeUpdateVerifier.sol";
contract TornadoTreesMock is TornadoTrees {
uint256 public currentBlock;
constructor(
address _governance,
ITornadoTreesV1 _tornadoTreesV1,
SearchParams memory _searchParams
) public TornadoTrees(_governance, _tornadoTreesV1, _searchParams) {}
function setBlockNumber(uint256 _blockNumber) public {
currentBlock = _blockNumber;
}
function blockNumber() public view override returns (uint256) {
return currentBlock == 0 ? block.number : currentBlock;
}
function findArrayLengthMock(
ITornadoTreesV1 _tornadoTreesV1,
string memory _type,
uint256 _from,
uint256 _step
) public view returns (uint256) {
return findArrayLength(_tornadoTreesV1, _type, _from, _step);
}
function register(
address _instance,
bytes32 _commitment,
bytes32 _nullifier,
uint256 _depositBlockNumber,
uint256 _withdrawBlockNumber
) public {
setBlockNumber(_depositBlockNumber);
registerDeposit(_instance, _commitment);
setBlockNumber(_withdrawBlockNumber);
registerWithdrawal(_instance, _nullifier);
}
function updateRoots(bytes32 _depositRoot, bytes32 _withdrawalRoot) public {
depositRoot = _depositRoot;
withdrawalRoot = _withdrawalRoot;
}
function updateDepositTreeMock(
bytes32 _oldRoot,
bytes32 _newRoot,
uint32 _pathIndices,
TreeLeaf[] calldata _events
) public pure returns (uint256) {
bytes memory data = new bytes(BYTES_SIZE);
assembly {
mstore(add(data, 0x44), _pathIndices)
mstore(add(data, 0x40), _newRoot)
mstore(add(data, 0x20), _oldRoot)
}
for (uint256 i = 0; i < CHUNK_SIZE; i++) {
(bytes32 hash, address instance, uint32 depositBlock) = (_events[i].hash, _events[i].instance, _events[i].block);
assembly {
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x7c), depositBlock)
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x78), instance)
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x64), hash)
}
}
return uint256(sha256(data)) % SNARK_FIELD;
}
function updateDepositTreeMock2(
bytes32 _oldRoot,
bytes32 _newRoot,
uint32 _pathIndices,
TreeLeaf[] calldata _events
) public pure returns (bytes memory) {
bytes memory data = new bytes(BYTES_SIZE);
assembly {
mstore(add(data, 0x44), _pathIndices)
mstore(add(data, 0x40), _newRoot)
mstore(add(data, 0x20), _oldRoot)
}
for (uint256 i = 0; i < CHUNK_SIZE; i++) {
(bytes32 hash, address instance, uint32 depositBlock) = (_events[i].hash, _events[i].instance, _events[i].block);
assembly {
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x7c), depositBlock)
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x78), instance)
mstore(add(add(data, mul(ITEM_SIZE, i)), 0x64), hash)
}
}
return data;
}
}

View File

@ -0,0 +1,79 @@
// SPDX-License-Identifier: MIT
pragma solidity ^0.6.0;
pragma experimental ABIEncoderV2;
contract TornadoTreesV1Mock {
uint256 public timestamp;
uint256 public currentBlock;
bytes32[] public deposits;
uint256 public lastProcessedDepositLeaf;
bytes32[] public withdrawals;
uint256 public lastProcessedWithdrawalLeaf;
bytes32 public depositRoot;
bytes32 public withdrawalRoot;
constructor(
uint256 _lastProcessedDepositLeaf,
uint256 _lastProcessedWithdrawalLeaf,
bytes32 _depositRoot,
bytes32 _withdrawalRoot
) public {
lastProcessedDepositLeaf = _lastProcessedDepositLeaf;
lastProcessedWithdrawalLeaf = _lastProcessedWithdrawalLeaf;
depositRoot = _depositRoot;
withdrawalRoot = _withdrawalRoot;
}
function register(
address _instance,
bytes32 _commitment,
bytes32 _nullifier,
uint256 _depositBlockNumber,
uint256 _withdrawBlockNumber
) public {
setBlockNumber(_depositBlockNumber);
deposits.push(keccak256(abi.encode(_instance, _commitment, blockNumber())));
setBlockNumber(_withdrawBlockNumber);
withdrawals.push(keccak256(abi.encode(_instance, _nullifier, blockNumber())));
}
function getRegisteredDeposits() external view returns (bytes32[] memory _deposits) {
uint256 count = deposits.length - lastProcessedDepositLeaf;
_deposits = new bytes32[](count);
for (uint256 i = 0; i < count; i++) {
_deposits[i] = deposits[lastProcessedDepositLeaf + i];
}
}
function getRegisteredWithdrawals() external view returns (bytes32[] memory _withdrawals) {
uint256 count = withdrawals.length - lastProcessedWithdrawalLeaf;
_withdrawals = new bytes32[](count);
for (uint256 i = 0; i < count; i++) {
_withdrawals[i] = withdrawals[lastProcessedWithdrawalLeaf + i];
}
}
function setLastProcessedDepositLeaf(uint256 _lastProcessedDepositLeaf) public {
lastProcessedDepositLeaf = _lastProcessedDepositLeaf;
}
function setLastProcessedWithdrawalLeaf(uint256 _lastProcessedWithdrawalLeaf) public {
lastProcessedWithdrawalLeaf = _lastProcessedWithdrawalLeaf;
}
function resolve(bytes32 _addr) public pure returns (address) {
return address(uint160(uint256(_addr) >> (12 * 8)));
}
function setBlockNumber(uint256 _blockNumber) public {
currentBlock = _blockNumber;
}
function blockNumber() public view returns (uint256) {
return currentBlock == 0 ? block.number : currentBlock;
}
}

View File

@ -0,0 +1 @@
../../artifacts/circuits/BatchTreeUpdateVerifier.sol

46
hardhat.config.js Normal file
View File

@ -0,0 +1,46 @@
/* global task, ethers */
require('@nomiclabs/hardhat-waffle')
require('dotenv').config()
// This is a sample Hardhat task. To learn how to create your own go to
// https://hardhat.org/guides/create-task.html
task('accounts', 'Prints the list of accounts', async () => {
const accounts = await ethers.getSigners()
for (const account of accounts) {
console.log(account.address)
}
})
// You need to export an object to set up your config
// Go to https://hardhat.org/config/ to learn more
/**
* @type import('hardhat/config').HardhatUserConfig
*/
const config = {
solidity: {
version: '0.6.12',
settings: {
optimizer: {
enabled: true,
runs: 200,
},
},
},
networks: {
hardhat: {
blockGasLimit: 9500000,
},
},
mocha: {
timeout: 600000,
},
}
if (process.env.NETWORK) {
config.networks[process.env.NETWORK] = {
url: `https://${process.env.NETWORK}.infura.io/v3/${process.env.INFURA_TOKEN}`,
accounts: [process.env.PRIVATE_KEY],
}
}
module.exports = config

47
optimize/Dockerfile Normal file
View File

@ -0,0 +1,47 @@
FROM ubuntu
RUN apt-get update && \
apt-get install -y python3 python3-distutils g++ make curl git && \
rm -rf /var/lib/apt/lists/*
# Install nvm with node and npm
RUN rm /bin/sh && ln -s /bin/bash /bin/sh
ENV NVM_DIR /usr/local/nvm
ENV NODE_VERSION 14.8.0
RUN curl https://raw.githubusercontent.com/creationix/nvm/v0.30.1/install.sh | bash \
&& source $NVM_DIR/nvm.sh \
&& nvm install $NODE_VERSION \
&& nvm alias default $NODE_VERSION \
&& nvm use default
ENV NODE_PATH $NVM_DIR/v$NODE_VERSION/lib/node_modules
ENV PATH $NVM_DIR/versions/node/v$NODE_VERSION/bin:$PATH
RUN node --version
WORKDIR /root
RUN git clone https://github.com/nodejs/node.git
RUN git clone https://github.com/iden3/circom.git
COPY node.sh /tmp
RUN apt-get update && apt-get install -y ninja-build
RUN /tmp/node.sh
RUN cd circom && \
git checkout v0.5.35 && \
npm install
RUN git clone https://github.com/iden3/r1csoptimize
RUN cd r1csoptimize && \
git checkout 8bc528b06c0f98818d1b5224e2078397f0bb7faf && \
npm install
RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
RUN source $HOME/.cargo/env && cargo install zkutil
RUN npm install -g circom snarkjs
WORKDIR /root/test
RUN npm init -y && npm install circomlib
RUN apt-get update && apt-get install -y ne
RUN mkdir circuits
COPY sha/circuit.circom sha/input.js test.sh ./circuits/

34
optimize/node.sh Executable file
View File

@ -0,0 +1,34 @@
#!/bin/bash -e
cd node
git checkout 8beef5eeb82425b13d447b50beafb04ece7f91b1
patch -p1 <<EOL
index 0097683120..d35fd6e68d 100644
--- a/deps/v8/src/api/api.cc
+++ b/deps/v8/src/api/api.cc
@@ -7986,7 +7986,7 @@ void BigInt::ToWordsArray(int* sign_bit, int* word_count,
void Isolate::ReportExternalAllocationLimitReached() {
i::Heap* heap = reinterpret_cast<i::Isolate*>(this)->heap();
if (heap->gc_state() != i::Heap::NOT_IN_GC) return;
- heap->ReportExternalMemoryPressure();
+ // heap->ReportExternalMemoryPressure();
}
HeapProfiler* Isolate::GetHeapProfiler() {
diff --git a/deps/v8/src/objects/backing-store.cc b/deps/v8/src/objects/backing-store.cc
index bd9f39b7d3..c7d7e58ef3 100644
--- a/deps/v8/src/objects/backing-store.cc
+++ b/deps/v8/src/objects/backing-store.cc
@@ -34,7 +34,7 @@ constexpr bool kUseGuardRegions = false;
// address space limits needs to be smaller.
constexpr size_t kAddressSpaceLimit = 0x8000000000L; // 512 GiB
#elif V8_TARGET_ARCH_64_BIT
-constexpr size_t kAddressSpaceLimit = 0x10100000000L; // 1 TiB + 4 GiB
+constexpr size_t kAddressSpaceLimit = 0x40100000000L; // 4 TiB + 4 GiB
#else
constexpr size_t kAddressSpaceLimit = 0xC0000000; // 3 GiB
#endif
EOL
# ./configure --ninja
# JOBS=24 make
./configure
make -j12

56
package.json Normal file
View File

@ -0,0 +1,56 @@
{
"name": "tornado-trees",
"version": "0.0.9",
"main": "src/index.js",
"repository": "https://github.com/tornadocash/tornado-trees.git",
"author": "Tornadocash team <hello@tornado.cash>",
"license": "MIT",
"files": [
"src/*",
"contracts/*",
"scripts/*"
],
"scripts": {
"compile": "npx hardhat compile",
"test": "npx hardhat test",
"eslint": "eslint --ext .js --ignore-path .gitignore .",
"prettier:check": "prettier --check . --config .prettierrc",
"prettier:fix": "prettier --write . --config .prettierrc",
"lint": "yarn eslint && yarn prettier:check",
"changeTreeHeight": "scripts/changeTreeHeight.sh",
"circuit:batchTreeUpdate": "scripts/buildCircuit.sh BatchTreeUpdate",
"circuit:batchTreeUpdateLarge": "scripts/buildCircuit.sh BatchTreeUpdate large",
"circuit:batchTreeUpdateWitness": "scripts/buildWitness.sh BatchTreeUpdate",
"circuit": "yarn circuit:batchTreeUpdate"
},
"devDependencies": {
"@nomiclabs/hardhat-ethers": "^2.0.1",
"@nomiclabs/hardhat-waffle": "^2.0.1",
"babel-eslint": "^10.1.0",
"chai": "^4.2.0",
"eslint": "^7.19.0",
"eslint-config-prettier": "^7.2.0",
"eslint-plugin-prettier": "^3.3.1",
"ethereum-waffle": "^3.2.2",
"ethers": "^5.0.26",
"hardhat": "^2.0.8",
"prettier": "^2.2.1",
"prettier-plugin-solidity": "^1.0.0-beta.3",
"solhint-plugin-prettier": "^0.0.5",
"torn-token": "^1.0.0"
},
"dependencies": {
"@openzeppelin/contracts": "^3.4.0",
"@openzeppelin/upgrades-core": "^1.5.1",
"circom": "^0.5.38",
"circom_runtime": "^0.1.12",
"circomlib": "git+https://github.com/tornadocash/circomlib.git#d20d53411d1bef61f38c99a8b36d5d0cc4836aa1",
"dotenv": "^8.2.0",
"ffiasm": "^0.1.1",
"ffjavascript": "^0.2.35",
"fixed-merkle-tree": "^0.5.0",
"jssha": "^3.2.0",
"snarkjs": "^0.3.57",
"tmp-promise": "^3.0.2"
}
}

11
scripts/buildCircuit.sh Executable file
View File

@ -0,0 +1,11 @@
#!/bin/bash -e
mkdir -p artifacts/circuits
if [ "$2" = "large" ]; then
npx circom -v -f -r artifacts/circuits/$1.r1cs -c artifacts/circuits/$1.cpp -s artifacts/circuits/$1.sym circuits/$1.circom
else
npx circom -v -r artifacts/circuits/$1.r1cs -w artifacts/circuits/$1.wasm -s artifacts/circuits/$1.sym circuits/$1.circom
fi
zkutil setup -c artifacts/circuits/$1.r1cs -p artifacts/circuits/$1.params
zkutil generate-verifier -p artifacts/circuits/$1.params -v artifacts/circuits/${1}Verifier.sol
sed -i.bak "s/contract Verifier/contract ${1}Verifier/g" artifacts/circuits/${1}Verifier.sol
npx snarkjs info -r artifacts/circuits/$1.r1cs

8
scripts/buildWitness.sh Executable file
View File

@ -0,0 +1,8 @@
#!/bin/bash -e
# required dependencies: libgmp-dev nlohmann-json3-dev nasm g++
cd artifacts/circuits
node ../../node_modules/ffiasm/src/buildzqfield.js -q 21888242871839275222246405745257275088548364400416034343698204186575808495617 -n Fr
nasm -felf64 fr.asm
cp ../../node_modules/circom_runtime/c/*.cpp ./
cp ../../node_modules/circom_runtime/c/*.hpp ./
g++ -pthread main.cpp calcwit.cpp utils.cpp fr.cpp fr.o ${1}.cpp -o ${1} -lgmp -std=c++11 -O3 -fopenmp -DSANITY_CHECK

7
scripts/changeTreeHeight.sh Executable file
View File

@ -0,0 +1,7 @@
#!/bin/bash
case $(sed --help 2>&1) in
*GNU*) sed_i () { xargs sed -i "$@"; };;
*) sed_i () { xargs sed -i '' "$@"; };;
esac
grep -l --exclude-dir={.git,node_modules,artifacts} -r "CHUNK_TREE_HEIGHT = [0-9]" . | sed_i "s/CHUNK_TREE_HEIGHT = [0-9]/CHUNK_TREE_HEIGHT = ${1}/g"

114
scripts/deploy.js Normal file
View File

@ -0,0 +1,114 @@
// We require the Hardhat Runtime Environment explicitly here. This is optional
// but useful for running the script in a standalone fashion through `node <script>`.
//
// When running the script with `hardhat run <script>` you'll find the Hardhat
// Runtime Environment's members available in the global scope.
const hre = require('hardhat')
const { toFixedHex, poseidonHash2 } = require('../src/utils')
const MerkleTree = require('fixed-merkle-tree')
const abi = new hre.ethers.utils.AbiCoder()
const instances = [
'0x1111000000000000000000000000000000001111',
'0x2222000000000000000000000000000000002222',
'0x3333000000000000000000000000000000003333',
'0x4444000000000000000000000000000000004444',
]
const blocks = ['0xaaaaaaaa', '0xbbbbbbbb', '0xcccccccc', '0xdddddddd']
const CHUNK_TREE_HEIGHT = 8
const levels = 20
const nonRandomBN = (nonce = 0) =>
hre.ethers.BigNumber.from('0x004d51bffaafdb3eed0661c1cfd76c8cd6ec1456b80b24bbb855f3a141ebf0be').sub(nonce)
async function main() {
const governance = '0x5efda50f22d34F262c29268506C5Fa42cB56A1Ce'
const [tornadoProxy] = await hre.ethers.getSigners()
console.log('deployer/tornadoProxy acc: ', tornadoProxy.address)
const tree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
const TornadoTreesV1Mock = await hre.ethers.getContractFactory('TornadoTreesV1Mock')
const tornadoTreesV1Mock = await TornadoTreesV1Mock.deploy(0, 0, tree.root(), tree.root())
await tornadoTreesV1Mock.deployed()
console.log('tornadoTreesV1Mock deployed to:', tornadoTreesV1Mock.address)
const notes = []
const depositEvents = {}
const withdrawalEvents = {}
for (let i = 0; i < 2 ** CHUNK_TREE_HEIGHT; i++) {
const note = {
instance: instances[i % instances.length],
depositBlock: blocks[i % blocks.length],
withdrawalBlock: 2 + i + i * 4 * 60 * 24,
commitment: nonRandomBN(i),
nullifierHash: nonRandomBN(i + instances.length),
}
await tornadoTreesV1Mock.register(
note.instance,
toFixedHex(note.commitment),
toFixedHex(note.nullifierHash),
note.depositBlock,
note.withdrawalBlock,
{ gasLimit: 200000 },
)
const encodedData = abi.encode(
['address', 'bytes32', 'uint256'],
[note.instance, toFixedHex(note.commitment), note.depositBlock],
)
const leaf = hre.ethers.utils.keccak256(encodedData)
depositEvents[leaf] = {
hash: toFixedHex(note.commitment),
instance: toFixedHex(note.instance, 20),
block: toFixedHex(note.depositBlock, 4),
}
const encodedDataW = abi.encode(
['address', 'bytes32', 'uint256'],
[note.instance, toFixedHex(note.nullifierHash), note.withdrawalBlock],
)
const leafW = hre.ethers.utils.keccak256(encodedDataW)
withdrawalEvents[leafW] = {
hash: toFixedHex(note.nullifierHash),
instance: toFixedHex(note.instance, 20),
block: toFixedHex(note.withdrawalBlock, 4),
}
notes[i] = note
}
console.log(`Registered ${notes.length} new deposits and withdrawals in tornadoTreesV1Mock`)
console.log(JSON.stringify(depositEvents, null, 2))
console.log(JSON.stringify(withdrawalEvents, null, 2))
const BatchTreeUpdateVerifier = await hre.ethers.getContractFactory('BatchTreeUpdateVerifier')
const verifier = await BatchTreeUpdateVerifier.deploy()
await verifier.deployed()
console.log('Verifier deployed to:', verifier.address)
const TornadoTrees = await hre.ethers.getContractFactory('TornadoTrees')
const tornadoTrees = await TornadoTrees.deploy(
governance,
tornadoProxy.address,
tornadoTreesV1Mock.address,
verifier.address,
{
unprocessedDeposits: 1, // this approximate value, actually there are 4, but the contract will figure out that
unprocessedWithdrawals: 1,
depositsPerDay: 2, // parameter for searching the count of unprocessedDeposits
withdrawalsPerDay: 2,
},
)
await tornadoTrees.deployed()
console.log('tornadoTrees deployed to:', tornadoTrees.address)
console.log('You can use the same private key to register new deposits in the tornadoTrees')
console.log(`\nTORNADO_TREES_V1=${tornadoTreesV1Mock.address}`)
console.log(`TORNADO_TREES=${tornadoTrees.address}`)
}
// We recommend this pattern to be able to use async/await everywhere
// and properly handle errors.
main()
.then(() => process.exit(0))
.catch((error) => {
console.error(error)
process.exit(1)
})

115
scripts/deployEmptyV1.js Normal file
View File

@ -0,0 +1,115 @@
// We require the Hardhat Runtime Environment explicitly here. This is optional
// but useful for running the script in a standalone fashion through `node <script>`.
//
// When running the script with `hardhat run <script>` you'll find the Hardhat
// Runtime Environment's members available in the global scope.
const hre = require('hardhat')
const { toFixedHex, poseidonHash2 } = require('../src/utils')
const MerkleTree = require('fixed-merkle-tree')
const abi = new hre.ethers.utils.AbiCoder()
const instances = [
'0x1111000000000000000000000000000000001111',
'0x2222000000000000000000000000000000002222',
'0x3333000000000000000000000000000000003333',
'0x4444000000000000000000000000000000004444',
]
const blocks = ['0xaaaaaaaa', '0xbbbbbbbb', '0xcccccccc', '0xdddddddd']
const CHUNK_TREE_HEIGHT = 8
const levels = 20
const nonRandomBN = (nonce = 0) =>
hre.ethers.BigNumber.from('0x004d51bffaafdb3eed0661c1cfd76c8cd6ec1456b80b24bbb855f3a141ebf0be').sub(nonce)
async function main() {
const governance = '0x5efda50f22d34F262c29268506C5Fa42cB56A1Ce'
const [tornadoProxy] = await hre.ethers.getSigners()
console.log('deployer/tornadoProxy acc: ', tornadoProxy.address)
const tree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
const TornadoTreesV1Mock = await hre.ethers.getContractFactory('TornadoTreesV1Mock')
const tornadoTreesV1Mock = await TornadoTreesV1Mock.deploy(0, 0, tree.root(), tree.root())
await tornadoTreesV1Mock.deployed()
console.log('tornadoTreesV1Mock deployed to:', tornadoTreesV1Mock.address)
const BatchTreeUpdateVerifier = await hre.ethers.getContractFactory('BatchTreeUpdateVerifier')
const verifier = await BatchTreeUpdateVerifier.deploy()
await verifier.deployed()
console.log('Verifier deployed to:', verifier.address)
const TornadoTrees = await hre.ethers.getContractFactory('TornadoTreesMock')
const tornadoTrees = await TornadoTrees.deploy(
governance,
tornadoProxy.address,
tornadoTreesV1Mock.address,
verifier.address,
{
depositsFrom: 0,
depositsStep: 0,
withdrawalsFrom: 0,
withdrawalsStep: 0,
},
)
await tornadoTrees.deployed()
const notes = []
const depositEvents = {}
const withdrawalEvents = {}
for (let i = 0; i < 2 ** CHUNK_TREE_HEIGHT; i++) {
const note = {
instance: instances[i % instances.length],
depositBlock: blocks[i % blocks.length],
withdrawalBlock: 2 + i + i * 4 * 60 * 24,
commitment: nonRandomBN(i),
nullifierHash: nonRandomBN(i + instances.length),
}
await tornadoTrees.register(
note.instance,
toFixedHex(note.commitment),
toFixedHex(note.nullifierHash),
note.depositBlock,
note.withdrawalBlock,
{ gasLimit: 200000 },
)
const encodedData = abi.encode(
['address', 'bytes32', 'uint256'],
[note.instance, toFixedHex(note.commitment), note.depositBlock],
)
const leaf = hre.ethers.utils.keccak256(encodedData)
depositEvents[leaf] = {
hash: toFixedHex(note.commitment),
instance: toFixedHex(note.instance, 20),
block: toFixedHex(note.depositBlock, 4),
}
const encodedDataW = abi.encode(
['address', 'bytes32', 'uint256'],
[note.instance, toFixedHex(note.nullifierHash), note.withdrawalBlock],
)
const leafW = hre.ethers.utils.keccak256(encodedDataW)
withdrawalEvents[leafW] = {
hash: toFixedHex(note.nullifierHash),
instance: toFixedHex(note.instance, 20),
block: toFixedHex(note.withdrawalBlock, 4),
}
notes[i] = note
}
console.log(`Registered ${notes.length} new deposits and withdrawals in tornadoTreesV1Mock`)
console.log(JSON.stringify(depositEvents, null, 2))
console.log(JSON.stringify(withdrawalEvents, null, 2))
console.log('tornadoTrees deployed to:', tornadoTrees.address)
console.log('You can use the same private key to register new deposits in the tornadoTrees')
console.log(`\nTORNADO_TREES_V1=${tornadoTreesV1Mock.address}`)
console.log(`TORNADO_TREES=${tornadoTrees.address}`)
}
// We recommend this pattern to be able to use async/await everywhere
// and properly handle errors.
main()
.then(() => process.exit(0))
.catch((error) => {
console.error(error)
process.exit(1)
})

123
src/index.js Normal file
View File

@ -0,0 +1,123 @@
const ethers = require('ethers')
const BigNumber = ethers.BigNumber
const { wtns } = require('snarkjs')
const { utils } = require('ffjavascript')
const { bitsToNumber, toBuffer, toFixedHex, poseidonHash } = require('./utils')
const jsSHA = require('jssha')
const fs = require('fs')
const tmp = require('tmp-promise')
const util = require('util')
const exec = util.promisify(require('child_process').exec)
function hashInputs(input) {
const sha = new jsSHA('SHA-256', 'ARRAYBUFFER')
sha.update(toBuffer(input.oldRoot, 32))
sha.update(toBuffer(input.newRoot, 32))
sha.update(toBuffer(input.pathIndices, 4))
for (let i = 0; i < input.instances.length; i++) {
sha.update(toBuffer(input.hashes[i], 32))
sha.update(toBuffer(input.instances[i], 20))
sha.update(toBuffer(input.blocks[i], 4))
}
const hash = '0x' + sha.getHash('HEX')
const result = BigNumber.from(hash)
.mod(BigNumber.from('21888242871839275222246405745257275088548364400416034343698204186575808495617'))
.toString()
return result
}
function prove(input, keyBasePath) {
return tmp.dir().then(async (dir) => {
dir = dir.path
let out
try {
if (fs.existsSync(`${keyBasePath}`)) {
// native witness calc
fs.writeFileSync(`${dir}/input.json`, JSON.stringify(input, null, 2))
out = await exec(`${keyBasePath} ${dir}/input.json ${dir}/witness.json`)
} else {
await wtns.calculate(utils.unstringifyBigInts(input), `${keyBasePath}.wasm`, `${dir}/witness.wtns`)
const witness = utils.stringifyBigInts(await wtns.exportJson(`${dir}/witness.wtns`))
fs.writeFileSync(`${dir}/witness.json`, JSON.stringify(witness, null, 2))
}
out = await exec(
`zkutil prove -c ${keyBasePath}.r1cs -p ${keyBasePath}.params -w ${dir}/witness.json -r ${dir}/proof.json -o ${dir}/public.json`,
)
} catch (e) {
console.log(out, e)
throw e
}
return '0x' + JSON.parse(fs.readFileSync(`${dir}/proof.json`)).proof
})
}
function batchTreeUpdate(tree, events) {
const batchHeight = Math.log2(events.length)
if (!Number.isInteger(batchHeight)) {
throw new Error('events length has to be power of 2')
}
const oldRoot = tree.root().toString()
const leaves = events.map((e) => poseidonHash([e.instance, e.hash, e.block]))
tree.bulkInsert(leaves)
const newRoot = tree.root().toString()
let { pathElements, pathIndices } = tree.path(tree.elements().length - 1)
pathElements = pathElements.slice(batchHeight).map((a) => BigNumber.from(a).toString())
pathIndices = bitsToNumber(pathIndices.slice(batchHeight)).toString()
const input = {
oldRoot,
newRoot,
pathIndices,
pathElements,
instances: events.map((e) => BigNumber.from(e.instance).toString()),
hashes: events.map((e) => BigNumber.from(e.hash).toString()),
blocks: events.map((e) => BigNumber.from(e.block).toString()),
}
input.argsHash = hashInputs(input)
const args = [
toFixedHex(input.argsHash),
toFixedHex(input.oldRoot),
toFixedHex(input.newRoot),
toFixedHex(input.pathIndices, 4),
events.map((e) => ({
hash: toFixedHex(e.hash),
instance: toFixedHex(e.instance, 20),
block: toFixedHex(e.block, 4),
})),
]
return { input, args }
// const proofData = await websnarkUtils.genWitnessAndProve(
// this.groth16,
// input,
// this.provingKeys.batchTreeUpdateCircuit,
// this.provingKeys.batchTreeUpdateProvingKey,
// )
// const { proof } = websnarkUtils.toSolidityInput(proofData)
// const args = [
// toFixedHex(input.oldRoot),
// toFixedHex(input.newRoot),
// toFixedHex(input.pathIndices),
// events.map((e) => ({
// instance: toFixedHex(e.instance, 20),
// hash: toFixedHex(e.hash),
// block: toFixedHex(e.block),
// })),
// ]
// return {
// proof,
// args,
// }
}
module.exports = { batchTreeUpdate, prove }

45
src/utils.js Normal file
View File

@ -0,0 +1,45 @@
const crypto = require('crypto')
const ethers = require('ethers')
const BigNumber = ethers.BigNumber
const { poseidon } = require('circomlib')
const poseidonHash = (items) => BigNumber.from(poseidon(items).toString())
const poseidonHash2 = (a, b) => poseidonHash([a, b])
/** Generate random number of specified byte length */
const randomBN = (nbytes = 31) => BigNumber.from(crypto.randomBytes(nbytes))
/** BigNumber to hex string of specified length */
const toFixedHex = (number, length = 32) =>
'0x' +
(number instanceof Buffer
? number.toString('hex')
: BigNumber.from(number).toHexString().slice(2)
).padStart(length * 2, '0')
const toBuffer = (value, length) =>
Buffer.from(
BigNumber.from(value)
.toHexString()
.slice(2)
.padStart(length * 2, '0'),
'hex',
)
function bitsToNumber(bits) {
let result = 0
for (const item of bits.slice().reverse()) {
result = (result << 1) + item
}
return result
}
module.exports = {
randomBN,
bitsToNumber,
toFixedHex,
toBuffer,
poseidonHash,
poseidonHash2,
}

147
test/binarySearch.test.js Normal file
View File

@ -0,0 +1,147 @@
/* global ethers */
const { expect } = require('chai')
const depositsEven = [10, 11, 12, 13, 14, 15, 16, 17, 18]
const depositsOdd = [10, 11, 12, 13, 14, 15, 16, 17]
describe('findArrayLength', () => {
let publicArray
let tornadoTrees
let PublicArray
beforeEach(async function () {
const [operator, tornadoProxy] = await ethers.getSigners()
PublicArray = await ethers.getContractFactory('PublicArray')
publicArray = await PublicArray.deploy()
await publicArray.setDeposits(depositsEven)
await publicArray.setWithdrawals(depositsEven)
const TornadoTrees = await ethers.getContractFactory('TornadoTreesMock')
tornadoTrees = await TornadoTrees.deploy(operator.address, publicArray.address, {
depositsFrom: 3,
depositsStep: 3,
withdrawalsFrom: 2,
withdrawalsStep: 2,
})
await tornadoTrees.initialize(tornadoProxy.address, publicArray.address)
})
it('should work for even array', async () => {
const depositsLength = await tornadoTrees.findArrayLengthMock(
publicArray.address,
'deposits(uint256)',
4,
2,
)
expect(depositsLength).to.be.equal(depositsEven.length)
})
it('should work for empty array', async () => {
publicArray = await PublicArray.deploy()
// will throw out of gas if you pass non zero params
const depositsLength = await tornadoTrees.findArrayLengthMock(
publicArray.address,
'deposits(uint256)',
0,
0,
)
expect(depositsLength).to.be.equal(0)
})
it('should work for odd array', async () => {
publicArray = await PublicArray.deploy()
await publicArray.setDeposits(depositsOdd)
const depositsLength = await tornadoTrees.findArrayLengthMock(
publicArray.address,
'deposits(uint256)',
4,
2,
)
expect(depositsLength).to.be.equal(depositsOdd.length)
})
it('should work for even array and odd step', async () => {
const depositsLength = await tornadoTrees.findArrayLengthMock(
publicArray.address,
'deposits(uint256)',
4,
3,
)
expect(depositsLength).to.be.equal(depositsEven.length)
})
it('should work for odd array and odd step', async () => {
publicArray = await PublicArray.deploy()
await publicArray.setDeposits(depositsOdd)
const depositsLength = await tornadoTrees.findArrayLengthMock(
publicArray.address,
'deposits(uint256)',
4,
3,
)
expect(depositsLength).to.be.equal(depositsOdd.length)
})
it('should work for odd array and step 1', async () => {
publicArray = await PublicArray.deploy()
await publicArray.setDeposits(depositsOdd)
const depositsLength = await tornadoTrees.findArrayLengthMock(
publicArray.address,
'deposits(uint256)',
4,
1,
)
expect(depositsLength).to.be.equal(depositsOdd.length)
})
it('should work for big array and big step', async () => {
const deposits = Array.from(Array(100).keys())
publicArray = await PublicArray.deploy()
await publicArray.setDeposits(deposits)
const depositsLength = await tornadoTrees.findArrayLengthMock(
publicArray.address,
'deposits(uint256)',
67,
10,
)
expect(depositsLength).to.be.equal(deposits.length)
})
it('should work for an array and big big step', async () => {
const deposits = Array.from(Array(30).keys())
publicArray = await PublicArray.deploy()
await publicArray.setDeposits(deposits)
const depositsLength = await tornadoTrees.findArrayLengthMock(
publicArray.address,
'deposits(uint256)',
1,
50,
)
expect(depositsLength).to.be.equal(deposits.length)
})
it('should pass stress test', async () => {
const iterations = 30
const days = 10
const depositsPerDay = 10
const dispersion = 5
for (let i = 0; i < iterations; i++) {
let len = 0
for (let j = 0; j < days; j++) {
len += depositsPerDay + Math.round((Math.random() - 0.5) * 2 * dispersion)
}
const deposits = Array.from(Array(len).keys())
publicArray = await PublicArray.deploy()
await publicArray.setDeposits(deposits)
const depositsLength = await tornadoTrees.findArrayLengthMock(
publicArray.address,
'deposits(uint256)',
days * depositsPerDay,
dispersion * 2,
)
expect(depositsLength).to.be.equal(deposits.length)
}
})
})

44
test/pack.test.js Normal file
View File

@ -0,0 +1,44 @@
/* global ethers */
const instances = [
'0xc6325fa78E0764993Bf2997116A3771bCbcb3fa9',
'0xb70738422D0f9d1225300eE0Fc67e7392095567d',
'0xA675B536203a123B0214cdf1EBb1298F440dA19A',
'0xFA1835cf197C3281Dc993a63bb160026dAC98bF3',
]
const hashes = [
'0x6f44cd7458bf24f65851fa8097712e3a8d9a6f3e387c501b285338308a74b8f3',
'0xafd3103939b7b0cd7a0ad1ddac57dd13af7f2825a21b47ae995b5bb0f767a106',
'0x57f7b90a3cb4ea6860e6dd5fa44ac4f53ebe6ae3948af577a01ef51738313246',
]
const CHUNK_TREE_HEIGHT = 8
describe.skip('Pack', () => {
it('should work', async () => {
const Pack = await ethers.getContractFactory('Pack')
const pack = await Pack.deploy()
const notes = []
for (let i = 0; i < 2 ** CHUNK_TREE_HEIGHT; i++) {
notes[i] = {
instance: instances[i % instances.length],
hash: hashes[i % hashes.length],
block: 1 + i,
}
}
const receipt = await pack.pack2(
notes.map((a) => a.hash),
notes.map((a) => a.instance),
notes.map((a) => a.block),
)
const receipt2 = await receipt.wait()
console.log(`total ${receipt2.gasUsed}`)
console.log(`batch size ${notes.length}`)
console.log(`events ${await pack.gas1()}`)
console.log(`hash ${await pack.gas2()}`)
console.log(`bytes ${await pack.gas3()}`)
console.log(`calldata ${receipt.gasLimit.sub(await pack.gas4())}`)
})
})

24
test/snark.test.js Normal file
View File

@ -0,0 +1,24 @@
const { expect } = require('chai')
const MerkleTree = require('fixed-merkle-tree')
const { poseidonHash2, randomBN } = require('../src/utils')
const { batchTreeUpdate, prove } = require('../src/index')
const levels = 20
const CHUNK_TREE_HEIGHT = 8
describe('Snark', () => {
it('should work', async () => {
const tree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
const events = []
for (let i = 0; i < 2 ** CHUNK_TREE_HEIGHT; i++) {
events.push({
hash: randomBN(31).toString(),
instance: randomBN(20).toString(),
block: randomBN(4).toString(),
})
}
const { input } = batchTreeUpdate(tree, events)
const proof = await prove(input, './artifacts/circuits/BatchTreeUpdate')
expect(proof.length).to.be.gt(0)
})
})

299
test/tornadoTrees.test.js Normal file
View File

@ -0,0 +1,299 @@
/* global ethers */
const { expect } = require('chai')
const { toFixedHex, poseidonHash2, randomBN } = require('../src/utils')
const MerkleTree = require('fixed-merkle-tree')
const controller = require('../src/index')
async function register(note, tornadoTrees, from) {
await tornadoTrees
.connect(from)
.register(
note.instance,
toFixedHex(note.commitment),
toFixedHex(note.nullifierHash),
note.depositBlock,
note.withdrawalBlock,
)
}
const levels = 20
const CHUNK_TREE_HEIGHT = 8
const instances = [
'0x1111000000000000000000000000000000001111',
'0x2222000000000000000000000000000000002222',
'0x3333000000000000000000000000000000003333',
'0x4444000000000000000000000000000000004444',
]
const blocks = ['0xaaaaaaaa', '0xbbbbbbbb', '0xcccccccc', '0xdddddddd']
describe('TornadoTrees', function () {
let tree
let operator
let tornadoProxy
let verifier
let tornadoTrees
let tornadoTreesV1
let notes
let depositDataEventFilter
const depositEvents = []
const withdrawalEvents = []
beforeEach(async function () {
tree = new MerkleTree(levels, [], { hashFunction: poseidonHash2 })
;[operator, tornadoProxy] = await ethers.getSigners()
const BatchTreeUpdateVerifier = await ethers.getContractFactory('BatchTreeUpdateVerifier')
verifier = await BatchTreeUpdateVerifier.deploy()
const TornadoTreesV1 = await ethers.getContractFactory('TornadoTreesV1Mock')
tornadoTreesV1 = await TornadoTreesV1.deploy(0, 0, tree.root(), tree.root())
notes = []
for (let i = 0; i < 2 ** CHUNK_TREE_HEIGHT; i++) {
notes[i] = {
instance: instances[i % instances.length],
depositBlock: blocks[i % blocks.length],
withdrawalBlock: 2 + i + i * 4 * 60 * 24,
commitment: randomBN(),
nullifierHash: randomBN(),
}
await register(notes[i], tornadoTreesV1, tornadoProxy)
depositEvents[i] = {
hash: toFixedHex(notes[i].commitment),
instance: toFixedHex(notes[i].instance, 20),
block: toFixedHex(notes[i].depositBlock, 4),
}
withdrawalEvents[i] = {
hash: toFixedHex(notes[i].nullifierHash),
instance: toFixedHex(notes[i].instance, 20),
block: toFixedHex(notes[i].withdrawalBlock, 4),
}
}
const TornadoTrees = await ethers.getContractFactory('TornadoTreesMock')
tornadoTrees = await TornadoTrees.deploy(operator.address, tornadoTreesV1.address, {
depositsFrom: 1,
depositsStep: 1,
withdrawalsFrom: 2,
withdrawalsStep: 2,
})
await tornadoTrees.initialize(tornadoProxy.address, verifier.address)
depositDataEventFilter = tornadoTrees.filters.DepositData()
})
describe('#updateDepositTree', () => {
it('should check hash', async () => {
const { args } = controller.batchTreeUpdate(tree, depositEvents)
const solHash = await tornadoTrees.updateDepositTreeMock(...args.slice(1))
expect(solHash).to.be.equal(args[0])
})
it('should prove snark', async () => {
const { input, args } = controller.batchTreeUpdate(tree, depositEvents)
const proof = await controller.prove(input, './artifacts/circuits/BatchTreeUpdate')
await tornadoTrees.updateDepositTree(proof, ...args)
const updatedRoot = await tornadoTrees.depositRoot()
expect(updatedRoot).to.be.equal(tree.root())
})
it('should work for non-empty tree', async () => {
let { input, args } = controller.batchTreeUpdate(tree, depositEvents)
let proof = await controller.prove(input, './artifacts/circuits/BatchTreeUpdate')
await tornadoTrees.updateDepositTree(proof, ...args)
let updatedRoot = await tornadoTrees.depositRoot()
expect(updatedRoot).to.be.equal(tree.root())
//
for (let i = 0; i < notes.length; i++) {
await register(notes[i], tornadoTrees, tornadoProxy)
}
;({ input, args } = controller.batchTreeUpdate(tree, depositEvents))
proof = await controller.prove(input, './artifacts/circuits/BatchTreeUpdate')
await tornadoTrees.updateDepositTree(proof, ...args)
updatedRoot = await tornadoTrees.depositRoot()
expect(updatedRoot).to.be.equal(tree.root())
})
it('should work with events from contracts', async () => {
let { input, args } = controller.batchTreeUpdate(tree, depositEvents)
let proof = await controller.prove(input, './artifacts/circuits/BatchTreeUpdate')
await tornadoTrees.updateDepositTree(proof, ...args)
let updatedRoot = await tornadoTrees.depositRoot()
expect(updatedRoot).to.be.equal(tree.root())
const migratedEvents = await tornadoTrees.queryFilter(depositDataEventFilter)
migratedEvents.forEach((e, i) => {
expect(e.args.index).to.be.equal(i)
})
//
for (let i = 0; i < notes.length; i++) {
await register(notes[i], tornadoTrees, tornadoProxy)
}
let registeredEvents = await tornadoTrees.queryFilter(depositDataEventFilter)
registeredEvents = registeredEvents.map((e) => ({
hash: toFixedHex(e.args.hash),
instance: toFixedHex(e.args.instance, 20),
block: toFixedHex(e.args.block, 4),
}))
;({ input, args } = controller.batchTreeUpdate(tree, registeredEvents.slice(0, notes.length)))
proof = await controller.prove(input, './artifacts/circuits/BatchTreeUpdate')
await tornadoTrees.updateDepositTree(proof, ...args)
updatedRoot = await tornadoTrees.depositRoot()
expect(updatedRoot).to.be.equal(tree.root())
})
it('should work for batch+N filled v1 tree', async () => {
const batchSize = 2 ** CHUNK_TREE_HEIGHT
for (let i = batchSize; i < batchSize + 2; i++) {
notes.push({
instance: instances[i % instances.length],
depositBlock: blocks[i % blocks.length],
withdrawalBlock: 2 + i + i * 4 * 60 * 24,
commitment: randomBN(),
nullifierHash: randomBN(),
})
await register(notes[i], tornadoTreesV1, tornadoProxy)
}
const TornadoTrees = await ethers.getContractFactory('TornadoTreesMock')
const newTornadoTrees = await TornadoTrees.deploy(operator.address, tornadoTreesV1.address, {
depositsFrom: 1,
depositsStep: 1,
withdrawalsFrom: 2,
withdrawalsStep: 2,
})
await newTornadoTrees.initialize(tornadoProxy.address, verifier.address)
// load first batchSize deposits
let { input, args } = controller.batchTreeUpdate(tree, depositEvents)
let proof = await controller.prove(input, './artifacts/circuits/BatchTreeUpdate')
await newTornadoTrees.updateDepositTree(proof, ...args)
let updatedRoot = await newTornadoTrees.depositRoot()
expect(updatedRoot).to.be.equal(tree.root())
// register 2 * `notes.length` new deposits on the new trees
for (let i = 0; i < notes.length; i++) {
await register(notes[i], newTornadoTrees, tornadoProxy)
}
for (let i = 0; i < notes.length; i++) {
await register(notes[i], newTornadoTrees, tornadoProxy)
}
// get 2 extra events from v1 tress
let events = notes.slice(batchSize).map((note) => ({
hash: toFixedHex(note.commitment),
instance: toFixedHex(note.instance, 20),
block: toFixedHex(note.depositBlock, 4),
}))
let registeredEvents = await newTornadoTrees.queryFilter(depositDataEventFilter)
registeredEvents = registeredEvents.slice(batchSize) // cut processed deposits from v1
events = events.concat(
registeredEvents.slice(0, batchSize - 2).map((e) => ({
hash: toFixedHex(e.args.hash),
instance: toFixedHex(e.args.instance, 20),
block: toFixedHex(e.args.block, 4),
})),
)
//
;({ input, args } = controller.batchTreeUpdate(tree, events))
proof = await controller.prove(input, './artifacts/circuits/BatchTreeUpdate')
await newTornadoTrees.updateDepositTree(proof, ...args)
updatedRoot = await newTornadoTrees.depositRoot()
expect(updatedRoot).to.be.equal(tree.root())
events = registeredEvents.slice(batchSize - 2, 2 * batchSize - 2).map((e) => ({
hash: toFixedHex(e.args.hash),
instance: toFixedHex(e.args.instance, 20),
block: toFixedHex(e.args.block, 4),
}))
;({ input, args } = controller.batchTreeUpdate(tree, events))
proof = await controller.prove(input, './artifacts/circuits/BatchTreeUpdate')
await newTornadoTrees.updateDepositTree(proof, ...args)
updatedRoot = await newTornadoTrees.depositRoot()
expect(updatedRoot).to.be.equal(tree.root())
})
it('should reject for partially filled tree')
it('should reject for outdated deposit root')
it('should reject for incorrect insert index')
it('should reject for overflows of newRoot')
it('should reject for invalid sha256 args')
})
describe('#getRegisteredDeposits', () => {
it('should work', async () => {
for (let i = 0; i < 2 ** CHUNK_TREE_HEIGHT; i++) {
notes[i] = {
instance: instances[i % instances.length],
depositBlock: blocks[i % blocks.length],
withdrawalBlock: 2 + i + i * 4 * 60 * 24,
commitment: randomBN(),
nullifierHash: randomBN(),
}
await register(notes[i], tornadoTrees, tornadoProxy)
}
const abi = new ethers.utils.AbiCoder()
const count = await tornadoTrees.depositsLength()
const _deposits = await tornadoTrees.getRegisteredDeposits()
expect(count).to.be.equal(notes.length * 2)
_deposits.forEach((hash, i) => {
if (i < notes.length) {
expect(hash).to.be.equal('0x0000000000000000000000000000000000000000000000000000000000000000')
} else {
const index = i - notes.length
const encodedData = abi.encode(
['address', 'bytes32', 'uint256'],
[notes[index].instance, toFixedHex(notes[index].commitment), notes[index].depositBlock],
)
const leaf = ethers.utils.keccak256(encodedData)
expect(leaf).to.be.equal(hash)
}
})
// res.length.should.be.equal(1)
// res[0].should.be.true
// await tornadoTrees.updateRoots([note1DepositLeaf], [])
// res = await tornadoTrees.getRegisteredDeposits()
// res.length.should.be.equal(0)
// await registerDeposit(note2, tornadoTrees)
// res = await tornadoTrees.getRegisteredDeposits()
// // res[0].should.be.true
})
})
describe('#getRegisteredWithdrawals', () => {
it('should work', async () => {
for (let i = 0; i < 2 ** CHUNK_TREE_HEIGHT; i++) {
notes[i] = {
instance: instances[i % instances.length],
depositBlock: blocks[i % blocks.length],
withdrawalBlock: 2 + i + i * 4 * 60 * 24,
commitment: randomBN(),
nullifierHash: randomBN(),
}
await register(notes[i], tornadoTrees, tornadoProxy)
}
const abi = new ethers.utils.AbiCoder()
const count = await tornadoTrees.withdrawalsLength()
const _withdrawals = await tornadoTrees.getRegisteredWithdrawals()
expect(count).to.be.equal(notes.length * 2)
_withdrawals.forEach((hash, i) => {
if (i < notes.length) {
expect(hash).to.be.equal('0x0000000000000000000000000000000000000000000000000000000000000000')
} else {
const index = i - notes.length
const encodedData = abi.encode(
['address', 'bytes32', 'uint256'],
[notes[index].instance, toFixedHex(notes[index].nullifierHash), notes[index].withdrawalBlock],
)
const leaf = ethers.utils.keccak256(encodedData)
expect(leaf).to.be.equal(hash)
}
})
})
})
})

9230
yarn.lock Normal file

File diff suppressed because it is too large Load Diff