2023.04.23: Check HISTORY.md for more info

Signed-off-by: T-Hax <>
This commit is contained in:
T-Hax 2023-04-23 22:01:45 +00:00
parent 4b661dd3e6
commit a27fff1974
12 changed files with 559 additions and 363 deletions

View File

@ -1,6 +1,11 @@
# All of these are used for tests
# If someone is using the SDK, there is no reason to use .env
## Test behaviour
# Debug (Whether to log debug events)
DEBUG=
# Tor
# Torify tests (need to make possible on each still)
TORIFY=

1
.gitignore vendored
View File

@ -5,6 +5,7 @@ sand\ box
debug.log
yarn-error.log
cache
vanilla_cache
.env
scripts
reference

View File

@ -1,5 +1,18 @@
# History
### 2023.04.23 (2023-04-23)
Did:
* Single withdrawals working, this means multiple ones are also a given but need to test.
* Event emitting, core is an event emitter such that implementors will have an easier time using it.
* Relayer also works I guess but sometimes refuses connection?
Next:
* Clean up / create / add scripts for inspecting and synchronizing cache since doing over tests is horrible.
* Need a few more tests but we can move on to monorepo finally!!!!
### 2023.04.20 (2023-04-20)
Did:

View File

@ -1,6 +1,6 @@
{
"name": "tornado-sdk",
"description": "SDK to integrate your protocol with Tornado Cash.",
"name": "tornado-developer-toolkit",
"description": "A developer toolkit to integrate your protocol with Tornado Cash.",
"repository": "https://development.tornadocash.community/T-Hax/tornado-sdk",
"homepage": "https://tornadocash.community",
"author": "T-Hax",
@ -10,7 +10,7 @@
"zk"
],
"private": false,
"version": "2023.04.20",
"version": "2023.04.23",
"engines": {
"node": "^18"
},

View File

@ -110,7 +110,8 @@ export class Core extends EventEmitter {
// Extract commitments and nullifier hashes
const hexCommitments: string[] = []
const hexNullifierHashes: string[] = []
const purchaseAmounts = options?.ethPurchaseAmounts ?? new Array(zkDepositsData.length)
const purchaseAmounts =
options?.ethPurchaseAmounts ?? new Array(zkDepositsData.length).fill(BigNumber.from(0))
if (zkDepositsData.length !== recipientAddresses.length)
throw ErrorUtils.getError(
@ -135,20 +136,29 @@ export class Core extends EventEmitter {
const [leaves, leafIndices] = await this._findLeavesAndIndices(name, hexCommitments)
const invalidCommitments: string[] = []
this.emit(
'debug',
`\nFound leaves and indices, num leaves: ${leaves.length}, indices: [${leafIndices.join(', ')}]`
)
// Determine whether we will be checking whether notes are spent
const checkSpent = options?.checkNotesSpent !== false
const spentNotes: string[] = []
this.emit('debug', `\nCheck spent notes? => ${checkSpent}`)
// If yes, immediately check it with the supplied Tornado Instance
const checkSpentArray = checkSpent ? await instance.isSpentArray(hexNullifierHashes) : undefined
if (checkSpent) this.emit('debug', `\nSpent array: [${checkSpentArray?.join(', ')}]`)
// Check whether a commitment has not been found in all deposits, meaning that it is invalid
// Also add the invalid commitments. We can do leafIndices[i] because the matched one are concatenated
// at the start
for (let i = 0, len = zkDepositsData.length; i < len; i++) {
if (!leafIndices[i]) invalidCommitments.push(hexCommitments[i])
if (checkSpent && !checkSpentArray![i]) spentNotes.push(hexNullifierHashes[i])
if (checkSpent && checkSpentArray![i]) spentNotes.push(hexNullifierHashes[i])
}
// If something is wrong, throw
@ -174,9 +184,11 @@ export class Core extends EventEmitter {
leaves: leaves
})
const root: string = merkleTree.root()
const root: string = BigNumber.from(merkleTree.root()).toHexString()
const checkKnownRoot: boolean = options?.checkKnownRoot ?? true
this.emit('debug', `\nMerkle root: ${root}, check known? => ${checkKnownRoot}`)
// Check whether the root is valid
if (checkKnownRoot && !(await instance.isKnownRoot(root)))
throw ErrorUtils.getError(
@ -190,21 +202,34 @@ export class Core extends EventEmitter {
// In reality, if a manual withdraw is made, we don't differentiate it from a relayer withdraw
// Since it is only serviceFee 0 AND without a token price, the function will not buy more tokens
const serviceFee = relayerProperties.serviceFeePercent ?? 0
const serviceFeePercent = relayerProperties.serviceFeePercent ?? 0
const tokenPrice = relayerProperties.prices?.get(lookupKeys.token)
const decimals =
// @ts-expect-error
bigInt(10).pow(
options?.tokenDecimals ?? (await Onchain.getTokenDecimals(lookupKeys.network, lookupKeys.token))
)
const toWithdraw = BigNumber.from(lookupKeys.denomination).mul(decimals)
const decimals = BigNumber.from(10).pow(
options?.tokenDecimals ?? (await Onchain.getTokenDecimals(lookupKeys.network, lookupKeys.token))
)
const toWithdraw = BigNumber.from(+lookupKeys.denomination * 10 ** lookupKeys.denomination.length)
.mul(decimals)
.div(10 ** lookupKeys.denomination.length)
const native = lookupKeys.token !== (await this.chain.getChainSymbol())
// TODO: Decide if necessary
if (!tokenPrice && lookupKeys.token !== (await this.chain.getChainSymbol()))
if (!tokenPrice && native)
throw ErrorUtils.getError(
'Core.buildDepositProofs: a token price MUST be supplied if the token withdrawn is not native.'
)
this.emit(
'debug',
`\nProof building, invariant data: [${[
gasPrice.toString(),
gasPriceCushion.toString(),
serviceFeePercent,
tokenPrice,
decimals.toString(),
toWithdraw.toString()
].join(', ')}]\n`
)
// Compute proofs
for (let i = 0, len = zkDepositsData.length; i < len; i++) {
inputsForProofs.push({
@ -220,12 +245,13 @@ export class Core extends EventEmitter {
decimals,
gasPrice,
gasPriceCushion,
serviceFee,
serviceFeePercent,
purchaseAmounts[i],
tokenPrice
// This is our flag whether it's a token or not
native ? undefined : tokenPrice
),
// @ts-expect-error
refund: bigInt(purchaseAmounts[i].toString()) ?? bigInt(0)
refund: purchaseAmounts[i] ? bigInt(purchaseAmounts[i].toString()) : bigInt(0)
},
private: {
nullifier: zkDepositsData[i].nullifier,
@ -243,13 +269,13 @@ export class Core extends EventEmitter {
gasPrice: BigNumber,
gasPriceCushion: BigNumber,
relayerServiceFee: number,
ethBought?: BigNumber,
ethBought: BigNumber,
tokenPriceInEth?: BigNumber
): typeof bigInt {
const factor = BigNumber.from(10).pow(String(relayerServiceFee).length)
const baseRelayerFee = toWithdraw.mul(BigNumber.from(relayerServiceFee).mul(factor)).div(factor)
const txCost = gasPrice.add(gasPriceCushion).mul(5e5)
if (ethBought && tokenPriceInEth) {
if (tokenPriceInEth) {
// @ts-expect-error
return bigInt(txCost.add(ethBought).mul(decimals).div(tokenPriceInEth).add(baseRelayerFee).toString())
}
@ -268,8 +294,8 @@ export class Core extends EventEmitter {
if (indexes)
for (let i = 0, len = rows.length; i < len; i++) {
const id = parseIndexableString(rows[i].id)[0]
if (0 < indexes.findIndex(id)) docs.push(rows[i].doc)
const [index, , ,] = parseIndexableString(rows[i].id)[0]
if (0 < indexes.findIndex(index)) docs.push(rows[i].doc)
}
else docs = rows.map((row) => row.doc)
@ -393,10 +419,14 @@ export class Core extends EventEmitter {
}
}
if (backupNotes) await this._backupDepositData(new Cache.Base<Docs.Note>('DepositNotes'), notesToBackup)
if (backupNotes)
await this._backupDepositData(this.loadCache<Cache.Base<Docs.Note>>('DepositNotes'), notesToBackup)
if (backupInvoices)
await this._backupDepositData(new Cache.Base<Docs.Invoice>('DepositInvoices'), invoicesToBackup)
await this._backupDepositData(
this.loadCache<Cache.Base<Docs.Invoice>>('DepositInvoices'),
invoicesToBackup
)
return txs
}
@ -420,9 +450,10 @@ export class Core extends EventEmitter {
throw ErrorUtils.ensureError(err)
})
await cache.close().catch((err) => {
throw ErrorUtils.ensureError(err)
})
// TODO: Decide whether to close caches by default or not
//await cache.close().catch((err) => {
// throw ErrorUtils.ensureError(err)
//})
}
loadWithdrawalCache(name: string, options?: Options.Core.Cache): Cache.Withdrawal {
@ -439,9 +470,9 @@ export class Core extends EventEmitter {
return this.caches.get(name) as Cache.Deposit
}
loadCache<T extends Docs.Base, C extends Cache.Base<T>>(name: string, options?: Options.Cache.Database): C {
loadCache<C extends Cache.Base<Docs.Base>>(name: string, options?: Options.Cache.Database): C {
if (!this.caches.has(name)) {
this.caches.set(name, new Cache.Base<T>(name, options))
this.caches.set(name, new Cache.Base(name, options))
}
return this.caches.get(name) as C
}
@ -557,10 +588,11 @@ export class Core extends EventEmitter {
this.emit('sync', 'synced')
// Immediately start listening if we're doing this
if (syncOptions.cache.sync.listen)
if (syncOptions.cache.sync.listen) {
instance = instance.on(filter, (...eventArgs) => {
cache.db.put(cache.buildDoc(eventArgs[eventArgs.length - 1]))
this.emit(cache.name, 'received', cache.db.put(cache.buildDoc(eventArgs[eventArgs.length - 1])))
})
}
// Then wait for all pooler requests to resolve
let results = await cache.sync.pooler!.all()
@ -646,13 +678,7 @@ export class Core extends EventEmitter {
const indices = new Array<number>(commitments.length).fill(0)
const leaves: Array<string> = []
// Either load all deposit events from memory or from cache
let cache: Cache.Base<Docs.Deposit>
if (!this.caches.has(instanceName)) {
cache = new Cache.Base<Docs.Deposit>(instanceName)
} else cache = this.caches.get(instanceName) as Cache.Base<Docs.Deposit>
const cache = this.loadCache<Cache.Base<Docs.Deposit>>(instanceName)
const docs = await cache.db.allDocs()
// If no docs in cache throw and stop

View File

@ -6,14 +6,14 @@ import circomlib from 'circomlib'
import { bigInt } from 'snarkjs'
import { Groth16 } from 'websnark/src/groth16'
import { buildGroth16 } from 'websnark'
import { MerkleTree } from 'fixed-merkle-tree'
import MerkleTreeDefault, { MerkleTree } from 'fixed-merkle-tree'
import { genWitnessAndProve, toSolidityInput } from 'websnark/src/utils'
// Some utils to work with hex numbers
import { HexUtils, NumberUtils } from 'lib/utils'
import { ErrorUtils, HexUtils, NumberUtils } from 'lib/utils'
// Parse some files
import { Files } from 'lib/data'
import { Files, Json } from 'lib/data'
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ SETUP ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
/**
@ -25,8 +25,8 @@ export namespace Setup {
return (await Files.loadRaw('circuits/tornadoProvingKey.bin')).buffer
}
export async function getTornadoCircuit(): Promise<Buffer> {
return await Files.loadRaw('circuits/tornado.json')
export async function getTornadoCircuit(): Promise<any> {
return Json.load('circuits/tornado.json')
}
/**
@ -51,21 +51,21 @@ export namespace Primitives {
export function parseNote(hexNote: string): Types.ZKDepositData {
const _hexNote = hexNote.split('_')[1] ?? hexNote
const buffer = Buffer.from(_hexNote, 'hex')
const buffer = Buffer.from(_hexNote.slice(2), 'hex')
return createDeposit({
// @ts-expect-error
nullifier: bigInt.leBuff2int(buffer.subarray(0, 31)),
// @ts-expect-error
secret: bigInt.leBuff2int(buffer.subarray(32, 62))
secret: bigInt.leBuff2int(buffer.subarray(31, 62))
})
}
export function createDeposit(input?: Types.InputFor.CreateDeposit): Types.ZKDepositData {
if (!input?.nullifier || !input?.secret)
input = {
nullifier: NumberUtils.randomBigInteger(31),
secret: NumberUtils.randomBigInteger(31)
}
export function createDeposit(
input: Types.InputFor.CreateDeposit = {
nullifier: NumberUtils.randomBigInteger(31),
secret: NumberUtils.randomBigInteger(31)
}
): Types.ZKDepositData {
// @ts-expect-error
let preimage = Buffer.concat([input.nullifier.leInt2Buff(31), input.secret.leInt2Buff(31)])
let commitment = calcPedersenHash({ msg: preimage })
@ -85,7 +85,8 @@ export namespace Primitives {
}
export function buildMerkleTree(inputs: Types.InputFor.BuildMerkleTree): MerkleTree {
return new MerkleTree(inputs.height, inputs.leaves)
// @ts-expect-error
return new MerkleTreeDefault(inputs.height, inputs.leaves)
}
export async function calcDepositProofs(
@ -99,36 +100,47 @@ export namespace Primitives {
for (let i = 0, len = inputs.length; i < len; i++) {
const input = inputs[i]
// TODO: remove try and const again after fixing
let proofData
// Compute Merkle Proof
const { pathElements, pathIndex } = input.public.tree.path(input.public.leafIndex)
// The ts return is noted as `pathIndex` but using this we get an undefined because it is really `pathIndices`???
// TODO: Bug that needs to be fixed (above)
// @ts-expect-error
const { pathElements, pathIndices } = input.public.tree.path(input.public.leafIndex)
proofs.push([])
const proofData = await genWitnessAndProve(
groth16,
{
// Public inputs
root: input.public.root,
// @ts-expect-error
nullifierHash: bigInt(input.public.hexNullifierHash),
// @ts-expect-error
recipient: bigInt(input.public.recipientAddress),
// @ts-expect-error
relayer: bigInt(input.public.relayerAddress),
// @ts-expect-error
fee: bigInt(input.public.fee),
//
refund: input.public.refund,
try {
proofData = await genWitnessAndProve(
groth16,
{
// Public inputs
root: input.public.root,
// @ts-expect-error
nullifierHash: bigInt(input.public.hexNullifierHash),
// @ts-expect-error
recipient: bigInt(input.public.recipientAddress),
// @ts-expect-error
relayer: bigInt(input.public.relayerAddress),
// @ts-expect-error
fee: bigInt(input.public.fee),
//
refund: input.public.refund,
// Private inputs
nullifier: input.private.nullifier,
secret: input.private.secret,
pathElements: pathElements,
pathIndices: pathIndex
},
circuit,
provingKey
)
// Private inputs
nullifier: input.private.nullifier,
secret: input.private.secret,
pathElements: pathElements,
pathIndices: pathIndices
},
circuit,
provingKey
)
} catch (err) {
groth16.terminate()
throw ErrorUtils.ensureError(err)
}
proofs[i].push(toSolidityInput(proofData).proof)

View File

@ -172,7 +172,7 @@ export namespace Onchain {
const obj = await Json.load(filepath)
return await Promise.all(
paths.map((path) =>
Json.getValue(obj, [`${path.network ?? '\0'}${path.token ?? '\0'}${path.denomination ?? '\0'}`])
Json.getValue(obj, [`${path.network ?? ''}${path.token ?? ''}${path.denomination ?? ''}`])
)
)
}

View File

@ -193,6 +193,18 @@ export class Relayer {
this._propertiesFetched('prices')
return this._prices!
}
get properties(): RelayerProperties {
this._propertiesFetched('properties')
return {
address: this._address!,
version: this._version!,
chainId: this._chainId!,
serviceFeePercent: this._serviceFee!,
miningFeePercent: this._miningFee!,
status: this._status!,
prices: this._prices!
}
}
async getETHPurchasePrice(token: string): Promise<BigNumber> {
return BigNumber.from(

406
src/test/core.test.ts Normal file
View File

@ -0,0 +1,406 @@
import chai from 'chai'
import * as ganache from 'ganache'
// External
import { solidity } from 'ethereum-waffle'
import { providers } from 'ethers'
import { parseUnits } from 'ethers/lib/utils'
// @ts-expect-error
import { parseIndexableString } from 'pouchdb-collate'
// Local
import { RelayerProperties } from 'types/sdk/data'
import { ERC20, TornadoInstance } from 'types/deth'
import { Docs, Files, Onchain, Cache } from 'lib/data'
import { Chain, Contracts } from 'lib/chain'
import { Primitives } from 'lib/crypto'
import { ErrorUtils } from 'lib/utils'
import { TorProvider, Relayer, RegularHttpClient } from 'lib/web'
import { Core } from 'lib/core'
// Data
import compareDeposits from './resources/deposits_eth_0.1.json'
chai.use(solidity)
const expect = chai.expect
describe('Core', () => {
const torify = process.env.TORIFY === 'true'
const debug = process.env.DEBUG === 'true'
if (!process.env.ETH_MAINNET_TEST_RPC) throw ErrorUtils.getError('need a mainnet rpc endpoint.')
console.log('\nNote that these tests are time intensive. ⏳. ⏳.. ⏳...\n')
console.log(
'Also, we are using ganache because we just need a forked blockchain and not an entire environment. 🐧'
)
let daiAddress: string
const daiWhale = '0x5777d92f208679db4b9778590fa3cab3ac9e2168' // Uniswap V3 Something/Dai Pool
const mainnetProvider: providers.Provider = torify
? new TorProvider(process.env.ETH_MAINNET_TEST_RPC!, { port: +process.env.TOR_PORT! })
: new providers.JsonRpcProvider(process.env.ETH_MAINNET_TEST_RPC)
const _ganacheProvider = ganache.provider({
chain: { chainId: 1 },
// @ts-ignore
fork: { url: process.env.ETH_MAINNET_TEST_RPC },
logging: { quiet: true },
wallet: {
totalAccounts: 20,
unlockedAccounts: [daiWhale]
}
})
// @ts-expect-error
const ganacheProvider = new providers.Web3Provider(_ganacheProvider)
const chain = new Chain(ganacheProvider)
async function initializeRelayer(): Promise<Relayer> {
const httpClient = new RegularHttpClient()
const relayer = new Relayer({
url: 'https://thornadope.xyz',
httpClient: httpClient
})
await relayer.fetchProperties()
return relayer
}
after(async function () {
this.timeout(0)
await Files.wipeCache()
})
describe('namespace Contracts', () => {
it('getClassicInstance: should be able to get a tornado instance', async () => {
let instance = await Contracts.getInstance(String(1), 'eth', String(1), mainnetProvider)
expect(instance.address).to.equal('0x47CE0C6eD5B0Ce3d3A51fdb1C52DC66a7c3c2936')
await expect(instance.getLastRoot()).to.not.be.reverted
}).timeout(0)
})
context('Unforked', () => {
describe('class Classic', () => {
const core = new Core(mainnetProvider)
let smallestEth: TornadoInstance
let dai100K: TornadoInstance
let logListener = function (...args: any[]) {
if (args.length === 3) {
console.debug(`\nSync will be started with SB: ${args[0]}, TB: ${args[1]}, BD: ${args[2]}\n`)
} else if (args.length == 2) {
console.debug(`Syncing from block ${args[0]} to ${args[1]}`)
}
}
before(async function () {
this.timeout(0)
smallestEth = await core.getInstance('eth', 0.1)
dai100K = await core.getInstance('dai', 100000)
if (debug) core.on('debug', logListener)
})
after(async function () {
this.timeout()
if (debug) core.off('debug', logListener)
})
const smallEthDenomName = '1ETH0.1'
it(`sync: Should be able to fetch deposit events for ${smallEthDenomName}`, async function () {
// This is going to try syncing the entire range
await core.sync(smallestEth, {
blocks: {
deltaDivisor: 50
},
cache: {
sync: {
concurrencyLimit: 20
}
}
})
const cache = core.caches.get('Deposits' + smallEthDenomName)
const rows = (await cache!.db.allDocs()).rows
const valid = Object.values(compareDeposits)
expect(rows.length).to.be.gte(valid.length)
for (let i = 0, len = valid.length; i < len; i++) {
const id = rows[i].id
const [bn, leafIndex, commitment] = parseIndexableString(id)
const validDoc = valid[i]
expect(bn).to.equal(validDoc['blockNumber'])
expect(leafIndex).to.equal(validDoc['leafIndex'])
expect(commitment).to.equal(validDoc['commitment'])
}
}).timeout(0)
const bigDaiDenomName = '1DAI100000'
it(`sync: Should be able to fetch deposit events for ${bigDaiDenomName}`, async function () {
// This is going to try syncing the entire range
await core.sync(dai100K, {
blocks: {
deltaDivisor: 50
},
cache: {
sync: {
concurrencyLimit: 20
}
}
})
}).timeout(0)
})
})
describe('Forked (Ganache)', async () => {
describe('class Classic', async () => {
// Init sync objects
const core = new Core(ganacheProvider)
const needsMoney = ganacheProvider.getSigner()
const daiWhaleSigner = ganacheProvider.getSigner(daiWhale)
const debugListener = (message: string) => console.debug(message)
let snapshotId: any
let needsMoneyAddress: string
let dai: ERC20
let smallestEth: TornadoInstance
let dai100K: TornadoInstance
before(async function () {
this.timeout(0)
// Get snapshot just in case
snapshotId = await ganacheProvider.send('evm_snapshot', [])
// Prep whale eth balance
await ganacheProvider.send('evm_setAccountBalance', [daiWhale, parseUnits('10').toHexString()])
// Init async objects
needsMoneyAddress = await needsMoney.getAddress()
daiAddress = await Onchain.getTokenAddress('1', 'dai')
dai = chain.getTokenContract(daiAddress).connect(daiWhaleSigner)
smallestEth = await core.getInstance('eth', 0.1)
dai100K = await core.getInstance('dai', 100000)
// Set debug
if (debug) core.on('debug', debugListener)
})
after(async function () {
this.timeout(0)
await ganacheProvider.send('evm_revert', [snapshotId])
core.off('debug', debugListener)
})
afterEach(() => {
dai = dai.connect(daiWhaleSigner)
})
it.only('buildDepositTransaction: build a single eth deposit tx and succeed', async () => {
const initBal = await needsMoney.getBalance()
// Build tx and load cache for this test
const tx = await core.buildDepositTransaction(smallestEth)
const cache = core.loadDepositCache('Deposits1ETH0.1')
// Prep promise to only try withdrawing after cache has been updated
const putPromise = new Promise((resolve) => {
smallestEth.on(
smallestEth.filters.Deposit(null, null, null),
function (commitment, leafIndex, timestamp, event) {
resolve(cache.db.put(cache.buildDoc(event)))
}
)
})
// Deposit and await cache updated
const response = await needsMoney.sendTransaction(tx.request)
await response.wait()
const endBal = await needsMoney.getBalance()
// Passing resolve as callback into put didn't work
await await putPromise
// Check deposit predicates
expect(initBal).to.equal(parseUnits('1000'))
expect(endBal).to.be.lte(parseUnits('999.9'))
}).timeout(0)
it.only('buildDepositProof: it should be able to build an eth proof', async () => {
// Get withdrawer, load cache, prep note for this test
const withdrawer = ganacheProvider.getSigner(2)
const cache = core.loadDepositCache('Deposits1ETH0.1')
// We need this to clean the cache, we want to have clean state
const doc = (await cache.db.allDocs({ include_docs: true, descending: true, limit: 1 })).rows[0].doc
// We are not transforming because we want to test this out
const notes = await core.loadNotes()
// Build proof
let proof
try {
proof = await core.buildDepositProof(
smallestEth,
{
address: await withdrawer.getAddress()
},
await needsMoney.getAddress(),
notes[0],
{
// On by default but stating for visibility
checkNotesSpent: true,
checkKnownRoot: true
}
)
} finally {
await cache.db.remove(doc?._id!, doc?._rev!)
}
// Substract the calculated fee from the received amount
const ethDelta = parseUnits('0.1').sub(proof[5])
// Withdrawal time, let's see if it works
// The balance diff will be exact because withdrawer is paying for gas as relayer
await expect(
await smallestEth
.connect(withdrawer)
.withdraw(proof[0], proof[1], proof[2], proof[3], proof[4], proof[5], proof[6])
).to.changeEtherBalance(needsMoney, ethDelta)
}).timeout(0)
it.only('buildDepositTransaction: build a single token deposit tx and succeed', async () => {
// Prep deposit amount, proxy for approval, cache, bal for comp
const depositAmount = parseUnits('100000')
const proxy = await core.getProxy()
const cache = core.loadDepositCache('Deposits1DAI100000')
const daiBalBef = await dai.balanceOf(dai100K.address)
// Prep promise to only try withdrawing after cache has been updated
const putPromise = new Promise((resolve) => {
dai100K.on(
dai100K.filters.Deposit(null, null, null),
function (commitment, leafIndex, timestamp, event) {
resolve(cache.db.put(cache.buildDoc(event)))
}
)
})
// Prep for deposit
await dai.transfer(needsMoneyAddress, depositAmount)
dai = dai.connect(needsMoney)
const tx = await core.buildDepositTransaction(dai100K)
// Approve dai for the proxy first (transferFrom)
await dai.approve(proxy.address, depositAmount)
// Deposit
const response = await needsMoney.sendTransaction(tx.request)
await response.wait()
// Prep for check
const daiBalPost = await dai.balanceOf(dai100K.address)
// Passing resolve as callback into put didn't work
await await putPromise
// Checks
expect(daiBalBef).to.equal(daiBalPost.sub(depositAmount))
expect(await dai.balanceOf(needsMoneyAddress)).to.equal(0)
}).timeout(0)
it.only('buildDepositProof: it should be able to build a token proof', async () => {
// Get withdrawer, load cache, prep note for this test
const withdrawer = ganacheProvider.getSigner(2)
const cache = core.loadDepositCache('Deposits1DAI100000')
// We need this to clean the cache, we want to have clean state
const doc = (await cache.db.allDocs({ include_docs: true, descending: true, limit: 1 })).rows[0].doc
// We are not transforming because we want to test this out
const notes = await core.loadNotes()
// We need to select last
const note = notes[notes.length - 1]
// Init properties via some relayer to make our life easier
const relayer = await initializeRelayer()
let properties = relayer.properties
// Just set another address
properties.address = await withdrawer.getAddress()
// Build proof with relayer properties this time
let proof
try {
proof = await core.buildDepositProof(dai100K, properties, await needsMoney.getAddress(), note, {
// On by default but stating for visibility
checkNotesSpent: true,
checkKnownRoot: true
})
} finally {
await cache.db.remove(doc?._id!, doc?._rev!)
}
// Calc balance diff again... it will be expressed in dai
const daiDelta = parseUnits('100000').sub(proof[5])
await expect(
await smallestEth
.connect(withdrawer)
.withdraw(proof[0], proof[1], proof[2], proof[3], proof[4], proof[5], proof[6])
).to.changeTokenBalance(dai, needsMoney, daiDelta)
}).timeout(0)
it('buildDepositTransactions: multiple eth deposits', async () => {
const instances = await core.getInstances(
[0.1, 1, 10, 100].map((el) => {
return { token: 'eth', denomination: el }
})
)
const txs = await core.buildDepositTransactions(instances, {
depositsPerInstance: [1, 2, 3, 4]
})
for (let i = 0, len = txs.length; i < len; i++) {
await expect(() => needsMoney.sendTransaction(txs[i].request)).to.not.be.reverted
}
expect(await dai.balanceOf(needsMoneyAddress)).to.equal(0)
}).timeout(0)
it('buildDepositTransactions: multiple token deposits', async () => {
const instances = await core.getInstances(
[100, 1000, 10000, 100000].map((el) => {
return { token: 'dai', denomination: el }
})
)
const proxy = await core.getProxy()
const depositAmount = parseUnits('432100')
await dai.transfer(needsMoneyAddress, parseUnits('432100'))
dai = dai.connect(needsMoney)
const txs = await core.buildDepositTransactions(instances, {
depositsPerInstance: [1, 2, 3, 4]
})
await dai.approve(proxy.address, depositAmount)
for (let i = 0, len = txs.length; i < len; i++) {
await expect(() => needsMoney.sendTransaction(txs[i].request)).to.not.be.reverted
}
expect(await dai.balanceOf(needsMoneyAddress)).to.equal(0)
}).timeout(0)
it('createInvoice: should be able to create an invoice', async () => {
const instance = await core.getInstance('dai', '1000')
const invoice = await core.createInvoice(instance)
console.log(invoice)
}).timeout(0)
})
})
})

View File

@ -28,6 +28,13 @@ describe('crypto', () => {
// From the whitepaper, the randomness r E B^248
expect(BigNumber.from(deposit.secret.toString())).to.be.lte(limit)
})
it('parseNote', () => {
const deposit = Primitives.createDeposit()
const note = Primitives.createNote(deposit.preimage)
const parsed = Primitives.parseNote(note)
expect(parsed.hexCommitment).to.equal(deposit.hexCommitment)
})
})
})
})

View File

@ -1,285 +0,0 @@
import chai from 'chai'
import * as ganache from 'ganache'
import { solidity } from 'ethereum-waffle'
import { providers } from 'ethers'
import { parseUnits } from 'ethers/lib/utils'
import { ERC20, TornadoInstance } from 'types/deth'
import { Json } from 'types/sdk/data'
import { Core } from 'lib/core'
import { Chain, Contracts } from 'lib/chain'
import { Docs, Files, Onchain, Cache } from 'lib/data'
import { ErrorUtils } from 'lib/utils'
import { TorProvider } from 'lib/web'
// Data
// @ts-expect-error
import { parseIndexableString } from 'pouchdb-collate'
import compareDeposits from './resources/deposits_eth_0.1.json'
import { Primitives } from 'lib/crypto'
import { ZKDepositData } from 'types/sdk/crypto'
chai.use(solidity)
const expect = chai.expect
describe('main', () => {
const torify = process.env.TORIFY === 'true'
if (!process.env.ETH_MAINNET_TEST_RPC) throw ErrorUtils.getError('need a mainnet rpc endpoint.')
console.log('\nNote that these tests are time intensive. ⏳. ⏳.. ⏳...\n')
console.log(
'Also, we are using ganache because we just need a forked blockchain and not an entire environment. 🐧'
)
let daiAddress: string
const daiWhale = '0x5777d92f208679db4b9778590fa3cab3ac9e2168' // Uniswap V3 Something/Dai Pool
const mainnetProvider: providers.Provider = torify
? new TorProvider(process.env.ETH_MAINNET_TEST_RPC!, { port: +process.env.TOR_PORT! })
: new providers.JsonRpcProvider(process.env.ETH_MAINNET_TEST_RPC)
const _ganacheProvider = ganache.provider({
chain: { chainId: 1 },
// @ts-ignore
fork: { url: process.env.ETH_MAINNET_TEST_RPC },
logging: { quiet: true },
wallet: {
totalAccounts: 20,
unlockedAccounts: [daiWhale]
}
})
// @ts-expect-error
const ganacheProvider = new providers.Web3Provider(_ganacheProvider)
const chain = new Chain(ganacheProvider)
let snapshotId: any
after(async function () {
this.timeout(0)
await Files.wipeCache()
})
describe('namespace Tornado', () => {
describe('namespace Contracts', () => {
it('getClassicInstance: should be able to get a tornado instance', async () => {
let instance = await Contracts.getInstance(String(1), 'eth', String(1), mainnetProvider)
expect(instance.address).to.equal('0x47CE0C6eD5B0Ce3d3A51fdb1C52DC66a7c3c2936')
await expect(instance.getLastRoot()).to.not.be.reverted
}).timeout(0)
})
describe('class Classic', () => {
it.only('sync: Should be able to fetch deposit events', async function () {
const core = new Core(mainnetProvider)
const instance = await Contracts.getInstance(String(1), 'eth', String(0.1), mainnetProvider)
// For safety
expect(torify).to.be.true
core.on('debug', function (...args) {
if (args.length === 3) {
console.debug(`\nSync will be started with SB: ${args[0]}, TB: ${args[1]}, BD: ${args[2]}\n`)
} else if (args.length == 2) {
console.debug(`Syncing from block ${args[0]} to ${args[1]}`)
}
})
// This is going to try syncing the entire range
await core.sync(instance, {
blocks: {
deltaDivisor: 50
},
cache: {
sync: {
concurrencyLimit: 20
}
}
})
const cache = core.caches.get('Deposits1ETH0.1')
const rows = (await cache!.db.allDocs()).rows
const valid = Object.values(compareDeposits)
expect(rows.length).to.be.gte(valid.length)
for (let i = 0, len = valid.length; i < len; i++) {
const id = rows[i].id
const [bn, leafIndex, commitment] = parseIndexableString(id)
const validDoc = valid[i]
expect(bn).to.equal(validDoc['blockNumber'])
expect(leafIndex).to.equal(validDoc['leafIndex'])
expect(commitment).to.equal(validDoc['commitment'])
}
}).timeout(0)
describe('ganache fork', async () => {
const core = new Core(ganacheProvider)
const needsMoney = ganacheProvider.getSigner()
const whale = ganacheProvider.getSigner(daiWhale)
let needsMoneyAddress: string
let dai: ERC20
let smallestEth: TornadoInstance
let note: ZKDepositData, noteObj: any
before(async function () {
this.timeout(0)
snapshotId = await ganacheProvider.send('evm_snapshot', [])
await ganacheProvider.send('evm_setAccountBalance', [daiWhale, parseUnits('10').toHexString()])
needsMoneyAddress = await needsMoney.getAddress()
daiAddress = await Onchain.getTokenAddress('1', 'dai')
dai = chain.getTokenContract(daiAddress).connect(whale)
smallestEth = await core.getInstance('eth', 0.1)
})
after(async function () {
this.timeout(0)
await ganacheProvider.send('evm_revert', [snapshotId])
})
afterEach(() => {
dai = dai.connect(whale)
})
it('buildDepositTransaction: build a single eth deposit tx and succeed', async () => {
const signer = ganacheProvider.getSigner()
const initBal = await signer.getBalance()
const tx = await core.buildDepositTransaction(smallestEth)
const response = await signer.sendTransaction(tx.request)
const receipt = await response.wait()
noteObj = {
blockNumber: receipt.blockNumber,
transactionHash: receipt.transactionHash,
args: {
commitment: '',
leafIndex: 0,
timestamp: response.timestamp
}
}
console.log(receipt, '\n')
note = Primitives.parseNote(tx.note!)
const endBal = await signer.getBalance()
expect(initBal).to.equal(parseUnits('1000'))
expect(endBal).to.be.lte(parseUnits('999.9'))
}).timeout(0)
it('buildDepositProofs: it should be able to build', async () => {
try {
const instance = await core.getInstance('eth', 0.1)
const signer = ganacheProvider.getSigner()
const withdrawer = ganacheProvider.getSigner(2)
const cache = core.loadCache('Deposits1ETH0.1') as Cache.Base<Docs.Deposit>
noteObj['args'] = {
commitment: note.hexCommitment,
leafIndex:
(await cache!.db.allDocs({ descending: true, limit: 1, include_docs: true }))?.rows[0].doc
?.leafIndex! + 1,
timestamp: noteObj['args']['timestamp']
}
console.log(noteObj, '\n')
await cache!.db.put(new Docs.Deposit(noteObj))
const proof = await core.buildDepositProof(
instance,
{
address: await withdrawer.getAddress()
},
await signer.getAddress(),
note,
{
checkNotesSpent: false
}
)
console.log(proof)
} catch (err) {
console.log(ErrorUtils.ensureError(err).message)
throw err
}
}).timeout(0)
it('buildDepositTransaction: build a single token deposit tx and succeed', async () => {
const dai100K = await core.getInstance('dai', 100000)
const proxy = await core.getProxy()
const depositAmount = parseUnits('100000')
await dai.transfer(needsMoneyAddress, depositAmount)
dai = dai.connect(needsMoney)
const tx = await core.buildDepositTransaction(dai100K)
await dai.approve(proxy.address, depositAmount)
await needsMoney.sendTransaction(tx.request)
expect(await dai.balanceOf(needsMoneyAddress)).to.equal(0)
}).timeout(0)
it('buildDepositTransactions: multiple eth deposits', async () => {
const instances = await core.getInstances(
[0.1, 1, 10, 100].map((el) => {
return { token: 'eth', denomination: el }
})
)
const txs = await core.buildDepositTransactions(instances, {
depositsPerInstance: [1, 2, 3, 4]
})
for (let i = 0, len = txs.length; i < len; i++) {
await expect(() => needsMoney.sendTransaction(txs[i].request)).to.not.be.reverted
}
expect(await dai.balanceOf(needsMoneyAddress)).to.equal(0)
}).timeout(0)
it('buildDepositTransactions: multiple token deposits', async () => {
const instances = await core.getInstances(
[100, 1000, 10000, 100000].map((el) => {
return { token: 'dai', denomination: el }
})
)
const proxy = await core.getProxy()
const depositAmount = parseUnits('432100')
await dai.transfer(needsMoneyAddress, parseUnits('432100'))
dai = dai.connect(needsMoney)
const txs = await core.buildDepositTransactions(instances, {
depositsPerInstance: [1, 2, 3, 4]
})
await dai.approve(proxy.address, depositAmount)
for (let i = 0, len = txs.length; i < len; i++) {
await expect(() => needsMoney.sendTransaction(txs[i].request)).to.not.be.reverted
}
expect(await dai.balanceOf(needsMoneyAddress)).to.equal(0)
}).timeout(0)
it('createInvoice: should be able to create an invoice', async () => {
const instance = await core.getInstance('dai', '1000')
const invoice = await core.createInvoice(instance)
console.log(invoice)
}).timeout(0)
})
})
})
})

View File

@ -3,7 +3,6 @@ import { AxiosInstance } from 'axios'
export namespace Relayer {
export interface Options {
url: string
address?: string
httpClient: AxiosInstance
}