sdk-monorepo/src/lib/core.ts

695 lines
25 KiB
TypeScript
Raw Normal View History

// ts-essentials
import { DeepRequired, MarkOptional, MarkRequired } from 'ts-essentials'
// Local types
import { RelayerProperties as RelayerDataProperties } from 'types/sdk/data'
import { Options, Transactions } from 'types/sdk/core'
import { ZKDepositData, InputFor } from 'types/sdk/crypto'
import { TornadoInstance, TornadoProxy } from 'types/deth'
// External imports
import { BigNumber, EventFilter, providers } from 'ethers'
import { parseUnits } from 'ethers/lib/utils'
import { bigInt } from 'snarkjs'
// @ts-ignore
import { parseIndexableString } from 'pouchdb-collate'
// Local imports
import { Docs, Cache, Types as DataTypes, Json, Constants, OnchainData } from 'lib/data'
import { Primitives } from 'lib/crypto'
import { Contracts, Chain } from 'lib/chain'
import { ErrorUtils, ObjectUtils } from 'lib/utils'
type Provider = providers.Provider
type BackupDepositDoc = {
network: string
denomination: string
token: string
invoice?: string
note?: string
}
type RelayerProperties = MarkOptional<
Pick<RelayerDataProperties, 'address' | 'serviceFeePercent' | 'prices'>,
'serviceFeePercent' | 'prices'
>
export class Core {
chain: Chain
caches: Map<string, Cache.Base<Docs.Base>>
instances: Map<string, TornadoInstance>
constructor(provider: providers.Provider) {
this.chain = new Chain(provider)
this.caches = new Map<string, Cache.Syncable<Docs.Base>>()
this.instances = new Map<string, TornadoInstance>()
}
connect(provider: Provider): void {
this.chain.provider = provider
}
async getInstances(
keys: Array<{ token: string; denomination: number | string }>
): Promise<Array<TornadoInstance>> {
const chainId = await this.chain.getChainId()
return Promise.all(
keys.map((key) =>
Contracts.getInstance(String(chainId), key.token, String(key.denomination), this.chain.provider)
)
)
}
async getInstance(token: string, denomination: number | string): Promise<TornadoInstance> {
const chainId = await this.chain.getChainId()
return Contracts.getInstance(String(chainId), token, String(denomination), this.chain.provider)
}
async getProxy(): Promise<TornadoProxy> {
const chainId = await this.chain.getChainId()
return Contracts.getProxy(String(chainId), this.chain.provider)
}
async buildDepositProof(
instance: TornadoInstance,
relayerProperties: RelayerProperties,
recipientAddress: string,
zkDepositsData: ZKDepositData,
options?: Options.Core.BuildDepositProof
): Promise<Array<string>> {
return (
await this.buildDepositProofs(
instance,
relayerProperties,
[recipientAddress],
[zkDepositsData],
options
)
)[0]
}
/**
* @param instance This is the Tornado Instance which will be withdrawn from.
* @param relayerProperties The properties of the relayer that is going to be used for the withdrawals. If the service fee is 0, it is assumed that there is no relayer, but that a manual wallet withdraw is being made. These properties are included in the ZK proof.
* @param recipientAddresses The recipient addresses which should receive the withdrawals, in order.
* @param zkDepositsData These represent the public and private values, reconstructed from the deposit note, generated during the building of deposit transactions, used for building the proof of knowledge statement for withdrawal, for each withdrawal (in this context).
* @param options Numerous options which most importantly allow a user to specify whether he is buying ETH, whether to check proof data validity and finally to modulate the gas prices which will be used to calculate the gas fees paid to the relayer.
* @returns The proofs for which the user should then decide whether to use a relayer (recommended, but decide carefully which one) or use his own wallet (if needed).
*/
async buildDepositProofs(
instance: TornadoInstance,
relayerProperties: RelayerProperties,
recipientAddresses: Array<string>,
zkDepositsData: Array<ZKDepositData>,
options?: Options.Core.BuildDepositProof
): Promise<Array<Array<string>>> {
// Extract commitments and nullifier hashes
const hexCommitments: string[] = []
const hexNullifierHashes: string[] = []
const purchaseAmounts = options?.ethPurchaseAmounts ?? new Array(zkDepositsData.length)
console.log('\nChecking inputs.\n')
if (zkDepositsData.length !== recipientAddresses.length)
throw ErrorUtils.getError(
'Core.buildDepositProofs: the number of recipients must equal the length of zkDepositsData.'
)
if (zkDepositsData.length !== purchaseAmounts.length)
throw ErrorUtils.getError(
'Core.buildDepositProofs: if purchase amounts is specified, it must equal the length of zkDepositsData.'
)
zkDepositsData.forEach((deposit) => {
hexCommitments.push(deposit.hexCommitment)
hexNullifierHashes.push(deposit.hexNullifierHash)
})
console.log('\nGetting lookup keys.\n')
// Determine cache name
const lookupKeys = await this.getInstanceLookupKeys(instance.address)
const name = 'Deposits' + (lookupKeys.network + lookupKeys.token + lookupKeys.denomination).toUpperCase()
console.log('\nLeaves and indices.\n')
// Find all leaves & indices by reading from cache
const [leaves, leafIndices] = await this._findLeavesAndIndices(name, hexCommitments)
const invalidCommitments: string[] = []
// Determine whether we will be checking whether notes are spent
const checkSpent = options?.checkNotesSpent !== false
const spentNotes: string[] = []
console.log('\nNote checking.\n')
// If yes, immediately check it with the supplied Tornado Instance
const checkSpentArray = checkSpent ? await instance.isSpentArray(hexNullifierHashes) : undefined
// Check whether a commitment has not been found in all deposits, meaning that it is invalid
// Also add the invalid commitments. We can do leafIndices[i] because the matched one are concatenated
// at the start
for (let i = 0, len = zkDepositsData.length; i < len; i++) {
if (!leafIndices[i]) invalidCommitments.push(hexCommitments[i])
if (checkSpent && !checkSpentArray![i]) spentNotes.push(hexNullifierHashes[i])
}
// If something is wrong, throw
const commitmentsAreInvalid = invalidCommitments.length !== 0
const notesAreSpent = spentNotes.length !== 0
console.log('\nErrors.\n')
if (commitmentsAreInvalid || notesAreSpent)
throw ErrorUtils.getError(
`Core.buildDepositProofs: ` +
(commitmentsAreInvalid
? `following commitments are invalid:\n\n${invalidCommitments.join('\n')}\n\n`
: '') +
(notesAreSpent
? `${
commitmentsAreInvalid ? 'and ' : ''
}following notes are already spent or invalid:\n\n${spentNotes.join('\n')}\n\n`
: '')
)
console.log('\nMerkle tree.\n')
// Otherwise, build the merkle tree from the leaves
const merkleTree = Primitives.buildMerkleTree({
height: options?.merkleTreeHeight ?? Constants.MERKLE_TREE_HEIGHT,
leaves: leaves
})
const root: string = merkleTree.root()
const checkKnownRoot: boolean = options?.checkKnownRoot ?? true
// Check whether the root is valid
if (checkKnownRoot && !(await instance.isKnownRoot(root)))
throw ErrorUtils.getError(
'Core.buildDepositProofs: the merkle tree created is not valid, something went wrong with syncing.'
)
console.log('\nProof data invariant.\n')
// Rest of note invariant arguments
const inputsForProofs: InputFor.ZKProof[] = []
const gasPrice = options?.gasPrice ?? (await this.chain.getGasPrice())
const gasPriceCushion = options?.gasPrice ?? gasPrice.mul(10).div(100)
// In reality, if a manual withdraw is made, we don't differentiate it from a relayer withdraw
// Since it is only serviceFee 0 AND without a token price, the function will not buy more tokens
const serviceFee = relayerProperties.serviceFeePercent ?? 0
const tokenPrice = relayerProperties.prices?.get(lookupKeys.token)
const decimals =
// @ts-expect-error
bigInt(10).pow(
options?.tokenDecimals ?? (await OnchainData.getTokenDecimals(lookupKeys.network, lookupKeys.token))
)
const toWithdraw = BigNumber.from(lookupKeys.denomination).mul(decimals)
// TODO: Decide if necessary
if (!tokenPrice && lookupKeys.token !== (await this.chain.getChainSymbol()))
throw ErrorUtils.getError(
'Core.buildDepositProofs: a token price MUST be supplied if the token withdrawn is not native.'
)
console.log('\nConstruct.\n')
// Compute proofs
for (let i = 0, len = zkDepositsData.length; i < len; i++) {
inputsForProofs.push({
public: {
root: root,
tree: merkleTree,
leafIndex: leafIndices[i],
hexNullifierHash: zkDepositsData[i].hexNullifierHash,
recipientAddress: recipientAddresses[i],
relayerAddress: relayerProperties.address,
fee: this._calcWithdrawalFee(
toWithdraw,
decimals,
gasPrice,
gasPriceCushion,
serviceFee,
purchaseAmounts[i],
tokenPrice
),
// @ts-expect-error
refund: bigInt(purchaseAmounts[i].toString()) ?? bigInt(0)
},
private: {
nullifier: zkDepositsData[i].nullifier,
secret: zkDepositsData[i].secret
}
})
}
console.log('\nCalc and return.\n')
return await Primitives.calcDepositProofs(inputsForProofs)
}
private _calcWithdrawalFee(
toWithdraw: BigNumber,
decimals: BigNumber,
gasPrice: BigNumber,
gasPriceCushion: BigNumber,
relayerServiceFee: number,
ethBought?: BigNumber,
tokenPriceInEth?: BigNumber
): typeof bigInt {
const factor = BigNumber.from(10).pow(String(relayerServiceFee).length)
const baseRelayerFee = toWithdraw.mul(BigNumber.from(relayerServiceFee).mul(factor)).div(factor)
const txCost = gasPrice.add(gasPriceCushion).mul(5e5)
if (ethBought && tokenPriceInEth) {
// @ts-expect-error
return bigInt(txCost.add(ethBought).mul(decimals).div(tokenPriceInEth).add(baseRelayerFee).toString())
}
// @ts-expect-error
else return bigInt(txCost.add(baseRelayerFee).toString())
}
async loadNotes(
indexes?: Array<number>,
keys?: Partial<DataTypes.Keys.InstanceLookup>
): Promise<Array<ZKDepositData>> {
const rows = await Cache.loadContents<Docs.Note>('DepositNotes')
let docs: Array<Docs.Note | undefined> = []
let notes: Array<string> = []
if (indexes)
for (let i = 0, len = rows.length; i < len; i++) {
const id = parseIndexableString(rows[i].id)[0]
if (id === indexes[i]) docs.push(rows[i].doc)
}
else docs = rows.map((row) => row.doc)
if (keys)
docs.forEach((doc) => {
const idNetworkMatches = doc && keys.network ? keys.network === doc?.network : true
const andTokenSymbolMatches = idNetworkMatches && (keys.token ? keys.token === doc?.token : true)
const lastlyDenominationMatches =
andTokenSymbolMatches && (keys.denomination ? keys.denomination === doc?.denomination : true)
if (lastlyDenominationMatches && doc?.note) notes.push(doc.note)
})
else notes = docs.filter((doc) => ObjectUtils.exists(doc?.note)).map((doc) => doc!.note)
return this.parseNotes(notes)
}
parseNotes(notes: Array<string>): Array<ZKDepositData> {
return notes.map((note) => Primitives.parseNote(note))
}
parseNote(note: string): ZKDepositData {
return this.parseNotes([note])[0]
}
async createInvoice(
instance: TornadoInstance,
options?: Omit<Options.Core.Invoice, 'depositsPerInstance'>
): Promise<Transactions.Invoice> {
let opts: Options.Core.Invoice = options ?? {}
opts.depositsPerInstance = [1]
return (await this.createInvoices([instance], options))[0]
}
async createInvoices(
instances: Array<TornadoInstance>,
options?: Options.Core.Invoice
): Promise<Array<Transactions.Invoice>> {
if (!options) options = {}
if (!options.backup) options.backup = {}
options.backup.invoices = options.backup.invoices ?? true
options.backup.notes = options.backup.notes ?? true
options.doNotPopulate = options.doNotPopulate ?? true
return this.buildDepositTxs(instances, options)
}
async buildDepositTx(
instance: TornadoInstance,
options?: Options.Core.Deposit
): Promise<Transactions.Deposit> {
let opts: Options.Core.Deposit = options ?? {}
opts.depositsPerInstance = [1]
return (await this.buildDepositTxs([instance], opts))[0]
}
async buildDepositTxs(
instances: Array<TornadoInstance>,
options?: Options.Core.Deposit
): Promise<Array<Transactions.Deposit>> {
const depositsPerInstance = options?.depositsPerInstance ?? new Array<number>(instances.length).fill(1)
const doNotPopulate = options?.doNotPopulate ?? false
const backupNotes = options?.backup?.notes ?? true
const backupInvoices = options?.backup?.invoices ?? false
if (depositsPerInstance.length != instances.length)
throw ErrorUtils.getError(
'Core.buildDepositTx: number of deposit amount elements must equal the number of instances!'
)
const notesToBackup: Array<BackupDepositDoc> = []
const invoicesToBackup: Array<BackupDepositDoc> = []
const txs: Array<Transactions.Deposit> = []
const chainId = await this.chain.getChainId()
const proxy: TornadoProxy = await Contracts.getProxy(String(chainId), this.chain.provider)
for (let i = 0, nInstances = instances.length; i < nInstances; i++) {
const lookupKeys = await this.getInstanceLookupKeys(instances[i].address)
const pathstring = lookupKeys.network + lookupKeys.token + lookupKeys.denomination
for (let d = 0, nDeposits = depositsPerInstance[i]; d < nDeposits; d++) {
const deposit = Primitives.createDeposit()
const note = Primitives.createNote(deposit.preimage)
if (backupNotes)
notesToBackup.push({
network: lookupKeys.network,
denomination: lookupKeys.denomination,
token: lookupKeys.token,
note: note
})
if (backupInvoices)
invoicesToBackup.push({
network: lookupKeys.network,
denomination: lookupKeys.denomination,
token: lookupKeys.token,
invoice: deposit.hexCommitment
})
if (!doNotPopulate) {
txs.push({
request: {
to: proxy.address,
data: proxy.interface.encodeFunctionData('deposit', [
instances[i].address,
deposit.hexCommitment,
[]
]),
value: lookupKeys.token == 'eth' ? parseUnits(lookupKeys.denomination) : BigNumber.from(0)
},
note: pathstring + '_' + note,
invoice: pathstring + '_' + deposit.hexCommitment
})
} else
txs.push({
request: {},
note: pathstring + '_' + note,
invoice: pathstring + '_' + deposit.hexCommitment
})
}
}
if (backupNotes) await this._backupDepositData(new Cache.Base<Docs.Note>('DepositNotes'), notesToBackup)
if (backupInvoices)
await this._backupDepositData(new Cache.Base<Docs.Invoice>('DepositInvoices'), invoicesToBackup)
return txs
}
private async _backupDepositData<T extends Docs.Note | Docs.Invoice>(
cache: Cache.Base<T>,
backupData: Array<BackupDepositDoc>
): Promise<void> {
let id = +(await cache.db.info()).update_seq
await cache.db
.bulkDocs(
backupData.map((entry) => {
if (entry.note)
return new Docs.Note(++id, entry.network, entry.token, entry.denomination, entry.note)
else if (entry.invoice)
return new Docs.Invoice(++id, entry.network, entry.token, entry.denomination, entry.invoice)
}) as Array<T>
)
.catch((err) => {
throw ErrorUtils.ensureError(err)
})
await cache.close().catch((err) => {
throw ErrorUtils.ensureError(err)
})
}
loadCache<T extends Docs.Base, C extends Cache.Base<T>>(cacheName: string): C {
if (!this.caches.has(cacheName)) {
this.caches.set(cacheName, new Cache.Base<T>(cacheName))
}
return this.caches.get(cacheName) as C
}
async syncMultiple(instances: Array<TornadoInstance>, syncOptions?: Options.Core.Sync): Promise<void> {
for (const instance of instances) {
await this.sync(instance, syncOptions)
}
}
async sync(instance: TornadoInstance, syncOptions?: Options.Core.Sync): Promise<void> {
// Get some data
const lookupKeys = await this.getInstanceLookupKeys(instance.address)
const populatedSyncOpts = await this._populateSyncOpts(lookupKeys, syncOptions)
const actions = Object.entries(populatedSyncOpts).filter((el) => el[1] === true) as [string, boolean][]
// Synchronize
for (let i = 0, bound = actions.length; i < bound; i++) {
let action = actions[i][0].charAt(0).toUpperCase() + actions[i][0].slice(1)
await this._sync(action, lookupKeys, instance, populatedSyncOpts)
}
}
private async _sync(
action: string,
lookupKeys: DataTypes.Keys.InstanceLookup,
instance: TornadoInstance,
syncOptions: DeepRequired<Options.Core.Sync>
): Promise<void> {
const name = `${action + 's'}${lookupKeys.network}${lookupKeys.token.toUpperCase()}${
lookupKeys.denomination
}`,
pathstring = name.substring(action.length).toLowerCase()
let cache: Cache.Syncable<Docs.Base>,
toDoc: (_: any) => Docs.Base,
filter: EventFilter,
numEntries: number
if (action == 'Deposit') {
toDoc = (resp: any) => new Docs.Deposit(resp)
cache = this.caches.has(name)
? (this.caches.get(name)! as Cache.Deposit)
: new Cache.Deposit(name, syncOptions.cache)
filter = instance.filters.Deposit(null, null, null)
} else {
toDoc = (resp: any) => new Docs.Withdrawal(resp)
cache = this.caches.has(name)
? (this.caches.get(name)! as Cache.Withdrawal)
: new Cache.Withdrawal(name, syncOptions.cache)
filter = instance.filters.Withdrawal(null, null, null, null)
}
// Assign pooler
cache.sync.pooler = await cache.sync.initializePooler(cache.getCallbacks(instance))
// Decide whether we have a latest block
numEntries = (await cache.db.info()).doc_count
// Check for synced blocks
if (0 < numEntries) {
const [lastSyncedBlock, ,] = parseIndexableString(
(await cache.db.allDocs({ descending: true, limit: 1 })).rows[0].id
)
syncOptions.blocks.startBlock =
lastSyncedBlock < syncOptions.blocks.startBlock ? syncOptions.blocks.startBlock : lastSyncedBlock
syncOptions.blocks.blockDelta = this._getBlockDelta(syncOptions)
}
// Start synchronizing
let dbPromises = []
for (
let currentBlock = syncOptions.blocks.startBlock,
blockDelta = syncOptions.blocks.blockDelta,
targetBlock = syncOptions.blocks.targetBlock,
concurrencyLimit = syncOptions.cache.sync.concurrencyLimit;
currentBlock < targetBlock;
currentBlock += blockDelta
) {
if (cache.sync.pooler.pending < concurrencyLimit) {
const sum = currentBlock + blockDelta
if (currentBlock + blockDelta < targetBlock) {
await cache.sync.pooler.pool(currentBlock, sum)
} else {
await cache.sync.pooler.pool(currentBlock, sum - (sum % targetBlock))
}
} else {
let res: Array<any> = await cache.sync.pooler.race()
if (res.length != 0)
dbPromises.push(
cache.db.bulkDocs(res.map((el) => toDoc(el))).catch((err) => {
throw ErrorUtils.ensureError(err)
})
)
currentBlock -= blockDelta
}
}
// Immediately start listening if we're doing this
if (syncOptions.cache.sync.listen)
instance = instance.on(filter, (...eventArgs) => {
cache.db.put(toDoc(eventArgs[eventArgs.length - 1]))
})
// Then wait for all pooler requests to resolve
let results = await cache.sync.pooler.all()
// Then transform them, we know the shape in forward
results = results.reduce((res: any[], response: any[]) => {
if (response[0]) response.forEach((el: any) => res.push(toDoc(el)))
return res
}, [])
// Then wait for old dbPromises to resolve
await Promise.all(dbPromises)
// Add the last docs
await cache.db.bulkDocs(results).catch((err) => {
throw ErrorUtils.ensureError(err)
})
// Finally, store the objects
if (!this.instances.has(pathstring)) this.instances.set(pathstring, instance)
if (!this.caches.has(name)) this.caches.set(name, cache)
}
private async _populateSyncOpts(
lookupKeys: DataTypes.Keys.InstanceLookup,
syncOptions?: Options.Core.Sync
): Promise<DeepRequired<Options.Core.Sync>> {
// Assign nonexistent
if (!syncOptions) syncOptions = {}
if (!syncOptions.blocks) syncOptions.blocks = {}
if (!syncOptions.cache) syncOptions.cache = { db: {}, sync: {} }
if (!syncOptions.cache.sync) syncOptions.cache.sync = {}
if (!syncOptions.cache.db) syncOptions.cache.db = {}
// Prepare options
// deposit & withdraw
const both = syncOptions.deposit === undefined && syncOptions.withdrawal === undefined
syncOptions.deposit = syncOptions.deposit ?? both
syncOptions.withdrawal = syncOptions.withdrawal ?? false
// blocks
syncOptions.blocks.startBlock =
syncOptions.blocks.startBlock ??
(await OnchainData.getInstanceDeployBlockNum(
lookupKeys.network,
lookupKeys.token,
lookupKeys.denomination
))
syncOptions.blocks.targetBlock = syncOptions.blocks.targetBlock ?? (await this.chain.latestBlockNum())
syncOptions.blocks.blockDelta = this._getBlockDelta(syncOptions)
// cache
// db
syncOptions.cache.db.persistent = syncOptions.cache.db.persistent ?? true
syncOptions.cache.db.adapter = syncOptions.cache.db.adapter ?? 'leveldb'
// sync
syncOptions.cache.sync.concurrencyLimit = syncOptions.cache.sync.concurrencyLimit ?? 8
syncOptions.cache.sync.listen = syncOptions.cache.sync.listen ?? false
return syncOptions as DeepRequired<Options.Core.Sync>
}
private _getBlockDelta(syncOptions?: Options.Core.Sync): number {
return Math.floor(
syncOptions?.blocks?.blockDelta ??
(syncOptions!.blocks!.targetBlock! - syncOptions!.blocks!.startBlock!) / 20
)
}
/**
* @param instanceName The name of the instance as created in `_sync` function.
* @param commitments The commitments for which the leaf index values are to be noted down extra.
* @returns The result of concatenating the array of leaf indices found by matching them with the provided commitment values, followed by the array of all leaf indices, including all of the formerly mentioned values given that they are valid. Values which have not been matched, meaning probably invalid values, will be `0`.
*/
private async _findLeavesAndIndices(
instanceName: string,
commitments: Array<string>
): Promise<[Array<string>, Array<number>]> {
const indices = new Array<number>(commitments.length).fill(0)
const leaves: Array<string> = []
// Either load all deposit events from memory or from cache
let cache: Cache.Base<Docs.Deposit>
if (!this.caches.has(instanceName)) {
cache = new Cache.Base<Docs.Deposit>(instanceName)
} else cache = this.caches.get(instanceName) as Cache.Base<Docs.Deposit>
const docs = await cache.db.allDocs()
// If no docs in cache throw and stop
if (docs.total_rows === 0) {
await cache.clear()
throw ErrorUtils.getError(
`Core.buildMerkleTree: events for instance ${instanceName} have not been synchronized.`
)
}
// Otherwise start looking for commitment leaf indices and also pick up all other leafs on the way
for (const row of docs.rows) {
const [, leafIndex, loadedCommitment] = parseIndexableString(row.id)
const index = commitments.findIndex((commitment) => commitment === loadedCommitment)
// If some commitment is found then add the leaf index and remove that commitment
if (index !== -1) {
indices[index] = leafIndex
commitments.splice(index, 1)
}
// In any case push every leaf
leaves.push(BigNumber.from(loadedCommitment).toString())
}
// Concat matched and all leaf indices
return [leaves, indices]
}
async getInstanceLookupKeys(instanceAddress: string): Promise<DataTypes.Keys.InstanceLookup> {
// lookup some stuff first
const lookupObj: { [key: string]: string } = Json.getValue(await Json.load('onchain/quickLookup.json'), [
'instanceAddresses'
])
const pathstring: string = Object.entries(lookupObj).find((el) => el[1] === instanceAddress)![0]
const network = pathstring.match('[0-9]+')![0],
token = pathstring.substring(network.length).match('[a-z]+')![0],
denomination = pathstring.substring(network.length + token.length)
return {
network: network,
token: token,
denomination: denomination
}
}
}
export { Transactions, Options }