2023-04-08 19:17:52 +00:00
|
|
|
// ts-essentials
|
2023-04-19 17:01:40 +00:00
|
|
|
import { DeepRequired, MarkOptional } from 'ts-essentials'
|
2023-04-08 19:17:52 +00:00
|
|
|
|
|
|
|
// Local types
|
2023-04-17 21:56:57 +00:00
|
|
|
import { RelayerProperties as RelayerDataProperties } from 'types/sdk/data'
|
|
|
|
import { Options, Transactions } from 'types/sdk/core'
|
2023-04-11 19:36:32 +00:00
|
|
|
import { ZKDepositData, InputFor } from 'types/sdk/crypto'
|
2023-04-08 19:17:52 +00:00
|
|
|
import { TornadoInstance, TornadoProxy } from 'types/deth'
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
// External imports
|
2023-04-19 17:01:40 +00:00
|
|
|
import { EventEmitter } from 'stream'
|
2023-04-08 19:17:52 +00:00
|
|
|
import { BigNumber, EventFilter, providers } from 'ethers'
|
2023-04-17 21:56:57 +00:00
|
|
|
import { parseUnits } from 'ethers/lib/utils'
|
|
|
|
import { bigInt } from 'snarkjs'
|
2023-04-08 19:17:52 +00:00
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
// @ts-ignore
|
|
|
|
import { parseIndexableString } from 'pouchdb-collate'
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
// Local imports
|
2023-04-19 17:01:40 +00:00
|
|
|
import { Docs, Cache, Types as DataTypes, Json, Constants, Onchain } from 'lib/data'
|
2023-04-08 19:17:52 +00:00
|
|
|
import { Primitives } from 'lib/crypto'
|
2023-04-17 21:56:57 +00:00
|
|
|
import { Contracts, Chain } from 'lib/chain'
|
2023-04-19 17:01:40 +00:00
|
|
|
import { ErrorUtils, ObjectUtils, AsyncUtils } from 'lib/utils'
|
2023-04-08 19:17:52 +00:00
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
type Provider = providers.Provider
|
|
|
|
|
2023-04-08 19:17:52 +00:00
|
|
|
type BackupDepositDoc = {
|
2023-04-17 21:56:57 +00:00
|
|
|
network: string
|
|
|
|
denomination: string
|
|
|
|
token: string
|
2023-04-08 19:17:52 +00:00
|
|
|
invoice?: string
|
|
|
|
note?: string
|
|
|
|
}
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
type RelayerProperties = MarkOptional<
|
|
|
|
Pick<RelayerDataProperties, 'address' | 'serviceFeePercent' | 'prices'>,
|
|
|
|
'serviceFeePercent' | 'prices'
|
|
|
|
>
|
|
|
|
|
2023-04-19 17:01:40 +00:00
|
|
|
export class Core extends EventEmitter {
|
2023-04-08 19:17:52 +00:00
|
|
|
chain: Chain
|
|
|
|
caches: Map<string, Cache.Base<Docs.Base>>
|
|
|
|
instances: Map<string, TornadoInstance>
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
constructor(provider: providers.Provider) {
|
2023-04-19 17:01:40 +00:00
|
|
|
super()
|
2023-04-11 19:36:32 +00:00
|
|
|
this.chain = new Chain(provider)
|
2023-04-17 21:56:57 +00:00
|
|
|
this.caches = new Map<string, Cache.Syncable<Docs.Base>>()
|
2023-04-08 19:17:52 +00:00
|
|
|
this.instances = new Map<string, TornadoInstance>()
|
|
|
|
}
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
connect(provider: Provider): void {
|
|
|
|
this.chain.provider = provider
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
async getInstances(
|
|
|
|
keys: Array<{ token: string; denomination: number | string }>
|
|
|
|
): Promise<Array<TornadoInstance>> {
|
|
|
|
const chainId = await this.chain.getChainId()
|
2023-04-11 19:36:32 +00:00
|
|
|
return Promise.all(
|
2023-04-08 19:17:52 +00:00
|
|
|
keys.map((key) =>
|
2023-04-11 19:36:32 +00:00
|
|
|
Contracts.getInstance(String(chainId), key.token, String(key.denomination), this.chain.provider)
|
2023-04-08 19:17:52 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
async getInstance(token: string, denomination: number | string): Promise<TornadoInstance> {
|
|
|
|
const chainId = await this.chain.getChainId()
|
2023-04-11 19:36:32 +00:00
|
|
|
return Contracts.getInstance(String(chainId), token, String(denomination), this.chain.provider)
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
async getProxy(): Promise<TornadoProxy> {
|
|
|
|
const chainId = await this.chain.getChainId()
|
2023-04-17 21:56:57 +00:00
|
|
|
return Contracts.getProxy(String(chainId), this.chain.provider)
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
async buildDepositProof(
|
2023-04-08 19:17:52 +00:00
|
|
|
instance: TornadoInstance,
|
2023-04-11 19:36:32 +00:00
|
|
|
relayerProperties: RelayerProperties,
|
|
|
|
recipientAddress: string,
|
|
|
|
zkDepositsData: ZKDepositData,
|
|
|
|
options?: Options.Core.BuildDepositProof
|
2023-04-17 21:56:57 +00:00
|
|
|
): Promise<Array<string>> {
|
2023-04-11 19:36:32 +00:00
|
|
|
return (
|
|
|
|
await this.buildDepositProofs(
|
|
|
|
instance,
|
|
|
|
relayerProperties,
|
|
|
|
[recipientAddress],
|
|
|
|
[zkDepositsData],
|
|
|
|
options
|
|
|
|
)
|
|
|
|
)[0]
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
/**
|
|
|
|
* @param instance This is the Tornado Instance which will be withdrawn from.
|
2023-04-17 21:56:57 +00:00
|
|
|
* @param relayerProperties The properties of the relayer that is going to be used for the withdrawals. If the service fee is 0, it is assumed that there is no relayer, but that a manual wallet withdraw is being made. These properties are included in the ZK proof.
|
2023-04-11 19:36:32 +00:00
|
|
|
* @param recipientAddresses The recipient addresses which should receive the withdrawals, in order.
|
|
|
|
* @param zkDepositsData These represent the public and private values, reconstructed from the deposit note, generated during the building of deposit transactions, used for building the proof of knowledge statement for withdrawal, for each withdrawal (in this context).
|
2023-04-17 21:56:57 +00:00
|
|
|
* @param options Numerous options which most importantly allow a user to specify whether he is buying ETH, whether to check proof data validity and finally to modulate the gas prices which will be used to calculate the gas fees paid to the relayer.
|
2023-04-11 19:36:32 +00:00
|
|
|
* @returns The proofs for which the user should then decide whether to use a relayer (recommended, but decide carefully which one) or use his own wallet (if needed).
|
|
|
|
*/
|
|
|
|
async buildDepositProofs(
|
|
|
|
instance: TornadoInstance,
|
|
|
|
relayerProperties: RelayerProperties,
|
|
|
|
recipientAddresses: Array<string>,
|
|
|
|
zkDepositsData: Array<ZKDepositData>,
|
|
|
|
options?: Options.Core.BuildDepositProof
|
2023-04-17 21:56:57 +00:00
|
|
|
): Promise<Array<Array<string>>> {
|
2023-04-11 19:36:32 +00:00
|
|
|
// Extract commitments and nullifier hashes
|
|
|
|
const hexCommitments: string[] = []
|
|
|
|
const hexNullifierHashes: string[] = []
|
2023-04-23 22:01:45 +00:00
|
|
|
const purchaseAmounts =
|
|
|
|
options?.ethPurchaseAmounts ?? new Array(zkDepositsData.length).fill(BigNumber.from(0))
|
2023-04-11 19:36:32 +00:00
|
|
|
|
|
|
|
if (zkDepositsData.length !== recipientAddresses.length)
|
|
|
|
throw ErrorUtils.getError(
|
|
|
|
'Core.buildDepositProofs: the number of recipients must equal the length of zkDepositsData.'
|
|
|
|
)
|
2023-04-17 21:56:57 +00:00
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
if (zkDepositsData.length !== purchaseAmounts.length)
|
|
|
|
throw ErrorUtils.getError(
|
|
|
|
'Core.buildDepositProofs: if purchase amounts is specified, it must equal the length of zkDepositsData.'
|
|
|
|
)
|
|
|
|
|
|
|
|
zkDepositsData.forEach((deposit) => {
|
|
|
|
hexCommitments.push(deposit.hexCommitment)
|
|
|
|
hexNullifierHashes.push(deposit.hexNullifierHash)
|
|
|
|
})
|
|
|
|
|
|
|
|
// Determine cache name
|
2023-04-19 17:01:40 +00:00
|
|
|
const lookupKeys = await Onchain.getInstanceLookupKeys(instance.address)
|
2023-04-17 21:56:57 +00:00
|
|
|
const name = 'Deposits' + (lookupKeys.network + lookupKeys.token + lookupKeys.denomination).toUpperCase()
|
|
|
|
|
|
|
|
// Find all leaves & indices by reading from cache
|
|
|
|
const [leaves, leafIndices] = await this._findLeavesAndIndices(name, hexCommitments)
|
2023-04-11 19:36:32 +00:00
|
|
|
const invalidCommitments: string[] = []
|
|
|
|
|
2023-04-23 22:01:45 +00:00
|
|
|
this.emit(
|
|
|
|
'debug',
|
|
|
|
`\nFound leaves and indices, num leaves: ${leaves.length}, indices: [${leafIndices.join(', ')}]`
|
|
|
|
)
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
// Determine whether we will be checking whether notes are spent
|
|
|
|
const checkSpent = options?.checkNotesSpent !== false
|
2023-04-17 21:56:57 +00:00
|
|
|
const spentNotes: string[] = []
|
|
|
|
|
2023-04-23 22:01:45 +00:00
|
|
|
this.emit('debug', `\nCheck spent notes? => ${checkSpent}`)
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
// If yes, immediately check it with the supplied Tornado Instance
|
2023-04-17 21:56:57 +00:00
|
|
|
const checkSpentArray = checkSpent ? await instance.isSpentArray(hexNullifierHashes) : undefined
|
2023-04-11 19:36:32 +00:00
|
|
|
|
2023-04-23 22:01:45 +00:00
|
|
|
if (checkSpent) this.emit('debug', `\nSpent array: [${checkSpentArray?.join(', ')}]`)
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
// Check whether a commitment has not been found in all deposits, meaning that it is invalid
|
|
|
|
// Also add the invalid commitments. We can do leafIndices[i] because the matched one are concatenated
|
|
|
|
// at the start
|
|
|
|
|
|
|
|
for (let i = 0, len = zkDepositsData.length; i < len; i++) {
|
|
|
|
if (!leafIndices[i]) invalidCommitments.push(hexCommitments[i])
|
2023-04-23 22:01:45 +00:00
|
|
|
if (checkSpent && checkSpentArray![i]) spentNotes.push(hexNullifierHashes[i])
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
// If something is wrong, throw
|
|
|
|
const commitmentsAreInvalid = invalidCommitments.length !== 0
|
|
|
|
const notesAreSpent = spentNotes.length !== 0
|
2023-04-08 19:17:52 +00:00
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
if (commitmentsAreInvalid || notesAreSpent)
|
|
|
|
throw ErrorUtils.getError(
|
2023-04-17 21:56:57 +00:00
|
|
|
`Core.buildDepositProofs: ` +
|
2023-04-11 19:36:32 +00:00
|
|
|
(commitmentsAreInvalid
|
|
|
|
? `following commitments are invalid:\n\n${invalidCommitments.join('\n')}\n\n`
|
|
|
|
: '') +
|
|
|
|
(notesAreSpent
|
2023-04-17 21:56:57 +00:00
|
|
|
? `${
|
|
|
|
commitmentsAreInvalid ? 'and ' : ''
|
|
|
|
}following notes are already spent or invalid:\n\n${spentNotes.join('\n')}\n\n`
|
2023-04-11 19:36:32 +00:00
|
|
|
: '')
|
|
|
|
)
|
2023-04-08 19:17:52 +00:00
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
// Otherwise, build the merkle tree from the leaves
|
2023-04-11 19:36:32 +00:00
|
|
|
const merkleTree = Primitives.buildMerkleTree({
|
|
|
|
height: options?.merkleTreeHeight ?? Constants.MERKLE_TREE_HEIGHT,
|
2023-04-17 21:56:57 +00:00
|
|
|
leaves: leaves
|
2023-04-11 19:36:32 +00:00
|
|
|
})
|
|
|
|
|
2023-04-23 22:01:45 +00:00
|
|
|
const root: string = BigNumber.from(merkleTree.root()).toHexString()
|
2023-04-17 21:56:57 +00:00
|
|
|
const checkKnownRoot: boolean = options?.checkKnownRoot ?? true
|
2023-04-11 19:36:32 +00:00
|
|
|
|
2023-04-23 22:01:45 +00:00
|
|
|
this.emit('debug', `\nMerkle root: ${root}, check known? => ${checkKnownRoot}`)
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
// Check whether the root is valid
|
2023-04-17 21:56:57 +00:00
|
|
|
if (checkKnownRoot && !(await instance.isKnownRoot(root)))
|
2023-04-11 19:36:32 +00:00
|
|
|
throw ErrorUtils.getError(
|
2023-04-17 21:56:57 +00:00
|
|
|
'Core.buildDepositProofs: the merkle tree created is not valid, something went wrong with syncing.'
|
2023-04-11 19:36:32 +00:00
|
|
|
)
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
// Rest of note invariant arguments
|
2023-04-11 19:36:32 +00:00
|
|
|
const inputsForProofs: InputFor.ZKProof[] = []
|
2023-04-17 21:56:57 +00:00
|
|
|
const gasPrice = options?.gasPrice ?? (await this.chain.getGasPrice())
|
|
|
|
const gasPriceCushion = options?.gasPrice ?? gasPrice.mul(10).div(100)
|
|
|
|
|
|
|
|
// In reality, if a manual withdraw is made, we don't differentiate it from a relayer withdraw
|
|
|
|
// Since it is only serviceFee 0 AND without a token price, the function will not buy more tokens
|
2023-04-23 22:01:45 +00:00
|
|
|
const serviceFeePercent = relayerProperties.serviceFeePercent ?? 0
|
2023-04-17 21:56:57 +00:00
|
|
|
const tokenPrice = relayerProperties.prices?.get(lookupKeys.token)
|
2023-04-23 22:01:45 +00:00
|
|
|
const decimals = BigNumber.from(10).pow(
|
|
|
|
options?.tokenDecimals ?? (await Onchain.getTokenDecimals(lookupKeys.network, lookupKeys.token))
|
|
|
|
)
|
|
|
|
const toWithdraw = BigNumber.from(+lookupKeys.denomination * 10 ** lookupKeys.denomination.length)
|
|
|
|
.mul(decimals)
|
|
|
|
.div(10 ** lookupKeys.denomination.length)
|
|
|
|
const native = lookupKeys.token !== (await this.chain.getChainSymbol())
|
2023-04-17 21:56:57 +00:00
|
|
|
|
|
|
|
// TODO: Decide if necessary
|
2023-04-23 22:01:45 +00:00
|
|
|
if (!tokenPrice && native)
|
2023-04-17 21:56:57 +00:00
|
|
|
throw ErrorUtils.getError(
|
|
|
|
'Core.buildDepositProofs: a token price MUST be supplied if the token withdrawn is not native.'
|
|
|
|
)
|
|
|
|
|
2023-04-23 22:01:45 +00:00
|
|
|
this.emit(
|
|
|
|
'debug',
|
|
|
|
`\nProof building, invariant data: [${[
|
|
|
|
gasPrice.toString(),
|
|
|
|
gasPriceCushion.toString(),
|
|
|
|
serviceFeePercent,
|
|
|
|
tokenPrice,
|
|
|
|
decimals.toString(),
|
|
|
|
toWithdraw.toString()
|
|
|
|
].join(', ')}]\n`
|
|
|
|
)
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
// Compute proofs
|
2023-04-11 19:36:32 +00:00
|
|
|
for (let i = 0, len = zkDepositsData.length; i < len; i++) {
|
|
|
|
inputsForProofs.push({
|
|
|
|
public: {
|
|
|
|
root: root,
|
|
|
|
tree: merkleTree,
|
|
|
|
leafIndex: leafIndices[i],
|
|
|
|
hexNullifierHash: zkDepositsData[i].hexNullifierHash,
|
|
|
|
recipientAddress: recipientAddresses[i],
|
|
|
|
relayerAddress: relayerProperties.address,
|
2023-04-17 21:56:57 +00:00
|
|
|
fee: this._calcWithdrawalFee(
|
|
|
|
toWithdraw,
|
|
|
|
decimals,
|
|
|
|
gasPrice,
|
|
|
|
gasPriceCushion,
|
2023-04-23 22:01:45 +00:00
|
|
|
serviceFeePercent,
|
2023-04-17 21:56:57 +00:00
|
|
|
purchaseAmounts[i],
|
2023-04-23 22:01:45 +00:00
|
|
|
// This is our flag whether it's a token or not
|
|
|
|
native ? undefined : tokenPrice
|
2023-04-17 21:56:57 +00:00
|
|
|
),
|
|
|
|
// @ts-expect-error
|
2023-04-23 22:01:45 +00:00
|
|
|
refund: purchaseAmounts[i] ? bigInt(purchaseAmounts[i].toString()) : bigInt(0)
|
2023-04-11 19:36:32 +00:00
|
|
|
},
|
|
|
|
private: {
|
|
|
|
nullifier: zkDepositsData[i].nullifier,
|
|
|
|
secret: zkDepositsData[i].secret
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
return await Primitives.calcDepositProofs(inputsForProofs)
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
private _calcWithdrawalFee(
|
|
|
|
toWithdraw: BigNumber,
|
|
|
|
decimals: BigNumber,
|
|
|
|
gasPrice: BigNumber,
|
|
|
|
gasPriceCushion: BigNumber,
|
|
|
|
relayerServiceFee: number,
|
2023-04-23 22:01:45 +00:00
|
|
|
ethBought: BigNumber,
|
2023-04-17 21:56:57 +00:00
|
|
|
tokenPriceInEth?: BigNumber
|
|
|
|
): typeof bigInt {
|
|
|
|
const factor = BigNumber.from(10).pow(String(relayerServiceFee).length)
|
|
|
|
const baseRelayerFee = toWithdraw.mul(BigNumber.from(relayerServiceFee).mul(factor)).div(factor)
|
|
|
|
const txCost = gasPrice.add(gasPriceCushion).mul(5e5)
|
2023-04-23 22:01:45 +00:00
|
|
|
if (tokenPriceInEth) {
|
2023-04-17 21:56:57 +00:00
|
|
|
// @ts-expect-error
|
|
|
|
return bigInt(txCost.add(ethBought).mul(decimals).div(tokenPriceInEth).add(baseRelayerFee).toString())
|
|
|
|
}
|
|
|
|
// @ts-expect-error
|
|
|
|
else return bigInt(txCost.add(baseRelayerFee).toString())
|
|
|
|
}
|
|
|
|
|
|
|
|
async loadNotes(
|
|
|
|
indexes?: Array<number>,
|
|
|
|
keys?: Partial<DataTypes.Keys.InstanceLookup>
|
|
|
|
): Promise<Array<ZKDepositData>> {
|
|
|
|
const rows = await Cache.loadContents<Docs.Note>('DepositNotes')
|
|
|
|
|
|
|
|
let docs: Array<Docs.Note | undefined> = []
|
|
|
|
let notes: Array<string> = []
|
|
|
|
|
|
|
|
if (indexes)
|
|
|
|
for (let i = 0, len = rows.length; i < len; i++) {
|
2023-04-23 22:01:45 +00:00
|
|
|
const [index, , ,] = parseIndexableString(rows[i].id)[0]
|
|
|
|
if (0 < indexes.findIndex(index)) docs.push(rows[i].doc)
|
2023-04-17 21:56:57 +00:00
|
|
|
}
|
|
|
|
else docs = rows.map((row) => row.doc)
|
|
|
|
|
|
|
|
if (keys)
|
|
|
|
docs.forEach((doc) => {
|
2023-04-19 17:01:40 +00:00
|
|
|
const idNetworkMatches = doc && (keys.network ? keys.network === doc?.network : true)
|
2023-04-17 21:56:57 +00:00
|
|
|
const andTokenSymbolMatches = idNetworkMatches && (keys.token ? keys.token === doc?.token : true)
|
|
|
|
const lastlyDenominationMatches =
|
|
|
|
andTokenSymbolMatches && (keys.denomination ? keys.denomination === doc?.denomination : true)
|
|
|
|
if (lastlyDenominationMatches && doc?.note) notes.push(doc.note)
|
|
|
|
})
|
|
|
|
else notes = docs.filter((doc) => ObjectUtils.exists(doc?.note)).map((doc) => doc!.note)
|
|
|
|
|
|
|
|
return this.parseNotes(notes)
|
|
|
|
}
|
|
|
|
|
|
|
|
parseNotes(notes: Array<string>): Array<ZKDepositData> {
|
|
|
|
return notes.map((note) => Primitives.parseNote(note))
|
|
|
|
}
|
|
|
|
|
|
|
|
parseNote(note: string): ZKDepositData {
|
|
|
|
return this.parseNotes([note])[0]
|
|
|
|
}
|
|
|
|
|
2023-04-08 19:17:52 +00:00
|
|
|
async createInvoice(
|
|
|
|
instance: TornadoInstance,
|
2023-04-11 19:36:32 +00:00
|
|
|
options?: Omit<Options.Core.Invoice, 'depositsPerInstance'>
|
2023-04-08 19:17:52 +00:00
|
|
|
): Promise<Transactions.Invoice> {
|
2023-04-11 19:36:32 +00:00
|
|
|
let opts: Options.Core.Invoice = options ?? {}
|
2023-04-08 19:17:52 +00:00
|
|
|
opts.depositsPerInstance = [1]
|
2023-04-11 19:36:32 +00:00
|
|
|
return (await this.createInvoices([instance], options))[0]
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
async createInvoices(
|
|
|
|
instances: Array<TornadoInstance>,
|
2023-04-11 19:36:32 +00:00
|
|
|
options?: Options.Core.Invoice
|
2023-04-08 19:17:52 +00:00
|
|
|
): Promise<Array<Transactions.Invoice>> {
|
2023-04-11 19:36:32 +00:00
|
|
|
if (!options) options = {}
|
|
|
|
if (!options.backup) options.backup = {}
|
|
|
|
options.backup.invoices = options.backup.invoices ?? true
|
|
|
|
options.backup.notes = options.backup.notes ?? true
|
|
|
|
options.doNotPopulate = options.doNotPopulate ?? true
|
2023-04-19 17:01:40 +00:00
|
|
|
return this.buildDepositTransactions(instances, options)
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
2023-04-19 17:01:40 +00:00
|
|
|
async buildDepositTransaction(
|
2023-04-08 19:17:52 +00:00
|
|
|
instance: TornadoInstance,
|
2023-04-11 19:36:32 +00:00
|
|
|
options?: Options.Core.Deposit
|
2023-04-08 19:17:52 +00:00
|
|
|
): Promise<Transactions.Deposit> {
|
2023-04-11 19:36:32 +00:00
|
|
|
let opts: Options.Core.Deposit = options ?? {}
|
2023-04-08 19:17:52 +00:00
|
|
|
opts.depositsPerInstance = [1]
|
2023-04-19 17:01:40 +00:00
|
|
|
return (await this.buildDepositTransactions([instance], opts))[0]
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
2023-04-19 17:01:40 +00:00
|
|
|
async buildDepositTransactions(
|
2023-04-08 19:17:52 +00:00
|
|
|
instances: Array<TornadoInstance>,
|
2023-04-11 19:36:32 +00:00
|
|
|
options?: Options.Core.Deposit
|
2023-04-08 19:17:52 +00:00
|
|
|
): Promise<Array<Transactions.Deposit>> {
|
2023-04-11 19:36:32 +00:00
|
|
|
const depositsPerInstance = options?.depositsPerInstance ?? new Array<number>(instances.length).fill(1)
|
2023-04-08 19:17:52 +00:00
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
const doNotPopulate = options?.doNotPopulate ?? false
|
|
|
|
const backupNotes = options?.backup?.notes ?? true
|
|
|
|
const backupInvoices = options?.backup?.invoices ?? false
|
2023-04-08 19:17:52 +00:00
|
|
|
|
|
|
|
if (depositsPerInstance.length != instances.length)
|
|
|
|
throw ErrorUtils.getError(
|
|
|
|
'Core.buildDepositTx: number of deposit amount elements must equal the number of instances!'
|
|
|
|
)
|
|
|
|
|
|
|
|
const notesToBackup: Array<BackupDepositDoc> = []
|
|
|
|
const invoicesToBackup: Array<BackupDepositDoc> = []
|
|
|
|
const txs: Array<Transactions.Deposit> = []
|
|
|
|
const chainId = await this.chain.getChainId()
|
|
|
|
|
|
|
|
const proxy: TornadoProxy = await Contracts.getProxy(String(chainId), this.chain.provider)
|
|
|
|
|
|
|
|
for (let i = 0, nInstances = instances.length; i < nInstances; i++) {
|
2023-04-19 17:01:40 +00:00
|
|
|
const lookupKeys = await Onchain.getInstanceLookupKeys(instances[i].address)
|
2023-04-08 19:17:52 +00:00
|
|
|
const pathstring = lookupKeys.network + lookupKeys.token + lookupKeys.denomination
|
|
|
|
|
|
|
|
for (let d = 0, nDeposits = depositsPerInstance[i]; d < nDeposits; d++) {
|
|
|
|
const deposit = Primitives.createDeposit()
|
|
|
|
const note = Primitives.createNote(deposit.preimage)
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
if (backupNotes)
|
|
|
|
notesToBackup.push({
|
|
|
|
network: lookupKeys.network,
|
|
|
|
denomination: lookupKeys.denomination,
|
|
|
|
token: lookupKeys.token,
|
|
|
|
note: note
|
|
|
|
})
|
2023-04-08 19:17:52 +00:00
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
if (backupInvoices)
|
|
|
|
invoicesToBackup.push({
|
|
|
|
network: lookupKeys.network,
|
|
|
|
denomination: lookupKeys.denomination,
|
|
|
|
token: lookupKeys.token,
|
|
|
|
invoice: deposit.hexCommitment
|
|
|
|
})
|
2023-04-08 19:17:52 +00:00
|
|
|
|
|
|
|
if (!doNotPopulate) {
|
|
|
|
txs.push({
|
|
|
|
request: {
|
|
|
|
to: proxy.address,
|
|
|
|
data: proxy.interface.encodeFunctionData('deposit', [
|
|
|
|
instances[i].address,
|
2023-04-11 19:36:32 +00:00
|
|
|
deposit.hexCommitment,
|
2023-04-08 19:17:52 +00:00
|
|
|
[]
|
|
|
|
]),
|
|
|
|
value: lookupKeys.token == 'eth' ? parseUnits(lookupKeys.denomination) : BigNumber.from(0)
|
|
|
|
},
|
|
|
|
note: pathstring + '_' + note,
|
2023-04-11 19:36:32 +00:00
|
|
|
invoice: pathstring + '_' + deposit.hexCommitment
|
2023-04-08 19:17:52 +00:00
|
|
|
})
|
|
|
|
} else
|
|
|
|
txs.push({
|
|
|
|
request: {},
|
|
|
|
note: pathstring + '_' + note,
|
2023-04-11 19:36:32 +00:00
|
|
|
invoice: pathstring + '_' + deposit.hexCommitment
|
2023-04-08 19:17:52 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-23 22:01:45 +00:00
|
|
|
if (backupNotes)
|
|
|
|
await this._backupDepositData(this.loadCache<Cache.Base<Docs.Note>>('DepositNotes'), notesToBackup)
|
2023-04-08 19:17:52 +00:00
|
|
|
|
|
|
|
if (backupInvoices)
|
2023-04-23 22:01:45 +00:00
|
|
|
await this._backupDepositData(
|
|
|
|
this.loadCache<Cache.Base<Docs.Invoice>>('DepositInvoices'),
|
|
|
|
invoicesToBackup
|
|
|
|
)
|
2023-04-08 19:17:52 +00:00
|
|
|
|
|
|
|
return txs
|
|
|
|
}
|
|
|
|
|
|
|
|
private async _backupDepositData<T extends Docs.Note | Docs.Invoice>(
|
|
|
|
cache: Cache.Base<T>,
|
|
|
|
backupData: Array<BackupDepositDoc>
|
|
|
|
): Promise<void> {
|
|
|
|
let id = +(await cache.db.info()).update_seq
|
|
|
|
|
|
|
|
await cache.db
|
|
|
|
.bulkDocs(
|
|
|
|
backupData.map((entry) => {
|
2023-04-17 21:56:57 +00:00
|
|
|
if (entry.note)
|
|
|
|
return new Docs.Note(++id, entry.network, entry.token, entry.denomination, entry.note)
|
|
|
|
else if (entry.invoice)
|
|
|
|
return new Docs.Invoice(++id, entry.network, entry.token, entry.denomination, entry.invoice)
|
2023-04-08 19:17:52 +00:00
|
|
|
}) as Array<T>
|
|
|
|
)
|
|
|
|
.catch((err) => {
|
|
|
|
throw ErrorUtils.ensureError(err)
|
|
|
|
})
|
|
|
|
|
2023-04-23 22:01:45 +00:00
|
|
|
// TODO: Decide whether to close caches by default or not
|
|
|
|
//await cache.close().catch((err) => {
|
|
|
|
// throw ErrorUtils.ensureError(err)
|
|
|
|
//})
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
2023-04-19 17:01:40 +00:00
|
|
|
loadWithdrawalCache(name: string, options?: Options.Core.Cache): Cache.Withdrawal {
|
|
|
|
if (!this.caches.has(name)) {
|
|
|
|
this.caches.set(name, new Cache.Withdrawal(name, options))
|
2023-04-17 21:56:57 +00:00
|
|
|
}
|
2023-04-19 17:01:40 +00:00
|
|
|
return this.caches.get(name) as Cache.Withdrawal
|
|
|
|
}
|
|
|
|
|
|
|
|
loadDepositCache(name: string, options?: Options.Core.Cache): Cache.Deposit {
|
|
|
|
if (!this.caches.has(name)) {
|
|
|
|
this.caches.set(name, new Cache.Deposit(name, options))
|
|
|
|
}
|
|
|
|
return this.caches.get(name) as Cache.Deposit
|
|
|
|
}
|
|
|
|
|
2023-04-23 22:01:45 +00:00
|
|
|
loadCache<C extends Cache.Base<Docs.Base>>(name: string, options?: Options.Cache.Database): C {
|
2023-04-19 17:01:40 +00:00
|
|
|
if (!this.caches.has(name)) {
|
2023-04-23 22:01:45 +00:00
|
|
|
this.caches.set(name, new Cache.Base(name, options))
|
2023-04-19 17:01:40 +00:00
|
|
|
}
|
|
|
|
return this.caches.get(name) as C
|
2023-04-17 21:56:57 +00:00
|
|
|
}
|
|
|
|
|
2023-04-08 19:17:52 +00:00
|
|
|
async syncMultiple(instances: Array<TornadoInstance>, syncOptions?: Options.Core.Sync): Promise<void> {
|
|
|
|
for (const instance of instances) {
|
|
|
|
await this.sync(instance, syncOptions)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async sync(instance: TornadoInstance, syncOptions?: Options.Core.Sync): Promise<void> {
|
|
|
|
// Get some data
|
2023-04-19 17:01:40 +00:00
|
|
|
const lookupKeys = await Onchain.getInstanceLookupKeys(instance.address)
|
2023-04-08 19:17:52 +00:00
|
|
|
|
|
|
|
const populatedSyncOpts = await this._populateSyncOpts(lookupKeys, syncOptions)
|
|
|
|
|
|
|
|
const actions = Object.entries(populatedSyncOpts).filter((el) => el[1] === true) as [string, boolean][]
|
|
|
|
|
|
|
|
// Synchronize
|
|
|
|
for (let i = 0, bound = actions.length; i < bound; i++) {
|
2023-04-19 17:01:40 +00:00
|
|
|
const action = actions[i][0].charAt(0).toUpperCase() + actions[i][0].slice(1)
|
|
|
|
const pathstring = lookupKeys.network + lookupKeys.token + lookupKeys.denomination
|
|
|
|
const name = action + 's' + pathstring.toUpperCase()
|
|
|
|
|
|
|
|
if (action == 'Deposit')
|
|
|
|
await this._sync(
|
|
|
|
pathstring,
|
|
|
|
this.loadDepositCache(name, syncOptions?.cache),
|
|
|
|
instance.filters.Deposit(null, null, null),
|
|
|
|
instance,
|
|
|
|
populatedSyncOpts
|
|
|
|
)
|
|
|
|
else if (action == 'Withdrawal')
|
|
|
|
await this._sync(
|
|
|
|
pathstring,
|
|
|
|
this.loadWithdrawalCache(name, syncOptions?.cache),
|
|
|
|
instance.filters.Withdrawal(null, null, null, null),
|
|
|
|
instance,
|
|
|
|
populatedSyncOpts
|
|
|
|
)
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private async _sync(
|
2023-04-19 17:01:40 +00:00
|
|
|
pathstring: string,
|
|
|
|
cache: Cache.Syncable<Docs.Base>,
|
|
|
|
filter: EventFilter,
|
2023-04-08 19:17:52 +00:00
|
|
|
instance: TornadoInstance,
|
|
|
|
syncOptions: DeepRequired<Options.Core.Sync>
|
|
|
|
): Promise<void> {
|
|
|
|
// Assign pooler
|
2023-04-19 17:01:40 +00:00
|
|
|
cache.sync.initializePooler(cache.getCallbacks(instance), cache.getErrorHandlers())
|
2023-04-08 19:17:52 +00:00
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
// Decide whether we have a latest block
|
2023-04-19 17:01:40 +00:00
|
|
|
const numEntries = (await cache.db.info()).doc_count
|
2023-04-17 21:56:57 +00:00
|
|
|
|
|
|
|
// Check for synced blocks
|
|
|
|
if (0 < numEntries) {
|
|
|
|
const [lastSyncedBlock, ,] = parseIndexableString(
|
|
|
|
(await cache.db.allDocs({ descending: true, limit: 1 })).rows[0].id
|
|
|
|
)
|
|
|
|
syncOptions.blocks.startBlock =
|
|
|
|
lastSyncedBlock < syncOptions.blocks.startBlock ? syncOptions.blocks.startBlock : lastSyncedBlock
|
|
|
|
syncOptions.blocks.blockDelta = this._getBlockDelta(syncOptions)
|
|
|
|
}
|
|
|
|
|
2023-04-08 19:17:52 +00:00
|
|
|
// Start synchronizing
|
|
|
|
let dbPromises = []
|
|
|
|
|
2023-04-19 17:01:40 +00:00
|
|
|
this.emit(
|
|
|
|
'debug',
|
|
|
|
syncOptions.blocks.startBlock,
|
|
|
|
syncOptions.blocks.targetBlock,
|
|
|
|
syncOptions.blocks.blockDelta
|
|
|
|
)
|
|
|
|
|
|
|
|
this.emit('sync', 'syncing')
|
|
|
|
|
2023-04-08 19:17:52 +00:00
|
|
|
for (
|
|
|
|
let currentBlock = syncOptions.blocks.startBlock,
|
|
|
|
blockDelta = syncOptions.blocks.blockDelta,
|
|
|
|
targetBlock = syncOptions.blocks.targetBlock,
|
|
|
|
concurrencyLimit = syncOptions.cache.sync.concurrencyLimit;
|
2023-04-17 21:56:57 +00:00
|
|
|
currentBlock < targetBlock;
|
2023-04-08 19:17:52 +00:00
|
|
|
currentBlock += blockDelta
|
|
|
|
) {
|
2023-04-19 17:01:40 +00:00
|
|
|
if (cache.sync.pooler!.pending < concurrencyLimit) {
|
2023-04-17 21:56:57 +00:00
|
|
|
const sum = currentBlock + blockDelta
|
2023-04-19 17:01:40 +00:00
|
|
|
|
|
|
|
await AsyncUtils.timeout(syncOptions.msTimeout)
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
if (currentBlock + blockDelta < targetBlock) {
|
2023-04-19 17:01:40 +00:00
|
|
|
await cache.sync.pooler!.pool(currentBlock, sum)
|
2023-04-08 19:17:52 +00:00
|
|
|
} else {
|
2023-04-19 17:01:40 +00:00
|
|
|
await cache.sync.pooler!.pool(currentBlock, sum - (sum % targetBlock))
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
2023-04-19 17:01:40 +00:00
|
|
|
|
|
|
|
this.emit('debug', currentBlock++, sum)
|
2023-04-08 19:17:52 +00:00
|
|
|
} else {
|
2023-04-19 17:01:40 +00:00
|
|
|
let res: Array<any> = await cache.sync.pooler!.race()
|
|
|
|
|
2023-04-08 19:17:52 +00:00
|
|
|
if (res.length != 0)
|
|
|
|
dbPromises.push(
|
2023-04-19 17:01:40 +00:00
|
|
|
cache.db.bulkDocs(res.map((el) => cache.buildDoc(el))).catch((err) => {
|
2023-04-08 19:17:52 +00:00
|
|
|
throw ErrorUtils.ensureError(err)
|
|
|
|
})
|
|
|
|
)
|
2023-04-19 17:01:40 +00:00
|
|
|
|
2023-04-08 19:17:52 +00:00
|
|
|
currentBlock -= blockDelta
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-19 17:01:40 +00:00
|
|
|
this.emit('sync', 'synced')
|
|
|
|
|
2023-04-08 19:17:52 +00:00
|
|
|
// Immediately start listening if we're doing this
|
2023-04-23 22:01:45 +00:00
|
|
|
if (syncOptions.cache.sync.listen) {
|
2023-04-08 19:17:52 +00:00
|
|
|
instance = instance.on(filter, (...eventArgs) => {
|
2023-04-23 22:01:45 +00:00
|
|
|
this.emit(cache.name, 'received', cache.db.put(cache.buildDoc(eventArgs[eventArgs.length - 1])))
|
2023-04-08 19:17:52 +00:00
|
|
|
})
|
2023-04-23 22:01:45 +00:00
|
|
|
}
|
2023-04-08 19:17:52 +00:00
|
|
|
|
|
|
|
// Then wait for all pooler requests to resolve
|
2023-04-19 17:01:40 +00:00
|
|
|
let results = await cache.sync.pooler!.all()
|
2023-04-08 19:17:52 +00:00
|
|
|
|
|
|
|
// Then transform them, we know the shape in forward
|
|
|
|
results = results.reduce((res: any[], response: any[]) => {
|
2023-04-19 17:01:40 +00:00
|
|
|
if (response[0]) response.forEach((el: any) => res.push(cache.buildDoc(el)))
|
2023-04-08 19:17:52 +00:00
|
|
|
return res
|
|
|
|
}, [])
|
|
|
|
|
|
|
|
// Then wait for old dbPromises to resolve
|
|
|
|
await Promise.all(dbPromises)
|
|
|
|
|
|
|
|
// Add the last docs
|
|
|
|
await cache.db.bulkDocs(results).catch((err) => {
|
|
|
|
throw ErrorUtils.ensureError(err)
|
|
|
|
})
|
|
|
|
|
|
|
|
// Finally, store the objects
|
|
|
|
if (!this.instances.has(pathstring)) this.instances.set(pathstring, instance)
|
2023-04-19 17:01:40 +00:00
|
|
|
if (!this.caches.has(cache.name)) this.caches.set(cache.name, cache)
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
private async _populateSyncOpts(
|
|
|
|
lookupKeys: DataTypes.Keys.InstanceLookup,
|
|
|
|
syncOptions?: Options.Core.Sync
|
|
|
|
): Promise<DeepRequired<Options.Core.Sync>> {
|
|
|
|
// Assign nonexistent
|
|
|
|
if (!syncOptions) syncOptions = {}
|
|
|
|
if (!syncOptions.blocks) syncOptions.blocks = {}
|
|
|
|
if (!syncOptions.cache) syncOptions.cache = { db: {}, sync: {} }
|
|
|
|
if (!syncOptions.cache.sync) syncOptions.cache.sync = {}
|
|
|
|
if (!syncOptions.cache.db) syncOptions.cache.db = {}
|
|
|
|
|
|
|
|
// Prepare options
|
|
|
|
|
|
|
|
// deposit & withdraw
|
|
|
|
const both = syncOptions.deposit === undefined && syncOptions.withdrawal === undefined
|
|
|
|
syncOptions.deposit = syncOptions.deposit ?? both
|
|
|
|
syncOptions.withdrawal = syncOptions.withdrawal ?? false
|
|
|
|
|
|
|
|
// blocks
|
|
|
|
syncOptions.blocks.startBlock =
|
|
|
|
syncOptions.blocks.startBlock ??
|
2023-04-19 17:01:40 +00:00
|
|
|
(await Onchain.getInstanceDeployBlockNum(lookupKeys.network, lookupKeys.token, lookupKeys.denomination))
|
2023-04-08 19:17:52 +00:00
|
|
|
|
|
|
|
syncOptions.blocks.targetBlock = syncOptions.blocks.targetBlock ?? (await this.chain.latestBlockNum())
|
|
|
|
|
2023-04-19 17:01:40 +00:00
|
|
|
syncOptions.blocks.deltaDivisor = syncOptions.blocks.deltaDivisor ?? 100
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
syncOptions.blocks.blockDelta = this._getBlockDelta(syncOptions)
|
2023-04-08 19:17:52 +00:00
|
|
|
|
2023-04-19 17:01:40 +00:00
|
|
|
syncOptions.msTimeout = syncOptions.msTimeout ?? 200 // 5 requests per second
|
|
|
|
|
2023-04-08 19:17:52 +00:00
|
|
|
// cache
|
|
|
|
// db
|
|
|
|
syncOptions.cache.db.persistent = syncOptions.cache.db.persistent ?? true
|
|
|
|
syncOptions.cache.db.adapter = syncOptions.cache.db.adapter ?? 'leveldb'
|
|
|
|
|
|
|
|
// sync
|
|
|
|
syncOptions.cache.sync.concurrencyLimit = syncOptions.cache.sync.concurrencyLimit ?? 8
|
|
|
|
syncOptions.cache.sync.listen = syncOptions.cache.sync.listen ?? false
|
|
|
|
|
|
|
|
return syncOptions as DeepRequired<Options.Core.Sync>
|
|
|
|
}
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
private _getBlockDelta(syncOptions?: Options.Core.Sync): number {
|
|
|
|
return Math.floor(
|
2023-04-19 17:01:40 +00:00
|
|
|
(syncOptions!.blocks!.targetBlock! - syncOptions!.blocks!.startBlock!) /
|
|
|
|
syncOptions!.blocks!.deltaDivisor!
|
2023-04-17 21:56:57 +00:00
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
/**
|
|
|
|
* @param instanceName The name of the instance as created in `_sync` function.
|
|
|
|
* @param commitments The commitments for which the leaf index values are to be noted down extra.
|
|
|
|
* @returns The result of concatenating the array of leaf indices found by matching them with the provided commitment values, followed by the array of all leaf indices, including all of the formerly mentioned values given that they are valid. Values which have not been matched, meaning probably invalid values, will be `0`.
|
|
|
|
*/
|
2023-04-17 21:56:57 +00:00
|
|
|
private async _findLeavesAndIndices(
|
|
|
|
instanceName: string,
|
|
|
|
commitments: Array<string>
|
|
|
|
): Promise<[Array<string>, Array<number>]> {
|
|
|
|
const indices = new Array<number>(commitments.length).fill(0)
|
|
|
|
const leaves: Array<string> = []
|
2023-04-11 19:36:32 +00:00
|
|
|
|
2023-04-23 22:01:45 +00:00
|
|
|
const cache = this.loadCache<Cache.Base<Docs.Deposit>>(instanceName)
|
2023-04-11 19:36:32 +00:00
|
|
|
const docs = await cache.db.allDocs()
|
|
|
|
|
|
|
|
// If no docs in cache throw and stop
|
|
|
|
if (docs.total_rows === 0) {
|
|
|
|
await cache.clear()
|
|
|
|
throw ErrorUtils.getError(
|
|
|
|
`Core.buildMerkleTree: events for instance ${instanceName} have not been synchronized.`
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
// Otherwise start looking for commitment leaf indices and also pick up all other leafs on the way
|
2023-04-11 19:36:32 +00:00
|
|
|
for (const row of docs.rows) {
|
|
|
|
const [, leafIndex, loadedCommitment] = parseIndexableString(row.id)
|
|
|
|
const index = commitments.findIndex((commitment) => commitment === loadedCommitment)
|
|
|
|
|
|
|
|
// If some commitment is found then add the leaf index and remove that commitment
|
|
|
|
if (index !== -1) {
|
2023-04-17 21:56:57 +00:00
|
|
|
indices[index] = leafIndex
|
2023-04-11 19:36:32 +00:00
|
|
|
commitments.splice(index, 1)
|
|
|
|
}
|
|
|
|
|
|
|
|
// In any case push every leaf
|
2023-04-17 21:56:57 +00:00
|
|
|
leaves.push(BigNumber.from(loadedCommitment).toString())
|
2023-04-11 19:36:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Concat matched and all leaf indices
|
2023-04-17 21:56:57 +00:00
|
|
|
return [leaves, indices]
|
2023-04-11 19:36:32 +00:00
|
|
|
}
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
export { Transactions, Options }
|