2023-04-08 19:17:52 +00:00
|
|
|
// ts-essentials
|
2023-04-17 21:56:57 +00:00
|
|
|
import { DeepRequired, MarkOptional, MarkRequired } from 'ts-essentials'
|
2023-04-08 19:17:52 +00:00
|
|
|
|
|
|
|
// Local types
|
2023-04-17 21:56:57 +00:00
|
|
|
import { RelayerProperties as RelayerDataProperties } from 'types/sdk/data'
|
|
|
|
import { Options, Transactions } from 'types/sdk/core'
|
2023-04-11 19:36:32 +00:00
|
|
|
import { ZKDepositData, InputFor } from 'types/sdk/crypto'
|
2023-04-08 19:17:52 +00:00
|
|
|
import { TornadoInstance, TornadoProxy } from 'types/deth'
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
// External imports
|
2023-04-08 19:17:52 +00:00
|
|
|
import { BigNumber, EventFilter, providers } from 'ethers'
|
2023-04-17 21:56:57 +00:00
|
|
|
import { parseUnits } from 'ethers/lib/utils'
|
|
|
|
import { bigInt } from 'snarkjs'
|
2023-04-08 19:17:52 +00:00
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
// @ts-ignore
|
|
|
|
import { parseIndexableString } from 'pouchdb-collate'
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
// Local imports
|
|
|
|
import { Docs, Cache, Types as DataTypes, Json, Constants, OnchainData } from 'lib/data'
|
2023-04-08 19:17:52 +00:00
|
|
|
import { Primitives } from 'lib/crypto'
|
2023-04-17 21:56:57 +00:00
|
|
|
import { Contracts, Chain } from 'lib/chain'
|
|
|
|
import { ErrorUtils, ObjectUtils } from 'lib/utils'
|
2023-04-08 19:17:52 +00:00
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
type Provider = providers.Provider
|
|
|
|
|
2023-04-08 19:17:52 +00:00
|
|
|
type BackupDepositDoc = {
|
2023-04-17 21:56:57 +00:00
|
|
|
network: string
|
|
|
|
denomination: string
|
|
|
|
token: string
|
2023-04-08 19:17:52 +00:00
|
|
|
invoice?: string
|
|
|
|
note?: string
|
|
|
|
}
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
type RelayerProperties = MarkOptional<
|
|
|
|
Pick<RelayerDataProperties, 'address' | 'serviceFeePercent' | 'prices'>,
|
|
|
|
'serviceFeePercent' | 'prices'
|
|
|
|
>
|
|
|
|
|
2023-04-08 19:17:52 +00:00
|
|
|
export class Core {
|
|
|
|
chain: Chain
|
|
|
|
caches: Map<string, Cache.Base<Docs.Base>>
|
|
|
|
instances: Map<string, TornadoInstance>
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
constructor(provider: providers.Provider) {
|
|
|
|
this.chain = new Chain(provider)
|
2023-04-17 21:56:57 +00:00
|
|
|
this.caches = new Map<string, Cache.Syncable<Docs.Base>>()
|
2023-04-08 19:17:52 +00:00
|
|
|
this.instances = new Map<string, TornadoInstance>()
|
|
|
|
}
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
connect(provider: Provider): void {
|
|
|
|
this.chain.provider = provider
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
async getInstances(
|
|
|
|
keys: Array<{ token: string; denomination: number | string }>
|
|
|
|
): Promise<Array<TornadoInstance>> {
|
|
|
|
const chainId = await this.chain.getChainId()
|
2023-04-11 19:36:32 +00:00
|
|
|
return Promise.all(
|
2023-04-08 19:17:52 +00:00
|
|
|
keys.map((key) =>
|
2023-04-11 19:36:32 +00:00
|
|
|
Contracts.getInstance(String(chainId), key.token, String(key.denomination), this.chain.provider)
|
2023-04-08 19:17:52 +00:00
|
|
|
)
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
|
|
|
async getInstance(token: string, denomination: number | string): Promise<TornadoInstance> {
|
|
|
|
const chainId = await this.chain.getChainId()
|
2023-04-11 19:36:32 +00:00
|
|
|
return Contracts.getInstance(String(chainId), token, String(denomination), this.chain.provider)
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
async getProxy(): Promise<TornadoProxy> {
|
|
|
|
const chainId = await this.chain.getChainId()
|
2023-04-17 21:56:57 +00:00
|
|
|
return Contracts.getProxy(String(chainId), this.chain.provider)
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
async buildDepositProof(
|
2023-04-08 19:17:52 +00:00
|
|
|
instance: TornadoInstance,
|
2023-04-11 19:36:32 +00:00
|
|
|
relayerProperties: RelayerProperties,
|
|
|
|
recipientAddress: string,
|
|
|
|
zkDepositsData: ZKDepositData,
|
|
|
|
options?: Options.Core.BuildDepositProof
|
2023-04-17 21:56:57 +00:00
|
|
|
): Promise<Array<string>> {
|
2023-04-11 19:36:32 +00:00
|
|
|
return (
|
|
|
|
await this.buildDepositProofs(
|
|
|
|
instance,
|
|
|
|
relayerProperties,
|
|
|
|
[recipientAddress],
|
|
|
|
[zkDepositsData],
|
|
|
|
options
|
|
|
|
)
|
|
|
|
)[0]
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
/**
|
|
|
|
* @param instance This is the Tornado Instance which will be withdrawn from.
|
2023-04-17 21:56:57 +00:00
|
|
|
* @param relayerProperties The properties of the relayer that is going to be used for the withdrawals. If the service fee is 0, it is assumed that there is no relayer, but that a manual wallet withdraw is being made. These properties are included in the ZK proof.
|
2023-04-11 19:36:32 +00:00
|
|
|
* @param recipientAddresses The recipient addresses which should receive the withdrawals, in order.
|
|
|
|
* @param zkDepositsData These represent the public and private values, reconstructed from the deposit note, generated during the building of deposit transactions, used for building the proof of knowledge statement for withdrawal, for each withdrawal (in this context).
|
2023-04-17 21:56:57 +00:00
|
|
|
* @param options Numerous options which most importantly allow a user to specify whether he is buying ETH, whether to check proof data validity and finally to modulate the gas prices which will be used to calculate the gas fees paid to the relayer.
|
2023-04-11 19:36:32 +00:00
|
|
|
* @returns The proofs for which the user should then decide whether to use a relayer (recommended, but decide carefully which one) or use his own wallet (if needed).
|
|
|
|
*/
|
|
|
|
async buildDepositProofs(
|
|
|
|
instance: TornadoInstance,
|
|
|
|
relayerProperties: RelayerProperties,
|
|
|
|
recipientAddresses: Array<string>,
|
|
|
|
zkDepositsData: Array<ZKDepositData>,
|
|
|
|
options?: Options.Core.BuildDepositProof
|
2023-04-17 21:56:57 +00:00
|
|
|
): Promise<Array<Array<string>>> {
|
2023-04-11 19:36:32 +00:00
|
|
|
// Extract commitments and nullifier hashes
|
|
|
|
const hexCommitments: string[] = []
|
|
|
|
const hexNullifierHashes: string[] = []
|
|
|
|
const purchaseAmounts = options?.ethPurchaseAmounts ?? new Array(zkDepositsData.length)
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
console.log('\nChecking inputs.\n')
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
if (zkDepositsData.length !== recipientAddresses.length)
|
|
|
|
throw ErrorUtils.getError(
|
|
|
|
'Core.buildDepositProofs: the number of recipients must equal the length of zkDepositsData.'
|
|
|
|
)
|
2023-04-17 21:56:57 +00:00
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
if (zkDepositsData.length !== purchaseAmounts.length)
|
|
|
|
throw ErrorUtils.getError(
|
|
|
|
'Core.buildDepositProofs: if purchase amounts is specified, it must equal the length of zkDepositsData.'
|
|
|
|
)
|
|
|
|
|
|
|
|
zkDepositsData.forEach((deposit) => {
|
|
|
|
hexCommitments.push(deposit.hexCommitment)
|
|
|
|
hexNullifierHashes.push(deposit.hexNullifierHash)
|
|
|
|
})
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
console.log('\nGetting lookup keys.\n')
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
// Determine cache name
|
|
|
|
const lookupKeys = await this.getInstanceLookupKeys(instance.address)
|
2023-04-17 21:56:57 +00:00
|
|
|
const name = 'Deposits' + (lookupKeys.network + lookupKeys.token + lookupKeys.denomination).toUpperCase()
|
|
|
|
|
|
|
|
console.log('\nLeaves and indices.\n')
|
2023-04-11 19:36:32 +00:00
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
// Find all leaves & indices by reading from cache
|
|
|
|
const [leaves, leafIndices] = await this._findLeavesAndIndices(name, hexCommitments)
|
2023-04-11 19:36:32 +00:00
|
|
|
const invalidCommitments: string[] = []
|
|
|
|
|
|
|
|
// Determine whether we will be checking whether notes are spent
|
|
|
|
const checkSpent = options?.checkNotesSpent !== false
|
2023-04-17 21:56:57 +00:00
|
|
|
const spentNotes: string[] = []
|
|
|
|
|
|
|
|
console.log('\nNote checking.\n')
|
2023-04-11 19:36:32 +00:00
|
|
|
|
|
|
|
// If yes, immediately check it with the supplied Tornado Instance
|
2023-04-17 21:56:57 +00:00
|
|
|
const checkSpentArray = checkSpent ? await instance.isSpentArray(hexNullifierHashes) : undefined
|
2023-04-11 19:36:32 +00:00
|
|
|
|
|
|
|
// Check whether a commitment has not been found in all deposits, meaning that it is invalid
|
|
|
|
// Also add the invalid commitments. We can do leafIndices[i] because the matched one are concatenated
|
|
|
|
// at the start
|
|
|
|
|
|
|
|
for (let i = 0, len = zkDepositsData.length; i < len; i++) {
|
|
|
|
if (!leafIndices[i]) invalidCommitments.push(hexCommitments[i])
|
|
|
|
if (checkSpent && !checkSpentArray![i]) spentNotes.push(hexNullifierHashes[i])
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
// If something is wrong, throw
|
|
|
|
const commitmentsAreInvalid = invalidCommitments.length !== 0
|
|
|
|
const notesAreSpent = spentNotes.length !== 0
|
2023-04-08 19:17:52 +00:00
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
console.log('\nErrors.\n')
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
if (commitmentsAreInvalid || notesAreSpent)
|
|
|
|
throw ErrorUtils.getError(
|
2023-04-17 21:56:57 +00:00
|
|
|
`Core.buildDepositProofs: ` +
|
2023-04-11 19:36:32 +00:00
|
|
|
(commitmentsAreInvalid
|
|
|
|
? `following commitments are invalid:\n\n${invalidCommitments.join('\n')}\n\n`
|
|
|
|
: '') +
|
|
|
|
(notesAreSpent
|
2023-04-17 21:56:57 +00:00
|
|
|
? `${
|
|
|
|
commitmentsAreInvalid ? 'and ' : ''
|
|
|
|
}following notes are already spent or invalid:\n\n${spentNotes.join('\n')}\n\n`
|
2023-04-11 19:36:32 +00:00
|
|
|
: '')
|
|
|
|
)
|
2023-04-08 19:17:52 +00:00
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
console.log('\nMerkle tree.\n')
|
|
|
|
|
|
|
|
// Otherwise, build the merkle tree from the leaves
|
2023-04-11 19:36:32 +00:00
|
|
|
const merkleTree = Primitives.buildMerkleTree({
|
|
|
|
height: options?.merkleTreeHeight ?? Constants.MERKLE_TREE_HEIGHT,
|
2023-04-17 21:56:57 +00:00
|
|
|
leaves: leaves
|
2023-04-11 19:36:32 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
const root: string = merkleTree.root()
|
2023-04-17 21:56:57 +00:00
|
|
|
const checkKnownRoot: boolean = options?.checkKnownRoot ?? true
|
2023-04-11 19:36:32 +00:00
|
|
|
|
|
|
|
// Check whether the root is valid
|
2023-04-17 21:56:57 +00:00
|
|
|
if (checkKnownRoot && !(await instance.isKnownRoot(root)))
|
2023-04-11 19:36:32 +00:00
|
|
|
throw ErrorUtils.getError(
|
2023-04-17 21:56:57 +00:00
|
|
|
'Core.buildDepositProofs: the merkle tree created is not valid, something went wrong with syncing.'
|
2023-04-11 19:36:32 +00:00
|
|
|
)
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
console.log('\nProof data invariant.\n')
|
|
|
|
|
|
|
|
// Rest of note invariant arguments
|
2023-04-11 19:36:32 +00:00
|
|
|
const inputsForProofs: InputFor.ZKProof[] = []
|
2023-04-17 21:56:57 +00:00
|
|
|
const gasPrice = options?.gasPrice ?? (await this.chain.getGasPrice())
|
|
|
|
const gasPriceCushion = options?.gasPrice ?? gasPrice.mul(10).div(100)
|
|
|
|
|
|
|
|
// In reality, if a manual withdraw is made, we don't differentiate it from a relayer withdraw
|
|
|
|
// Since it is only serviceFee 0 AND without a token price, the function will not buy more tokens
|
|
|
|
const serviceFee = relayerProperties.serviceFeePercent ?? 0
|
|
|
|
const tokenPrice = relayerProperties.prices?.get(lookupKeys.token)
|
|
|
|
const decimals =
|
|
|
|
// @ts-expect-error
|
|
|
|
bigInt(10).pow(
|
|
|
|
options?.tokenDecimals ?? (await OnchainData.getTokenDecimals(lookupKeys.network, lookupKeys.token))
|
|
|
|
)
|
|
|
|
const toWithdraw = BigNumber.from(lookupKeys.denomination).mul(decimals)
|
|
|
|
|
|
|
|
// TODO: Decide if necessary
|
|
|
|
if (!tokenPrice && lookupKeys.token !== (await this.chain.getChainSymbol()))
|
|
|
|
throw ErrorUtils.getError(
|
|
|
|
'Core.buildDepositProofs: a token price MUST be supplied if the token withdrawn is not native.'
|
|
|
|
)
|
|
|
|
|
|
|
|
console.log('\nConstruct.\n')
|
2023-04-11 19:36:32 +00:00
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
// Compute proofs
|
2023-04-11 19:36:32 +00:00
|
|
|
for (let i = 0, len = zkDepositsData.length; i < len; i++) {
|
|
|
|
inputsForProofs.push({
|
|
|
|
public: {
|
|
|
|
root: root,
|
|
|
|
tree: merkleTree,
|
|
|
|
leafIndex: leafIndices[i],
|
|
|
|
hexNullifierHash: zkDepositsData[i].hexNullifierHash,
|
|
|
|
recipientAddress: recipientAddresses[i],
|
|
|
|
relayerAddress: relayerProperties.address,
|
2023-04-17 21:56:57 +00:00
|
|
|
fee: this._calcWithdrawalFee(
|
|
|
|
toWithdraw,
|
|
|
|
decimals,
|
|
|
|
gasPrice,
|
|
|
|
gasPriceCushion,
|
|
|
|
serviceFee,
|
|
|
|
purchaseAmounts[i],
|
|
|
|
tokenPrice
|
|
|
|
),
|
|
|
|
// @ts-expect-error
|
|
|
|
refund: bigInt(purchaseAmounts[i].toString()) ?? bigInt(0)
|
2023-04-11 19:36:32 +00:00
|
|
|
},
|
|
|
|
private: {
|
|
|
|
nullifier: zkDepositsData[i].nullifier,
|
|
|
|
secret: zkDepositsData[i].secret
|
|
|
|
}
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
console.log('\nCalc and return.\n')
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
return await Primitives.calcDepositProofs(inputsForProofs)
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
private _calcWithdrawalFee(
|
|
|
|
toWithdraw: BigNumber,
|
|
|
|
decimals: BigNumber,
|
|
|
|
gasPrice: BigNumber,
|
|
|
|
gasPriceCushion: BigNumber,
|
|
|
|
relayerServiceFee: number,
|
|
|
|
ethBought?: BigNumber,
|
|
|
|
tokenPriceInEth?: BigNumber
|
|
|
|
): typeof bigInt {
|
|
|
|
const factor = BigNumber.from(10).pow(String(relayerServiceFee).length)
|
|
|
|
const baseRelayerFee = toWithdraw.mul(BigNumber.from(relayerServiceFee).mul(factor)).div(factor)
|
|
|
|
const txCost = gasPrice.add(gasPriceCushion).mul(5e5)
|
|
|
|
if (ethBought && tokenPriceInEth) {
|
|
|
|
// @ts-expect-error
|
|
|
|
return bigInt(txCost.add(ethBought).mul(decimals).div(tokenPriceInEth).add(baseRelayerFee).toString())
|
|
|
|
}
|
|
|
|
// @ts-expect-error
|
|
|
|
else return bigInt(txCost.add(baseRelayerFee).toString())
|
|
|
|
}
|
|
|
|
|
|
|
|
async loadNotes(
|
|
|
|
indexes?: Array<number>,
|
|
|
|
keys?: Partial<DataTypes.Keys.InstanceLookup>
|
|
|
|
): Promise<Array<ZKDepositData>> {
|
|
|
|
const rows = await Cache.loadContents<Docs.Note>('DepositNotes')
|
|
|
|
|
|
|
|
let docs: Array<Docs.Note | undefined> = []
|
|
|
|
let notes: Array<string> = []
|
|
|
|
|
|
|
|
if (indexes)
|
|
|
|
for (let i = 0, len = rows.length; i < len; i++) {
|
|
|
|
const id = parseIndexableString(rows[i].id)[0]
|
|
|
|
if (id === indexes[i]) docs.push(rows[i].doc)
|
|
|
|
}
|
|
|
|
else docs = rows.map((row) => row.doc)
|
|
|
|
|
|
|
|
if (keys)
|
|
|
|
docs.forEach((doc) => {
|
|
|
|
const idNetworkMatches = doc && keys.network ? keys.network === doc?.network : true
|
|
|
|
const andTokenSymbolMatches = idNetworkMatches && (keys.token ? keys.token === doc?.token : true)
|
|
|
|
const lastlyDenominationMatches =
|
|
|
|
andTokenSymbolMatches && (keys.denomination ? keys.denomination === doc?.denomination : true)
|
|
|
|
if (lastlyDenominationMatches && doc?.note) notes.push(doc.note)
|
|
|
|
})
|
|
|
|
else notes = docs.filter((doc) => ObjectUtils.exists(doc?.note)).map((doc) => doc!.note)
|
|
|
|
|
|
|
|
return this.parseNotes(notes)
|
|
|
|
}
|
|
|
|
|
|
|
|
parseNotes(notes: Array<string>): Array<ZKDepositData> {
|
|
|
|
return notes.map((note) => Primitives.parseNote(note))
|
|
|
|
}
|
|
|
|
|
|
|
|
parseNote(note: string): ZKDepositData {
|
|
|
|
return this.parseNotes([note])[0]
|
|
|
|
}
|
|
|
|
|
2023-04-08 19:17:52 +00:00
|
|
|
async createInvoice(
|
|
|
|
instance: TornadoInstance,
|
2023-04-11 19:36:32 +00:00
|
|
|
options?: Omit<Options.Core.Invoice, 'depositsPerInstance'>
|
2023-04-08 19:17:52 +00:00
|
|
|
): Promise<Transactions.Invoice> {
|
2023-04-11 19:36:32 +00:00
|
|
|
let opts: Options.Core.Invoice = options ?? {}
|
2023-04-08 19:17:52 +00:00
|
|
|
opts.depositsPerInstance = [1]
|
2023-04-11 19:36:32 +00:00
|
|
|
return (await this.createInvoices([instance], options))[0]
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
async createInvoices(
|
|
|
|
instances: Array<TornadoInstance>,
|
2023-04-11 19:36:32 +00:00
|
|
|
options?: Options.Core.Invoice
|
2023-04-08 19:17:52 +00:00
|
|
|
): Promise<Array<Transactions.Invoice>> {
|
2023-04-11 19:36:32 +00:00
|
|
|
if (!options) options = {}
|
|
|
|
if (!options.backup) options.backup = {}
|
|
|
|
options.backup.invoices = options.backup.invoices ?? true
|
|
|
|
options.backup.notes = options.backup.notes ?? true
|
|
|
|
options.doNotPopulate = options.doNotPopulate ?? true
|
|
|
|
return this.buildDepositTxs(instances, options)
|
2023-04-08 19:17:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
async buildDepositTx(
|
|
|
|
instance: TornadoInstance,
|
2023-04-11 19:36:32 +00:00
|
|
|
options?: Options.Core.Deposit
|
2023-04-08 19:17:52 +00:00
|
|
|
): Promise<Transactions.Deposit> {
|
2023-04-11 19:36:32 +00:00
|
|
|
let opts: Options.Core.Deposit = options ?? {}
|
2023-04-08 19:17:52 +00:00
|
|
|
opts.depositsPerInstance = [1]
|
|
|
|
return (await this.buildDepositTxs([instance], opts))[0]
|
|
|
|
}
|
|
|
|
|
|
|
|
async buildDepositTxs(
|
|
|
|
instances: Array<TornadoInstance>,
|
2023-04-11 19:36:32 +00:00
|
|
|
options?: Options.Core.Deposit
|
2023-04-08 19:17:52 +00:00
|
|
|
): Promise<Array<Transactions.Deposit>> {
|
2023-04-11 19:36:32 +00:00
|
|
|
const depositsPerInstance = options?.depositsPerInstance ?? new Array<number>(instances.length).fill(1)
|
2023-04-08 19:17:52 +00:00
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
const doNotPopulate = options?.doNotPopulate ?? false
|
|
|
|
const backupNotes = options?.backup?.notes ?? true
|
|
|
|
const backupInvoices = options?.backup?.invoices ?? false
|
2023-04-08 19:17:52 +00:00
|
|
|
|
|
|
|
if (depositsPerInstance.length != instances.length)
|
|
|
|
throw ErrorUtils.getError(
|
|
|
|
'Core.buildDepositTx: number of deposit amount elements must equal the number of instances!'
|
|
|
|
)
|
|
|
|
|
|
|
|
const notesToBackup: Array<BackupDepositDoc> = []
|
|
|
|
const invoicesToBackup: Array<BackupDepositDoc> = []
|
|
|
|
const txs: Array<Transactions.Deposit> = []
|
|
|
|
const chainId = await this.chain.getChainId()
|
|
|
|
|
|
|
|
const proxy: TornadoProxy = await Contracts.getProxy(String(chainId), this.chain.provider)
|
|
|
|
|
|
|
|
for (let i = 0, nInstances = instances.length; i < nInstances; i++) {
|
|
|
|
const lookupKeys = await this.getInstanceLookupKeys(instances[i].address)
|
|
|
|
const pathstring = lookupKeys.network + lookupKeys.token + lookupKeys.denomination
|
|
|
|
|
|
|
|
for (let d = 0, nDeposits = depositsPerInstance[i]; d < nDeposits; d++) {
|
|
|
|
const deposit = Primitives.createDeposit()
|
|
|
|
const note = Primitives.createNote(deposit.preimage)
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
if (backupNotes)
|
|
|
|
notesToBackup.push({
|
|
|
|
network: lookupKeys.network,
|
|
|
|
denomination: lookupKeys.denomination,
|
|
|
|
token: lookupKeys.token,
|
|
|
|
note: note
|
|
|
|
})
|
2023-04-08 19:17:52 +00:00
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
if (backupInvoices)
|
|
|
|
invoicesToBackup.push({
|
|
|
|
network: lookupKeys.network,
|
|
|
|
denomination: lookupKeys.denomination,
|
|
|
|
token: lookupKeys.token,
|
|
|
|
invoice: deposit.hexCommitment
|
|
|
|
})
|
2023-04-08 19:17:52 +00:00
|
|
|
|
|
|
|
if (!doNotPopulate) {
|
|
|
|
txs.push({
|
|
|
|
request: {
|
|
|
|
to: proxy.address,
|
|
|
|
data: proxy.interface.encodeFunctionData('deposit', [
|
|
|
|
instances[i].address,
|
2023-04-11 19:36:32 +00:00
|
|
|
deposit.hexCommitment,
|
2023-04-08 19:17:52 +00:00
|
|
|
[]
|
|
|
|
]),
|
|
|
|
value: lookupKeys.token == 'eth' ? parseUnits(lookupKeys.denomination) : BigNumber.from(0)
|
|
|
|
},
|
|
|
|
note: pathstring + '_' + note,
|
2023-04-11 19:36:32 +00:00
|
|
|
invoice: pathstring + '_' + deposit.hexCommitment
|
2023-04-08 19:17:52 +00:00
|
|
|
})
|
|
|
|
} else
|
|
|
|
txs.push({
|
|
|
|
request: {},
|
|
|
|
note: pathstring + '_' + note,
|
2023-04-11 19:36:32 +00:00
|
|
|
invoice: pathstring + '_' + deposit.hexCommitment
|
2023-04-08 19:17:52 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (backupNotes) await this._backupDepositData(new Cache.Base<Docs.Note>('DepositNotes'), notesToBackup)
|
|
|
|
|
|
|
|
if (backupInvoices)
|
|
|
|
await this._backupDepositData(new Cache.Base<Docs.Invoice>('DepositInvoices'), invoicesToBackup)
|
|
|
|
|
|
|
|
return txs
|
|
|
|
}
|
|
|
|
|
|
|
|
private async _backupDepositData<T extends Docs.Note | Docs.Invoice>(
|
|
|
|
cache: Cache.Base<T>,
|
|
|
|
backupData: Array<BackupDepositDoc>
|
|
|
|
): Promise<void> {
|
|
|
|
let id = +(await cache.db.info()).update_seq
|
|
|
|
|
|
|
|
await cache.db
|
|
|
|
.bulkDocs(
|
|
|
|
backupData.map((entry) => {
|
2023-04-17 21:56:57 +00:00
|
|
|
if (entry.note)
|
|
|
|
return new Docs.Note(++id, entry.network, entry.token, entry.denomination, entry.note)
|
|
|
|
else if (entry.invoice)
|
|
|
|
return new Docs.Invoice(++id, entry.network, entry.token, entry.denomination, entry.invoice)
|
2023-04-08 19:17:52 +00:00
|
|
|
}) as Array<T>
|
|
|
|
)
|
|
|
|
.catch((err) => {
|
|
|
|
throw ErrorUtils.ensureError(err)
|
|
|
|
})
|
|
|
|
|
|
|
|
await cache.close().catch((err) => {
|
|
|
|
throw ErrorUtils.ensureError(err)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
loadCache<T extends Docs.Base, C extends Cache.Base<T>>(cacheName: string): C {
|
|
|
|
if (!this.caches.has(cacheName)) {
|
|
|
|
this.caches.set(cacheName, new Cache.Base<T>(cacheName))
|
|
|
|
}
|
|
|
|
return this.caches.get(cacheName) as C
|
|
|
|
}
|
|
|
|
|
2023-04-08 19:17:52 +00:00
|
|
|
async syncMultiple(instances: Array<TornadoInstance>, syncOptions?: Options.Core.Sync): Promise<void> {
|
|
|
|
for (const instance of instances) {
|
|
|
|
await this.sync(instance, syncOptions)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
async sync(instance: TornadoInstance, syncOptions?: Options.Core.Sync): Promise<void> {
|
|
|
|
// Get some data
|
|
|
|
const lookupKeys = await this.getInstanceLookupKeys(instance.address)
|
|
|
|
|
|
|
|
const populatedSyncOpts = await this._populateSyncOpts(lookupKeys, syncOptions)
|
|
|
|
|
|
|
|
const actions = Object.entries(populatedSyncOpts).filter((el) => el[1] === true) as [string, boolean][]
|
|
|
|
|
|
|
|
// Synchronize
|
|
|
|
for (let i = 0, bound = actions.length; i < bound; i++) {
|
|
|
|
let action = actions[i][0].charAt(0).toUpperCase() + actions[i][0].slice(1)
|
|
|
|
await this._sync(action, lookupKeys, instance, populatedSyncOpts)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
private async _sync(
|
|
|
|
action: string,
|
|
|
|
lookupKeys: DataTypes.Keys.InstanceLookup,
|
|
|
|
instance: TornadoInstance,
|
|
|
|
syncOptions: DeepRequired<Options.Core.Sync>
|
|
|
|
): Promise<void> {
|
|
|
|
const name = `${action + 's'}${lookupKeys.network}${lookupKeys.token.toUpperCase()}${
|
|
|
|
lookupKeys.denomination
|
|
|
|
}`,
|
|
|
|
pathstring = name.substring(action.length).toLowerCase()
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
let cache: Cache.Syncable<Docs.Base>,
|
|
|
|
toDoc: (_: any) => Docs.Base,
|
|
|
|
filter: EventFilter,
|
|
|
|
numEntries: number
|
2023-04-08 19:17:52 +00:00
|
|
|
|
|
|
|
if (action == 'Deposit') {
|
|
|
|
toDoc = (resp: any) => new Docs.Deposit(resp)
|
2023-04-17 21:56:57 +00:00
|
|
|
cache = this.caches.has(name)
|
|
|
|
? (this.caches.get(name)! as Cache.Deposit)
|
|
|
|
: new Cache.Deposit(name, syncOptions.cache)
|
2023-04-08 19:17:52 +00:00
|
|
|
filter = instance.filters.Deposit(null, null, null)
|
|
|
|
} else {
|
|
|
|
toDoc = (resp: any) => new Docs.Withdrawal(resp)
|
2023-04-17 21:56:57 +00:00
|
|
|
cache = this.caches.has(name)
|
|
|
|
? (this.caches.get(name)! as Cache.Withdrawal)
|
|
|
|
: new Cache.Withdrawal(name, syncOptions.cache)
|
2023-04-08 19:17:52 +00:00
|
|
|
filter = instance.filters.Withdrawal(null, null, null, null)
|
|
|
|
}
|
|
|
|
|
|
|
|
// Assign pooler
|
|
|
|
cache.sync.pooler = await cache.sync.initializePooler(cache.getCallbacks(instance))
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
// Decide whether we have a latest block
|
|
|
|
numEntries = (await cache.db.info()).doc_count
|
|
|
|
|
|
|
|
// Check for synced blocks
|
|
|
|
if (0 < numEntries) {
|
|
|
|
const [lastSyncedBlock, ,] = parseIndexableString(
|
|
|
|
(await cache.db.allDocs({ descending: true, limit: 1 })).rows[0].id
|
|
|
|
)
|
|
|
|
syncOptions.blocks.startBlock =
|
|
|
|
lastSyncedBlock < syncOptions.blocks.startBlock ? syncOptions.blocks.startBlock : lastSyncedBlock
|
|
|
|
syncOptions.blocks.blockDelta = this._getBlockDelta(syncOptions)
|
|
|
|
}
|
|
|
|
|
2023-04-08 19:17:52 +00:00
|
|
|
// Start synchronizing
|
|
|
|
let dbPromises = []
|
|
|
|
|
|
|
|
for (
|
|
|
|
let currentBlock = syncOptions.blocks.startBlock,
|
|
|
|
blockDelta = syncOptions.blocks.blockDelta,
|
|
|
|
targetBlock = syncOptions.blocks.targetBlock,
|
|
|
|
concurrencyLimit = syncOptions.cache.sync.concurrencyLimit;
|
2023-04-17 21:56:57 +00:00
|
|
|
currentBlock < targetBlock;
|
2023-04-08 19:17:52 +00:00
|
|
|
currentBlock += blockDelta
|
|
|
|
) {
|
|
|
|
if (cache.sync.pooler.pending < concurrencyLimit) {
|
2023-04-17 21:56:57 +00:00
|
|
|
const sum = currentBlock + blockDelta
|
|
|
|
if (currentBlock + blockDelta < targetBlock) {
|
|
|
|
await cache.sync.pooler.pool(currentBlock, sum)
|
2023-04-08 19:17:52 +00:00
|
|
|
} else {
|
|
|
|
await cache.sync.pooler.pool(currentBlock, sum - (sum % targetBlock))
|
|
|
|
}
|
|
|
|
} else {
|
|
|
|
let res: Array<any> = await cache.sync.pooler.race()
|
|
|
|
if (res.length != 0)
|
|
|
|
dbPromises.push(
|
|
|
|
cache.db.bulkDocs(res.map((el) => toDoc(el))).catch((err) => {
|
|
|
|
throw ErrorUtils.ensureError(err)
|
|
|
|
})
|
|
|
|
)
|
|
|
|
currentBlock -= blockDelta
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Immediately start listening if we're doing this
|
|
|
|
if (syncOptions.cache.sync.listen)
|
|
|
|
instance = instance.on(filter, (...eventArgs) => {
|
|
|
|
cache.db.put(toDoc(eventArgs[eventArgs.length - 1]))
|
|
|
|
})
|
|
|
|
|
|
|
|
// Then wait for all pooler requests to resolve
|
|
|
|
let results = await cache.sync.pooler.all()
|
|
|
|
|
|
|
|
// Then transform them, we know the shape in forward
|
|
|
|
results = results.reduce((res: any[], response: any[]) => {
|
|
|
|
if (response[0]) response.forEach((el: any) => res.push(toDoc(el)))
|
|
|
|
return res
|
|
|
|
}, [])
|
|
|
|
|
|
|
|
// Then wait for old dbPromises to resolve
|
|
|
|
await Promise.all(dbPromises)
|
|
|
|
|
|
|
|
// Add the last docs
|
|
|
|
await cache.db.bulkDocs(results).catch((err) => {
|
|
|
|
throw ErrorUtils.ensureError(err)
|
|
|
|
})
|
|
|
|
|
|
|
|
// Finally, store the objects
|
|
|
|
if (!this.instances.has(pathstring)) this.instances.set(pathstring, instance)
|
|
|
|
if (!this.caches.has(name)) this.caches.set(name, cache)
|
|
|
|
}
|
|
|
|
|
|
|
|
private async _populateSyncOpts(
|
|
|
|
lookupKeys: DataTypes.Keys.InstanceLookup,
|
|
|
|
syncOptions?: Options.Core.Sync
|
|
|
|
): Promise<DeepRequired<Options.Core.Sync>> {
|
|
|
|
// Assign nonexistent
|
|
|
|
if (!syncOptions) syncOptions = {}
|
|
|
|
if (!syncOptions.blocks) syncOptions.blocks = {}
|
|
|
|
if (!syncOptions.cache) syncOptions.cache = { db: {}, sync: {} }
|
|
|
|
if (!syncOptions.cache.sync) syncOptions.cache.sync = {}
|
|
|
|
if (!syncOptions.cache.db) syncOptions.cache.db = {}
|
|
|
|
|
|
|
|
// Prepare options
|
|
|
|
|
|
|
|
// deposit & withdraw
|
|
|
|
const both = syncOptions.deposit === undefined && syncOptions.withdrawal === undefined
|
|
|
|
syncOptions.deposit = syncOptions.deposit ?? both
|
|
|
|
syncOptions.withdrawal = syncOptions.withdrawal ?? false
|
|
|
|
|
|
|
|
// blocks
|
|
|
|
syncOptions.blocks.startBlock =
|
|
|
|
syncOptions.blocks.startBlock ??
|
|
|
|
(await OnchainData.getInstanceDeployBlockNum(
|
|
|
|
lookupKeys.network,
|
|
|
|
lookupKeys.token,
|
|
|
|
lookupKeys.denomination
|
|
|
|
))
|
|
|
|
|
|
|
|
syncOptions.blocks.targetBlock = syncOptions.blocks.targetBlock ?? (await this.chain.latestBlockNum())
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
syncOptions.blocks.blockDelta = this._getBlockDelta(syncOptions)
|
2023-04-08 19:17:52 +00:00
|
|
|
|
|
|
|
// cache
|
|
|
|
// db
|
|
|
|
syncOptions.cache.db.persistent = syncOptions.cache.db.persistent ?? true
|
|
|
|
syncOptions.cache.db.adapter = syncOptions.cache.db.adapter ?? 'leveldb'
|
|
|
|
|
|
|
|
// sync
|
|
|
|
syncOptions.cache.sync.concurrencyLimit = syncOptions.cache.sync.concurrencyLimit ?? 8
|
|
|
|
syncOptions.cache.sync.listen = syncOptions.cache.sync.listen ?? false
|
|
|
|
|
|
|
|
return syncOptions as DeepRequired<Options.Core.Sync>
|
|
|
|
}
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
private _getBlockDelta(syncOptions?: Options.Core.Sync): number {
|
|
|
|
return Math.floor(
|
|
|
|
syncOptions?.blocks?.blockDelta ??
|
|
|
|
(syncOptions!.blocks!.targetBlock! - syncOptions!.blocks!.startBlock!) / 20
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
/**
|
|
|
|
* @param instanceName The name of the instance as created in `_sync` function.
|
|
|
|
* @param commitments The commitments for which the leaf index values are to be noted down extra.
|
|
|
|
* @returns The result of concatenating the array of leaf indices found by matching them with the provided commitment values, followed by the array of all leaf indices, including all of the formerly mentioned values given that they are valid. Values which have not been matched, meaning probably invalid values, will be `0`.
|
|
|
|
*/
|
2023-04-17 21:56:57 +00:00
|
|
|
private async _findLeavesAndIndices(
|
|
|
|
instanceName: string,
|
|
|
|
commitments: Array<string>
|
|
|
|
): Promise<[Array<string>, Array<number>]> {
|
|
|
|
const indices = new Array<number>(commitments.length).fill(0)
|
|
|
|
const leaves: Array<string> = []
|
2023-04-11 19:36:32 +00:00
|
|
|
|
|
|
|
// Either load all deposit events from memory or from cache
|
|
|
|
let cache: Cache.Base<Docs.Deposit>
|
|
|
|
|
|
|
|
if (!this.caches.has(instanceName)) {
|
|
|
|
cache = new Cache.Base<Docs.Deposit>(instanceName)
|
|
|
|
} else cache = this.caches.get(instanceName) as Cache.Base<Docs.Deposit>
|
|
|
|
|
|
|
|
const docs = await cache.db.allDocs()
|
|
|
|
|
|
|
|
// If no docs in cache throw and stop
|
|
|
|
if (docs.total_rows === 0) {
|
|
|
|
await cache.clear()
|
|
|
|
throw ErrorUtils.getError(
|
|
|
|
`Core.buildMerkleTree: events for instance ${instanceName} have not been synchronized.`
|
|
|
|
)
|
|
|
|
}
|
|
|
|
|
2023-04-17 21:56:57 +00:00
|
|
|
// Otherwise start looking for commitment leaf indices and also pick up all other leafs on the way
|
2023-04-11 19:36:32 +00:00
|
|
|
for (const row of docs.rows) {
|
|
|
|
const [, leafIndex, loadedCommitment] = parseIndexableString(row.id)
|
|
|
|
const index = commitments.findIndex((commitment) => commitment === loadedCommitment)
|
|
|
|
|
|
|
|
// If some commitment is found then add the leaf index and remove that commitment
|
|
|
|
if (index !== -1) {
|
2023-04-17 21:56:57 +00:00
|
|
|
indices[index] = leafIndex
|
2023-04-11 19:36:32 +00:00
|
|
|
commitments.splice(index, 1)
|
|
|
|
}
|
|
|
|
|
|
|
|
// In any case push every leaf
|
2023-04-17 21:56:57 +00:00
|
|
|
leaves.push(BigNumber.from(loadedCommitment).toString())
|
2023-04-11 19:36:32 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// Concat matched and all leaf indices
|
2023-04-17 21:56:57 +00:00
|
|
|
return [leaves, indices]
|
2023-04-11 19:36:32 +00:00
|
|
|
}
|
|
|
|
|
2023-04-08 19:17:52 +00:00
|
|
|
async getInstanceLookupKeys(instanceAddress: string): Promise<DataTypes.Keys.InstanceLookup> {
|
|
|
|
// lookup some stuff first
|
|
|
|
const lookupObj: { [key: string]: string } = Json.getValue(await Json.load('onchain/quickLookup.json'), [
|
|
|
|
'instanceAddresses'
|
|
|
|
])
|
|
|
|
|
|
|
|
const pathstring: string = Object.entries(lookupObj).find((el) => el[1] === instanceAddress)![0]
|
|
|
|
|
|
|
|
const network = pathstring.match('[0-9]+')![0],
|
|
|
|
token = pathstring.substring(network.length).match('[a-z]+')![0],
|
|
|
|
denomination = pathstring.substring(network.length + token.length)
|
|
|
|
|
|
|
|
return {
|
|
|
|
network: network,
|
|
|
|
token: token,
|
|
|
|
denomination: denomination
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2023-04-11 19:36:32 +00:00
|
|
|
export { Transactions, Options }
|