// Big modules import { BigNumber } from 'ethers' import { existsSync, mkdirSync, readFileSync } from 'fs' import { opendir, readFile, rm } from 'fs/promises' import { createInterface } from 'readline' // Local logic import { AsyncUtils, NumberUtils, ErrorUtils } from '@tornado/sdk-utils' // PouchDB import PouchDB from 'pouchdb' import * as PouchDBAdapterMemory from 'pouchdb-adapter-memory' // @ts-ignore import { toIndexableString } from 'pouchdb-collate' // Register plugins PouchDB.plugin(PouchDBAdapterMemory) // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ DECLARATIONS ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ export interface TornadoInstance { network: number symbol: string decimals: number denomination: number deployBlock: number address: string } export interface ClassicInstance extends TornadoInstance { anonymityMiningEnabled: boolean } export interface TokenData { network: number decimals: number address: string } export namespace Keys { export interface InstanceLookup { network: string token: string denomination: string } } export interface RelayerProperties { address: string version: string serviceFeePercent: number miningFeePercent: number status: string chainId: number prices: Map } export namespace Options { export interface Cache { adapter?: string cachePath?: string persistent?: boolean } } // ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ REST ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ export namespace Files { export type PathGetter = (relative: string) => string export const getModulesPath = (relative: string, prefix?: string): string => (prefix ?? __dirname + '/../../node_modules/') + relative export const getResourcePath = (relative: string, prefix?: string): string => (prefix ?? __dirname + '/../resources/') + relative export const getCachePath = (relative: string, prefix?: string): string => (prefix ?? process.cwd() + '/cache/') + relative export const cacheDirExists = (prefix?: string): boolean => existsSync(getCachePath('', prefix)) export const makeCacheDir = (prefix?: string): void => mkdirSync(getCachePath('', prefix)) export const loadRaw = (relative: string): Promise => readFile(getResourcePath(relative)) export const loadRawSync = (relative: string): Buffer => readFileSync(getResourcePath(relative)) export async function wipeCache(prompt: boolean = true): Promise { const dir = await opendir(getCachePath('')) const toRemove: string[] = [] const userInput = createInterface({ input: process.stdin, output: process.stdout }) for await (const entry of dir) { if (entry.name.match('(Deposit.*)|(Withdrawal.*)|(Note.*)|(Invoice.*)')) toRemove.push(getCachePath(entry.name)) } if (toRemove.length != 0) { if (prompt) { const promptString = `\nCache wipe requested, following would be wiped:\n\n${toRemove.join( '\n' )}\n\nContinue? (y/n): ` function wipeCachePrompt(prompt: string, resolve: any): void { userInput.question(prompt, (answer) => { if (answer == 'y') { userInput.close() resolve(true) } else if (answer == 'n') { userInput.close() resolve(false) } else wipeCachePrompt('', resolve) }) } const answer = await new Promise((resolve) => wipeCachePrompt(promptString, resolve)) if (answer) await Promise.all(toRemove.map((entry) => rm(entry, { recursive: true, force: true }))).catch( (err) => { throw ErrorUtils.ensureError(err) } ) } else { await Promise.all(toRemove.map((entry) => rm(entry, { recursive: true, force: true }))).catch( (err) => { throw ErrorUtils.ensureError(err) } ) } } } } export namespace Json { const cachedJsonData = new Map() export async function load( relativePath: string, encoding: BufferEncoding = 'utf8', pathGetter: Files.PathGetter = Files.getResourcePath ): Promise { if (cachedJsonData.has(relativePath)) return cachedJsonData.get(relativePath) else { const obj = JSON.parse(await readFile(pathGetter(relativePath), encoding)) cachedJsonData.set(relativePath, obj) return obj } } export function loadSync( relativePath: string, encoding: BufferEncoding = 'utf8', pathGetter: Files.PathGetter = Files.getResourcePath ): any { if (cachedJsonData.has(relativePath)) return cachedJsonData.get(relativePath) else { const obj = JSON.parse(readFileSync(pathGetter(relativePath), encoding)) cachedJsonData.set(relativePath, obj) return obj } } export function toMap(jsonData: any): Map { return new Map(Object.entries(jsonData)) } export async function loadMap( relativePath: string, encoding: BufferEncoding = 'utf8' ): Promise> { return toMap(await load(relativePath, encoding)) } export function getError(...values: any[]): Error { return ErrorUtils.getError(`there is no such entry for the key-value path [${values.join('][')}]`) } export function throwError(...values: any[]): void { throw getError(...values) } export function getValue(jsonObj: any, keys: any[]): any { for (let i = 0; i < keys.length; i++) { jsonObj = jsonObj[keys[i]] ?? throwError(...keys.slice(0, i + 1)) } return jsonObj } } // TODO: Decide whether to also cache the data instead of just loading it for the function call export namespace Onchain { export async function getClassicInstanceData( network: string, token: string, denomination: string ): Promise { const instanceData = Json.getValue(await Json.load('onchain/instances.json'), [network, token]) return { network: +network, symbol: token.toUpperCase(), decimals: Json.getValue(instanceData, ['decimals']), denomination: +denomination, deployBlock: Json.getValue(instanceData, ['deployedBlockNumber', denomination]), address: Json.getValue(instanceData, ['instanceAddress', denomination]), anonymityMiningEnabled: Json.getValue(instanceData, ['miningEnabled']) } } export function getClassicInstanceDataSync( network: string, token: string, denomination: string ): ClassicInstance { const instanceData = Json.getValue(Json.loadSync('onchain/instances.json'), [network, token]) return { network: +network, symbol: token.toUpperCase(), decimals: Json.getValue(instanceData, ['decimals']), denomination: +denomination, deployBlock: Json.getValue(instanceData, ['deployedBlockNumber', denomination]), address: Json.getValue(instanceData, ['instanceAddress', denomination]), anonymityMiningEnabled: Json.getValue(instanceData, ['miningEnabled']) } } export async function getInstanceLookupKeys(instanceAddress: string): Promise { // lookup some stuff first const lookupObj: { [key: string]: string } = await Json.load('onchain/instanceAddresses.json') const pathstring: string = Object.entries(lookupObj).find((el) => el[1] === instanceAddress)![0] const network = pathstring.match('[0-9]+')![0], token = pathstring.substring(network.length).match('[a-z]+')![0], denomination = pathstring.substring(network.length + token.length) return { network: network, token: token, denomination: denomination } } export function getInstanceLookupKeysSync(instanceAddress: string): Keys.InstanceLookup { // lookup some stuff first const lookupObj: { [key: string]: string } = Json.loadSync('onchain/instanceAddresses.json') const pathstring: string = Object.entries(lookupObj).find((el) => el[1] === instanceAddress)![0] const network = pathstring.match('[0-9]+')![0], token = pathstring.substring(network.length).match('[a-z]+')![0], denomination = pathstring.substring(network.length + token.length) return { network: network, token: token, denomination: denomination } } export async function getPathstringBasedContent( filepath: string, paths: Array<{ network?: string token?: string denomination?: string }> ): Promise> { const obj = await Json.load(filepath) return await Promise.all( paths.map((path) => Json.getValue(obj, [`${path.network ?? ''}${path.token ?? ''}${path.denomination ?? ''}`]) ) ) } export function getPathstringBasedContentSync( filepath: string, paths: Array<{ network?: string token?: string denomination?: string }> ): Array { return paths.map((path) => Json.getValue(Json.loadSync(filepath), [ `${path.network ?? ''}${path.token ?? ''}${path.denomination ?? ''}` ]) ) } export async function getNetworkSymbol(networkId: string): Promise { return ( await getPathstringBasedContent('onchain/networkSymbols.json', [{ network: networkId }]) )[0] } export function getNetworkSymbolSync(networkId: string): string { return getPathstringBasedContentSync('onchain/networkSymbols.json', [{ network: networkId }])[0] } export function getInstanceAddresses( paths: Array<{ network: string token: string denomination: string }> ): Promise> { return getPathstringBasedContent('onchain/instanceAddresses.json', paths) } export function getInstanceAddressesSync( paths: Array<{ network: string token: string denomination: string }> ): Array { return getPathstringBasedContentSync('onchain/instanceAddresses.json', paths) } export async function getInstanceAddress( network: string, token: string, denomination: string ): Promise { return (await getInstanceAddresses([{ network: network, token: token, denomination: denomination }]))[0] } export function getInstanceAddressSync(network: string, token: string, denomination: string): string { return getInstanceAddressesSync([{ network: network, token: token, denomination: denomination }])[0] } export function getInstanceDeployBlockNums( paths: Array<{ network: string token: string denomination: string }> ): Promise> { return getPathstringBasedContent('onchain/deployedBlockNumbers.json', paths) } export function getInstanceDeployBlockNumsSync( paths: Array<{ network: string token: string denomination: string }> ): Array { return getPathstringBasedContentSync('onchain/deployedBlockNumbers.json', paths) } export async function getInstanceDeployBlockNum( network: string, token: string, denomination: string ): Promise { return ( await getInstanceDeployBlockNums([{ network: network, token: token, denomination: denomination }]) )[0] } export function getInstanceDeployBlockNumSync( network: string, token: string, denomination: string ): number { return getInstanceDeployBlockNumsSync([{ network: network, token: token, denomination: denomination }])[0] } export async function getProxyAddress(network: string): Promise { return Json.getValue(await Json.load('onchain/infrastructure.json'), [network, 'proxy']) } export async function getMulticallAddress(network: string): Promise { return Json.getValue(await Json.load('onchain/infrastructure.json'), [network, 'multicall']) } export async function getMulticall3Address(network: string): Promise { return Json.getValue(await Json.load('onchain/infrastructure.json'), [network, 'multicall3']) } export function getProxyAddressSync(network: string): string { return Json.getValue(Json.loadSync('onchain/infrastructure.json'), [network, 'proxy']) } export function getMulticallAddressSync(network: string): string { return Json.getValue(Json.loadSync('onchain/infrastructure.json'), [network, 'multicall']) } export function getMulticall3AddressSync(network: string): string { return Json.getValue(Json.loadSync('onchain/infrastructure.json'), [network, 'multicall3']) } export async function getTokenData(network: string, token: string): Promise { const data = Json.getValue(await Json.load('onchain/tokens.json'), [network, token]) return { network: +network, decimals: +data['decimals'], address: data['address'] } } export function getTokenDataSync(network: string, token: string): TokenData { const data = Json.getValue(Json.loadSync('onchain/tokens.json'), [network, token]) return { network: +network, decimals: +data['decimals'], address: data['address'] } } export async function getTokenAddress(network: string, token: string): Promise { return ( await getPathstringBasedContent('onchain/tokenAddresses.json', [ { network: network, token: token } ]) )[0] } export function getTokenAddressSync(network: string, token: string): string { return getPathstringBasedContentSync('onchain/tokenAddresses.json', [ { network: network, token: token } ])[0] } export async function getTokenDecimals(network: string, token: string): Promise { return ( await getPathstringBasedContent('onchain/decimals.json', [{ network: network, token: token }]) )[0] } export function getTokenDecimalsSync(network: string, token: string): number { return getPathstringBasedContentSync('onchain/decimals.json', [ { network: network, token: token } ])[0] } } export namespace Offchain { export async function getUncensoredRpcURL(network: string, name: string = ''): Promise { const rpcs = Json.toMap( Json.getValue(await Json.load('offchain/infrastructure.json'), ['jrpc-uncensored', network]) ) if (name.length !== 0) { return rpcs.get(name)! } let keys = rpcs.keys() let randCount = NumberUtils.getRandomFromRange(0, rpcs.size - 1) for (let i = 0; i < randCount; i++) keys.next() return rpcs.get(keys.next().value)! } export function getUncensoredRpcURLSync(network: string, name: string = ''): string { const rpcs = Json.toMap( Json.getValue(Json.loadSync('offchain/infrastructure.json'), ['jrpc-uncensored', network]) ) if (name.length !== 0) { return rpcs.get(name)! } let keys = rpcs.keys() let randCount = NumberUtils.getRandomFromRange(0, rpcs.size - 1) for (let i = 0; i < randCount; i++) keys.next() return rpcs.get(keys.next().value)! } export async function getClassicSubgraphURL(network: string): Promise { return Json.getValue(await Json.load('offchain/infrastructure.json'), ['subgraph', network]) } export function getClassicSubgraphURLSync(network: string): string { return Json.getValue(Json.loadSync('offchain/infrastructure.json'), ['subgraph', network]) } } export namespace Constants { export const MERKLE_TREE_HEIGHT = 20 } export namespace Docs { // TODO: Probably find some easier way to lookup below docs for the end user... export class Base { _id: string _rev?: string constructor(id: string) { this._id = id } } export class Deposit extends Base { blockNumber: number leafIndex: number commitment: string transactionHash: string timestamp: string constructor(obj: any) { const blockNumber = obj['blockNumber'] const transactionHash = obj['transactionHash'] const commitment = obj['args']['commitment'] const leafIndex = obj['args']['leafIndex'] const timestamp = obj['args']['timestamp'] // To preserve order because we will need it later super(toIndexableString([blockNumber, leafIndex, commitment])) this.commitment = commitment this.blockNumber = blockNumber this.leafIndex = leafIndex this.transactionHash = transactionHash this.timestamp = timestamp } } export class Withdrawal extends Base { blockNumber: number to: string nullifierHash: string transactionHash: string fee: string constructor(obj: any) { const blockNumber = obj['blockNumber'] const transactionHash = obj['transactionHash'] const to = obj['args']['to'] const nullifierHash = obj['args']['nullifierHash'] const fee = (obj['args']['fee'] as BigNumber).toString() super(toIndexableString([blockNumber, to, nullifierHash])) this.blockNumber = blockNumber this.to = to this.nullifierHash = nullifierHash this.transactionHash = transactionHash this.fee = fee } } export class Note extends Base { network: string token: string denomination: string note: string constructor(index: number, network: string, token: string, denomination: string, note: string) { super(toIndexableString([index, network, denomination, token])) this.network = network this.token = token this.denomination = denomination this.note = note } } export class Invoice extends Base { network: string token: string denomination: string invoice: string constructor(index: number, network: string, token: string, denomination: string, invoice: string) { super(toIndexableString([index, network, denomination, token])) this.network = network this.token = token this.denomination = denomination this.invoice = invoice } } export class Relayer extends Base { address: string version: string serviceFeePercent: number miningFeePercent: number status: string chainId: number prices: Map constructor(url: string, properties: RelayerProperties) { super(toIndexableString([url])) this.address = properties.address this.version = properties.version this.serviceFeePercent = properties.serviceFeePercent this.miningFeePercent = properties.miningFeePercent this.status = properties.status this.chainId = properties.chainId this.prices = properties.prices } } } export namespace Cache { export class Base { name: string db: PouchDB.Database constructor(name: string, options?: Options.Cache) { this.name = name if (options?.persistent === false && options?.adapter !== 'memory' && options?.adapter !== null) throw ErrorUtils.getError('Cache.new: if not persistent, cache must use memory adapter.') if (options?.adapter === 'memory' && options?.persistent === true) throw ErrorUtils.getError("Cache.new: can't specify memory adapter if persistent.") const dbAdapter = options?.adapter ?? (options?.persistent === false ? 'memory' : 'leveldb') if (options?.cachePath) if (options.cachePath.charAt(options.cachePath.length - 1) != '/') options.cachePath += '/' if (!Files.cacheDirExists(options?.cachePath)) Files.makeCacheDir() this.db = new PouchDB(Files.getCachePath(name, options?.cachePath), { adapter: dbAdapter }) } async get(keys: Array): Promise { return await this.db.get(toIndexableString(keys)).catch((err) => { throw ErrorUtils.ensureError(err) }) } async close(): Promise { await this.db.close() } async clear(): Promise { await this.db.destroy() } } export abstract class Syncable extends Base { pooler?: AsyncUtils.PromisePooler constructor(name: string, options?: Options.Cache) { super(name, options) } abstract buildDoc(response: any): Docs.Base abstract getCallbacks(...args: Array): Array abstract getErrorHandlers(...args: Array): Array initializePooler( callbacks: Array, errorHandlers: Array, concurrencyLimit: number ): void { if (this.pooler) this.pooler.reset() this.pooler = new AsyncUtils.PromisePooler(callbacks, errorHandlers, concurrencyLimit) } async close(): Promise { if (this.pooler && this.pooler.pending) throw ErrorUtils.getError("Syncable.close: can't clear while pooler still has pending promises.") await super.close() } async clear(): Promise { if (this.pooler && this.pooler.pending) throw ErrorUtils.getError("Syncable.clear: can't clear while pooler still has pending promises.") await super.clear() } } type DocsArray = Array<{ doc?: T id: string key: string value: { rev: string deleted?: boolean } }> export async function loadContents( nameOfContent: string, full: boolean = true, emptyError: Error = ErrorUtils.getError( `Core.loadCacheContents: there is no cache entry for ${nameOfContent}` ) ): Promise> { const cache = new Cache.Base(nameOfContent) const docs = await cache.db.allDocs({ include_docs: full }).catch((err) => { throw ErrorUtils.ensureError(err) }) if (docs.total_rows === 0) { await cache.clear() throw emptyError } return docs.rows as DocsArray } }