Merge pull request '#PR1: classic-ui: Prefix static caches with network id and code quality' (#25) from AlienTornadosaurusHex/classic-ui:master into master
Reviewed-on: https://git.tornado.ws/tornadocash/classic-ui/pulls/25
This commit is contained in:
commit
6130c46d90
@ -25,7 +25,7 @@ export async function _encryptFormatTx({ dispatch, getters, rootGetters }, { eve
|
|||||||
if (!instance) {
|
if (!instance) {
|
||||||
return acc
|
return acc
|
||||||
}
|
}
|
||||||
const name = `${instance.amount}${instance.currency}`
|
const name = `${netId}${instance.amount}${instance.currency}`
|
||||||
if (!acc[name]) {
|
if (!acc[name]) {
|
||||||
const service = eventsInterface.getService({ netId, ...instance })
|
const service = eventsInterface.getService({ netId, ...instance })
|
||||||
acc[name] = { ...instance, service }
|
acc[name] = { ...instance, service }
|
||||||
@ -49,7 +49,7 @@ export async function _encryptFormatTx({ dispatch, getters, rootGetters }, { eve
|
|||||||
if (!instance) {
|
if (!instance) {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
const { service } = instances[`${instance.amount}${instance.currency}`]
|
const { service } = instances[`${netId}${instance.amount}${instance.currency}`]
|
||||||
return getDeposit({ event, netId, service, instance })
|
return getDeposit({ event, netId, service, instance })
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
export const enabledChains = ['1', '10', '56', '100', '137', '42161']
|
||||||
|
export const chainsWithEncryptedNotes = ['1', '5', '56', '100', '137']
|
||||||
export default {
|
export default {
|
||||||
netId1: {
|
netId1: {
|
||||||
rpcCallRetryAttempt: 15,
|
rpcCallRetryAttempt: 15,
|
||||||
@ -26,9 +28,14 @@ export default {
|
|||||||
mevblockerRPC: {
|
mevblockerRPC: {
|
||||||
name: 'MevblockerRPC',
|
name: 'MevblockerRPC',
|
||||||
url: 'https://rpc.mevblocker.io'
|
url: 'https://rpc.mevblocker.io'
|
||||||
|
},
|
||||||
|
llamaRPC: {
|
||||||
|
name: 'llamarpc',
|
||||||
|
url: 'https://eth.llamarpc.com'
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
multicall: '0xeefba1e63905ef1d7acba5a8513c70307c1ce441',
|
multicall: '0xeefba1e63905ef1d7acba5a8513c70307c1ce441',
|
||||||
|
routerContract: '0xd90e2f925DA726b50C4Ed8D0Fb90Ad053324F31b',
|
||||||
registryContract: '0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2',
|
registryContract: '0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2',
|
||||||
echoContractAccount: '0x9B27DD5Bb15d42DC224FCD0B7caEbBe16161Df42',
|
echoContractAccount: '0x9B27DD5Bb15d42DC224FCD0B7caEbBe16161Df42',
|
||||||
aggregatorContract: '0xE8F47A78A6D52D317D0D2FFFac56739fE14D1b49',
|
aggregatorContract: '0xE8F47A78A6D52D317D0D2FFFac56739fE14D1b49',
|
||||||
@ -297,6 +304,10 @@ export default {
|
|||||||
name: 'Tornado RPC',
|
name: 'Tornado RPC',
|
||||||
url: 'https://arbitrum-one.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607'
|
url: 'https://arbitrum-one.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607'
|
||||||
},
|
},
|
||||||
|
oneRpc: {
|
||||||
|
name: '1rpc',
|
||||||
|
url: 'https://1rpc.io/arb'
|
||||||
|
},
|
||||||
Arbitrum: {
|
Arbitrum: {
|
||||||
name: 'Arbitrum RPC',
|
name: 'Arbitrum RPC',
|
||||||
url: 'https://arb1.arbitrum.io/rpc'
|
url: 'https://arb1.arbitrum.io/rpc'
|
||||||
|
@ -264,24 +264,24 @@ export default async (ctx, inject) => {
|
|||||||
Object.keys(tokens[token].instanceAddress).forEach((amount) => {
|
Object.keys(tokens[token].instanceAddress).forEach((amount) => {
|
||||||
if (nativeCurrency === token && netId === 1) {
|
if (nativeCurrency === token && netId === 1) {
|
||||||
stores.push({
|
stores.push({
|
||||||
name: `stringify_bloom_${token}_${amount}`,
|
name: `stringify_bloom_${netId}_${token}_${amount}`,
|
||||||
keyPath: 'hashBloom'
|
keyPath: 'hashBloom'
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
stores.push(
|
stores.push(
|
||||||
{
|
{
|
||||||
name: `deposits_${token}_${amount}`,
|
name: `deposits_${netId}_${token}_${amount}`,
|
||||||
keyPath: 'leafIndex', // the key by which it refers to the object must be in all instances of the storage
|
keyPath: 'leafIndex', // the key by which it refers to the object must be in all instances of the storage
|
||||||
indexes: DEPOSIT_INDEXES
|
indexes: DEPOSIT_INDEXES
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: `withdrawals_${token}_${amount}`,
|
name: `withdrawals_${netId}_${token}_${amount}`,
|
||||||
keyPath: 'blockNumber',
|
keyPath: 'blockNumber',
|
||||||
indexes: WITHDRAWAL_INDEXES
|
indexes: WITHDRAWAL_INDEXES
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
name: `stringify_tree_${token}_${amount}`,
|
name: `stringify_tree_${netId}_${token}_${amount}`,
|
||||||
keyPath: 'hashTree'
|
keyPath: 'hashTree'
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
@ -1,24 +1,21 @@
|
|||||||
import networkConfig from '../networkConfig'
|
import networkConfig, { enabledChains } from '../networkConfig'
|
||||||
import ABI from '../abis/Instance.abi.json'
|
import { loadCachedEvents } from './helpers'
|
||||||
import { loadCachedEvents, getPastEvents } from './helpers'
|
|
||||||
|
|
||||||
const EVENTS_PATH = './static/events/'
|
const EVENTS_PATH = './static/events/'
|
||||||
const enabledChains = ['1', '56', '100', '137' ]
|
|
||||||
|
|
||||||
async function main() {
|
function main() {
|
||||||
for (let network in enabledChains) {
|
for (const netId of enabledChains) {
|
||||||
const netId = enabledChains[network]
|
|
||||||
const config = networkConfig[`netId${netId}`]
|
const config = networkConfig[`netId${netId}`]
|
||||||
const { constants, tokens, nativeCurrency, deployedBlock } = config
|
const { constants, tokens, nativeCurrency, deployedBlock } = config
|
||||||
const CONTRACTS = tokens[nativeCurrency].instanceAddress
|
const CONTRACTS = tokens[nativeCurrency].instanceAddress
|
||||||
|
|
||||||
console.log(`\n ::: ${netId} [${nativeCurrency.toUpperCase()}] :::`)
|
console.log(`\n ::: ${netId} [${nativeCurrency.toUpperCase()}] :::`)
|
||||||
|
|
||||||
for (const [instance, _contract] of Object.entries(CONTRACTS)) {
|
for (const [instance] of Object.entries(CONTRACTS)) {
|
||||||
console.log(`\n instanceDenomation - ${instance}`)
|
console.log(`\n instanceDenomation - ${instance}`)
|
||||||
|
|
||||||
const withdrawalCachedEvents = await loadCachedEvents({
|
const withdrawalCachedEvents = loadCachedEvents({
|
||||||
name: `withdrawals_${nativeCurrency}_${instance}.json`,
|
name: `withdrawals_${netId}_${nativeCurrency}_${instance}.json`,
|
||||||
directory: EVENTS_PATH,
|
directory: EVENTS_PATH,
|
||||||
deployedBlock
|
deployedBlock
|
||||||
})
|
})
|
||||||
@ -27,8 +24,8 @@ async function main() {
|
|||||||
console.log('cachedEvents count - ', withdrawalCachedEvents.events.length)
|
console.log('cachedEvents count - ', withdrawalCachedEvents.events.length)
|
||||||
console.log('lastBlock - ', withdrawalCachedEvents.lastBlock)
|
console.log('lastBlock - ', withdrawalCachedEvents.lastBlock)
|
||||||
|
|
||||||
const depositCachedEvents = await loadCachedEvents({
|
const depositCachedEvents = loadCachedEvents({
|
||||||
name: `withdrawals_${nativeCurrency}_${instance}.json`,
|
name: `deposits_${netId}_${nativeCurrency}_${instance}.json`,
|
||||||
directory: EVENTS_PATH,
|
directory: EVENTS_PATH,
|
||||||
deployedBlock
|
deployedBlock
|
||||||
})
|
})
|
||||||
@ -37,7 +34,7 @@ async function main() {
|
|||||||
console.log('cachedEvents count - ', depositCachedEvents.events.length)
|
console.log('cachedEvents count - ', depositCachedEvents.events.length)
|
||||||
console.log('lastBlock - ', depositCachedEvents.lastBlock)
|
console.log('lastBlock - ', depositCachedEvents.lastBlock)
|
||||||
|
|
||||||
const notesCachedEvents = await loadCachedEvents({
|
const notesCachedEvents = loadCachedEvents({
|
||||||
name: `encrypted_notes_${netId}.json`,
|
name: `encrypted_notes_${netId}.json`,
|
||||||
directory: EVENTS_PATH,
|
directory: EVENTS_PATH,
|
||||||
deployedBlock: constants.ENCRYPTED_NOTES_BLOCK
|
deployedBlock: constants.ENCRYPTED_NOTES_BLOCK
|
||||||
@ -46,7 +43,6 @@ async function main() {
|
|||||||
console.log('- Notes')
|
console.log('- Notes')
|
||||||
console.log('cachedEvents count - ', notesCachedEvents.events.length)
|
console.log('cachedEvents count - ', notesCachedEvents.events.length)
|
||||||
console.log('lastBlock - ', notesCachedEvents.lastBlock)
|
console.log('lastBlock - ', notesCachedEvents.lastBlock)
|
||||||
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -4,25 +4,25 @@ import Web3 from 'web3'
|
|||||||
|
|
||||||
import networkConfig from '../../networkConfig'
|
import networkConfig from '../../networkConfig'
|
||||||
|
|
||||||
export async function download({ name, directory, contentType }) {
|
export function download({ name, directory }) {
|
||||||
const path = `${directory}${name}.gz`.toLowerCase()
|
const path = `${directory}${name}.gz`.toLowerCase()
|
||||||
|
|
||||||
const data = fs.readFileSync(path)
|
const data = fs.readFileSync(path, { flag: 'as+' })
|
||||||
const content = zlib.inflateSync(data)
|
const content = zlib.inflateSync(data)
|
||||||
|
|
||||||
return content
|
return content
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function loadCachedEvents({ name, directory, deployedBlock }) {
|
export function loadCachedEvents({ name, directory, deployedBlock }) {
|
||||||
try {
|
try {
|
||||||
const module = await download({ contentType: 'string', directory, name })
|
const module = download({ contentType: 'string', directory, name })
|
||||||
|
|
||||||
if (module) {
|
if (module) {
|
||||||
const events = JSON.parse(module)
|
const events = JSON.parse(module)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
events,
|
events,
|
||||||
lastBlock: events[events.length - 1].blockNumber
|
lastBlock: events[events.length - 1].blockNumber
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
@ -67,6 +67,7 @@ export async function getPastEvents({ type, fromBlock, netId, events, contractAt
|
|||||||
}
|
}
|
||||||
|
|
||||||
console.log(`Fetching ${type}, chainId - ${netId}`, `chunksCount - ${chunksCount}`)
|
console.log(`Fetching ${type}, chainId - ${netId}`, `chunksCount - ${chunksCount}`)
|
||||||
|
|
||||||
for (let i = 0; i < chunksCount; i++)
|
for (let i = 0; i < chunksCount; i++)
|
||||||
try {
|
try {
|
||||||
await new Promise((resolve) => setTimeout(resolve, 200))
|
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||||
|
@ -1,14 +1,11 @@
|
|||||||
import fs from 'fs'
|
import fs from 'fs'
|
||||||
import zlib from 'zlib'
|
import zlib from 'zlib'
|
||||||
|
|
||||||
export async function save(filePath) {
|
export function save(filePath) {
|
||||||
const directories = filePath.split('/')
|
|
||||||
const fileName = directories[directories.length - 1]
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const data = fs.readFileSync(`${filePath}`)
|
const data = fs.readFileSync(`${filePath}`)
|
||||||
|
|
||||||
const payload = await zlib.deflateSync(data, {
|
const payload = zlib.deflateSync(data, {
|
||||||
level: zlib.constants.Z_BEST_COMPRESSION,
|
level: zlib.constants.Z_BEST_COMPRESSION,
|
||||||
strategy: zlib.constants.Z_FILTERED
|
strategy: zlib.constants.Z_FILTERED
|
||||||
})
|
})
|
||||||
|
@ -3,12 +3,12 @@ import 'dotenv/config'
|
|||||||
import fs from 'fs'
|
import fs from 'fs'
|
||||||
import { uniqBy } from 'lodash'
|
import { uniqBy } from 'lodash'
|
||||||
|
|
||||||
import networkConfig from '../networkConfig'
|
import networkConfig, { enabledChains } from '../networkConfig'
|
||||||
import ABI from '../abis/TornadoProxy.abi.json'
|
import ABI from '../abis/TornadoProxy.abi.json'
|
||||||
|
|
||||||
import { getPastEvents, loadCachedEvents } from './helpers'
|
import { getPastEvents, loadCachedEvents } from './helpers'
|
||||||
|
|
||||||
const EVENTS_PATH = './static/events/'
|
const EVENTS_PATH = './static/events/'
|
||||||
const enabledChains = ['1', '5', '56', '100', '137']
|
|
||||||
|
|
||||||
async function saveEncryptedNote(netId) {
|
async function saveEncryptedNote(netId) {
|
||||||
const {
|
const {
|
||||||
@ -23,7 +23,7 @@ async function saveEncryptedNote(netId) {
|
|||||||
let encryptedEvents = []
|
let encryptedEvents = []
|
||||||
const name = `encrypted_notes_${netId}.json`
|
const name = `encrypted_notes_${netId}.json`
|
||||||
|
|
||||||
const cachedEvents = await loadCachedEvents({
|
const cachedEvents = loadCachedEvents({
|
||||||
name,
|
name,
|
||||||
directory: EVENTS_PATH,
|
directory: EVENTS_PATH,
|
||||||
deployedBlock: constants.ENCRYPTED_NOTES_BLOCK
|
deployedBlock: constants.ENCRYPTED_NOTES_BLOCK
|
||||||
@ -57,11 +57,13 @@ async function saveEncryptedNote(netId) {
|
|||||||
freshEvents = uniqBy(freshEvents, 'encryptedNote').sort((a, b) => b.blockNumber - a.blockNumber)
|
freshEvents = uniqBy(freshEvents, 'encryptedNote').sort((a, b) => b.blockNumber - a.blockNumber)
|
||||||
|
|
||||||
const eventsJson = JSON.stringify(freshEvents, null, 2) + '\n'
|
const eventsJson = JSON.stringify(freshEvents, null, 2) + '\n'
|
||||||
|
|
||||||
fs.writeFileSync(`${EVENTS_PATH}${name}`, eventsJson)
|
fs.writeFileSync(`${EVENTS_PATH}${name}`, eventsJson)
|
||||||
}
|
}
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
const [, , , chain] = process.argv
|
const [, , , chain] = process.argv
|
||||||
|
|
||||||
if (!enabledChains.includes(chain)) {
|
if (!enabledChains.includes(chain)) {
|
||||||
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
|
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
|
||||||
}
|
}
|
||||||
|
@ -3,87 +3,120 @@ import 'dotenv/config'
|
|||||||
import fs from 'fs'
|
import fs from 'fs'
|
||||||
import { uniqBy } from 'lodash'
|
import { uniqBy } from 'lodash'
|
||||||
|
|
||||||
import networkConfig from '../networkConfig'
|
import networkConfig, { enabledChains } from '../networkConfig'
|
||||||
import ABI from '../abis/Instance.abi.json'
|
import ABI from '../abis/Instance.abi.json'
|
||||||
|
|
||||||
import { loadCachedEvents, getPastEvents } from './helpers'
|
import { loadCachedEvents, getPastEvents } from './helpers'
|
||||||
|
|
||||||
const EVENTS_PATH = './static/events/'
|
const EVENTS_PATH = './static/events/'
|
||||||
const EVENTS = ['Deposit', 'Withdrawal']
|
|
||||||
const enabledChains = ['1', '56', '100', '137']
|
|
||||||
|
|
||||||
async function main(type, netId) {
|
function parseArg(netId, tokenOrEvent) {
|
||||||
const { tokens, nativeCurrency, deployedBlock } = networkConfig[`netId${netId}`]
|
const { tokens } = networkConfig[`netId${netId}`]
|
||||||
const CONTRACTS = tokens[nativeCurrency].instanceAddress
|
const keys = Object.keys(tokens)
|
||||||
|
if (tokenOrEvent !== undefined) {
|
||||||
|
const lower = tokenOrEvent.toLowerCase()
|
||||||
|
return keys.includes(lower)
|
||||||
|
? { token: lower }
|
||||||
|
: { event: lower[0].toUpperCase() + lower.slice(1).toLowerCase() }
|
||||||
|
} else return undefined
|
||||||
|
}
|
||||||
|
|
||||||
for (const [instance, _contract] of Object.entries(CONTRACTS)) {
|
function parseDepositEvent({ blockNumber, transactionHash, returnValues }) {
|
||||||
const cachedEvents = await loadCachedEvents({
|
const { commitment, leafIndex, timestamp } = returnValues
|
||||||
name: `${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`,
|
return {
|
||||||
directory: EVENTS_PATH,
|
timestamp,
|
||||||
deployedBlock
|
commitment,
|
||||||
})
|
blockNumber,
|
||||||
|
transactionHash,
|
||||||
console.log('Update events for', instance, nativeCurrency.toUpperCase(), `${type.toLowerCase()}s`)
|
leafIndex: Number(leafIndex)
|
||||||
console.log('cachedEvents count - ', cachedEvents.events.length)
|
|
||||||
console.log('lastBlock - ', cachedEvents.lastBlock)
|
|
||||||
|
|
||||||
let events = []
|
|
||||||
|
|
||||||
events = await getPastEvents({
|
|
||||||
type,
|
|
||||||
netId,
|
|
||||||
events,
|
|
||||||
contractAttrs: [ABI, _contract],
|
|
||||||
fromBlock: cachedEvents.lastBlock + 1
|
|
||||||
})
|
|
||||||
|
|
||||||
if (type === 'Deposit') {
|
|
||||||
events = events.map(({ blockNumber, transactionHash, returnValues }) => {
|
|
||||||
const { commitment, leafIndex, timestamp } = returnValues
|
|
||||||
return {
|
|
||||||
timestamp,
|
|
||||||
commitment,
|
|
||||||
blockNumber,
|
|
||||||
transactionHash,
|
|
||||||
leafIndex: Number(leafIndex)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
if (type === 'Withdrawal') {
|
|
||||||
events = events.map(({ blockNumber, transactionHash, returnValues }) => {
|
|
||||||
const { nullifierHash, to, fee } = returnValues
|
|
||||||
return {
|
|
||||||
to,
|
|
||||||
fee,
|
|
||||||
blockNumber,
|
|
||||||
nullifierHash,
|
|
||||||
transactionHash
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
let freshEvents = cachedEvents.events.concat(events)
|
|
||||||
|
|
||||||
if (type === 'Withdrawal') {
|
|
||||||
freshEvents = uniqBy(freshEvents, 'nullifierHash').sort((a, b) => a.blockNumber - b.blockNumber)
|
|
||||||
} else {
|
|
||||||
freshEvents = freshEvents.filter((e, index) => Number(e.leafIndex) === index)
|
|
||||||
}
|
|
||||||
|
|
||||||
const eventsJson = JSON.stringify(freshEvents, null, 2) + '\n'
|
|
||||||
fs.writeFileSync(`${EVENTS_PATH}${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`, eventsJson)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function parseWithdrawalEvent({ blockNumber, transactionHash, returnValues }) {
|
||||||
|
const { nullifierHash, to, fee } = returnValues
|
||||||
|
return {
|
||||||
|
to,
|
||||||
|
fee,
|
||||||
|
blockNumber,
|
||||||
|
nullifierHash,
|
||||||
|
transactionHash
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function filterWithdrawalEvents(events) {
|
||||||
|
return uniqBy(events, 'nullifierHash').sort((a, b) => a.blockNumber - b.blockNumber)
|
||||||
|
}
|
||||||
|
|
||||||
|
function filterDepositEvents(events) {
|
||||||
|
return events.filter((e, index) => Number(e.leafIndex) === index)
|
||||||
|
}
|
||||||
|
|
||||||
|
async function main(netId, chosenToken, chosenEvent) {
|
||||||
|
const { tokens, deployedBlock } = networkConfig[`netId${netId}`]
|
||||||
|
|
||||||
|
const tokenSymbols = chosenToken !== undefined ? [chosenToken] : Object.keys(tokens)
|
||||||
|
const eventNames = chosenEvent !== undefined ? [chosenEvent] : ['Deposit', 'Withdrawal']
|
||||||
|
|
||||||
|
for (const eventName of eventNames) {
|
||||||
|
// Get the parser that we need
|
||||||
|
const parser = eventName === 'Deposit' ? parseDepositEvent : parseWithdrawalEvent
|
||||||
|
// Get the parser that we need
|
||||||
|
const filter = eventName === 'Deposit' ? filterDepositEvents : filterWithdrawalEvents
|
||||||
|
|
||||||
|
for (const tokenSymbol of tokenSymbols) {
|
||||||
|
// Now load the denominations and address
|
||||||
|
const instanceData = Object.entries(tokens[tokenSymbol].instanceAddress)
|
||||||
|
|
||||||
|
// And now sync
|
||||||
|
for (const data of instanceData) {
|
||||||
|
const denom = data[0]
|
||||||
|
const address = data[1]
|
||||||
|
|
||||||
|
// Now load cached events
|
||||||
|
const cachedEvents = loadCachedEvents({
|
||||||
|
name: `${eventName.toLowerCase()}s_${netId}_${tokenSymbol}_${denom}.json`,
|
||||||
|
directory: EVENTS_PATH,
|
||||||
|
deployedBlock
|
||||||
|
})
|
||||||
|
|
||||||
|
console.log('Update events for', denom, tokenSymbol.toUpperCase(), `${eventName.toLowerCase()}s`)
|
||||||
|
console.log('cachedEvents count - ', cachedEvents.events.length)
|
||||||
|
console.log('lastBlock - ', cachedEvents.lastBlock)
|
||||||
|
|
||||||
|
let events = await getPastEvents({
|
||||||
|
type: eventName,
|
||||||
|
fromBlock: cachedEvents.lastBlock + 1,
|
||||||
|
netId: netId,
|
||||||
|
events: [],
|
||||||
|
contractAttrs: [ABI, address]
|
||||||
|
})
|
||||||
|
|
||||||
|
events = filter(cachedEvents.events.concat(events.map(parser)))
|
||||||
|
|
||||||
|
fs.writeFileSync(
|
||||||
|
`${EVENTS_PATH}${eventName.toLowerCase()}s_${netId}_${tokenSymbol}_${denom}.json`,
|
||||||
|
JSON.stringify(events, null, 2) + '\n'
|
||||||
|
)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @param netId ID of the network for which event(s) should be synced.
|
||||||
|
* @param tokenOrEvent Optional token or event.
|
||||||
|
* @param eventOrToken Optional token or event. Overwrites the former option.
|
||||||
|
*/
|
||||||
async function start() {
|
async function start() {
|
||||||
const [, , , chain] = process.argv
|
const [, , , netId, tokenOrEvent, eventOrToken] = process.argv
|
||||||
if (!enabledChains.includes(chain)) {
|
|
||||||
|
const args = { ...parseArg(netId, tokenOrEvent), ...parseArg(netId, eventOrToken) }
|
||||||
|
|
||||||
|
if (!enabledChains.includes(netId)) {
|
||||||
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
|
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
for await (const event of EVENTS) {
|
await main(netId, args.token, args.event)
|
||||||
await main(event, chain)
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
start()
|
start()
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
import 'dotenv/config'
|
import 'dotenv/config'
|
||||||
|
|
||||||
import fs from 'fs'
|
import fs from 'fs'
|
||||||
|
|
||||||
import BloomFilter from 'bloomfilter.js'
|
import BloomFilter from 'bloomfilter.js'
|
||||||
import { MerkleTree } from 'fixed-merkle-tree'
|
import { MerkleTree } from 'fixed-merkle-tree'
|
||||||
import { buildMimcSponge } from 'circomlibjs'
|
import { buildMimcSponge } from 'circomlibjs'
|
||||||
|
|
||||||
import networkConfig from '../networkConfig'
|
import networkConfig, { enabledChains } from '../networkConfig'
|
||||||
|
|
||||||
import { loadCachedEvents, save } from './helpers'
|
import { loadCachedEvents, save } from './helpers'
|
||||||
|
|
||||||
@ -14,7 +15,7 @@ const TREES_PATH = './static/trees/'
|
|||||||
const EVENTS_PATH = './static/events/'
|
const EVENTS_PATH = './static/events/'
|
||||||
|
|
||||||
const EVENTS = ['deposit']
|
const EVENTS = ['deposit']
|
||||||
const enabledChains = ['1', '56', '100', '137' ]
|
|
||||||
let mimcHash
|
let mimcHash
|
||||||
|
|
||||||
const trees = {
|
const trees = {
|
||||||
@ -22,8 +23,8 @@ const trees = {
|
|||||||
LEVELS: 20 // const from contract
|
LEVELS: 20 // const from contract
|
||||||
}
|
}
|
||||||
|
|
||||||
function getName({ path, type, instance, format = '.json', currName = 'eth' }) {
|
function getName({ path, type, netId, instance, format = '.json', currName = 'eth' }) {
|
||||||
return `${path}${type.toLowerCase()}s_${currName}_${instance}${format}`
|
return `${path}${type.toLowerCase()}s_${netId}_${currName}_${instance}${format}`
|
||||||
}
|
}
|
||||||
|
|
||||||
function createTreeZip(netId) {
|
function createTreeZip(netId) {
|
||||||
@ -36,6 +37,7 @@ function createTreeZip(netId) {
|
|||||||
const baseFilename = getName({
|
const baseFilename = getName({
|
||||||
type,
|
type,
|
||||||
instance,
|
instance,
|
||||||
|
netId,
|
||||||
format: '',
|
format: '',
|
||||||
path: TREES_PATH,
|
path: TREES_PATH,
|
||||||
currName: currencyName.toLowerCase()
|
currName: currencyName.toLowerCase()
|
||||||
@ -45,6 +47,7 @@ function createTreeZip(netId) {
|
|||||||
|
|
||||||
treesFolder.forEach((fileName) => {
|
treesFolder.forEach((fileName) => {
|
||||||
fileName = `${TREES_PATH}${fileName}`
|
fileName = `${TREES_PATH}${fileName}`
|
||||||
|
|
||||||
const isInstanceFile = !fileName.includes('.gz') && fileName.includes(baseFilename)
|
const isInstanceFile = !fileName.includes('.gz') && fileName.includes(baseFilename)
|
||||||
|
|
||||||
if (isInstanceFile) {
|
if (isInstanceFile) {
|
||||||
@ -67,6 +70,7 @@ async function createTree(netId) {
|
|||||||
const filePath = getName({
|
const filePath = getName({
|
||||||
type,
|
type,
|
||||||
instance,
|
instance,
|
||||||
|
netId,
|
||||||
format: '',
|
format: '',
|
||||||
path: TREES_PATH,
|
path: TREES_PATH,
|
||||||
currName: currencyName.toLowerCase()
|
currName: currencyName.toLowerCase()
|
||||||
@ -74,8 +78,8 @@ async function createTree(netId) {
|
|||||||
|
|
||||||
console.log('createTree', { type, instance })
|
console.log('createTree', { type, instance })
|
||||||
|
|
||||||
const { events } = await loadCachedEvents({
|
const { events } = loadCachedEvents({
|
||||||
name: `${type}s_${nativeCurrency}_${instance}.json`,
|
name: `${type}s_${netId}_${nativeCurrency}_${instance}.json`,
|
||||||
directory: EVENTS_PATH,
|
directory: EVENTS_PATH,
|
||||||
deployedBlock
|
deployedBlock
|
||||||
})
|
})
|
||||||
@ -118,10 +122,12 @@ async function createTree(netId) {
|
|||||||
}, [])
|
}, [])
|
||||||
|
|
||||||
const sliceJson = JSON.stringify(slice, null, 2) + '\n'
|
const sliceJson = JSON.stringify(slice, null, 2) + '\n'
|
||||||
|
|
||||||
fs.writeFileSync(`${filePath}_slice${index + 1}.json`, sliceJson)
|
fs.writeFileSync(`${filePath}_slice${index + 1}.json`, sliceJson)
|
||||||
})
|
})
|
||||||
|
|
||||||
const bloomCache = bloom.serialize()
|
const bloomCache = bloom.serialize()
|
||||||
|
|
||||||
fs.writeFileSync(`${filePath}_bloom.json`, bloomCache)
|
fs.writeFileSync(`${filePath}_bloom.json`, bloomCache)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -137,13 +143,16 @@ async function initMimc() {
|
|||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
const [, , , chain] = process.argv
|
const [, , , chain] = process.argv
|
||||||
|
|
||||||
if (!enabledChains.includes(chain)) {
|
if (!enabledChains.includes(chain)) {
|
||||||
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
|
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
|
||||||
}
|
}
|
||||||
|
|
||||||
await initMimc()
|
await initMimc()
|
||||||
|
|
||||||
await createTree(chain)
|
await createTree(chain)
|
||||||
await createTreeZip(chain)
|
|
||||||
|
createTreeZip(chain)
|
||||||
}
|
}
|
||||||
|
|
||||||
main()
|
main()
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
import { uniqBy } from 'lodash'
|
import { uniqBy } from 'lodash'
|
||||||
|
|
||||||
import networkConfig from '../networkConfig'
|
import networkConfig, { enabledChains, chainsWithEncryptedNotes } from '../networkConfig'
|
||||||
|
|
||||||
import { loadCachedEvents, save } from './helpers'
|
import { loadCachedEvents, save } from './helpers'
|
||||||
|
|
||||||
const EVENTS_PATH = './static/events/'
|
const EVENTS_PATH = './static/events/'
|
||||||
@ -9,22 +10,26 @@ const EVENTS = ['Deposit', 'Withdrawal']
|
|||||||
function updateEncrypted(netId) {
|
function updateEncrypted(netId) {
|
||||||
try {
|
try {
|
||||||
const file = `${EVENTS_PATH}encrypted_notes_${netId}.json`
|
const file = `${EVENTS_PATH}encrypted_notes_${netId}.json`
|
||||||
|
|
||||||
save(file)
|
save(file)
|
||||||
} catch {}
|
} catch {}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function updateCommon(netId) {
|
async function updateCommon(netId) {
|
||||||
const { nativeCurrency, tokens } = networkConfig[`netId${netId}`]
|
const { nativeCurrency, tokens } = networkConfig[`netId${netId}`]
|
||||||
|
|
||||||
console.log(Object.keys(tokens[nativeCurrency].instanceAddress))
|
console.log(Object.keys(tokens[nativeCurrency].instanceAddress))
|
||||||
|
|
||||||
for await (const type of EVENTS) {
|
for await (const type of EVENTS) {
|
||||||
for await (const instance of Object.keys(tokens[nativeCurrency].instanceAddress)) {
|
for await (const instance of Object.keys(tokens[nativeCurrency].instanceAddress)) {
|
||||||
console.warn('instance', instance)
|
console.warn('instance', instance)
|
||||||
const filename = `${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`
|
|
||||||
|
const filename = `${type.toLowerCase()}s_${netId}_${nativeCurrency}_${instance}.json`
|
||||||
|
|
||||||
const isSaved = save(`${EVENTS_PATH}${filename}`)
|
const isSaved = save(`${EVENTS_PATH}${filename}`)
|
||||||
|
|
||||||
if (isSaved) {
|
if (isSaved) {
|
||||||
try {
|
try {
|
||||||
await testCommon(netId, type, filename)
|
testCommon(netId, type, filename)
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
console.error(err.message)
|
console.error(err.message)
|
||||||
}
|
}
|
||||||
@ -33,10 +38,10 @@ async function updateCommon(netId) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function testCommon(netId, type, filename) {
|
function testCommon(netId, type, filename) {
|
||||||
const { deployedBlock } = networkConfig[`netId${netId}`]
|
const { deployedBlock } = networkConfig[`netId${netId}`]
|
||||||
|
|
||||||
const cachedEvents = await loadCachedEvents({
|
const cachedEvents = loadCachedEvents({
|
||||||
name: filename,
|
name: filename,
|
||||||
directory: EVENTS_PATH,
|
directory: EVENTS_PATH,
|
||||||
deployedBlock
|
deployedBlock
|
||||||
@ -45,11 +50,13 @@ async function testCommon(netId, type, filename) {
|
|||||||
console.log('cachedEvents', cachedEvents.events.length, type)
|
console.log('cachedEvents', cachedEvents.events.length, type)
|
||||||
|
|
||||||
let events = cachedEvents.events
|
let events = cachedEvents.events
|
||||||
|
|
||||||
if (type === 'Withdrawal') {
|
if (type === 'Withdrawal') {
|
||||||
events = uniqBy(cachedEvents.events, 'nullifierHash')
|
events = uniqBy(cachedEvents.events, 'nullifierHash')
|
||||||
} else if (type === 'Deposit') {
|
} else if (type === 'Deposit') {
|
||||||
events = cachedEvents.events.filter((e, index) => Number(e.leafIndex) === index)
|
events = cachedEvents.events.filter((e, index) => Number(e.leafIndex) === index)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (events.length !== cachedEvents.events.length) {
|
if (events.length !== cachedEvents.events.length) {
|
||||||
console.error('events.length', events.length)
|
console.error('events.length', events.length)
|
||||||
console.error('cachedEvents.events.length', cachedEvents.events.length)
|
console.error('cachedEvents.events.length', cachedEvents.events.length)
|
||||||
@ -58,10 +65,11 @@ async function testCommon(netId, type, filename) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
async function main() {
|
async function main() {
|
||||||
const NETWORKS = [1, 5, 56, 100, 137 ]
|
for (let i = 0; i < enabledChains.length; i++) {
|
||||||
|
const netId = enabledChains[i]
|
||||||
|
|
||||||
|
if (netId === chainsWithEncryptedNotes[i]) updateEncrypted(netId)
|
||||||
|
|
||||||
for await (const netId of NETWORKS) {
|
|
||||||
updateEncrypted(netId)
|
|
||||||
await updateCommon(netId)
|
await updateCommon(netId)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -2,13 +2,11 @@ import Web3 from 'web3'
|
|||||||
|
|
||||||
import graph from '@/services/graph'
|
import graph from '@/services/graph'
|
||||||
import { download } from '@/store/snark'
|
import { download } from '@/store/snark'
|
||||||
import networkConfig from '@/networkConfig'
|
import networkConfig, { enabledChains } from '@/networkConfig'
|
||||||
import InstanceABI from '@/abis/Instance.abi.json'
|
import InstanceABI from '@/abis/Instance.abi.json'
|
||||||
import { CONTRACT_INSTANCES, eventsType, httpConfig } from '@/constants'
|
import { CONTRACT_INSTANCES, eventsType, httpConfig } from '@/constants'
|
||||||
import { sleep, flattenNArray, formatEvents, capitalizeFirstLetter } from '@/utils'
|
import { sleep, flattenNArray, formatEvents, capitalizeFirstLetter } from '@/utils'
|
||||||
|
|
||||||
const supportedCaches = ['1', '56', '100', '137']
|
|
||||||
|
|
||||||
let store
|
let store
|
||||||
if (process.browser) {
|
if (process.browser) {
|
||||||
window.onNuxtReady(({ $store }) => {
|
window.onNuxtReady(({ $store }) => {
|
||||||
@ -21,7 +19,7 @@ class EventService {
|
|||||||
this.idb = window.$nuxt.$indexedDB(netId)
|
this.idb = window.$nuxt.$indexedDB(netId)
|
||||||
|
|
||||||
const { nativeCurrency } = networkConfig[`netId${netId}`]
|
const { nativeCurrency } = networkConfig[`netId${netId}`]
|
||||||
const hasCache = supportedCaches.includes(netId.toString())
|
const hasCache = enabledChains.includes(netId.toString())
|
||||||
|
|
||||||
this.netId = netId
|
this.netId = netId
|
||||||
this.amount = amount
|
this.amount = amount
|
||||||
@ -35,7 +33,7 @@ class EventService {
|
|||||||
}
|
}
|
||||||
|
|
||||||
getInstanceName(type) {
|
getInstanceName(type) {
|
||||||
return `${type}s_${this.currency}_${this.amount}`
|
return `${type}s_${this.netId}_${this.currency}_${this.amount}`
|
||||||
}
|
}
|
||||||
|
|
||||||
updateEventProgress(percentage, type) {
|
updateEventProgress(percentage, type) {
|
||||||
@ -466,7 +464,7 @@ class EventsFactory {
|
|||||||
}
|
}
|
||||||
|
|
||||||
getService = (payload) => {
|
getService = (payload) => {
|
||||||
const instanceName = `${payload.currency}_${payload.amount}`
|
const instanceName = `${payload.netId}_${payload.currency}_${payload.amount}`
|
||||||
|
|
||||||
if (this.instances.has(instanceName)) {
|
if (this.instances.has(instanceName)) {
|
||||||
return this.instances.get(instanceName)
|
return this.instances.get(instanceName)
|
||||||
|
@ -16,18 +16,19 @@ class MerkleTreeService {
|
|||||||
this.instanceName = instanceName
|
this.instanceName = instanceName
|
||||||
|
|
||||||
this.idb = window.$nuxt.$indexedDB(netId)
|
this.idb = window.$nuxt.$indexedDB(netId)
|
||||||
|
|
||||||
this.bloomService = bloomService({
|
this.bloomService = bloomService({
|
||||||
netId,
|
netId,
|
||||||
amount,
|
amount,
|
||||||
commitment,
|
commitment,
|
||||||
instanceName,
|
instanceName,
|
||||||
fileFolder: 'trees',
|
fileFolder: 'trees',
|
||||||
fileName: `deposits_${currency}_${amount}_bloom.json.gz`
|
fileName: `deposits_${netId}_${currency}_${amount}_bloom.json.gz`
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
getFileName(partNumber = trees.PARTS_COUNT) {
|
getFileName(partNumber = trees.PARTS_COUNT) {
|
||||||
return `trees/deposits_${this.currency}_${this.amount}_slice${partNumber}.json.gz`
|
return `trees/deposits_${this.netId}_${this.currency}_${this.amount}_slice${partNumber}.json.gz`
|
||||||
}
|
}
|
||||||
|
|
||||||
createTree({ events }) {
|
createTree({ events }) {
|
||||||
@ -185,7 +186,7 @@ class TreesFactory {
|
|||||||
instances = new Map()
|
instances = new Map()
|
||||||
|
|
||||||
getService = (payload) => {
|
getService = (payload) => {
|
||||||
const instanceName = `${payload.currency}_${payload.amount}`
|
const instanceName = `${payload.netId}_${payload.currency}_${payload.amount}`
|
||||||
if (this.instances.has(instanceName)) {
|
if (this.instances.has(instanceName)) {
|
||||||
return this.instances.get(instanceName)
|
return this.instances.get(instanceName)
|
||||||
}
|
}
|
||||||
|
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user