🎨 improved scripts for updating events and trees
This commit is contained in:
parent
8fbbe4c67b
commit
f0e38035e7
55 changed files with 437 additions and 477 deletions
97
scripts/helpers/download.js
Normal file
97
scripts/helpers/download.js
Normal file
|
@ -0,0 +1,97 @@
|
|||
import fs from 'fs'
|
||||
import Jszip from 'jszip'
|
||||
import Web3 from 'web3'
|
||||
|
||||
const jszip = new Jszip()
|
||||
|
||||
export async function download({ name, directory, contentType }) {
|
||||
const path = `${directory}${name}.zip`.toLowerCase()
|
||||
|
||||
const data = fs.readFileSync(path)
|
||||
const zip = await jszip.loadAsync(data)
|
||||
|
||||
const file = zip.file(
|
||||
path
|
||||
.replace(directory, '')
|
||||
.slice(0, -4)
|
||||
.toLowerCase()
|
||||
)
|
||||
|
||||
const content = await file.async(contentType)
|
||||
|
||||
return content
|
||||
}
|
||||
|
||||
export async function loadCachedEvents({ name, directory, deployedBlock }) {
|
||||
try {
|
||||
const module = await download({ contentType: 'string', directory, name })
|
||||
|
||||
if (module) {
|
||||
const events = JSON.parse(module)
|
||||
|
||||
const [lastEvent] = JSON.parse(module).sort(
|
||||
(a, b) => (b.block || b.blockNumber) - (a.block || a.blockNumber)
|
||||
)
|
||||
const lastBlock = lastEvent.block || lastEvent.blockNumber
|
||||
|
||||
return {
|
||||
events,
|
||||
lastBlock
|
||||
}
|
||||
}
|
||||
} catch (err) {
|
||||
console.error(`Method loadCachedEvents has error: ${err.message}`)
|
||||
return {
|
||||
events: [],
|
||||
lastBlock: deployedBlock
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function getPastEvents({ type, fromBlock, netId, events, rpcUrl, contractAttrs }) {
|
||||
let downloadedEvents = events
|
||||
|
||||
const provider = new Web3.providers.HttpProvider(rpcUrl)
|
||||
const web3 = new Web3(provider)
|
||||
const contract = new web3.eth.Contract(...contractAttrs)
|
||||
|
||||
const currentBlockNumber = await web3.eth.getBlockNumber()
|
||||
const blockDifference = Math.ceil(currentBlockNumber - fromBlock)
|
||||
|
||||
const blockRange = Number(netId) === 56 ? 4950 : blockDifference / 20
|
||||
|
||||
let chunksCount = blockDifference === 0 ? 1 : Math.ceil(blockDifference / blockRange)
|
||||
const chunkSize = Math.ceil(blockDifference / chunksCount)
|
||||
|
||||
let toBlock = fromBlock + chunkSize
|
||||
|
||||
if (fromBlock < currentBlockNumber) {
|
||||
if (toBlock >= currentBlockNumber) {
|
||||
toBlock = currentBlockNumber
|
||||
chunksCount = 1
|
||||
}
|
||||
|
||||
console.log(`Fetching ${type}, chainId - ${netId}`, `chunksCount - ${chunksCount}`)
|
||||
for (let i = 0; i < chunksCount; i++)
|
||||
try {
|
||||
await new Promise((resolve) => setTimeout(resolve, 200))
|
||||
|
||||
console.log(`fromBlock - ${fromBlock}`)
|
||||
console.log(`toBlock - ${toBlock}`)
|
||||
|
||||
const eventsChunk = await contract.getPastEvents(type, { fromBlock, toBlock })
|
||||
|
||||
if (eventsChunk) {
|
||||
downloadedEvents = downloadedEvents.concat(eventsChunk)
|
||||
console.log('downloaded events count - ', eventsChunk.length)
|
||||
console.log('____________________________________________')
|
||||
}
|
||||
fromBlock = toBlock
|
||||
toBlock += chunkSize
|
||||
} catch (err) {
|
||||
console.log('getPastEvents events', `chunk number - ${i}, has error: ${err.message}`)
|
||||
chunksCount = chunksCount + 1
|
||||
}
|
||||
}
|
||||
return downloadedEvents
|
||||
}
|
2
scripts/helpers/index.js
Normal file
2
scripts/helpers/index.js
Normal file
|
@ -0,0 +1,2 @@
|
|||
export { download, loadCachedEvents, getPastEvents } from './download'
|
||||
export { save } from './save'
|
17
scripts/helpers/save.js
Normal file
17
scripts/helpers/save.js
Normal file
|
@ -0,0 +1,17 @@
|
|||
import fs from 'fs'
|
||||
import zipper from 'zip-local'
|
||||
|
||||
export function save(fileName) {
|
||||
try {
|
||||
zipper.sync
|
||||
.zip(`${fileName}`)
|
||||
.compress()
|
||||
.save(`${fileName}.zip`)
|
||||
|
||||
fs.unlinkSync(fileName)
|
||||
return true
|
||||
} catch (err) {
|
||||
console.log('on save error', fileName, err.message)
|
||||
return false
|
||||
}
|
||||
}
|
75
scripts/ipfsUpload.js
Normal file
75
scripts/ipfsUpload.js
Normal file
|
@ -0,0 +1,75 @@
|
|||
// debug
|
||||
// date +%s > dist/index.html & node ipfsUpload.js
|
||||
|
||||
require('dotenv').config()
|
||||
const fs = require('fs')
|
||||
const axios = require('axios')
|
||||
const FormData = require('form-data')
|
||||
const recursive = require('recursive-fs')
|
||||
const basePathConverter = require('base-path-converter')
|
||||
|
||||
// it's dangerous to set MAX_PINS to 1
|
||||
const MAX_PINS = 5
|
||||
|
||||
const baseUrl = `https://api.pinata.cloud`
|
||||
const src = process.argv[2] || './dist'
|
||||
const headers = {
|
||||
pinata_api_key: process.env.PINATA_API_KEY,
|
||||
pinata_secret_api_key: process.env.PINATA_SECRET_API_KEY
|
||||
}
|
||||
|
||||
async function removeOldPins() {
|
||||
const maxPins = MAX_PINS - 1
|
||||
|
||||
const res = await axios.get(`${baseUrl}/data/pinList?pageOffset=${maxPins}&status=pinned`, {
|
||||
headers
|
||||
})
|
||||
|
||||
const { count, rows } = res.data
|
||||
|
||||
if (count && count > maxPins) {
|
||||
for (const { ipfs_pin_hash: ipfsPinHash } of rows) {
|
||||
await axios.delete(`${baseUrl}/pinning/unpin/${ipfsPinHash}`, {
|
||||
headers
|
||||
})
|
||||
console.log(`Successfully removed pin: ${ipfsPinHash}`)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function pinBuild() {
|
||||
console.log('Make sure you have latest build. Run `npm run generate` if necessary.')
|
||||
const { files } = await recursive.readdirr(src)
|
||||
const data = new FormData()
|
||||
files.forEach((file) => {
|
||||
// for each file stream, we need to include the correct relative file path
|
||||
data.append(`file`, fs.createReadStream(file), {
|
||||
filepath: basePathConverter(src, file)
|
||||
})
|
||||
})
|
||||
|
||||
const res = await axios.post(`${baseUrl}/pinning/pinFileToIPFS`, data, {
|
||||
maxContentLength: 'Infinity', // this is needed to prevent axios from erroring out with large directories
|
||||
headers: {
|
||||
'Content-Type': `multipart/form-data; boundary=${data._boundary}`,
|
||||
...headers
|
||||
}
|
||||
})
|
||||
|
||||
const ipfsHash = res.data.IpfsHash
|
||||
|
||||
console.log(`Your site is ready! IPFS hash: ${ipfsHash}`)
|
||||
console.log(`output for github-actions:`)
|
||||
console.log(`::set-output name=ipfs_hash::${ipfsHash}`)
|
||||
console.log(`https://gateway.pinata.cloud/ipfs/${ipfsHash}`)
|
||||
}
|
||||
|
||||
async function main() {
|
||||
await removeOldPins()
|
||||
await pinBuild()
|
||||
}
|
||||
|
||||
main().catch((e) => {
|
||||
console.log(e)
|
||||
process.exit(1)
|
||||
})
|
79
scripts/updateEncryptedEvents.js
Normal file
79
scripts/updateEncryptedEvents.js
Normal file
|
@ -0,0 +1,79 @@
|
|||
import 'dotenv/config'
|
||||
|
||||
import fs from 'fs'
|
||||
import { uniqBy } from 'lodash'
|
||||
|
||||
import networkConfig from '../networkConfig'
|
||||
import ABI from '../abis/TornadoProxy.abi.json'
|
||||
import { getPastEvents, loadCachedEvents } from './helpers'
|
||||
|
||||
const EVENTS_PATH = './static/events/'
|
||||
const enabledChains = ['1', '5', '56']
|
||||
|
||||
async function saveEncryptedNote(netId) {
|
||||
const {
|
||||
constants,
|
||||
'tornado-proxy.contract.tornadocash.eth': tornadoProxy,
|
||||
'tornado-router.contract.tornadocash.eth': tornadoRouter,
|
||||
'tornado-proxy-light.contract.tornadocash.eth': lightProxy
|
||||
} = networkConfig[`netId${netId}`]
|
||||
|
||||
let [{ url: rpcUrl }] = Object.values(networkConfig[`netId${netId}`].rpcUrls)
|
||||
|
||||
if (netId === '5') {
|
||||
rpcUrl = `https://goerli.infura.io/v3/${process.env.INFURA_KEY}`
|
||||
}
|
||||
|
||||
const contractAddress = tornadoRouter || tornadoProxy || lightProxy
|
||||
|
||||
let encryptedEvents = []
|
||||
const name = `encrypted_notes_${netId}.json`
|
||||
|
||||
const cachedEvents = await loadCachedEvents({
|
||||
name,
|
||||
directory: EVENTS_PATH,
|
||||
deployedBlock: constants.ENCRYPTED_NOTES_BLOCK
|
||||
})
|
||||
|
||||
console.log('cachedEvents', cachedEvents.events.length)
|
||||
|
||||
encryptedEvents = await getPastEvents({
|
||||
netId,
|
||||
rpcUrl,
|
||||
type: 'EncryptedNote',
|
||||
events: encryptedEvents,
|
||||
fromBlock: cachedEvents.lastBlock + 1,
|
||||
contractAttrs: [ABI, contractAddress]
|
||||
})
|
||||
|
||||
console.log('Encrypted note', netId, encryptedEvents.length)
|
||||
|
||||
encryptedEvents = encryptedEvents.reduce((acc, curr) => {
|
||||
if (curr.returnValues.encryptedNote) {
|
||||
acc.push({
|
||||
txHash: curr.transactionHash,
|
||||
blockNumber: Number(curr.blockNumber),
|
||||
encryptedNote: curr.returnValues.encryptedNote
|
||||
})
|
||||
}
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
let freshEvents = cachedEvents.events.concat(encryptedEvents)
|
||||
|
||||
freshEvents = uniqBy(freshEvents, 'encryptedNote').sort((a, b) => b.blockNumber - a.blockNumber)
|
||||
|
||||
const eventsJson = JSON.stringify(freshEvents, null, 2) + '\n'
|
||||
fs.writeFileSync(`${EVENTS_PATH}${name}`, eventsJson)
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const [, , , chain] = process.argv
|
||||
if (!enabledChains.includes(chain)) {
|
||||
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
|
||||
}
|
||||
|
||||
await saveEncryptedNote(chain)
|
||||
}
|
||||
|
||||
main()
|
95
scripts/updateEvents.js
Normal file
95
scripts/updateEvents.js
Normal file
|
@ -0,0 +1,95 @@
|
|||
import 'dotenv/config'
|
||||
|
||||
import fs from 'fs'
|
||||
import { uniqBy } from 'lodash'
|
||||
|
||||
import networkConfig from '../networkConfig'
|
||||
import ABI from '../abis/Instance.abi.json'
|
||||
import { loadCachedEvents, getPastEvents } from './helpers'
|
||||
|
||||
const EVENTS_PATH = './static/events/'
|
||||
const EVENTS = ['Deposit', 'Withdrawal']
|
||||
const enabledChains = ['1', '56']
|
||||
|
||||
async function main(type, netId) {
|
||||
const { tokens, nativeCurrency, deployedBlock } = networkConfig[`netId${netId}`]
|
||||
const CONTRACTS = tokens[nativeCurrency].instanceAddress
|
||||
|
||||
for (const [instance, _contract] of Object.entries(CONTRACTS)) {
|
||||
const cachedEvents = await loadCachedEvents({
|
||||
name: `${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`,
|
||||
directory: EVENTS_PATH,
|
||||
deployedBlock
|
||||
})
|
||||
|
||||
console.log('cachedEvents count - ', cachedEvents.events.length)
|
||||
console.log('lastBlock - ', cachedEvents.lastBlock)
|
||||
|
||||
let [{ url: rpcUrl }] = Object.values(networkConfig[`netId${netId}`].rpcUrls)
|
||||
|
||||
if (netId === '5') {
|
||||
rpcUrl = `https://goerli.infura.io/v3/${process.env.INFURA_KEY}`
|
||||
}
|
||||
|
||||
let events = []
|
||||
|
||||
events = await getPastEvents({
|
||||
type,
|
||||
netId,
|
||||
rpcUrl,
|
||||
events,
|
||||
contractAttrs: [ABI, _contract],
|
||||
fromBlock: cachedEvents.lastBlock + 1
|
||||
})
|
||||
|
||||
if (type === 'Deposit') {
|
||||
events = events.map(({ blockNumber, transactionHash, returnValues }) => {
|
||||
const { commitment, leafIndex, timestamp } = returnValues
|
||||
return {
|
||||
timestamp,
|
||||
commitment,
|
||||
blockNumber,
|
||||
transactionHash,
|
||||
leafIndex: Number(leafIndex)
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
if (type === 'Withdrawal') {
|
||||
events = events.map(({ blockNumber, transactionHash, returnValues }) => {
|
||||
const { nullifierHash, to, fee } = returnValues
|
||||
return {
|
||||
to,
|
||||
fee,
|
||||
blockNumber,
|
||||
nullifierHash,
|
||||
transactionHash
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
let freshEvents = cachedEvents.events.concat(events)
|
||||
|
||||
if (type === 'Withdrawal') {
|
||||
freshEvents = uniqBy(freshEvents, 'nullifierHash').sort((a, b) => b.blockNumber - a.blockNumber)
|
||||
} else {
|
||||
freshEvents = freshEvents.filter((e, index) => Number(e.leafIndex) === index)
|
||||
}
|
||||
|
||||
const eventsJson = JSON.stringify(freshEvents, null, 2) + '\n'
|
||||
fs.writeFileSync(`${EVENTS_PATH}${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`, eventsJson)
|
||||
}
|
||||
}
|
||||
|
||||
async function start() {
|
||||
const [, , , chain] = process.argv
|
||||
if (!enabledChains.includes(chain)) {
|
||||
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
|
||||
}
|
||||
|
||||
for await (const event of EVENTS) {
|
||||
await main(event, chain)
|
||||
}
|
||||
}
|
||||
|
||||
start()
|
150
scripts/updateTree.js
Normal file
150
scripts/updateTree.js
Normal file
|
@ -0,0 +1,150 @@
|
|||
import 'dotenv/config'
|
||||
|
||||
import fs from 'fs'
|
||||
import BloomFilter from 'bloomfilter.js'
|
||||
import { MerkleTree } from 'fixed-merkle-tree'
|
||||
import { buildMimcSponge } from 'circomlibjs'
|
||||
|
||||
import networkConfig from '../networkConfig'
|
||||
|
||||
import { loadCachedEvents, save } from './helpers'
|
||||
|
||||
const TREES_FOLDER = 'static/trees'
|
||||
const TREES_PATH = './static/trees/'
|
||||
const EVENTS_PATH = './static/events/'
|
||||
|
||||
const EVENTS = ['deposit']
|
||||
const enabledChains = ['1']
|
||||
let mimcHash
|
||||
|
||||
const trees = {
|
||||
PARTS_COUNT: 4,
|
||||
LEVELS: 20 // const from contract
|
||||
}
|
||||
|
||||
function getName({ path, type, instance, format = '.json', currName = 'eth' }) {
|
||||
return `${path}${type.toLowerCase()}s_${currName}_${instance}${format}`
|
||||
}
|
||||
|
||||
function createTreeZip(netId) {
|
||||
try {
|
||||
const config = networkConfig[`netId${netId}`]
|
||||
const { instanceAddress: CONTRACTS } = config.tokens.eth
|
||||
|
||||
for (const type of EVENTS) {
|
||||
for (const [instance] of Object.entries(CONTRACTS)) {
|
||||
const baseFilename = getName({
|
||||
type,
|
||||
instance,
|
||||
format: '',
|
||||
path: TREES_PATH,
|
||||
currName: config.currencyName.toLowerCase()
|
||||
})
|
||||
|
||||
const treesFolder = fs.readdirSync(TREES_FOLDER)
|
||||
|
||||
treesFolder.forEach((fileName) => {
|
||||
fileName = `${TREES_PATH}${fileName}`
|
||||
const isInstanceFile = !fileName.includes('.zip') && fileName.includes(baseFilename)
|
||||
|
||||
if (isInstanceFile) {
|
||||
save(fileName)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
} catch {}
|
||||
}
|
||||
|
||||
async function createTree(netId) {
|
||||
try {
|
||||
const { currencyName, tokens, deployedBlock } = networkConfig[`netId${netId}`]
|
||||
|
||||
const currName = currencyName.toLowerCase()
|
||||
const { instanceAddress: CONTRACTS } = tokens.eth
|
||||
|
||||
for (const type of EVENTS) {
|
||||
for (const [instance] of Object.entries(CONTRACTS)) {
|
||||
const filePath = getName({
|
||||
type,
|
||||
instance,
|
||||
currName,
|
||||
format: '',
|
||||
path: TREES_PATH
|
||||
})
|
||||
|
||||
console.log('createTree', { type, instance })
|
||||
|
||||
const { events } = await loadCachedEvents({
|
||||
name: `${type}s_${currName}_${instance}.json`,
|
||||
directory: EVENTS_PATH,
|
||||
deployedBlock
|
||||
})
|
||||
|
||||
console.log('events', events.length)
|
||||
|
||||
const bloom = new BloomFilter(events.length) // to reduce the number of false positives
|
||||
|
||||
const eventsData = events.reduce(
|
||||
(acc, { leafIndex, commitment, ...rest }, i) => {
|
||||
if (leafIndex !== i) {
|
||||
throw new Error(`leafIndex (${leafIndex}) !== i (${i})`)
|
||||
}
|
||||
|
||||
const leave = commitment.toString()
|
||||
acc.leaves.push(leave)
|
||||
acc.metadata[leave] = { ...rest, leafIndex }
|
||||
|
||||
return acc
|
||||
},
|
||||
{ leaves: [], metadata: {} }
|
||||
)
|
||||
|
||||
console.log('leaves', eventsData.leaves.length)
|
||||
|
||||
const tree = new MerkleTree(trees.LEVELS, eventsData.leaves, {
|
||||
zeroElement: '21663839004416932945382355908790599225266501822907911457504978515578255421292',
|
||||
hashFunction: mimcHash
|
||||
})
|
||||
|
||||
const slices = tree.getTreeSlices(trees.PARTS_COUNT) // [edge(PARTS_COUNT)]
|
||||
|
||||
slices.forEach((slice, index) => {
|
||||
slice.metadata = slice.elements.reduce((acc, curr) => {
|
||||
if (index < trees.PARTS_COUNT - 1) {
|
||||
bloom.add(curr)
|
||||
}
|
||||
acc.push(eventsData.metadata[curr])
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
const sliceJson = JSON.stringify(slice, null, 2) + '\n'
|
||||
fs.writeFileSync(`${filePath}_slice${index + 1}.json`, sliceJson)
|
||||
})
|
||||
|
||||
const bloomCache = bloom.serialize()
|
||||
fs.writeFileSync(`${filePath}_bloom.json`, bloomCache)
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.error(e.message)
|
||||
}
|
||||
}
|
||||
|
||||
async function initMimc() {
|
||||
const mimcSponge = await buildMimcSponge()
|
||||
mimcHash = (left, right) => mimcSponge.F.toString(mimcSponge.multiHash([BigInt(left), BigInt(right)]))
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const [, , , chain] = process.argv
|
||||
if (!enabledChains.includes(chain)) {
|
||||
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
|
||||
}
|
||||
await initMimc()
|
||||
|
||||
await createTree(chain)
|
||||
await createTreeZip(chain)
|
||||
}
|
||||
|
||||
main()
|
71
scripts/updateZip.js
Normal file
71
scripts/updateZip.js
Normal file
|
@ -0,0 +1,71 @@
|
|||
import { uniqBy } from 'lodash'
|
||||
|
||||
import networkConfig from '../networkConfig'
|
||||
import { loadCachedEvents, save } from './helpers'
|
||||
|
||||
const EVENTS_PATH = './static/events/'
|
||||
const EVENTS = ['Deposit', 'Withdrawal']
|
||||
|
||||
function updateEncrypted(netId) {
|
||||
try {
|
||||
const file = `${EVENTS_PATH}encrypted_notes_${netId}.json`
|
||||
|
||||
save(file)
|
||||
} catch {
|
||||
console.warn('Not detected any events files for chainId - ', netId)
|
||||
}
|
||||
}
|
||||
async function updateCommon(netId) {
|
||||
const { nativeCurrency, tokens } = networkConfig[`netId${netId}`]
|
||||
|
||||
console.log(Object.keys(tokens[nativeCurrency].instanceAddress))
|
||||
for await (const type of EVENTS) {
|
||||
for await (const instance of Object.keys(tokens[nativeCurrency].instanceAddress)) {
|
||||
console.warn('instance', instance)
|
||||
const filename = `${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`
|
||||
const isSaved = save(`${EVENTS_PATH}${filename}`)
|
||||
if (isSaved) {
|
||||
try {
|
||||
await testCommon(netId, type, filename)
|
||||
} catch (err) {
|
||||
console.error(err.message)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async function testCommon(netId, type, filename) {
|
||||
const { deployedBlock } = networkConfig[`netId${netId}`]
|
||||
|
||||
const cachedEvents = await loadCachedEvents({
|
||||
name: filename,
|
||||
directory: EVENTS_PATH,
|
||||
deployedBlock
|
||||
})
|
||||
|
||||
console.log('cachedEvents', cachedEvents.events.length, type)
|
||||
|
||||
let events = cachedEvents.events
|
||||
if (type === 'Withdrawal') {
|
||||
events = uniqBy(cachedEvents.events, 'nullifierHash')
|
||||
} else if (type === 'Deposit') {
|
||||
events = cachedEvents.events.filter((e, index) => Number(e.leafIndex) === index)
|
||||
}
|
||||
if (events.length !== cachedEvents.events.length) {
|
||||
console.error('events.length', events.length)
|
||||
console.error('cachedEvents.events.length', cachedEvents.events.length)
|
||||
throw new Error(`Duplicates was detected in ${filename} (${events.length - cachedEvents.events.length})`)
|
||||
}
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const NETWORKS = [1, 5, 56]
|
||||
|
||||
for await (const netId of NETWORKS) {
|
||||
updateEncrypted(netId)
|
||||
await updateCommon(netId)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
Loading…
Add table
Add a link
Reference in a new issue