scripts: find unwanted relayers

Signed-off-by: AlienTornadosaurusHex <>
This commit is contained in:
AlienTornadosaurusHex 2023-05-19 01:19:05 +00:00
parent 937c221e22
commit fe64495581
7 changed files with 187 additions and 11 deletions

View File

@ -12,6 +12,7 @@
"start": "nuxt start",
"update:zip": "node -r esm scripts/updateZip.js",
"update:events": "node -r esm scripts/updateEvents.js --network",
"update:unwanted": "node -r esm scripts/updateUnwanted.js --network",
"update:encrypted": "node -r esm scripts/updateEncryptedEvents.js --network",
"update:tree": "node -r esm scripts/updateTree.js --network",
"generate": "NODE_OPTIONS='--max-old-space-size=8192' nuxt generate && cp dist/404.html dist/ipfs-404.html",

3
scripts/helpers/args.js Normal file
View File

@ -0,0 +1,3 @@
export function needsArg(argname) {
throw Error('classic-ui: this function requires the argument: ' + argname)
}

View File

@ -1,2 +1,3 @@
export { needsArg } from "./args"
export { download, loadCachedEvents, getPastEvents } from './download'
export { save } from './save'

View File

@ -61,14 +61,17 @@ async function saveEncryptedNote(netId) {
fs.writeFileSync(`${EVENTS_PATH}${name}`, eventsJson)
}
/**
* @param netId The netId of the chain to synchronize encrypted note events for.
*/
async function main() {
const [, , , chain] = process.argv
const [, , , netId] = process.argv
if (!enabledChains.includes(chain)) {
if (!enabledChains.includes(netId)) {
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
}
await saveEncryptedNote(chain)
await saveEncryptedNote(netId)
}
main()

View File

@ -6,7 +6,7 @@ import { uniqBy } from 'lodash'
import networkConfig, { enabledChains } from '../networkConfig'
import ABI from '../abis/Instance.abi.json'
import { loadCachedEvents, getPastEvents } from './helpers'
import { loadCachedEvents, getPastEvents, needsArg } from './helpers'
const EVENTS_PATH = './static/events/'
const EVENTS = ['Deposit', 'Withdrawal']
@ -78,15 +78,21 @@ async function main(type, netId, chosenToken) {
}
}
/**
* @param netId The netId of the chain to synchronize deposits and withdrawal events for.
* @param chosenToken Optional. Default is native. The token for which to synchronize the events.
*/
async function start() {
const [, , , chain, chosenToken] = process.argv
const [, , , netId, chosenToken] = process.argv
if (!enabledChains.includes(chain)) {
if (!enabledChains.includes(netId)) {
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
}
if (!netId) needsArg('netId')
for (const event of EVENTS) {
await main(event, chain, chosenToken)
await main(event, netId, chosenToken)
}
}

View File

@ -142,17 +142,17 @@ async function initMimc() {
}
async function main() {
const [, , , chain] = process.argv
const [, , , netId] = process.argv
if (!enabledChains.includes(chain)) {
if (!enabledChains.includes(netId)) {
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
}
await initMimc()
await createTree(chain)
await createTree(netId)
createTreeZip(chain)
createTreeZip(netId)
}
main()

162
scripts/updateUnwanted.js Normal file
View File

@ -0,0 +1,162 @@
import fs from 'fs'
import Web3 from 'web3'
import { uniq } from 'lodash'
import { loadCachedEvents, needsArg, save } from './helpers'
import networkConfig, { enabledChains } from '../networkConfig'
import RELAYER_REGISTRY_ABI from '../abis/RelayerRegistry.abi.json'
import AGGREGATOR_REGISTRY_ABI from '../abis/Aggregator.abi.json'
const RELAYERS_DIR_PATH = './static/relayers/'
const zeroAddress = '0x0000000000000000000000000000000000000000'
const subdomains = Object.values(networkConfig).map(({ ensSubdomainKey }) => ensSubdomainKey)
async function update(netId, untilBlock, rpcIndex) {
// Get all of the network data we need
// "Contract" in fact means address
const { rpcUrls, tokens, registryContract, aggregatorContract, routerContract } = networkConfig[
`netId${netId}`
]
// Get the rpcUrl we have chosen
const rpcUrl = Object.values(rpcUrls)[rpcIndex].url
// Prepare the provider
const provider = new Web3(new Web3.providers.HttpProvider(rpcUrl))
// Get the contracts
const aggregator = new provider.eth.Contract(AGGREGATOR_REGISTRY_ABI, aggregatorContract)
const registry = new provider.eth.Contract(RELAYER_REGISTRY_ABI, registryContract)
// Get all tokens for the network
const tokenSymbols = Object.keys(tokens)
// All avoiding txs
const avoids = []
// Start discovering unwanted relayers
for (const tokenSymbol of tokenSymbols) {
// Load denominations
const denoms = Object.entries(tokens[tokenSymbol].instanceAddress)
// Go through all denominations
for (const denom of denoms) {
// Load the cache, we need it in reverse
const withdrawals = loadCachedEvents({
name: `withdrawals_${netId}_${tokenSymbol}_${denom[0]}.json`,
directory: `./static/events/`,
deployedBlock: undefined
}).events.reverse()
// Start searching
for (const withdrawal of withdrawals) {
// If we are at the until block then stop scanning
if (withdrawal.blockNumber < untilBlock) {
break
}
// Get the tx, we need proper "to" data
const tx = await provider.eth.getTransaction(withdrawal.transactionHash)
// Found a sus tx
if (tx.to !== routerContract) {
// Look for the owner
const owner = await registry.methods.workers(tx.from).call()
// If not zeroAddress, it's an owner and he's misbehaving!
if (owner != zeroAddress) {
// Get his ens hash
const node = await registry.methods.getRelayerEnsHash(owner).call()
// Get his records
const data = (await aggregator.methods.relayersData([node], subdomains).call())[0]
// Since we are doing this in reverse, the last element is going to have the lowest block number
// We are storing this much data because relayers doing unwanted stuff should be quite rare
// And when this rolls out, it should become even moreso rare
avoids.push({
address: owner,
node: node,
net: netId,
token: tokenSymbol,
denomination: denom[0],
caughtAtTxHash: tx.hash,
blockNumber: tx.blockNumber,
records: data.records.filter((record) => record !== '')
})
}
}
}
}
}
// This is a set of the avoiding data meaning unwanted relayer + transaction proof
// Block numbers are descending
const cachedAvoids = loadCachedEvents({
name: 'avoids.json',
directory: RELAYERS_DIR_PATH,
deployedBlock: 99999999999999 // Has to be the future formally
}).events
// This on the other hand is a set of the unwanted relayers
const cachedUnwanted = loadCachedEvents({
name: 'unwanted.json',
directory: RELAYERS_DIR_PATH,
deployedBlock: undefined // No such concept
}).events
// Now prepare for latter data, kill duplicates to make it a set (hopefully)
const unwanted = uniq(
avoids.map((avoidingTx) => {
return {
address: avoidingTx.address,
node: avoidingTx.node,
records: avoidingTx.records
}
})
)
// Requires path
if (!fs.existsSync(RELAYERS_DIR_PATH)) fs.mkdirSync(RELAYERS_DIR_PATH)
// Write down avoids
fs.writeFileSync(
`${RELAYERS_DIR_PATH}avoids.json`,
JSON.stringify(cachedAvoids.concat(avoids), null, 2) + '\n'
)
// Write down unwanted relayers
fs.writeFileSync(
`${RELAYERS_DIR_PATH}unwanted.json`,
JSON.stringify(cachedUnwanted.concat(unwanted), null, 2) + '\n'
)
// Finally, save both
save(`${RELAYERS_DIR_PATH}avoids.json`)
save(`${RELAYERS_DIR_PATH}unwanted.json`)
}
/**
* @param netId The netId of the chain to synchronize deposits and withdrawal events for.
* @param untilBlock The LOWER block limit to which we will search for misbehaving relayers in all withdrawal event caches.
* @param rpcIndex Optional. The RPC to use according to networkConfig.
*/
async function main() {
const [, , , netId, untilBlock, rpcIndex] = process.argv
if (!enabledChains.includes(netId)) {
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
}
if (!untilBlock) needsArg('untilBlock')
await update(netId, untilBlock, rpcIndex !== undefined ? rpcIndex : 0)
}
main()