Compare commits

..

1 Commits

Author SHA1 Message Date
gozzy
197d60430e worker thread rpc batching 2022-11-09 02:55:13 +00:00
181 changed files with 1016 additions and 863 deletions

3
.gitignore vendored
View File

@ -87,6 +87,3 @@ sw.*
# Mac OSX
.DS_Store
static/*/*.json

1
.npmrc
View File

@ -1 +0,0 @@
@tornado:registry=https://git.tornado.ws/api/packages/tornado-packages/npm/

View File

@ -1,6 +1,6 @@
# Tornado Cash Classic UI
> Self-hostable Tornado Cash UI software for interacting with the protocol
> UI for non-custodial Ethereum Privacy solution
## Building locally
@ -31,42 +31,29 @@ For detailed explanation on how things work, checkout [Nuxt.js docs](https://nux
## Update cached files
- To update deposit and withdrawal events use `yarn update:events {chainId} {optional: tokenOrEvent} {optional: tokenOrEvent}`
- To update encrypted notes use `yarn update:encrypted {chainId}`
- To update merkle tree use `yarn update:tree {chainId}`
- For update deposits and withdrawals events use `yarn update:events {chainId}`
- For update encrypted notes use `yarn update:encrypted {chainId}`
- For update merkle tree use `yarn update:tree {chainId}`
#### NOTE!
After updating cached files do not forget to use `yarn update:zip`.
After update cached files do not forget to use `yarn update:zip`
### Example for Ethereum Mainnet:
You may set in [`networkConfig.js`](./networkConfig.js) the `blockSyncInterval` (def: 10_000) to the maximum value allowed by your RPC provider. Command usage follows below.
```bash
# Updating events with just the required chain id parameter
```
yarn update:events 1
# Updating events for only one token across all instances on that network
yarn update:events 1 dai
# Updating events for only one event on only some network
yarn update:events 1 deposit
# Both
yarn update:events 1 dai deposit
# Updating encrypted notes for some chain id
yarn update:encrypted 1
# Updating trees for some chain id
yarn update:tree 1
# Finally zips must be updated
yarn update:zip
```
### Example for Binance Smart Chain:
```bash
```
yarn update:events 56
yarn update:events 56 bnb
yarn update:events 56 bnb deposit
yarn update:encrypted 56
yarn update:tree 56
yarn update:zip
```

View File

@ -1,7 +0,0 @@
# ASSETS
**This directory is not required, you can delete it if you don't want to use it.**
This directory contains your un-compiled assets such as LESS, SASS, or JavaScript.
More information about the usage of this directory in [the documentation](https://nuxtjs.org/guide/assets#webpacked).

57
assets/events.worker.js Normal file
View File

@ -0,0 +1,57 @@
const Web3 = require('web3-eth')
const InstanceABI = require('../abis/Instance.abi.json')
const networkConfig = require('../networkConfig').default
const getContract = (rpcUrl, netId, currency, amount) => {
const provider = new Web3(rpcUrl)
const config = networkConfig[`netId${netId}`]
const address = config.tokens[currency].instanceAddress[amount]
const contract = new provider.Contract(InstanceABI, address)
return contract
}
const getBatchEvents = async ({ netId, rpcUrl, currency, amount, type, payload }, [port]) => {
try {
const { chunkSize, blocks } = payload
const contract = getContract(rpcUrl, netId, currency, amount)
let batchEvents = []
console.log('Fetching block ranges', blocks)
for (let x in blocks) {
const toBlock = blocks[x]
const fromBlock = toBlock - chunkSize
try {
const events = await contract.getPastEvents(type, {
fromBlock,
toBlock
})
batchEvents = batchEvents.concat(events)
} catch (e) {
console.log(`Failed to fetch ${toBlock}: ${e}`)
x = x - 1
}
}
port.postMessage({ result: batchEvents })
} catch (e) {
port.postMessage({ errorMessage: e })
}
}
const handlePayload = ({ data, ports }) => {
switch (data.eventName) {
case 'batch_events':
getBatchEvents(data, ports)
break
default:
break
}
}
self.addEventListener('message', handlePayload, false)

View File

@ -293,10 +293,6 @@
mask-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 448 512'%3E%3Cpath d='M297.216 243.2c0 15.616-11.52 28.416-26.112 28.416-14.336 0-26.112-12.8-26.112-28.416s11.52-28.416 26.112-28.416c14.592 0 26.112 12.8 26.112 28.416zm-119.552-28.416c-14.592 0-26.112 12.8-26.112 28.416s11.776 28.416 26.112 28.416c14.592 0 26.112-12.8 26.112-28.416.256-15.616-11.52-28.416-26.112-28.416zM448 52.736V512c-64.494-56.994-43.868-38.128-118.784-107.776l13.568 47.36H52.48C23.552 451.584 0 428.032 0 398.848V52.736C0 23.552 23.552 0 52.48 0h343.04C424.448 0 448 23.552 448 52.736zm-72.96 242.688c0-82.432-36.864-149.248-36.864-149.248-36.864-27.648-71.936-26.88-71.936-26.88l-3.584 4.096c43.52 13.312 63.744 32.512 63.744 32.512-60.811-33.329-132.244-33.335-191.232-7.424-9.472 4.352-15.104 7.424-15.104 7.424s21.248-20.224 67.328-33.536l-2.56-3.072s-35.072-.768-71.936 26.88c0 0-36.864 66.816-36.864 149.248 0 0 21.504 37.12 78.08 38.912 0 0 9.472-11.52 17.152-21.248-32.512-9.728-44.8-30.208-44.8-30.208 3.766 2.636 9.976 6.053 10.496 6.4 43.21 24.198 104.588 32.126 159.744 8.96 8.96-3.328 18.944-8.192 29.44-15.104 0 0-12.8 20.992-46.336 30.464 7.68 9.728 16.896 20.736 16.896 20.736 56.576-1.792 78.336-38.912 78.336-38.912z'%3E%3C/path%3E%3C/svg%3E");
}
&-git {
mask-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' width='92pt' height='92pt' viewBox='0 0 92 92'%3E%3Cpath style='stroke:none;fill-rule:nonzero;fill:%6b6b6b;fill-opacity:1' d='M90.156 41.965 50.036 1.848a5.918 5.918 0 0 0-8.372 0l-8.328 8.332 10.566 10.566a7.03 7.03 0 0 1 7.23 1.684 7.034 7.034 0 0 1 1.669 7.277l10.187 10.184a7.028 7.028 0 0 1 7.278 1.672 7.04 7.04 0 0 1 0 9.957 7.05 7.05 0 0 1-9.965 0 7.044 7.044 0 0 1-1.528-7.66l-9.5-9.497V59.36a7.04 7.04 0 0 1 1.86 11.29 7.04 7.04 0 0 1-9.957 0 7.04 7.04 0 0 1 0-9.958 7.06 7.06 0 0 1 2.304-1.539V33.926a7.049 7.049 0 0 1-3.82-9.234L29.242 14.272 1.73 41.777a5.925 5.925 0 0 0 0 8.371L41.852 90.27a5.925 5.925 0 0 0 8.37 0l39.934-39.934a5.925 5.925 0 0 0 0-8.371'/%3E%3C/svg%3E");
}
&-discourse {
mask-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 448 512'%3E%3Cpath d='M225.9 32C103.3 32 0 130.5 0 252.1 0 256 .1 480 .1 480l225.8-.2c122.7 0 222.1-102.3 222.1-223.9C448 134.3 348.6 32 225.9 32zM224 384c-19.4 0-37.9-4.3-54.4-12.1L88.5 392l22.9-75c-9.8-18.1-15.4-38.9-15.4-61 0-70.7 57.3-128 128-128s128 57.3 128 128-57.3 128-128 128z'%3E%3C/path%3E%3C/svg%3E");
}

View File

@ -37,15 +37,47 @@
<b-button
tag="a"
type="is-icon"
href="https://git.tornado.ws/tornadocash/classic-ui"
href="https://forums.tornadocash.community"
target="_blank"
rel="noopener noreferrer"
icon-right="git"
icon-right="discourse"
></b-button>
<b-button
tag="a"
type="is-icon"
href="https://github.com/tornadocash"
href="https://matrix.tornadocash.community"
target="_blank"
rel="noopener noreferrer"
icon-right="matrix"
></b-button>
<b-button
tag="a"
type="is-icon"
href="https://tornado-cash.medium.com"
target="_blank"
rel="noopener noreferrer"
icon-right="medium"
></b-button>
<b-button
tag="a"
type="is-icon"
href="https://twitter.com/TornadoCashOrg"
target="_blank"
rel="noopener noreferrer"
icon-right="twitter"
></b-button>
<b-button
tag="a"
type="is-icon"
href="https://t.me/TornadoCashOrg"
target="_blank"
rel="noopener noreferrer"
icon-right="telegram"
></b-button>
<b-button
tag="a"
type="is-icon"
href="https://development.tornadocash.community"
target="_blank"
rel="noopener noreferrer"
icon-right="github"

View File

@ -20,7 +20,7 @@
{{ $t('compliance') }}
</b-navbar-item>
<b-navbar-item
href="https://docs.tornado.ws"
href="https://docs.tornado.cash"
target="_blank"
data-test="docs_link"
rel="noopener noreferrer"

View File

@ -18,7 +18,12 @@
</template>
<template v-slot:description>{{ notice.description }}</template>
</i18n>
<a v-if="notice.nova" href="https://nova.tornado.ws/" target="_blank" rel="noopener noreferrer">
<a
v-if="notice.nova"
href="https://nova.tornadocash.eth.link"
target="_blank"
rel="noopener noreferrer"
>
Tornado Cash Nova
</a>
<a

View File

@ -47,51 +47,6 @@
{{ hasErrorRpc.msg }}
</p>
</div>
<template v-if="!isEthereumNetwork">
<div class="field">
<b-field label="Ethereum RPC provider" class="has-custom-field" data-test="rpc_endpoint_eth_dropdown">
<b-dropdown v-model="selectedEthRpc" expanded aria-role="list">
<div slot="trigger" class="control" :class="{ 'is-loading': checkingRpc && !isCustomEthRpc }">
<div class="input">
<span>{{ isCustomEthRpc ? $t('customRpc') : selectedEthRpc }}</span>
</div>
</div>
<b-dropdown-item
v-for="{ name, url } in Object.values(ethNetworkConfig.rpcUrls)"
:key="name"
:value="name"
aria-role="listitem"
:data-test="`rpc_endpoint_eth_${name}`"
@click="checkEthRpc({ name, url })"
>
{{ name }}
</b-dropdown-item>
<b-dropdown-item
value="custom"
aria-role="listitem"
data-test="rpc_endpoint_eth_custom"
@click="checkEthRpc({ name: 'custom' })"
>
{{ $t('customRpc') }}
</b-dropdown-item>
</b-dropdown>
</b-field>
<div v-if="isCustomEthRpc" class="field has-custom-field">
<b-input
ref="customInputTwo"
v-model="customEthRpcUrl"
type="url"
:placeholder="$t('customRpcPlaceholder')"
:custom-class="hasErrorEthRpc.type"
:use-html5-validation="false"
@input="checkCustomEthRpc"
></b-input>
</div>
<p v-if="hasErrorEthRpc.msg" class="help" :class="hasErrorEthRpc.type">
{{ hasErrorEthRpc.msg }}
</p>
</div>
</template>
<div class="buttons buttons__halfwidth">
<b-button type="is-primary" outlined data-test="button_reset_rpc" @mousedown.prevent @click="onReset">
{{ $t('reset') }}
@ -120,13 +75,9 @@ export default {
return {
checkingRpc: false,
hasErrorRpc: { type: '', msg: '' },
hasErrorEthRpc: { type: '', msg: '' },
customRpcUrl: '',
customEthUrl: '',
selectedRpc: 'custom',
selectedEthRpc: 'custom',
rpc: { name: 'custom', url: '' },
ethRpc: { name: 'custom', url: '' }
rpc: { name: 'custom', url: '' }
}
},
computed: {
@ -134,18 +85,9 @@ export default {
networkConfig() {
return networkConfig[`netId${this.netId}`]
},
ethNetworkConfig() {
return networkConfig.netId1
},
isEthereumNetwork() {
return this.netId === 1
},
isCustomRpc() {
return this.selectedRpc === 'custom'
},
isCustomEthRpc() {
return this.selectedEthRpc === 'custom'
},
isDisabledSave() {
return (
this.hasErrorRpc.type === 'is-warning' || this.checkingRpc || (this.isCustomRpc && !this.customRpcUrl)
@ -153,24 +95,16 @@ export default {
}
},
created() {
this.ethRpc = this.getRpc(1)
this.rpc = this.getRpc(this.netId)
this.selectedRpc = this.rpc.name
this.selectedEthRpc = this.ethRpc.name
if (this.selectedRpc === 'custom') {
this.$nextTick(() => {
this.customRpcUrl = this.rpc.url
})
}
if (this.selectedEthRpc === 'custom') {
this.$nextTick(() => {
this.customEthRpcUrl = this.ethRpc.url
})
}
this.checkRpc(this.rpc)
this.checkEthRpc(this.ethRpc)
},
methods: {
...mapMutations('settings', ['SAVE_RPC']),
@ -179,40 +113,25 @@ export default {
this.hasErrorRpc = { type: '', msg: '' }
this.rpc = Object.entries(this.networkConfig.rpcUrls)[0][1]
this.ethRpc = Object.entries(this.ethNetworkConfig.rpcUrls)[0][1]
this.selectedRpc = this.rpc.name
this.selectedEthRpc = this.ethRpc.name
this.checkEthRpc(this.ethRpc)
this.checkRpc(this.rpc)
},
onSave() {
this.SAVE_RPC({ ...this.rpc, netId: this.netId })
if (this.netId !== 1) {
this.SAVE_RPC({ ...this.ethRpc, netId: 1 })
}
this.$emit('close')
},
onCancel() {
this.$emit('cancel')
},
checkRpc({ name, url = '' }) {
this.checkingRpc = true
if (name === 'custom') {
this.customRpcUrl = ''
this.hasErrorRpc = { type: '', msg: '' }
}
this._checkRpc({ name, url })
},
checkEthRpc({ name, url = '' }) {
this.checkingRpc = true
if (name === 'custom') {
this.customEthRpcUrl = ''
this.hasErrorEthRpc = { type: '', msg: '' }
this.checkingRpc = true
return
}
this._checkEthRpc({ name, url })
this._checkRpc({ name, url })
},
checkCustomRpc(url) {
const trimmedUrl = url.trim()
@ -222,14 +141,6 @@ export default {
}
debounce(this._checkRpc, { name: 'custom', url: trimmedUrl })
},
checkCustomEthRpc(url) {
const trimmedUrl = url.trim()
if (!trimmedUrl) {
this.hasErrorEthRpc = { type: '', msg: '' }
return
}
debounce(this._checkEthRpc, { name: 'custom', url: trimmedUrl })
},
async _checkRpc({ name, url }) {
this.checkingRpc = true
this.hasErrorRpc = { type: '', msg: '' }
@ -248,27 +159,6 @@ export default {
this.hasErrorRpc.msg = error
}
this.checkingRpc = false
},
async _checkEthRpc({ name, url }) {
this.checkingRpc = true
this.hasErrorEthRpc = { type: '', msg: '' }
const { isValid, error } = await this.$store.dispatch('settings/checkRpc', {
url,
netId: 1,
isEthRpc: true
})
if (isValid) {
this.hasErrorEthRpc.type = 'is-primary'
this.hasErrorEthRpc.msg = this.$t('rpcStatusOk')
this.ethRpc = { name, url }
} else {
this.hasErrorEthRpc.type = 'is-warning'
this.hasErrorEthRpc.msg = error
}
this.checkingRpc = false
}
}

View File

@ -93,7 +93,9 @@ export default {
}
},
mounted() {
this.updateEvents()
if (!this.timer) {
this.updateEvents()
}
},
beforeDestroy() {
clearTimeout(this.timer)
@ -101,6 +103,10 @@ export default {
methods: {
updateEvents() {
this.$store.dispatch('application/updateSelectEvents')
this.timer = setTimeout(() => {
this.updateEvents()
}, 60 * 1000)
}
}
}

View File

@ -59,13 +59,8 @@ export default {
}
},
computed: {
...mapState('governance/gov', ['proposals']),
...mapGetters('governance/gov', [
'isFetchingProposals',
'constants',
'isFetchingBalances',
'votingPower'
]),
...mapState('governance/gov', ['lockedBalance', 'proposals']),
...mapGetters('governance/gov', ['isFetchingProposals', 'constants', 'isFetchingBalances']),
...mapGetters('token', ['toDecimals']),
filteredProposals() {
return this.proposals
@ -79,7 +74,7 @@ export default {
},
hasProposalThreshold() {
const PROPOSAL_THRESHOLD = toBN(this.constants.PROPOSAL_THRESHOLD)
return toBN(this.votingPower).gte(PROPOSAL_THRESHOLD)
return toBN(this.lockedBalance).gte(PROPOSAL_THRESHOLD)
},
proposalThreshold() {
return this.toDecimals(this.constants.PROPOSAL_THRESHOLD, 18)

View File

@ -60,12 +60,6 @@ export const cachedEventsLength = {
}
}
export const httpConfig = {
// buffer for tor connections
timeout: 30000,
keepAlive: true
}
export const PROVIDERS = {
walletConnect: {
name: 'WalletConnect',

View File

@ -151,7 +151,7 @@
"nullifierHash": "Nullifier Hash",
"verified": "Verified",
"generatePdfReport": "Generate PDF report",
"compliancePrintWarning": "This Compliance Report is for informational purposes only. You should confirm the validity of this report by using Tornados Compliance Tool (https://tornado.ws/compliance) or via any other cryptographic software that can compute and verify the information contained herein(the \"Tornado Compliance Tool\"). Any discrepancies between information found in this report and provided by the above tool indicate that the information in this report may be inaccurate and/or fraudulent.{newline}THE COMPLIANCE REPORT IS PROVIDED \"AS IS,\" WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OF THE TORNADO.CASH COMPLIANCE TOOL BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THIS COMPLIANCE REPORT.",
"compliancePrintWarning": "This Compliance Report is for informational purposes only. You should confirm the validity of this report by using Tornados Compliance Tool (https://tornadocash.eth.link/compliance) or via any other cryptographic software that can compute and verify the information contained herein(the \"Tornado Compliance Tool\"). Any discrepancies between information found in this report and provided by the above tool indicate that the information in this report may be inaccurate and/or fraudulent.{newline}THE COMPLIANCE REPORT IS PROVIDED \"AS IS,\" WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE, AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OF THE TORNADO.CASH COMPLIANCE TOOL BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THIS COMPLIANCE REPORT.",
"relayRequestFailed": "Relayer {relayerName} is down. Please choose a different relayer.",
"selectProvider": "Select provider",
"walletDoesNotSupported": "The wallet is not supported",

View File

@ -151,7 +151,7 @@
"nullifierHash": "Hash del Nullifier",
"verified": "Verificador",
"generatePdfReport": "Genere informe PDF",
"compliancePrintWarning": "Este Informe de Compromiso es para propósito informativo unicamente. Debería confirmar la validez de este informe utilizando la Herramienta de Cumplimiento de Tornado (https://tornado.ws/compliance) o cualquier otro software criptográfico que pueda procesar y verificar la información contenida aquí(la \"Tornado Compliance Tool\"). Cualquier discrepancia entre la información recogida en este informe y entregado por la herramienta anterior indica que este informe no es riguroso y/o fraudulento.{newline}EL INFORME DE CUMPLIMIENTO SE PRESENTA \"COMO TAL,\" SIN GARANTÍA DE NINGÚN TIPO, EXPRESA O IMPLÍCITAMENTE, INCLUYENDO PERO NO LIMITADA A LAS GARANTÍAS MERCANTILES, ADECUADAS PARA UN PROPÓSITO PARTICULAR Y LA NO INFRACCIÓN. EN NINGÚN CASO DEBERÍAN LOS AUTORES DE LA HERRAMIENTA DE CUMPLIMIENTO DE TORNADO.CASH SER RESPONSABLES U OBJETO DE CUALQUIER RECLAMO, DAÑO U OTRA RESPONSABILIDAD, YA SEA EN ACCIÓN CONTRACTUAL, AGRAVIADO O DE CUALQUIER OTRO MODO, DERIVADO DE, PRODUCTO DE O EN CONEXIÓN CON EL MENCIONADO INFORME DE CUMPLIMIENTO.",
"compliancePrintWarning": "Este Informe de Compromiso es para propósito informativo unicamente. Debería confirmar la validez de este informe utilizando la Herramienta de Cumplimiento de Tornado (https://tornadocash.eth.link/compliance) o cualquier otro software criptográfico que pueda procesar y verificar la información contenida aquí(la \"Tornado Compliance Tool\"). Cualquier discrepancia entre la información recogida en este informe y entregado por la herramienta anterior indica que este informe no es riguroso y/o fraudulento.{newline}EL INFORME DE CUMPLIMIENTO SE PRESENTA \"COMO TAL,\" SIN GARANTÍA DE NINGÚN TIPO, EXPRESA O IMPLÍCITAMENTE, INCLUYENDO PERO NO LIMITADA A LAS GARANTÍAS MERCANTILES, ADECUADAS PARA UN PROPÓSITO PARTICULAR Y LA NO INFRACCIÓN. EN NINGÚN CASO DEBERÍAN LOS AUTORES DE LA HERRAMIENTA DE CUMPLIMIENTO DE TORNADO.CASH SER RESPONSABLES U OBJETO DE CUALQUIER RECLAMO, DAÑO U OTRA RESPONSABILIDAD, YA SEA EN ACCIÓN CONTRACTUAL, AGRAVIADO O DE CUALQUIER OTRO MODO, DERIVADO DE, PRODUCTO DE O EN CONEXIÓN CON EL MENCIONADO INFORME DE CUMPLIMIENTO.",
"relayRequestFailed": "El retransmisor {relayerName} no responde. Por favor escoja uno diferente.",
"selectProvider": "Seleccione proveedor",
"walletDoesNotSupported": "El monedero no es compatible",

View File

@ -151,7 +151,7 @@
"nullifierHash": "Hash Nullifié",
"verified": "Verifié",
"generatePdfReport": "Générer un rapport PDF",
"compliancePrintWarning": "Ce rapport de conformité est uniquement destiné à des fins d'information. Vous devez confirmer la validité de ce rapport en utilisant l'outil de conformité de Tornado (https://tornado.ws/compliance) ou tout autre logiciel cryptographique capable de calculer et de vérifier les informations contenues dans ce document (l' \"Outil de Conformité Tornado\"). Toute divergence entre les informations trouvées dans ce rapport et celles fournies par l'outil susmentionné indique que les informations contenues dans ce rapport sont inexactes et/ou frauduleuses.{newline}LE RAPPORT DE CONFORMITÉ EST FOURNI \"EN L'ÉTAT\", SANS GARANTIE D'AUCUNE SORTE, EXPRESSE OU IMPLICITE, Y COMPRIS, MAIS SANS S'Y LIMITER, LES GARANTIES DE QUALITÉ MARCHANDE, D'ADÉQUATION À UN USAGE PARTICULIER ET D'ABSENCE DE CONTREFAÇON. EN AUCUN CAS, LES AUTEURS DE L'OUTIL DE CONFORMITÉ TORNADO.CASH NE POURRONT ÊTRE TENUS RESPONSABLES DE TOUTE RÉCLAMATION, DE TOUT DOMMAGE OU DE TOUTE AUTRE RESPONSABILITÉ, QUE CE SOIT DANS LE CADRE D'UNE ACTION CONTRACTUELLE, DÉLICTUELLE OU AUTRE, DÉCOULANT DE, EN DEHORS DE OU EN RELATION AVEC CE RAPPORT DE CONFORMITÉ.",
"compliancePrintWarning": "Ce rapport de conformité est uniquement destiné à des fins d'information. Vous devez confirmer la validité de ce rapport en utilisant l'outil de conformité de Tornado (https://tornadocash.eth.link/compliance) ou tout autre logiciel cryptographique capable de calculer et de vérifier les informations contenues dans ce document (l' \"Outil de Conformité Tornado\"). Toute divergence entre les informations trouvées dans ce rapport et celles fournies par l'outil susmentionné indique que les informations contenues dans ce rapport sont inexactes et/ou frauduleuses.{newline}LE RAPPORT DE CONFORMITÉ EST FOURNI \"EN L'ÉTAT\", SANS GARANTIE D'AUCUNE SORTE, EXPRESSE OU IMPLICITE, Y COMPRIS, MAIS SANS S'Y LIMITER, LES GARANTIES DE QUALITÉ MARCHANDE, D'ADÉQUATION À UN USAGE PARTICULIER ET D'ABSENCE DE CONTREFAÇON. EN AUCUN CAS, LES AUTEURS DE L'OUTIL DE CONFORMITÉ TORNADO.CASH NE POURRONT ÊTRE TENUS RESPONSABLES DE TOUTE RÉCLAMATION, DE TOUT DOMMAGE OU DE TOUTE AUTRE RESPONSABILITÉ, QUE CE SOIT DANS LE CADRE D'UNE ACTION CONTRACTUELLE, DÉLICTUELLE OU AUTRE, DÉCOULANT DE, EN DEHORS DE OU EN RELATION AVEC CE RAPPORT DE CONFORMITÉ.",
"relayRequestFailed": "Le relais {relayerName} est en panne. Veuillez choisir un autre relais.",
"selectProvider": "Sélectionner le fournisseur",
"walletDoesNotSupported": "Le portefeuille n'est pas supporté",

View File

@ -151,7 +151,7 @@
"nullifierHash": "Nullifier Hash",
"verified": "Подтверждено",
"generatePdfReport": "Сгенерировать PDF отчёт",
"compliancePrintWarning": "Настоящий отчет о соответствии носит исключительно информационный характер. Вы должны подтвердить действительность этого отчета с помощью средства проверки соответствия Tornado (https://tornado.ws/compliance) или с помощью любого другого криптографического программного обеспечения, которое может обработать и проверить информацию, содержащуюся в этом отчете(\"Tornado Compliance Tool\"). Любые расхождения между информацией, приведенной в данном отчете и предоставленной вышеуказанным инструментом, указывают на то, что информация, содержащаяся в этом отчете, является неточной и/или мошеннической.{newline}ОТЧЕТ О СООТВЕТСТВИИ ПРЕДОСТАВЛЯЕТСЯ \"КАК ЕСТЬ,\" БЕЗ ГАРАНТИЙ ЛЮБОГО РОДА, ЯВНЫХ ИЛИ ПОДРАЗУМЕВАЕМЫХ, ВКЛЮЧАЯ, НО НЕ ОГРАНИЧИВАЯСЬ ГАРАНТИЯМИ ТОВАРНОГО КАЧЕСТВА, ПРИГОДНОСТЬЮ К КОНКРЕТНОЙ ЦЕЛИ. НИ ПРИ КАКИХ ОБСТОЯТЕЛЬСТВАХ АВТОРЫ ИНСТРУМЕНТА СООТВЕТСТВИЯ TORNADO.CASH НЕ НЕСУТ ОТВЕТСТВЕННОСТИ ЗА ЛЮБЫЕ ПРЕТЕНЗИИ, УЩЕРБ ИЛИ ДРУГУЮ ОТВЕТСТВЕННОСТЬ, ОТНОСЯЩУЮСЯ К ДЕЙСТВИЮ ДОГОВОРОВ, ГРАЖДАНСКИМ ПРАВОНАРУШЕНИЯМ, А ТАКЖЕ ВЫТЕКАЮЩУЮ ИЗ НАСТОЯЩЕГО ОТЧЕТА О СООТВЕТСТВИИ ИЛИ СВЯЗАННУЮ С НИМ.",
"compliancePrintWarning": "Настоящий отчет о соответствии носит исключительно информационный характер. Вы должны подтвердить действительность этого отчета с помощью средства проверки соответствия Tornado (https://tornadocash.eth.link/compliance) или с помощью любого другого криптографического программного обеспечения, которое может обработать и проверить информацию, содержащуюся в этом отчете(\"Tornado Compliance Tool\"). Любые расхождения между информацией, приведенной в данном отчете и предоставленной вышеуказанным инструментом, указывают на то, что информация, содержащаяся в этом отчете, является неточной и/или мошеннической.{newline}ОТЧЕТ О СООТВЕТСТВИИ ПРЕДОСТАВЛЯЕТСЯ \"КАК ЕСТЬ,\" БЕЗ ГАРАНТИЙ ЛЮБОГО РОДА, ЯВНЫХ ИЛИ ПОДРАЗУМЕВАЕМЫХ, ВКЛЮЧАЯ, НО НЕ ОГРАНИЧИВАЯСЬ ГАРАНТИЯМИ ТОВАРНОГО КАЧЕСТВА, ПРИГОДНОСТЬЮ К КОНКРЕТНОЙ ЦЕЛИ. НИ ПРИ КАКИХ ОБСТОЯТЕЛЬСТВАХ АВТОРЫ ИНСТРУМЕНТА СООТВЕТСТВИЯ TORNADO.CASH НЕ НЕСУТ ОТВЕТСТВЕННОСТИ ЗА ЛЮБЫЕ ПРЕТЕНЗИИ, УЩЕРБ ИЛИ ДРУГУЮ ОТВЕТСТВЕННОСТЬ, ОТНОСЯЩУЮСЯ К ДЕЙСТВИЮ ДОГОВОРОВ, ГРАЖДАНСКИМ ПРАВОНАРУШЕНИЯМ, А ТАКЖЕ ВЫТЕКАЮЩУЮ ИЗ НАСТОЯЩЕГО ОТЧЕТА О СООТВЕТСТВИИ ИЛИ СВЯЗАННУЮ С НИМ.",
"relayRequestFailed": "Relayer {relayerName} не отвечает. Попробуйте сменить Relayer.",
"selectProvider": "Выберите кошелёк",
"walletDoesNotSupported": "Выбранный кошелёк не поддерживается",

View File

@ -151,7 +151,7 @@
"nullifierHash": "Nullifier Hash",
"verified": "Onaylanmış",
"generatePdfReport": "PDF rapora dönüştür.",
"compliancePrintWarning": "Bu Uyumluluk Raporu yalnızca bilgilendirme amaçlıdır. Bu raporun geçerliliğini Tornadonun Uyumluluk Aracını (https://tornado.ws/compliance) veya burada yer alan bilgileri hesaplayabilen ve doğrulayabilen diğer herhangi bir şifreleme yazılımıyla (\"Tornado Uyumluluk Aracı\") kullanarak onaylamalısınız.) Bu raporda bulunan ve yukarıdaki araç tarafından sağlanan bilgiler arasındaki herhangi bir tutarsızlık, rapordaki bilgilerin yanlış ve/veya sahte olduğunu gösterir.{newline} UYGUNLUK RAPORU, HERHANGİ BİR GARANTİ OLMADAN tamamen\"OLDUĞU GİBİ\" SUNULMAKTADIR. BELİRLİ BİR AMACA UYGUNLUK VE İHLAL ETMEME GARANTİLERİ DAHİLDİR ANCAK BUNLARLA SINIRLI OLMAMAK ÜZERE ZIMNİ VEYA ZIMNİ OLARAK GEÇERLİDİR. TORNADO.CASH UYUM ARACININ YAZARLARI RAPORDAN KAYNAKLANAN, UYUMLULUKTAN KAYNAKLANAN VEYA BAĞLANTILI OLARAK SÖZLEŞME, HAKSIZ YA DA BAŞKA BİR DURUMDA OLAN HERHANGİ BİR İDDİADAN, ZARAR VEYA BAŞKA SORUMLULUKTAN SORUMLU TUTULAMAZ.",
"compliancePrintWarning": "Bu Uyumluluk Raporu yalnızca bilgilendirme amaçlıdır. Bu raporun geçerliliğini Tornadonun Uyumluluk Aracını (https://tornadocash.eth.link/compliance) veya burada yer alan bilgileri hesaplayabilen ve doğrulayabilen diğer herhangi bir şifreleme yazılımıyla (\"Tornado Uyumluluk Aracı\") kullanarak onaylamalısınız.) Bu raporda bulunan ve yukarıdaki araç tarafından sağlanan bilgiler arasındaki herhangi bir tutarsızlık, rapordaki bilgilerin yanlış ve/veya sahte olduğunu gösterir.{newline} UYGUNLUK RAPORU, HERHANGİ BİR GARANTİ OLMADAN tamamen\"OLDUĞU GİBİ\" SUNULMAKTADIR. BELİRLİ BİR AMACA UYGUNLUK VE İHLAL ETMEME GARANTİLERİ DAHİLDİR ANCAK BUNLARLA SINIRLI OLMAMAK ÜZERE ZIMNİ VEYA ZIMNİ OLARAK GEÇERLİDİR. TORNADO.CASH UYUM ARACININ YAZARLARI RAPORDAN KAYNAKLANAN, UYUMLULUKTAN KAYNAKLANAN VEYA BAĞLANTILI OLARAK SÖZLEŞME, HAKSIZ YA DA BAŞKA BİR DURUMDA OLAN HERHANGİ BİR İDDİADAN, ZARAR VEYA BAŞKA SORUMLULUKTAN SORUMLU TUTULAMAZ.",
"relayRequestFailed": "Relayer {relayerName} çöktü. lütfen başka bir relayer seçin.",
"selectProvider": "Sağlayıcı seçin",
"walletDoesNotSupported": "Bu cüzdan desteklenmiyor",

View File

@ -151,7 +151,7 @@
"nullifierHash": "无效符",
"verified": "已验证",
"generatePdfReport": "生成 PDF 报告",
"compliancePrintWarning": "这本来源证明报告仅供参考的。 你应该使用Tornado的来源证明工具来确认报告 (https://tornado.ws/compliance) 的有效性,或者与可以算出和验证此处包含信息的任何其他密码学软件 (\"Tornado来源证明工具\") 一起使用。 报告中发现的信息与上述工具提供的信息之间存在任何差异,表明报告中的信息是不正确的{newline} 来源证明报告按 \"原样,\" 提供,不提供任何明示或暗示担保,包括但不限于对适销性,用途的适用性和非侵权专利的担保。 无论是出于合同要求、侵权或其他原因由本来源证明报告引起与相关的任何索赔损害或其他责任Tornado.cash的作者概不负责。",
"compliancePrintWarning": "这本来源证明报告仅供参考的。 你应该使用Tornado的来源证明工具来确认报告 (https://tornadocash.eth.link/compliance) 的有效性,或者与可以算出和验证此处包含信息的任何其他密码学软件 (\"Tornado来源证明工具\") 一起使用。 报告中发现的信息与上述工具提供的信息之间存在任何差异,表明报告中的信息是不正确的{newline} 来源证明报告按 \"原样,\" 提供,不提供任何明示或暗示担保,包括但不限于对适销性,用途的适用性和非侵权专利的担保。 无论是出于合同要求、侵权或其他原因由本来源证明报告引起与相关的任何索赔损害或其他责任Tornado.cash的作者概不负责。",
"relayRequestFailed": "中继者 {relayerName} 无法使用,请选择其他中继者。",
"selectProvider": "请选择钱包",
"walletDoesNotSupported": "此钱包不受支持",

View File

@ -25,7 +25,7 @@ export async function _encryptFormatTx({ dispatch, getters, rootGetters }, { eve
if (!instance) {
return acc
}
const name = `${netId}${instance.amount}${instance.currency}`
const name = `${instance.amount}${instance.currency}`
if (!acc[name]) {
const service = eventsInterface.getService({ netId, ...instance })
acc[name] = { ...instance, service }
@ -49,7 +49,7 @@ export async function _encryptFormatTx({ dispatch, getters, rootGetters }, { eve
if (!instance) {
return
}
const { service } = instances[`${netId}${instance.amount}${instance.currency}`]
const { service } = instances[`${instance.amount}${instance.currency}`]
return getDeposit({ event, netId, service, instance })
})

View File

@ -1,6 +1,3 @@
export const blockSyncInterval = 10000
export const enabledChains = ['1', '10', '56', '100', '137', '43114', '42161']
export const chainsWithEncryptedNotes = ['1', '5', '56', '100', '137']
export default {
netId1: {
rpcCallRetryAttempt: 15,
@ -22,21 +19,12 @@ export default {
networkName: 'Ethereum Mainnet',
deployedBlock: 9116966,
rpcUrls: {
chainnodes: {
name: 'Tornado RPC',
url: 'https://mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607'
},
mevblockerRPC: {
name: 'MevblockerRPC',
url: 'https://rpc.mevblocker.io'
},
llamaRPC: {
name: 'llamarpc',
url: 'https://eth.llamarpc.com'
SecureRPC: {
name: 'SecureRPC',
url: 'https://api.securerpc.com/v1'
}
},
multicall: '0xeefba1e63905ef1d7acba5a8513c70307c1ce441',
routerContract: '0xd90e2f925DA726b50C4Ed8D0Fb90Ad053324F31b',
registryContract: '0x58E8dCC13BE9780fC42E8723D8EaD4CF46943dF2',
echoContractAccount: '0x9B27DD5Bb15d42DC224FCD0B7caEbBe16161Df42',
aggregatorContract: '0xE8F47A78A6D52D317D0D2FFFac56739fE14D1b49',
@ -123,7 +111,7 @@ export default {
'torn.contract.tornadocash.eth': '0x77777FeDdddFfC19Ff86DB637967013e6C6A116C',
'governance.contract.tornadocash.eth': '0x5efda50f22d34F262c29268506C5Fa42cB56A1Ce',
'tornado-router.contract.tornadocash.eth': '0xd90e2f925DA726b50C4Ed8D0Fb90Ad053324F31b',
'staking-rewards.contract.tornadocash.eth': '0x5B3f656C80E8ddb9ec01Dd9018815576E9238c29'
'staking-rewards.contract.tornadocash.eth': '0x2FC93484614a34f26F7970CBB94615bA109BB4bf'
},
netId56: {
rpcCallRetryAttempt: 15,
@ -147,13 +135,9 @@ export default {
multicall: '0x41263cba59eb80dc200f3e2544eda4ed6a90e76c',
echoContractAccount: '0xa75BF2815618872f155b7C4B0C81bF990f5245E4',
rpcUrls: {
chainnodes: {
name: 'Tornado RPC',
url: 'https://bsc-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607'
},
oneRPC: {
name: '1RPC',
url: 'https://1rpc.io/bnb'
bscRpc: {
name: 'BSC RPC',
url: 'https://bscrpc.com/'
}
},
tokens: {
@ -198,13 +182,9 @@ export default {
multicall: '0x11ce4B23bD875D7F5C6a31084f55fDe1e9A87507',
echoContractAccount: '0xa75BF2815618872f155b7C4B0C81bF990f5245E4',
rpcUrls: {
chainnodes: {
name: 'Tornado RPC',
url: 'https://polygon-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607'
},
oneRpc: {
name: '1RPC',
url: 'https://1rpc.io/matic'
polygonRPC: {
name: 'Polygon RPC',
url: `https://polygon-rpc.com`
}
},
tokens: {
@ -250,13 +230,9 @@ export default {
echoContractAccount: '0xa75BF2815618872f155b7C4B0C81bF990f5245E4',
ovmGasPriceOracleContract: '0x420000000000000000000000000000000000000F',
rpcUrls: {
chainnodes: {
name: 'Tornado RPC',
url: 'https://optimism-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607'
},
oneRpc: {
name: '1RPC',
url: 'https://1rpc.io/op'
Optimism: {
name: 'Optimism',
url: `https://mainnet.optimism.io`
}
},
tokens: {
@ -301,16 +277,8 @@ export default {
multicall: '0x842eC2c7D803033Edf55E478F461FC547Bc54EB2',
echoContractAccount: '0xa75BF2815618872f155b7C4B0C81bF990f5245E4',
rpcUrls: {
chainnodes: {
name: 'Tornado RPC',
url: 'https://arbitrum-one.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607'
},
oneRpc: {
name: '1rpc',
url: 'https://1rpc.io/arb'
},
Arbitrum: {
name: 'Arbitrum RPC',
name: 'Arbitrum Public RPC',
url: 'https://arb1.arbitrum.io/rpc'
}
},
@ -356,13 +324,9 @@ export default {
multicall: '0xb5b692a88bdfc81ca69dcb1d924f59f0413a602a',
echoContractAccount: '0xa75BF2815618872f155b7C4B0C81bF990f5245E4',
rpcUrls: {
chainnodes: {
name: 'Tornado RPC',
url: 'https://gnosis-mainnet.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607'
},
blockPi: {
name: 'BlockPi',
url: 'https://gnosis.blockpi.network/v1/rpc/public'
publicRpc: {
name: 'Community RPC',
url: 'https://development.tornadocash.community/rpc/v1'
}
},
tokens: {
@ -407,17 +371,9 @@ export default {
multicall: '0xe86e3989c74293Acc962156cd3F525c07b6a1B6e',
echoContractAccount: '0xa75BF2815618872f155b7C4B0C81bF990f5245E4',
rpcUrls: {
communityRPC: {
name: 'Tornado RPC',
url: 'https://avalanche-rpc.tornado.ws/ext/bc/C/rpc'
},
publicRpc1: {
publicRpc: {
name: 'Avalanche RPC',
url: 'https://api.avax.network/ext/bc/C/rpc'
},
oneRpc: {
name: '1RPC',
ulr: 'https://1rpc.io/avax/'
}
},
tokens: {
@ -462,9 +418,9 @@ export default {
echoContractAccount: '0x37e6859804b6499d1e4a86d70a5fdd5de6a0ac65',
aggregatorContract: '0x8cb1436F64a3c33aD17bb42F94e255c4c0E871b2',
rpcUrls: {
chainnodes: {
name: 'Tornado RPC',
url: 'https://goerli.chainnodes.org/d692ae63-0a7e-43e0-9da9-fe4f4cc6c607'
Infura: {
name: 'Infura',
url: 'https://goerli.infura.io/v3/9aa3d95b3bc440fa88ea12eaa4456161'
}
},
tokens: {

View File

@ -50,7 +50,7 @@ export default {
{
'http-equiv': 'Content-Security-Policy',
content:
"img-src 'self' data:;font-src data:;style-src 'self' 'unsafe-inline';connect-src *;script-src 'self' 'unsafe-eval' 'unsafe-inline';default-src 'self';object-src 'none';base-uri 'none';upgrade-insecure-requests;child-src blob:;worker-src blob:;"
"img-src 'self' data:;font-src data:;style-src 'self' 'unsafe-inline';connect-src *;script-src 'self' 'unsafe-eval' 'unsafe-inline';default-src 'self';object-src 'none';base-uri 'none';upgrade-insecure-requests; "
},
{
name: 'Referer-Policy',
@ -79,7 +79,7 @@ export default {
{
hid: 'og:url',
property: 'og:url',
content: 'https://tornado.ws'
content: 'https://tornado.cash'
},
{
hid: 'og:type',
@ -89,7 +89,7 @@ export default {
{
hid: 'og:image',
property: 'og:image',
content: 'https://tornado.ws/tw.png'
content: 'https://tornado.cash/tw.png'
},
{
hid: 'description',
@ -178,10 +178,23 @@ export default {
if (ctx.isClient) {
config.devtool = hasSourceMaps
}
config.output.globalObject = 'this'
config.module.rules.push({
test: /\.bin$/,
use: 'arraybuffer-loader'
})
if(ctx.isClient) {
config.module.rules.push({
test: /\.worker\.js$/,
loader: 'worker-loader',
exclude: /(node_modules)/,
options: {
filename: './workers/[name].js',
}
})
}
},
plugins: [
new webpack.IgnorePlugin(/worker_threads/),

View File

@ -7,15 +7,16 @@
"lint": "eslint --ext .js,.vue --ignore-path .gitignore .",
"precommit": "yarn lint",
"test": "jest",
"dev": "cross-env NODE_OPTIONS='--max-old-space-size=8192' nuxt",
"dev": "NODE_OPTIONS='--max-old-space-size=8192' nuxt",
"build": "nuxt build",
"start": "nuxt start",
"update:zip": "node -r esm scripts/updateZip.js",
"update:events": "node -r esm scripts/updateEvents.js --network",
"update:encrypted": "node -r esm scripts/updateEncryptedEvents.js --network",
"update:tree": "node -r esm scripts/updateTree.js --network",
"generate": "cross-env NODE_OPTIONS='--max-old-space-size=8192' nuxt generate && cp dist/404.html dist/ipfs-404.html",
"generate": "NODE_OPTIONS='--max-old-space-size=8192' nuxt generate && cp dist/404.html dist/ipfs-404.html",
"check:sync": "node -r esm scripts/checkEventsSync.js",
"worker:compile": "babel dist/_nuxt/workers/events.worker.js --out-file static/events.worker.js",
"ipfsUpload": "node scripts/ipfsUpload.js",
"deploy:ipfs": "yarn generate && yarn ipfsUpload"
},
@ -49,14 +50,14 @@
"nuxt-web3-provider": "0.1.4",
"push-dir": "^0.4.1",
"recursive-fs": "^2.1.0",
"snarkjs": "git+https://development.tornadocash.community/tornadocash/snarkjs.git#869181cfaf7526fe8972073d31655493a04326d5",
"v-idle": "^0.2.0",
"vue-clipboard2": "^0.3.1",
"vue-i18n": "^8.15.4",
"vuex-persistedstate": "^2.7.0",
"web3": "1.5.2",
"cross-env": "7.0.3",
"@tornado/snarkjs": "0.1.20-p2",
"@tornado/websnark": "0.0.4-p1"
"web3-eth": "1.8.1-rc.0",
"websnark": "git+https://development.tornadocash.community/tornadocash/websnark.git#671762fab73f01771d0e7ebcf6b6a3123e193fb4"
},
"devDependencies": {
"@nuxtjs/eslint-config": "^1.1.2",
@ -86,6 +87,7 @@
"raw-loader": "^3.1.0",
"sass": "^1.49.9",
"sass-loader": "^8.0.0",
"vue-jest": "^3.0.5"
"vue-jest": "^3.0.5",
"worker-loader": "^3.0.8"
}
}

View File

@ -24,7 +24,7 @@
>
<i18n path="trustBanner.trustLess">
<template v-slot:link>
<a href="https://tornado.ws/">{{ $t('trustBanner.link') }}</a>
<a href="https://tornado.cash/">{{ $t('trustBanner.link') }}</a>
</template>
</i18n>
</b-notification>
@ -73,7 +73,7 @@
</template>
<template v-slot:linkThree>
<a
href="https://docs.tornado.ws/general/guides/post-censorship#RPC"
href="https://hackmd.io/@gozzy/tornado-cash-post-censorship#RPC"
target="_blank"
rel="noopener noreferrer"
>
@ -83,6 +83,31 @@
</i18n>
</b-notification>
<b-notification
v-if="isEthLink"
:active="isActiveNotification.ethLink"
class="main-notification"
type="is-warning"
icon-pack="icon"
has-icon
:aria-close-label="$t('closeNotification')"
@close="disableNotification({ key: 'ethLink' })"
>
<i18n path="ethLinkBanner.notification">
<template v-slot:issue>
<a
href="https://discuss.ens.domains/t/eth-link-expiry/13899"
target="_blank"
rel="noopener noreferrer"
>{{ $t('ethLinkBanner.issue') }}</a
>
</template>
<template v-slot:alternative>
<a href="https://tornado.cash/">{{ $t('ethLinkBanner.alternative') }}</a>
</template>
</i18n>
</b-notification>
<div class="columns">
<div class="column is-half">
<b-tabs v-model="activeTab" class="is-tornado" :animated="false" @input="tabChanged">
@ -115,7 +140,8 @@ export default {
data() {
return {
activeTab: 0,
isActive: false
isActive: false,
isEthLink: window.location.host === 'tornadocash.eth.link'
}
},
computed: {
@ -158,19 +184,8 @@ export default {
}
}
} else {
const userSelection = this.selectedInstance
const stateSelection = this.selectedStatistic
if (
!stateSelection ||
userSelection.amount !== stateSelection.amount ||
userSelection.currency !== stateSelection.currency
) {
this.$store.dispatch('application/setAndUpdateStatistic', {
currency: userSelection.currency,
amount: userSelection.amount
})
}
const { currency, amount } = this.selectedInstance
this.$store.dispatch('application/setAndUpdateStatistic', { currency, amount })
}
}
}

View File

@ -3,24 +3,13 @@ export default ({ store, isHMR, app }, inject) => {
inject('isLoadedFromIPFS', main)
}
function main() {
const whiteListedDomains = [
'tornadocash.eth.link',
'tornadocash.eth.limo',
'tornadocashcommunity.eth.link',
'tornadocashcommunity.eth.limo'
]
const whiteListedDomains = ['localhost:3000', 'tornadocash.eth.link', 'tornadocash.eth.limo']
const IPFS_GATEWAY_REGEXP = /.ipfs./
const IPFS_LOCAL_REGEXP = /.ipfs.localhost:/
const IPFS_SOP_GATEWAY_REGEXP = /\/ipfs\//
const NETLIFY_REGEXP = /deploy-preview-(\d+)--tornadocash\.netlify\.app/
if (IPFS_LOCAL_REGEXP.test(window.location.host)) {
if (NETLIFY_REGEXP.test(window.location.host)) {
return false
} else if (
IPFS_GATEWAY_REGEXP.test(window.location.host) ||
IPFS_SOP_GATEWAY_REGEXP.test(window.location.host) ||
whiteListedDomains.includes(window.location.host)
) {
} else if (!whiteListedDomains.includes(window.location.host)) {
console.warn('The page has been loaded from ipfs.io. LocalStorage is disabled')
return true
}

View File

@ -264,24 +264,24 @@ export default async (ctx, inject) => {
Object.keys(tokens[token].instanceAddress).forEach((amount) => {
if (nativeCurrency === token && netId === 1) {
stores.push({
name: `stringify_bloom_${netId}_${token}_${amount}`,
name: `stringify_bloom_${token}_${amount}`,
keyPath: 'hashBloom'
})
}
stores.push(
{
name: `deposits_${netId}_${token}_${amount}`,
name: `deposits_${token}_${amount}`,
keyPath: 'leafIndex', // the key by which it refers to the object must be in all instances of the storage
indexes: DEPOSIT_INDEXES
},
{
name: `withdrawals_${netId}_${token}_${amount}`,
name: `withdrawals_${token}_${amount}`,
keyPath: 'blockNumber',
indexes: WITHDRAWAL_INDEXES
},
{
name: `stringify_tree_${netId}_${token}_${amount}`,
name: `stringify_tree_${token}_${amount}`,
keyPath: 'hashTree'
}
)

View File

@ -23,9 +23,9 @@ function main(store) {
window.multipleTabsDetected = true
window.onbeforeunload = null
window.alert(
'Multiple tabs opened. Your page will be closed. Please only use single instance of https://tornado.ws'
'Multiple tabs opened. Your page will be closed. Please only use single instance of https://tornado.cash'
)
window.location = 'https://t.me/TornadoOfficial'
window.location = 'https://twitter.com/tornadocash'
}
}

View File

@ -1,21 +1,24 @@
import networkConfig, { enabledChains } from '../networkConfig'
import { loadCachedEvents } from './helpers'
import networkConfig from '../networkConfig'
import ABI from '../abis/Instance.abi.json'
import { loadCachedEvents, getPastEvents } from './helpers'
const EVENTS_PATH = './static/events/'
const enabledChains = ['1', '56', '100', '137' ]
function main() {
for (const netId of enabledChains) {
async function main() {
for (let network in enabledChains) {
const netId = enabledChains[network]
const config = networkConfig[`netId${netId}`]
const { constants, tokens, nativeCurrency, deployedBlock } = config
const CONTRACTS = tokens[nativeCurrency].instanceAddress
console.log(`\n ::: ${netId} [${nativeCurrency.toUpperCase()}] :::`)
for (const [instance] of Object.entries(CONTRACTS)) {
for (const [instance, _contract] of Object.entries(CONTRACTS)) {
console.log(`\n instanceDenomation - ${instance}`)
const withdrawalCachedEvents = loadCachedEvents({
name: `withdrawals_${netId}_${nativeCurrency}_${instance}.json`,
const withdrawalCachedEvents = await loadCachedEvents({
name: `withdrawals_${nativeCurrency}_${instance}.json`,
directory: EVENTS_PATH,
deployedBlock
})
@ -24,8 +27,8 @@ function main() {
console.log('cachedEvents count - ', withdrawalCachedEvents.events.length)
console.log('lastBlock - ', withdrawalCachedEvents.lastBlock)
const depositCachedEvents = loadCachedEvents({
name: `deposits_${netId}_${nativeCurrency}_${instance}.json`,
const depositCachedEvents = await loadCachedEvents({
name: `withdrawals_${nativeCurrency}_${instance}.json`,
directory: EVENTS_PATH,
deployedBlock
})
@ -34,7 +37,7 @@ function main() {
console.log('cachedEvents count - ', depositCachedEvents.events.length)
console.log('lastBlock - ', depositCachedEvents.lastBlock)
const notesCachedEvents = loadCachedEvents({
const notesCachedEvents = await loadCachedEvents({
name: `encrypted_notes_${netId}.json`,
directory: EVENTS_PATH,
deployedBlock: constants.ENCRYPTED_NOTES_BLOCK
@ -43,6 +46,7 @@ function main() {
console.log('- Notes')
console.log('cachedEvents count - ', notesCachedEvents.events.length)
console.log('lastBlock - ', notesCachedEvents.lastBlock)
}
}
}

View File

@ -2,27 +2,27 @@ import fs from 'fs'
import zlib from 'zlib'
import Web3 from 'web3'
import networkConfig, { blockSyncInterval } from '../../networkConfig'
import networkConfig from '../../networkConfig'
export function download({ name, directory }) {
export async function download({ name, directory, contentType }) {
const path = `${directory}${name}.gz`.toLowerCase()
const data = fs.readFileSync(path, { flag: 'as+' })
const data = fs.readFileSync(path)
const content = zlib.inflateSync(data)
return content
}
export function loadCachedEvents({ name, directory, deployedBlock }) {
export async function loadCachedEvents({ name, directory, deployedBlock }) {
try {
const module = download({ contentType: 'string', directory, name })
const module = await download({ contentType: 'string', directory, name })
if (module) {
const events = JSON.parse(module)
return {
events,
lastBlock: events[events.length - 1].blockNumber
lastBlock: events[events.length - 1].blockNumber
}
}
} catch (err) {
@ -53,7 +53,7 @@ export async function getPastEvents({ type, fromBlock, netId, events, contractAt
const blockDifference = Math.ceil(blockNumberBuffer - fromBlock)
// eth_logs and eth_filter are restricted > 10,000 block queries
const blockRange = blockSyncInterval ? blockSyncInterval : 10_000
const blockRange = 10000
let chunksCount = blockDifference === 0 ? 1 : Math.ceil(blockDifference / blockRange)
const chunkSize = Math.ceil(blockDifference / chunksCount)
@ -67,7 +67,6 @@ export async function getPastEvents({ type, fromBlock, netId, events, contractAt
}
console.log(`Fetching ${type}, chainId - ${netId}`, `chunksCount - ${chunksCount}`)
for (let i = 0; i < chunksCount; i++)
try {
await new Promise((resolve) => setTimeout(resolve, 200))

View File

@ -1,11 +1,14 @@
import fs from 'fs'
import zlib from 'zlib'
export function save(filePath) {
export async function save(filePath) {
const directories = filePath.split('/')
const fileName = directories[directories.length - 1]
try {
const data = fs.readFileSync(`${filePath}`)
const payload = zlib.deflateSync(data, {
const payload = await zlib.deflateSync(data, {
level: zlib.constants.Z_BEST_COMPRESSION,
strategy: zlib.constants.Z_FILTERED
})

View File

@ -3,12 +3,12 @@ import 'dotenv/config'
import fs from 'fs'
import { uniqBy } from 'lodash'
import networkConfig, { enabledChains } from '../networkConfig'
import networkConfig from '../networkConfig'
import ABI from '../abis/TornadoProxy.abi.json'
import { getPastEvents, loadCachedEvents } from './helpers'
const EVENTS_PATH = './static/events/'
const enabledChains = ['1', '5', '56', '100', '137']
async function saveEncryptedNote(netId) {
const {
@ -23,7 +23,7 @@ async function saveEncryptedNote(netId) {
let encryptedEvents = []
const name = `encrypted_notes_${netId}.json`
const cachedEvents = loadCachedEvents({
const cachedEvents = await loadCachedEvents({
name,
directory: EVENTS_PATH,
deployedBlock: constants.ENCRYPTED_NOTES_BLOCK
@ -57,13 +57,11 @@ async function saveEncryptedNote(netId) {
freshEvents = uniqBy(freshEvents, 'encryptedNote').sort((a, b) => b.blockNumber - a.blockNumber)
const eventsJson = JSON.stringify(freshEvents, null, 2) + '\n'
fs.writeFileSync(`${EVENTS_PATH}${name}`, eventsJson)
}
async function main() {
const [, , , chain] = process.argv
if (!enabledChains.includes(chain)) {
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
}

View File

@ -3,120 +3,86 @@ import 'dotenv/config'
import fs from 'fs'
import { uniqBy } from 'lodash'
import networkConfig, { enabledChains } from '../networkConfig'
import networkConfig from '../networkConfig'
import ABI from '../abis/Instance.abi.json'
import { loadCachedEvents, getPastEvents } from './helpers'
const EVENTS_PATH = './static/events/'
const EVENTS = ['Deposit', 'Withdrawal']
const enabledChains = ['1', '56', '5', '100', '137' ]
function parseArg(netId, tokenOrEvent) {
const { tokens } = networkConfig[`netId${netId}`]
const keys = Object.keys(tokens)
if (tokenOrEvent !== undefined) {
const lower = tokenOrEvent.toLowerCase()
return keys.includes(lower)
? { token: lower }
: { event: lower[0].toUpperCase() + lower.slice(1).toLowerCase() }
} else return undefined
}
async function main(type, netId) {
const { tokens, nativeCurrency, deployedBlock } = networkConfig[`netId${netId}`]
const CONTRACTS = tokens[nativeCurrency].instanceAddress
function parseDepositEvent({ blockNumber, transactionHash, returnValues }) {
const { commitment, leafIndex, timestamp } = returnValues
return {
timestamp,
commitment,
blockNumber,
transactionHash,
leafIndex: Number(leafIndex)
}
}
for (const [instance, _contract] of Object.entries(CONTRACTS)) {
const cachedEvents = await loadCachedEvents({
name: `${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`,
directory: EVENTS_PATH,
deployedBlock
})
function parseWithdrawalEvent({ blockNumber, transactionHash, returnValues }) {
const { nullifierHash, to, fee } = returnValues
return {
to,
fee,
blockNumber,
nullifierHash,
transactionHash
}
}
console.log('cachedEvents count - ', cachedEvents.events.length)
console.log('lastBlock - ', cachedEvents.lastBlock)
function filterWithdrawalEvents(events) {
return uniqBy(events, 'nullifierHash').sort((a, b) => a.blockNumber - b.blockNumber)
}
let events = []
function filterDepositEvents(events) {
return events.filter((e, index) => Number(e.leafIndex) === index)
}
events = await getPastEvents({
type,
netId,
events,
contractAttrs: [ABI, _contract],
fromBlock: cachedEvents.lastBlock + 1
})
async function main(netId, chosenToken, chosenEvent) {
const { tokens, deployedBlock } = networkConfig[`netId${netId}`]
const tokenSymbols = chosenToken !== undefined ? [chosenToken] : Object.keys(tokens)
const eventNames = chosenEvent !== undefined ? [chosenEvent] : ['Deposit', 'Withdrawal']
for (const eventName of eventNames) {
// Get the parser that we need
const parser = eventName === 'Deposit' ? parseDepositEvent : parseWithdrawalEvent
// Get the parser that we need
const filter = eventName === 'Deposit' ? filterDepositEvents : filterWithdrawalEvents
for (const tokenSymbol of tokenSymbols) {
// Now load the denominations and address
const instanceData = Object.entries(tokens[tokenSymbol].instanceAddress)
// And now sync
for (const data of instanceData) {
const denom = data[0]
const address = data[1]
// Now load cached events
const cachedEvents = loadCachedEvents({
name: `${eventName.toLowerCase()}s_${netId}_${tokenSymbol}_${denom}.json`,
directory: EVENTS_PATH,
deployedBlock
})
console.log('Update events for', denom, tokenSymbol.toUpperCase(), `${eventName.toLowerCase()}s`)
console.log('cachedEvents count - ', cachedEvents.events.length)
console.log('lastBlock - ', cachedEvents.lastBlock)
let events = await getPastEvents({
type: eventName,
fromBlock: cachedEvents.lastBlock + 1,
netId: netId,
events: [],
contractAttrs: [ABI, address]
})
events = filter(cachedEvents.events.concat(events.map(parser)))
fs.writeFileSync(
`${EVENTS_PATH}${eventName.toLowerCase()}s_${netId}_${tokenSymbol}_${denom}.json`,
JSON.stringify(events, null, 2) + '\n'
)
}
if (type === 'Deposit') {
events = events.map(({ blockNumber, transactionHash, returnValues }) => {
const { commitment, leafIndex, timestamp } = returnValues
return {
timestamp,
commitment,
blockNumber,
transactionHash,
leafIndex: Number(leafIndex)
}
})
}
if (type === 'Withdrawal') {
events = events.map(({ blockNumber, transactionHash, returnValues }) => {
const { nullifierHash, to, fee } = returnValues
return {
to,
fee,
blockNumber,
nullifierHash,
transactionHash
}
})
}
let freshEvents = cachedEvents.events.concat(events)
if (type === 'Withdrawal') {
freshEvents = uniqBy(freshEvents, 'nullifierHash').sort((a, b) => a.blockNumber - b.blockNumber)
} else {
freshEvents = freshEvents.filter((e, index) => Number(e.leafIndex) === index)
}
const eventsJson = JSON.stringify(freshEvents, null, 2) + '\n'
fs.writeFileSync(`${EVENTS_PATH}${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`, eventsJson)
}
}
/**
* @param netId ID of the network for which event(s) should be synced.
* @param tokenOrEvent Optional token or event.
* @param eventOrToken Optional token or event. Overwrites the former option.
*/
async function start() {
const [, , , netId, tokenOrEvent, eventOrToken] = process.argv
const args = { ...parseArg(netId, tokenOrEvent), ...parseArg(netId, eventOrToken) }
if (!enabledChains.includes(netId)) {
const [, , , chain] = process.argv
if (!enabledChains.includes(chain)) {
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
}
await main(netId, args.token, args.event)
for await (const event of EVENTS) {
await main(event, chain)
}
}
start()

View File

@ -1,12 +1,11 @@
import 'dotenv/config'
import fs from 'fs'
import BloomFilter from 'bloomfilter.js'
import { MerkleTree } from 'fixed-merkle-tree'
import { buildMimcSponge } from 'circomlibjs'
import networkConfig, { enabledChains } from '../networkConfig'
import networkConfig from '../networkConfig'
import { loadCachedEvents, save } from './helpers'
@ -15,7 +14,7 @@ const TREES_PATH = './static/trees/'
const EVENTS_PATH = './static/events/'
const EVENTS = ['deposit']
const enabledChains = ['1', '56', '100', '137' ]
let mimcHash
const trees = {
@ -23,8 +22,8 @@ const trees = {
LEVELS: 20 // const from contract
}
function getName({ path, type, netId, instance, format = '.json', currName = 'eth' }) {
return `${path}${type.toLowerCase()}s_${netId}_${currName}_${instance}${format}`
function getName({ path, type, instance, format = '.json', currName = 'eth' }) {
return `${path}${type.toLowerCase()}s_${currName}_${instance}${format}`
}
function createTreeZip(netId) {
@ -37,7 +36,6 @@ function createTreeZip(netId) {
const baseFilename = getName({
type,
instance,
netId,
format: '',
path: TREES_PATH,
currName: currencyName.toLowerCase()
@ -47,7 +45,6 @@ function createTreeZip(netId) {
treesFolder.forEach((fileName) => {
fileName = `${TREES_PATH}${fileName}`
const isInstanceFile = !fileName.includes('.gz') && fileName.includes(baseFilename)
if (isInstanceFile) {
@ -70,7 +67,6 @@ async function createTree(netId) {
const filePath = getName({
type,
instance,
netId,
format: '',
path: TREES_PATH,
currName: currencyName.toLowerCase()
@ -78,8 +74,8 @@ async function createTree(netId) {
console.log('createTree', { type, instance })
const { events } = loadCachedEvents({
name: `${type}s_${netId}_${nativeCurrency}_${instance}.json`,
const { events } = await loadCachedEvents({
name: `${type}s_${nativeCurrency}_${instance}.json`,
directory: EVENTS_PATH,
deployedBlock
})
@ -122,12 +118,10 @@ async function createTree(netId) {
}, [])
const sliceJson = JSON.stringify(slice, null, 2) + '\n'
fs.writeFileSync(`${filePath}_slice${index + 1}.json`, sliceJson)
})
const bloomCache = bloom.serialize()
fs.writeFileSync(`${filePath}_bloom.json`, bloomCache)
}
}
@ -143,16 +137,13 @@ async function initMimc() {
async function main() {
const [, , , chain] = process.argv
if (!enabledChains.includes(chain)) {
throw new Error(`Supported chain ids ${enabledChains.join(', ')}`)
}
await initMimc()
await createTree(chain)
createTreeZip(chain)
await createTreeZip(chain)
}
main()

View File

@ -1,7 +1,6 @@
import { uniqBy } from 'lodash'
import networkConfig, { enabledChains, chainsWithEncryptedNotes } from '../networkConfig'
import networkConfig from '../networkConfig'
import { loadCachedEvents, save } from './helpers'
const EVENTS_PATH = './static/events/'
@ -10,26 +9,22 @@ const EVENTS = ['Deposit', 'Withdrawal']
function updateEncrypted(netId) {
try {
const file = `${EVENTS_PATH}encrypted_notes_${netId}.json`
save(file)
} catch {}
}
async function updateCommon(netId) {
const { nativeCurrency, tokens } = networkConfig[`netId${netId}`]
console.log(Object.keys(tokens[nativeCurrency].instanceAddress))
for await (const type of EVENTS) {
for await (const instance of Object.keys(tokens[nativeCurrency].instanceAddress)) {
console.warn('instance', instance)
const filename = `${type.toLowerCase()}s_${netId}_${nativeCurrency}_${instance}.json`
const filename = `${type.toLowerCase()}s_${nativeCurrency}_${instance}.json`
const isSaved = save(`${EVENTS_PATH}${filename}`)
if (isSaved) {
try {
testCommon(netId, type, filename)
await testCommon(netId, type, filename)
} catch (err) {
console.error(err.message)
}
@ -38,10 +33,10 @@ async function updateCommon(netId) {
}
}
function testCommon(netId, type, filename) {
async function testCommon(netId, type, filename) {
const { deployedBlock } = networkConfig[`netId${netId}`]
const cachedEvents = loadCachedEvents({
const cachedEvents = await loadCachedEvents({
name: filename,
directory: EVENTS_PATH,
deployedBlock
@ -50,13 +45,11 @@ function testCommon(netId, type, filename) {
console.log('cachedEvents', cachedEvents.events.length, type)
let events = cachedEvents.events
if (type === 'Withdrawal') {
events = uniqBy(cachedEvents.events, 'nullifierHash')
} else if (type === 'Deposit') {
events = cachedEvents.events.filter((e, index) => Number(e.leafIndex) === index)
}
if (events.length !== cachedEvents.events.length) {
console.error('events.length', events.length)
console.error('cachedEvents.events.length', cachedEvents.events.length)
@ -65,11 +58,10 @@ function testCommon(netId, type, filename) {
}
async function main() {
for (let i = 0; i < enabledChains.length; i++) {
const netId = enabledChains[i]
if (netId === chainsWithEncryptedNotes[i]) updateEncrypted(netId)
const NETWORKS = [1, 5, 56, 100, 137 ]
for await (const netId of NETWORKS) {
updateEncrypted(netId)
await updateCommon(netId)
}
}

View File

@ -1,31 +1,35 @@
import Web3 from 'web3'
import EWorker from '@/assets/events.worker.js'
import graph from '@/services/graph'
import { download } from '@/store/snark'
import networkConfig, { enabledChains } from '@/networkConfig'
import networkConfig from '@/networkConfig'
import InstanceABI from '@/abis/Instance.abi.json'
import { CONTRACT_INSTANCES, eventsType, httpConfig } from '@/constants'
import { sleep, flattenNArray, formatEvents, capitalizeFirstLetter } from '@/utils'
import { CONTRACT_INSTANCES, eventsType } from '@/constants'
import { sleep, formatEvents, capitalizeFirstLetter, flattenNArray } from '@/utils'
let store
if (process.browser) {
window.onNuxtReady(({ $store }) => {
store = $store
})
}
const MIN_CORES = 2
const WORKERS_ALLOC = 2
const HARDWARE_CORES = window.navigator.hardwareConcurrency
const AVAILABLE_CORES = HARDWARE_CORES / WORKERS_ALLOC || MIN_CORES
const CORES = Math.max(AVAILABLE_CORES, MIN_CORES)
const supportedCaches = ['1', '56', '100', '137']
class EventService {
constructor({ netId, amount, currency, factoryMethods }) {
this.idb = window.$nuxt.$indexedDB(netId)
const { nativeCurrency } = networkConfig[`netId${netId}`]
const hasCache = enabledChains.includes(netId.toString())
const hasCache = supportedCaches.includes(netId.toString())
this.netId = netId
this.amount = amount
this.currency = currency
this.factoryMethods = factoryMethods
this.rpcUrl = this.factoryMethods.getProviderUrl()
this.contract = this.getContract({ netId, amount, currency })
this.isNative = nativeCurrency === this.currency
@ -33,15 +37,17 @@ class EventService {
}
getInstanceName(type) {
return `${type}s_${this.netId}_${this.currency}_${this.amount}`
return `${type}s_${this.currency}_${this.amount}`
}
updateEventProgress(percentage, type) {
if (store) {
store.dispatch('loading/updateProgress', {
message: `Fetching past ${type} events`,
progress: Math.ceil(percentage * 100)
})
getMessageParams(eventName, type) {
return {
type: capitalizeFirstLetter(type),
currency: this.currency,
rpcUrl: this.rpcUrl,
amount: this.amount,
netId: this.netId,
eventName
}
}
@ -78,6 +84,7 @@ class EventService {
}
return a.blockNumber - b.blockNumber
})
const lastBlock = allEvents[allEvents.length - 1].blockNumber
this.saveEvents({ events: allEvents, lastBlock, type })
@ -244,150 +251,107 @@ class EventService {
}
}
getPastEvents({ fromBlock, toBlock, type }, shouldRetry = false, i = 0) {
return new Promise((resolve, reject) => {
this.contract
.getPastEvents(capitalizeFirstLetter(type), {
fromBlock,
toBlock
})
.then((events) => resolve(events))
.catch((err) => {
i = i + 1
// maximum 5 second buffer for rate-limiting
if (shouldRetry) {
const isRetry = i !== 5
sleep(1000 * i).then(() =>
this.getPastEvents({ fromBlock, toBlock, type }, isRetry, i)
.then((events) => resolve(events))
.catch((_) => resolve(undefined))
)
} else {
reject(new Error(err))
}
})
})
}
async getEventsPartFromRpc(parameters, shouldRetry = false) {
async getEventsPartFromRpc({ fromBlock, toBlock, type }) {
try {
const { fromBlock, type } = parameters
const { currentBlockNumber } = await this.getBlocksDiff({ fromBlock })
if (fromBlock < currentBlockNumber) {
const eventsPart = await this.getPastEvents(parameters, shouldRetry)
if (eventsPart) {
if (eventsPart.length > 0) {
return {
events: formatEvents(eventsPart, type),
lastBlock: eventsPart[eventsPart.length - 1].blockNumber
}
} else {
return {
events: [],
lastBlock: fromBlock
}
}
} else {
return undefined
}
} else {
if (fromBlock > currentBlockNumber) {
return {
events: [],
lastBlock: fromBlock
}
}
} catch (err) {
return undefined
}
}
createBatchRequest(batchArray) {
return batchArray.map(
(e, i) =>
new Promise((resolve) =>
sleep(20 * i).then(() =>
this.getEventsPartFromRpc({ ...e }, true).then((batch) => {
if (!batch) {
resolve([{ isFailedBatch: true, ...e }])
} else {
resolve(batch.events)
}
})
)
)
)
}
async getBatchEventsFromRpc({ fromBlock, type }) {
try {
const batchSize = 10
const blockRange = 10000
let [events, failed] = [[], []]
let lastBlock = fromBlock
const { blockDifference, currentBlockNumber } = await this.getBlocksDiff({ fromBlock })
const batchDigest = blockDifference === 0 ? 1 : Math.ceil(blockDifference / blockRange)
const blockDenom = Math.ceil(blockDifference / batchDigest)
const batchCount = Math.ceil(batchDigest / batchSize)
if (fromBlock < currentBlockNumber) {
await this.updateEventProgress(0, type)
for (let batchIndex = 0; batchIndex < batchCount; batchIndex++) {
const isLastBatch = batchIndex === batchCount - 1
const params = new Array(batchSize).fill('').map((_, i) => {
const toBlock = (i + 1) * blockDenom + lastBlock
const fromBlock = toBlock - blockDenom
return { fromBlock, toBlock, type }
})
const batch = await Promise.all(this.createBatchRequest(params))
const requests = flattenNArray(batch)
events = events.concat(requests.filter((e) => !e.isFailedBatch))
failed = failed.concat(requests.filter((e) => e.isFailedBatch))
lastBlock = params[batchSize - 1].toBlock
const progressIndex = batchIndex - failed.length / batchSize
if (isLastBatch && failed.length !== 0) {
const failedBatch = await Promise.all(this.createBatchRequest(failed))
const failedReqs = flattenNArray(failedBatch)
const failedRept = failedReqs.filter((e) => e.isFailedBatch)
if (failedRept.length === 0) {
events = events.concat(failedReqs)
} else {
throw new Error('Failed to batch events')
}
}
await this.updateEventProgress(progressIndex / batchCount, type)
}
const events = await this.contract.getPastEvents(capitalizeFirstLetter(type), {
fromBlock,
toBlock
})
if (!events?.length) {
return {
lastBlock: events[events.length - 1].blockNumber,
events
events: [],
lastBlock: fromBlock
}
} else {
return undefined
}
return {
events: formatEvents(events, type),
lastBlock: events[events.length - 1].blockNumber
}
} catch (err) {
return undefined
}
}
async getBatchEventsFromRpc({ fromBlock, type }) {
try {
const blockRange = 10000
const { blockDifference, currentBlockNumber } = await this.getBlocksDiff({ fromBlock })
const chunks = blockDifference === 0 ? 1 : Math.ceil(blockDifference / blockRange)
const chunkSize = Math.ceil(blockDifference / chunks)
if (fromBlock < currentBlockNumber) {
const chunk = Math.ceil(chunks / CORES)
const digest = new Array(chunks).fill('')
const workers = new Array(CORES).fill('')
const blocks = digest.map((e, i) => chunkSize * (i + 1) + fromBlock)
const workerBatches = workers.map((e, i) => {
const endIndex = (i + 1) * chunk
const startIndex = endIndex - chunk
return this.openEventThreadPool({
...this.getMessageParams('batch_events', type),
payload: {
blocks: blocks.slice(startIndex, endIndex),
chunkSize
}
})
})
const results = flattenNArray(await Promise.all(workerBatches))
const events = results.map((e) => ({ ...e.returnValues, ...e }))
return {
lastBlock: events[events.length - 1].blockNumber,
events
}
}
return undefined
} catch (err) {
return undefined
}
}
openEventThreadPool(message) {
return new Promise((resolve, reject) => {
// const ipfsPathPrefix = getIPFSPrefix()
// const basePath = `${window.location.origin}${ipfsPathPrefix}`
// const worker = new Worker(basePath + '/assets/events.workers.js')
const worker = new EWorker()
const channel = new MessageChannel()
worker.postMessage(message, [channel.port2])
channel.port1.onmessage = ({ data }) => {
const { result, errorMessage } = data
channel.port1.close()
if (result) {
resolve(result)
} else {
reject(errorMessage)
}
}
})
}
async getEventsFromRpc({ fromBlock, type }) {
try {
const { blockDifference } = await this.getBlocksDiff({ fromBlock })
const blockRange = 10000
let events
if (blockDifference < blockRange) {
if (blockDifference < 10000) {
const rpcEvents = await this.getEventsPartFromRpc({ fromBlock, toBlock: 'latest', type })
events = rpcEvents?.events || []
} else {
@ -450,9 +414,7 @@ class EventsFactory {
instances = new Map()
constructor(rpcUrl) {
const httpProvider = new Web3.providers.HttpProvider(rpcUrl, httpConfig)
this.provider = new Web3(httpProvider).eth
this.provider = new Web3(rpcUrl).eth
}
getBlockNumber = () => {
@ -463,8 +425,12 @@ class EventsFactory {
return new this.provider.Contract(InstanceABI, address)
}
getProviderUrl = () => {
return this.provider.currentProvider.host
}
getService = (payload) => {
const instanceName = `${payload.netId}_${payload.currency}_${payload.amount}`
const instanceName = `${payload.currency}_${payload.amount}`
if (this.instances.has(instanceName)) {
return this.instances.get(instanceName)
@ -474,7 +440,8 @@ class EventsFactory {
...payload,
factoryMethods: {
getContract: this.getContract,
getBlockNumber: this.getBlockNumber
getBlockNumber: this.getBlockNumber,
getProviderUrl: this.getProviderUrl
}
})
this.instances.set(instanceName, instance)

View File

@ -39,7 +39,6 @@ const defaultOptions = {
const client = new ApolloClient({
uri: link,
credentials: 'omit',
cache: new InMemoryCache(),
defaultOptions
})
@ -47,7 +46,6 @@ const client = new ApolloClient({
const registryClient = new ApolloClient({
uri: 'https://api.thegraph.com/subgraphs/name/tornadocash/tornado-relayer-registry',
cache: new InMemoryCache(),
credentials: 'omit',
defaultOptions
})

View File

@ -5,7 +5,7 @@ import { download } from '@/store/snark'
import networkConfig from '@/networkConfig'
import { mimc, bloomService } from '@/services'
const supportedCaches = ['1', '56', '100', '137']
const supportedCaches = ['1', '56', '100', '137', '5']
class MerkleTreeService {
constructor({ netId, amount, currency, commitment, instanceName }) {
@ -16,19 +16,18 @@ class MerkleTreeService {
this.instanceName = instanceName
this.idb = window.$nuxt.$indexedDB(netId)
this.bloomService = bloomService({
netId,
amount,
commitment,
instanceName,
fileFolder: 'trees',
fileName: `deposits_${netId}_${currency}_${amount}_bloom.json.gz`
fileName: `deposits_${currency}_${amount}_bloom.json.zip`
})
}
getFileName(partNumber = trees.PARTS_COUNT) {
return `trees/deposits_${this.netId}_${this.currency}_${this.amount}_slice${partNumber}.json.gz`
return `trees/deposits_${this.currency}_${this.amount}_slice${partNumber}.json.zip`
}
createTree({ events }) {
@ -186,7 +185,7 @@ class TreesFactory {
instances = new Map()
getService = (payload) => {
const instanceName = `${payload.netId}_${payload.currency}_${payload.amount}`
const instanceName = `${payload.currency}_${payload.amount}`
if (this.instances.has(instanceName)) {
return this.instances.get(instanceName)
}

View File

@ -1,16 +1,15 @@
import Web3 from 'web3'
import namehash from 'eth-ens-namehash'
import { BigNumber as BN } from 'bignumber.js'
import { toChecksumAddress, isAddress } from 'web3-utils'
import { toChecksumAddress } from 'web3-utils'
import { graph } from '@/services'
import networkConfig from '@/networkConfig'
import { REGISTRY_DEPLOYED_BLOCK } from '@/constants'
import { sleep, flattenNArray } from '@/utils'
import AggregatorABI from '@/abis/Aggregator.abi.json'
import RelayerRegistryABI from '@/abis/RelayerRegistry.abi.json'
const MIN_STAKE_BALANCE = '0X1B1AE4D6E2EF500000' // 500 TORN
const MIN_STAKE_BALANCE = '0x22B1C8C1227A00000' // 40 TORN
const subdomains = Object.values(networkConfig).map(({ ensSubdomainKey }) => ensSubdomainKey)
@ -25,88 +24,28 @@ class RelayerRegister {
this.relayerRegistry = new this.provider.Contract(RelayerRegistryABI, registryContract)
}
fetchEvents = ({ fromBlock, toBlock }, shouldRetry = false) => {
return new Promise((resolve, reject) => {
if (fromBlock <= toBlock) {
this.relayerRegistry
.getPastEvents('RelayerRegistered', { fromBlock, toBlock })
.then((events) => resolve(events))
.catch((_) => {
if (shouldRetry) {
sleep(500).then(() =>
this.fetchEvents({ fromBlock, toBlock })
.then((events) => resolve(events))
.catch((_) => resolve(undefined))
)
} else {
resolve(undefined)
}
})
} else {
resolve(undefined)
}
})
}
batchFetchEvents = async ({ fromBlock, toBlock }) => {
const batchSize = 10
const blockRange = 10000
const blockDifference = toBlock - fromBlock
const chunkCount = Math.ceil(blockDifference / blockRange)
const blockDenom = Math.ceil(blockDifference / chunkCount)
const chunkSize = Math.ceil(chunkCount / batchSize)
let failed = []
let events = []
let lastBlock = fromBlock
for (let batchIndex = 0; batchIndex < chunkSize; batchIndex++) {
const params = new Array(batchSize).fill('').map((_, i) => {
const toBlock = (i + 1) * blockDenom + lastBlock
const fromBlock = toBlock - blockDenom
return { fromBlock, toBlock }
})
const promises = new Array(batchSize).fill('').map(
(_, i) =>
new Promise((resolve) =>
sleep(i * 20).then(() => {
this.fetchEvents(params[i], true).then((batch) => {
if (!batch) {
resolve([{ isFailedBatch: true, fromBlock, toBlock }])
} else {
resolve(batch)
}
})
})
)
)
const requests = flattenNArray(await Promise.all(promises))
const failedIndexes = requests
.filter((e) => e.isFailedBatch)
.map((e) => {
const reqIndex = requests.indexOf(e)
return params[reqIndex]
fetchEvents = async (fromBlock, toBlock) => {
if (fromBlock <= toBlock) {
try {
const registeredEventsPart = await this.relayerRegistry.getPastEvents('RelayerRegistered', {
fromBlock,
toBlock
})
failed = failed.concat(failedIndexes || [])
events = events.concat(requests.filter((e) => !e.isFailedBatch))
lastBlock = params[batchSize - 1].toBlock
return registeredEventsPart
} catch (error) {
const midBlock = (fromBlock + toBlock) >> 1
if (midBlock - fromBlock < 2) {
throw new Error(`error fetching events: ${error.message}`)
}
const arr1 = await this.fetchEvents(fromBlock, midBlock)
const arr2 = await this.fetchEvents(midBlock + 1, toBlock)
return [...arr1, ...arr2]
}
}
if (failed.length !== 0) {
const failedReqs = failed.map((e) => this.fetchEvents(e))
const failedBatch = flattenNArray(await Promise.all(failedReqs))
events = events.concat(failedBatch || [])
}
events = events.map((e) => ({ ...e.returnValues }))
if (events.length === 0) {
throw new Error('Failed to fetch registry events')
}
return events
return []
}
saveEvents = async ({ events, lastSyncBlock, storeName }) => {
@ -157,70 +96,51 @@ class RelayerRegister {
}
}
getENSAddress = async (ensName) => {
const { url } = Object.values(networkConfig.netId1.rpcUrls)[0]
const provider = new Web3(url)
const ensAddress = await provider.eth.ens.getAddress(ensName)
return ensAddress
}
fetchRelayers = async () => {
const blockRange = 10000
// eslint-disable-next-line prefer-const
let { blockTo, cachedEvents } = await this.getCachedData()
let { blockFrom, blockTo, cachedEvents } = await this.getCachedData()
let allRelayers = cachedEvents
const currentBlockNumber = await this.provider.getBlockNumber()
const fromBlock = cachedEvents.length === 0 ? REGISTRY_DEPLOYED_BLOCK[1] : blockTo
const blockDifference = currentBlockNumber - fromBlock
if (blockFrom !== blockTo) {
const registeredRelayersEvents = await graph.getAllRegisters(blockFrom)
try {
let toBlock
let registerRelayerEvents
let lastSyncBlock = blockTo
if (cachedEvents.length > 0 || blockDifference === 0) {
return cachedEvents
} else if (blockDifference >= blockRange) {
toBlock = currentBlockNumber
registerRelayerEvents = await this.batchFetchEvents({ fromBlock, toBlock })
lastSyncBlock = toBlock
} else {
toBlock = fromBlock + blockRange
registerRelayerEvents = await this.fetchEvents({ fromBlock, toBlock }, true)
lastSyncBlock = toBlock
let relayers = {
lastSyncBlock: registeredRelayersEvents.lastSyncBlock,
events: registeredRelayersEvents.events.map((el) => ({
ensName: el.ensName,
relayerAddress: toChecksumAddress(el.address)
}))
}
const relayerEvents = cachedEvents.concat(registerRelayerEvents || [])
const events = []
const isGraphLate = relayers.lastSyncBlock && blockTo > Number(relayers.lastSyncBlock)
for (let x = 0; x < relayerEvents.length; x++) {
const { ensName, relayerAddress } = relayerEvents[x]
let ensAddress
if (isGraphLate) {
blockFrom = relayers.lastSyncBlock
}
if (!isAddress(relayerAddress)) {
ensAddress = await this.getENSAddress(ensName)
ensAddress = toChecksumAddress(ensAddress)
} else {
ensAddress = relayerAddress
if (!relayers.events.length || isGraphLate) {
const multicallEvents = await this.fetchEvents(blockFrom, blockTo)
const eventsRelayers = multicallEvents.map(({ returnValues }) => ({
ensName: returnValues.ensName,
relayerAddress: returnValues.relayerAddress
}))
relayers = {
lastSyncBlock: blockTo,
events: relayers.events.concat(eventsRelayers)
}
events.push({ ensName, relayerAddress: ensAddress })
}
await this.saveEvents({ storeName: 'register_events', lastSyncBlock, events })
allRelayers = allRelayers.concat(events)
} catch (err) {
console.log(err)
await this.saveEvents({ storeName: 'register_events', ...relayers })
allRelayers = allRelayers.concat(relayers.events)
}
return allRelayers
}
filterRelayer = (acc, curr, ensSubdomainKey, relayer) => {
const subdomainIndex = subdomains.indexOf(ensSubdomainKey)
const mainnetSubdomain = curr.records[0]
const hostname = curr.records[subdomainIndex]
const isHostWithProtocol = hostname.includes('http')
@ -271,6 +191,7 @@ class RelayerRegister {
getRelayers = async (ensSubdomainKey) => {
const relayers = await this.fetchRelayers()
const validRelayers = await this.getValidRelayers(relayers, ensSubdomainKey)
return validRelayers

Binary file not shown.

Binary file not shown.

Some files were not shown because too many files have changed in this diff Show More