mirror of
https://github.com/autistic-symposium/blockchain-data-engineering-toolkit.git
synced 2025-05-02 14:56:17 -04:00
first commit
This commit is contained in:
commit
bb17a2a56e
29 changed files with 1238 additions and 0 deletions
131
.gitignore
vendored
Normal file
131
.gitignore
vendored
Normal file
|
@ -0,0 +1,131 @@
|
|||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
pip-wheel-metadata/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
.vercel
|
||||
.DS_Store
|
18
README.md
Normal file
18
README.md
Normal file
|
@ -0,0 +1,18 @@
|
|||
# ⛓🛠 blockchain data engineering
|
||||
|
||||
<br>
|
||||
|
||||
##### scalable event scanners and infrastructure for on-chain analysis and machine learning models training.
|
||||
|
||||
|
||||
<br>
|
||||
|
||||
## infrastructure
|
||||
|
||||
|
||||
* **[💎 token-scanner-api](token-scanner-api)**:
|
||||
- a mvp for a **scalable event scanner cli and api for ethereum**, through indexing and parsing blocks data. this is the first step for training **machine learning models on the chains** (e.g., high-frequency trading with deep learning).
|
||||
- check my mirror post **[building a scalable event scanner for ethereum](https://mirror.xyz/steinkirch.eth/vSF18xcLyfXLIWwxjreRa3I_XskwgnjSc6pScegNJWI)**.
|
||||
|
||||
|
||||
<br>
|
25
token-scanner-api/.env.example
Normal file
25
token-scanner-api/.env.example
Normal file
|
@ -0,0 +1,25 @@
|
|||
#########################
|
||||
##### requests settings
|
||||
#########################
|
||||
|
||||
RPC_PROVIDER_URL =
|
||||
TOKEN_CONTRACT =
|
||||
TOKEN_CONTRACT_ABI =
|
||||
|
||||
#########################
|
||||
##### indexing token data
|
||||
#########################
|
||||
|
||||
|
||||
MAX_RETRIES =
|
||||
RETRIES_TIMEOUT =
|
||||
SIZE_CHUNK_NEXT =
|
||||
|
||||
|
||||
#########################
|
||||
##### system settings
|
||||
#########################
|
||||
|
||||
LOG_LEVEL=info
|
||||
OUTPUT_DIR = ./output
|
||||
|
26
token-scanner-api/Makefile
Normal file
26
token-scanner-api/Makefile
Normal file
|
@ -0,0 +1,26 @@
|
|||
.PHONY: clean
|
||||
clean:
|
||||
@find . -iname '*.py[co]' -delete
|
||||
@find . -iname '__pycache__' -delete
|
||||
@rm -rf '.pytest_cache'
|
||||
@rm -rf dist/
|
||||
@rm -rf build/
|
||||
@rm -rf *.egg-info
|
||||
@rm -rf .tox
|
||||
@rm -rf venv/lib/python*/site-packages/*.egg
|
||||
|
||||
.PHONY: install
|
||||
install:
|
||||
python3 setup.py install
|
||||
|
||||
.PHONY: install_dep
|
||||
install_deps:
|
||||
pip3 install -r requirements.txt
|
||||
|
||||
.PHONY: lint
|
||||
lint:
|
||||
tox -e lint
|
||||
|
||||
.PHONY: local_server
|
||||
local_server:
|
||||
indexer -a
|
56
token-scanner-api/README.md
Normal file
56
token-scanner-api/README.md
Normal file
|
@ -0,0 +1,56 @@
|
|||
## token scanner api and cli
|
||||
|
||||
|
||||
<br>
|
||||
|
||||
#### installing dependencies
|
||||
|
||||
create a venv, either using virtualenv, pipenv, or poetry.
|
||||
|
||||
because of some of the dependencies in this code, we will be developing on a python3.9 environment (install here if you don’t have that version on disk):
|
||||
|
||||
```
|
||||
virtualenv -p /usr/local/bin/python3.9 venv
|
||||
source venv/bin/activate
|
||||
pip3 install -r requirements.txt
|
||||
```
|
||||
|
||||
|
||||
<br>
|
||||
|
||||
#### add environment variables
|
||||
|
||||
now, create an .env file and add an RPC_PROVIDER_URL to connect to ethereum mainnet nodes (you can pick from any of this list of nodes as a service):
|
||||
|
||||
```
|
||||
cp .env.example .env
|
||||
vim .env
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
#### installing the package
|
||||
|
||||
```
|
||||
make install
|
||||
indexer -h
|
||||
```
|
||||
|
||||
<br>
|
||||
|
||||
#### deploying on production
|
||||
|
||||
we use vercel to deploy this app at .
|
||||
|
||||
to deploy new changes, first install vercel:
|
||||
|
||||
```
|
||||
yarn
|
||||
```
|
||||
|
||||
then run:
|
||||
|
||||
```
|
||||
vercel login
|
||||
vercel .
|
||||
```
|
1
token-scanner-api/abi/dog.json
Normal file
1
token-scanner-api/abi/dog.json
Normal file
File diff suppressed because one or more lines are too long
98
token-scanner-api/output/balances_2023-03-06_22-03-41.json
Normal file
98
token-scanner-api/output/balances_2023-03-06_22-03-41.json
Normal file
|
@ -0,0 +1,98 @@
|
|||
{
|
||||
"0xDEF171Fe48CF0115B1d80b88dc8eAB59176FEe57": 0.0,
|
||||
"0x45a0c3C8a4d4F0E8c978E9FE3d6c0A361049891A": 0.0,
|
||||
"0xfa35113163bFD33c18A01d1A62d4D14a1Ed30a42": 0.0,
|
||||
"0xe66B31678d6C16E9ebf358268a790B763C133750": 0.0,
|
||||
"0x10Fe1dF1554A3df8EB14c675f7c96f2FD1976319": 0.0,
|
||||
"0x893837Ee290413F5349F3Ce47a665f531DF56C72": 0.0,
|
||||
"0x74de5d4FCbf63E00296fd95d33236B9794016631": 0.0,
|
||||
"0x7A6A59588B8106045303E1923227a2cefbEC2B66": 0.0,
|
||||
"0x6571d6be3d8460CF5F7d6711Cd9961860029D85F": 0.0,
|
||||
"0xd12bcdFB9A39BE79DA3bDF02557EFdcD5CA59e77": 0.0,
|
||||
"0xFd4F24676eD4588928213F37B126B53c07186F45": 0.0,
|
||||
"0xd9e1cE17f2641f24aE83637ab66a2cca9C378B9F": 0.0,
|
||||
"0x093599e111A14aaEFEf98573f26a7AA2Cc58eBff": 0.0,
|
||||
"0xd51b308c5BB8aE26f9F8cAeAC6907d39F73Bb94c": 0.0,
|
||||
"0x220bdA5c8994804Ac96ebe4DF184d25e5c2196D4": 0.0,
|
||||
"0x1111111254fb6c44bAC0beD2854e76F90643097d": 0.0,
|
||||
"0x7f7778A5A1D40423cF4D2ae276c7044F7D43848F": 0.0,
|
||||
"0x8D76a976A71b73829F4DDEAaFc48C9Cfa60E1971": 0.0,
|
||||
"0x00d5299a42033CeF812E209CE7100059f7Bd1f13": 0.0,
|
||||
"0x20d6aeaede89307c62Ee8F97653d627b15a06157": 0.0,
|
||||
"0x5c2FBe8585e8C1aBcfCBC2765629c3d479fAE077": 0.0,
|
||||
"0xa7be8b7C4819eC8edd05178673575F76974B4EaA": 0.0,
|
||||
"0xB1b419581be9a2D63e56d76F74aD0343e02D6A7C": 0.0,
|
||||
"0x451aDe6606AeD956a843235E763208Af0080385E": 0.0,
|
||||
"0x033F60907bD2b5F4EbB48DF2B13e9411E8e4959c": 0.0,
|
||||
"0xA079748329182d35e539EcA79274ed4ffBD33066": 0.0,
|
||||
"0xC5A2f28BAcF1425BFAEA14834486F0e9d0832155": 0.0,
|
||||
"0x2fEa965e184B36A364C19fBc4740D491A87E6584": 0.0,
|
||||
"0xFa71551F146873c30731C3e3d9CAF2AFbc607240": 0.0,
|
||||
"0x131e1D3507A1Bc81BF203E3161C2Ec9Fea0BA28B": 0.0,
|
||||
"0xd6Bb072838349CD6fBd69076f88F5596A590A268": 0.0,
|
||||
"0x2b92BFB9355051c0d565a93dFa42285b3FC3b5e0": 0.0,
|
||||
"0x8CEb117F98CebF18b3fB97181A8a2067FaD700d7": 0.0,
|
||||
"0x000000000035B5e5ad9019092C665357240f594e": 0.0,
|
||||
"0x839d4641F97153b0ff26aB837860c479E2Bd0242": 0.0,
|
||||
"0xF71530c1f043703085B42608ff9DCcCc43210a8E": 0.0,
|
||||
"0x15058aC839cB1Aea3fF142b1593E5882263bADaD": 0.0,
|
||||
"0xCb018790c03bDFd33A82354DEE4e320f1398d66F": 0.0,
|
||||
"0xC20003D3581DbFEa983EF36c46fa4Bb1d7D17135": 327.0,
|
||||
"0xdD9f24EfC84D93deeF3c8745c837ab63E80Abd27": 51783216.0,
|
||||
"0x36a7Ead0dbc0eA6ea766E529b4648548be1c6678": 262115697.0,
|
||||
"0xf6890e3114EbC79f56F49D0072b3aA0AF85af949": 2071972176.0,
|
||||
"0xAF838Fe6196A08f4575dB0FA7f1904137112ab3f": 2.2095959596e+20,
|
||||
"0x1671B592610FB7427Ed788B66fa3E9217fF41047": 3.1109093362476705e+20,
|
||||
"0xCb20a54C4Ed357bF7E28D1966e3F0f5215e25B37": 3.3143939394000355e+20,
|
||||
"0x5FD62c381959cc244EAd76989A60785f87E3A980": 4.024611112680783e+20,
|
||||
"0x0A0ed6600A19D3b349311e513fB9D84e288FF33C": 1e+21,
|
||||
"0xF3B23BB212567c174f356437F98544bf13164436": 1.0185572346239809e+21,
|
||||
"0xe1A16186A008846B35B39A1F9491aDDAD921B33e": 1.4577243788174188e+21,
|
||||
"0xF27a6a05D4e9B1eF9C6A90B15Aa759a135570f38": 2.1303131361012063e+21,
|
||||
"0x382fFCe2287252F930E1C8DC9328dac5BF282bA1": 2.2617665932790881e+21,
|
||||
"0x87a6877fb21d63c5a36eC894aa87a622007D31D9": 2.664156410446225e+21,
|
||||
"0xF05Abab203216009F6E41920845eD940C947Acc1": 3.238508252017349e+21,
|
||||
"0x86daab6Be2Cb675eb7496bD12a0253313430167f": 4.6494219983757855e+21,
|
||||
"0xa3A7B6F88361F48403514059F1F16C8E78d60EeC": 4.849621203487032e+21,
|
||||
"0x2256D57cb823Dde3a0E30bb229C61DAC6a2b69cF": 5.309869340974531e+21,
|
||||
"0x94c018A427e873eB4BcF0edE3889Ac0EE76911C5": 6.967942793815712e+21,
|
||||
"0xA3EB02Ffa9bF1965629fa2731b16fFEc87b86848": 7.311843276793911e+21,
|
||||
"0xD4007A323735520326CF86Be18C8089C9F99bB0c": 7.708736964902699e+21,
|
||||
"0x67F60b0891EBD842Ebe55E4CCcA1098d7Aac1A55": 1e+22,
|
||||
"0x815DcD0734daA2a4c83E0c66EFEfa8F5a0f69DEB": 1.285622336699151e+22,
|
||||
"0x934CC72Be3d9822022B77DAD69B3ca694748603A": 2.749956787645025e+22,
|
||||
"0xBCFE8EDa7801953962D0f4a2406E9F89c0Ea17e2": 2.9273948523736032e+22,
|
||||
"0x16d0259e9E1064366066cCAAa3D254fd76efdd28": 3e+22,
|
||||
"0xA2A8FF2AC4f5045aF0cdAB6F8A37Bc0072b13Fa5": 3.164844691759826e+22,
|
||||
"0x759a8ED13955a4d151405310d39a9Bb678607C9F": 4.2069e+22,
|
||||
"0x9BA7BB8c7c5BaA33C293488f8b5603c7b5353CEE": 4.2069e+22,
|
||||
"0x085A3de28ee31299f2e1316E2406fC81B8A9ebd1": 4.2069e+22,
|
||||
"0x13E8F79B9dE61e7Eb5ECc2F745f79bA52c93A71F": 4.2069e+22,
|
||||
"0x5e5F651a0520F5bFF3422F4EACB515aC5cAb6d88": 4.2069e+22,
|
||||
"0x7b78a416e7cac8541a45c8ff3CFd4c06B3F8F48a": 4.2069e+22,
|
||||
"0x0FFf73D4c54fc6460CDA981fB39a04E7D3dB195B": 4.2069e+22,
|
||||
"0xB0192C8986EbaACD70bf94FF3E174cD573440fe7": 4.228180359536077e+22,
|
||||
"0xE5B472B52C63A042435bD8621dC362B1015F7896": 5.351464971399304e+22,
|
||||
"0xAB015ED794FdE18bc5777DE278755D7Eb2230204": 7.386241568926839e+22,
|
||||
"0x9008D19f58AAbD9eD0D60971565AA8510560ab41": 8.379311479264368e+22,
|
||||
"0xB332ea3297b9175F1247c092166Bd967940454E6": 8.4138e+22,
|
||||
"0x1bfE0ADe71F4cB43E01818f0168EaF67D67741cB": 9.78977199057551e+22,
|
||||
"0x896190C4427018b453dbFbd6aCFAa28a73e73E29": 1.2337317127566247e+23,
|
||||
"0x67b14017725CD9A16908AAB37D5c6B5C026C295C": 1.42223e+23,
|
||||
"0xFbf784C8178DF8857FDe1b4b1e14870fD345F693": 1.5e+23,
|
||||
"0x9C1c6fd4C3D9b557A70486CC8F4179fBbee72606": 1.703742735332284e+23,
|
||||
"0x4BBA5F077a8fCFaF8d6d07c6cfBCE553E2d94C9c": 5.546038271087333e+23,
|
||||
"0x75e89d5979E4f6Fba9F97c104c2F0AFB3F1dcB88": 1.0772039258359035e+24,
|
||||
"0x02e7Ac540409D32C90BfB51114003a9E1fF0249c": 1.9059159056965411e+24,
|
||||
"0x2796317b0fF8538F253012862c06787Adfb8cEb6": 2.6420753459304336e+24,
|
||||
"0xe84A5A8Fa61e75b3a7F26dE3bD6F296279DaA306": 3.333333e+24,
|
||||
"0x07887Ee0Bd24E774903963d50cF4Ec6a0a16977D": 6.13162878789e+24,
|
||||
"0x17D05C8f7f495A652fE506595ecE459bFDF3Ee83": 9.75181310010596e+24,
|
||||
"0xFd8DBa176FaEF03903E0c9c4aB8b3898A6A1e409": 1.1281331903648701e+25,
|
||||
"0xe4D1664b3E3768Fb92E2A53f1D1652fDe4a1AE4a": 1.1513024024336068e+25,
|
||||
"0xCD18eAa163733Da39c232722cBC4E8940b1D8888": 1.2e+25,
|
||||
"0xE93381fB4c4F14bDa253907b18faD305D799241a": 1.6325658502264383e+25,
|
||||
"0x7731CA4d00800b6a681D031F565DeB355c5b77dA": 1.9568325182066836e+25,
|
||||
"0xc96F20099d96b37D7Ede66fF9E4DE59b9B1065b1": 2.4509601850814657e+25,
|
||||
"0x3eAB9C85718df9454aA7A1ca6BC765Bcee361617": 6.839334571914754e+25
|
||||
}
|
98
token-scanner-api/output/balances_2023-03-06_22-04-40.json
Normal file
98
token-scanner-api/output/balances_2023-03-06_22-04-40.json
Normal file
|
@ -0,0 +1,98 @@
|
|||
{
|
||||
"0xDEF171Fe48CF0115B1d80b88dc8eAB59176FEe57": 0.0,
|
||||
"0x45a0c3C8a4d4F0E8c978E9FE3d6c0A361049891A": 0.0,
|
||||
"0xfa35113163bFD33c18A01d1A62d4D14a1Ed30a42": 0.0,
|
||||
"0xe66B31678d6C16E9ebf358268a790B763C133750": 0.0,
|
||||
"0x10Fe1dF1554A3df8EB14c675f7c96f2FD1976319": 0.0,
|
||||
"0x893837Ee290413F5349F3Ce47a665f531DF56C72": 0.0,
|
||||
"0x74de5d4FCbf63E00296fd95d33236B9794016631": 0.0,
|
||||
"0x7A6A59588B8106045303E1923227a2cefbEC2B66": 0.0,
|
||||
"0x6571d6be3d8460CF5F7d6711Cd9961860029D85F": 0.0,
|
||||
"0xd12bcdFB9A39BE79DA3bDF02557EFdcD5CA59e77": 0.0,
|
||||
"0xFd4F24676eD4588928213F37B126B53c07186F45": 0.0,
|
||||
"0xd9e1cE17f2641f24aE83637ab66a2cca9C378B9F": 0.0,
|
||||
"0x093599e111A14aaEFEf98573f26a7AA2Cc58eBff": 0.0,
|
||||
"0xd51b308c5BB8aE26f9F8cAeAC6907d39F73Bb94c": 0.0,
|
||||
"0x220bdA5c8994804Ac96ebe4DF184d25e5c2196D4": 0.0,
|
||||
"0x1111111254fb6c44bAC0beD2854e76F90643097d": 0.0,
|
||||
"0x7f7778A5A1D40423cF4D2ae276c7044F7D43848F": 0.0,
|
||||
"0x8D76a976A71b73829F4DDEAaFc48C9Cfa60E1971": 0.0,
|
||||
"0x00d5299a42033CeF812E209CE7100059f7Bd1f13": 0.0,
|
||||
"0x20d6aeaede89307c62Ee8F97653d627b15a06157": 0.0,
|
||||
"0x5c2FBe8585e8C1aBcfCBC2765629c3d479fAE077": 0.0,
|
||||
"0xa7be8b7C4819eC8edd05178673575F76974B4EaA": 0.0,
|
||||
"0xB1b419581be9a2D63e56d76F74aD0343e02D6A7C": 0.0,
|
||||
"0x451aDe6606AeD956a843235E763208Af0080385E": 0.0,
|
||||
"0x033F60907bD2b5F4EbB48DF2B13e9411E8e4959c": 0.0,
|
||||
"0xA079748329182d35e539EcA79274ed4ffBD33066": 0.0,
|
||||
"0xC5A2f28BAcF1425BFAEA14834486F0e9d0832155": 0.0,
|
||||
"0x2fEa965e184B36A364C19fBc4740D491A87E6584": 0.0,
|
||||
"0xFa71551F146873c30731C3e3d9CAF2AFbc607240": 0.0,
|
||||
"0x131e1D3507A1Bc81BF203E3161C2Ec9Fea0BA28B": 0.0,
|
||||
"0xd6Bb072838349CD6fBd69076f88F5596A590A268": 0.0,
|
||||
"0x2b92BFB9355051c0d565a93dFa42285b3FC3b5e0": 0.0,
|
||||
"0x8CEb117F98CebF18b3fB97181A8a2067FaD700d7": 0.0,
|
||||
"0x000000000035B5e5ad9019092C665357240f594e": 0.0,
|
||||
"0x839d4641F97153b0ff26aB837860c479E2Bd0242": 0.0,
|
||||
"0xF71530c1f043703085B42608ff9DCcCc43210a8E": 0.0,
|
||||
"0x15058aC839cB1Aea3fF142b1593E5882263bADaD": 0.0,
|
||||
"0xCb018790c03bDFd33A82354DEE4e320f1398d66F": 0.0,
|
||||
"0xC20003D3581DbFEa983EF36c46fa4Bb1d7D17135": 327.0,
|
||||
"0xdD9f24EfC84D93deeF3c8745c837ab63E80Abd27": 51783216.0,
|
||||
"0x36a7Ead0dbc0eA6ea766E529b4648548be1c6678": 262115697.0,
|
||||
"0xf6890e3114EbC79f56F49D0072b3aA0AF85af949": 2071972176.0,
|
||||
"0xAF838Fe6196A08f4575dB0FA7f1904137112ab3f": 2.2095959596e+20,
|
||||
"0x1671B592610FB7427Ed788B66fa3E9217fF41047": 3.1109093362476705e+20,
|
||||
"0xCb20a54C4Ed357bF7E28D1966e3F0f5215e25B37": 3.3143939394000355e+20,
|
||||
"0x5FD62c381959cc244EAd76989A60785f87E3A980": 4.024611112680783e+20,
|
||||
"0x0A0ed6600A19D3b349311e513fB9D84e288FF33C": 1e+21,
|
||||
"0xF3B23BB212567c174f356437F98544bf13164436": 1.0185572346239809e+21,
|
||||
"0xe1A16186A008846B35B39A1F9491aDDAD921B33e": 1.4577243788174188e+21,
|
||||
"0xF27a6a05D4e9B1eF9C6A90B15Aa759a135570f38": 2.1303131361012063e+21,
|
||||
"0x382fFCe2287252F930E1C8DC9328dac5BF282bA1": 2.2617665932790881e+21,
|
||||
"0x87a6877fb21d63c5a36eC894aa87a622007D31D9": 2.664156410446225e+21,
|
||||
"0xF05Abab203216009F6E41920845eD940C947Acc1": 3.238508252017349e+21,
|
||||
"0x86daab6Be2Cb675eb7496bD12a0253313430167f": 4.6494219983757855e+21,
|
||||
"0xa3A7B6F88361F48403514059F1F16C8E78d60EeC": 4.849621203487032e+21,
|
||||
"0x2256D57cb823Dde3a0E30bb229C61DAC6a2b69cF": 5.309869340974531e+21,
|
||||
"0x94c018A427e873eB4BcF0edE3889Ac0EE76911C5": 6.967942793815712e+21,
|
||||
"0xA3EB02Ffa9bF1965629fa2731b16fFEc87b86848": 7.311843276793911e+21,
|
||||
"0xD4007A323735520326CF86Be18C8089C9F99bB0c": 7.708736964902699e+21,
|
||||
"0x67F60b0891EBD842Ebe55E4CCcA1098d7Aac1A55": 1e+22,
|
||||
"0x815DcD0734daA2a4c83E0c66EFEfa8F5a0f69DEB": 1.285622336699151e+22,
|
||||
"0x934CC72Be3d9822022B77DAD69B3ca694748603A": 2.749956787645025e+22,
|
||||
"0xBCFE8EDa7801953962D0f4a2406E9F89c0Ea17e2": 2.9273948523736032e+22,
|
||||
"0x16d0259e9E1064366066cCAAa3D254fd76efdd28": 3e+22,
|
||||
"0xA2A8FF2AC4f5045aF0cdAB6F8A37Bc0072b13Fa5": 3.164844691759826e+22,
|
||||
"0x759a8ED13955a4d151405310d39a9Bb678607C9F": 4.2069e+22,
|
||||
"0x9BA7BB8c7c5BaA33C293488f8b5603c7b5353CEE": 4.2069e+22,
|
||||
"0x085A3de28ee31299f2e1316E2406fC81B8A9ebd1": 4.2069e+22,
|
||||
"0x13E8F79B9dE61e7Eb5ECc2F745f79bA52c93A71F": 4.2069e+22,
|
||||
"0x5e5F651a0520F5bFF3422F4EACB515aC5cAb6d88": 4.2069e+22,
|
||||
"0x7b78a416e7cac8541a45c8ff3CFd4c06B3F8F48a": 4.2069e+22,
|
||||
"0x0FFf73D4c54fc6460CDA981fB39a04E7D3dB195B": 4.2069e+22,
|
||||
"0xB0192C8986EbaACD70bf94FF3E174cD573440fe7": 4.228180359536077e+22,
|
||||
"0xE5B472B52C63A042435bD8621dC362B1015F7896": 5.351464971399304e+22,
|
||||
"0xAB015ED794FdE18bc5777DE278755D7Eb2230204": 7.386241568926839e+22,
|
||||
"0x9008D19f58AAbD9eD0D60971565AA8510560ab41": 8.379311479264368e+22,
|
||||
"0xB332ea3297b9175F1247c092166Bd967940454E6": 8.4138e+22,
|
||||
"0x1bfE0ADe71F4cB43E01818f0168EaF67D67741cB": 9.78977199057551e+22,
|
||||
"0x896190C4427018b453dbFbd6aCFAa28a73e73E29": 1.2337317127566247e+23,
|
||||
"0x67b14017725CD9A16908AAB37D5c6B5C026C295C": 1.42223e+23,
|
||||
"0xFbf784C8178DF8857FDe1b4b1e14870fD345F693": 1.5e+23,
|
||||
"0x9C1c6fd4C3D9b557A70486CC8F4179fBbee72606": 1.703742735332284e+23,
|
||||
"0x4BBA5F077a8fCFaF8d6d07c6cfBCE553E2d94C9c": 5.546038271087333e+23,
|
||||
"0x75e89d5979E4f6Fba9F97c104c2F0AFB3F1dcB88": 1.0772039258359035e+24,
|
||||
"0x02e7Ac540409D32C90BfB51114003a9E1fF0249c": 1.9059159056965411e+24,
|
||||
"0x2796317b0fF8538F253012862c06787Adfb8cEb6": 2.6420753459304336e+24,
|
||||
"0xe84A5A8Fa61e75b3a7F26dE3bD6F296279DaA306": 3.333333e+24,
|
||||
"0x07887Ee0Bd24E774903963d50cF4Ec6a0a16977D": 6.13162878789e+24,
|
||||
"0x17D05C8f7f495A652fE506595ecE459bFDF3Ee83": 9.75181310010596e+24,
|
||||
"0xFd8DBa176FaEF03903E0c9c4aB8b3898A6A1e409": 1.1281331903648701e+25,
|
||||
"0xe4D1664b3E3768Fb92E2A53f1D1652fDe4a1AE4a": 1.1513024024336068e+25,
|
||||
"0xCD18eAa163733Da39c232722cBC4E8940b1D8888": 1.2e+25,
|
||||
"0xE93381fB4c4F14bDa253907b18faD305D799241a": 1.6325658502264383e+25,
|
||||
"0x7731CA4d00800b6a681D031F565DeB355c5b77dA": 1.9568325182066836e+25,
|
||||
"0xc96F20099d96b37D7Ede66fF9E4DE59b9B1065b1": 2.4509601850814657e+25,
|
||||
"0x3eAB9C85718df9454aA7A1ca6BC765Bcee361617": 6.839334571914754e+25
|
||||
}
|
1
token-scanner-api/output/data.json
Normal file
1
token-scanner-api/output/data.json
Normal file
File diff suppressed because one or more lines are too long
58
token-scanner-api/requirements.txt
Normal file
58
token-scanner-api/requirements.txt
Normal file
|
@ -0,0 +1,58 @@
|
|||
aiohttp==3.8.4
|
||||
aiosignal==1.3.1
|
||||
anyio==3.6.2
|
||||
async-timeout==4.0.2
|
||||
attrs==22.2.0
|
||||
base58==2.1.1
|
||||
bitarray==2.7.3
|
||||
certifi==2022.12.7
|
||||
charset-normalizer==3.1.0
|
||||
click==8.1.3
|
||||
cytoolz==0.12.1
|
||||
dnspython==2.3.0
|
||||
eth-abi==2.2.0
|
||||
eth-account==0.5.9
|
||||
eth-hash==0.5.1
|
||||
eth-keyfile==0.5.1
|
||||
eth-keys==0.3.4
|
||||
eth-rlp==0.2.1
|
||||
eth-typing==2.3.0
|
||||
eth-utils==1.9.5
|
||||
fastapi==0.94.0
|
||||
frozenlist==1.3.3
|
||||
h11==0.14.0
|
||||
hexbytes==0.3.0
|
||||
idna==3.4
|
||||
indexer==0.0.2
|
||||
ipfshttpclient==0.8.0a2
|
||||
jsonschema==4.17.3
|
||||
lru-dict==1.1.8
|
||||
multiaddr==0.0.9
|
||||
multidict==6.0.4
|
||||
netaddr==0.8.0
|
||||
numpy==1.24.2
|
||||
pandas==1.5.3
|
||||
parsimonious==0.8.1
|
||||
protobuf==3.19.5
|
||||
pycryptodome==3.17
|
||||
pydantic==1.10.6
|
||||
pymongo==4.3.3
|
||||
pyrsistent==0.19.3
|
||||
python-dateutil==2.8.2
|
||||
python-dotenv==1.0.0
|
||||
pytz==2022.7.1
|
||||
requests==2.28.2
|
||||
rlp==2.0.1
|
||||
simplejson==3.18.3
|
||||
six==1.16.0
|
||||
sniffio==1.3.0
|
||||
starlette==0.26.0.post1
|
||||
toolz==0.12.0
|
||||
tqdm==4.65.0
|
||||
typing_extensions==4.5.0
|
||||
urllib3==1.26.15
|
||||
uvicorn==0.21.0
|
||||
varint==1.0.2
|
||||
web3==5.31.3
|
||||
websockets==9.1
|
||||
yarl==1.8.2
|
16
token-scanner-api/setup.py
Normal file
16
token-scanner-api/setup.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
from setuptools import setup, find_packages
|
||||
|
||||
setup(
|
||||
name="indexer",
|
||||
version='0.0.2',
|
||||
packages=find_packages(include=[
|
||||
'src',
|
||||
'src.blockchains',
|
||||
'src.server',
|
||||
'src.utils']),
|
||||
author="steinkirch.eth",
|
||||
install_requires=['python-dotenv'],
|
||||
entry_points={
|
||||
'console_scripts': ['indexer=src.main:run']
|
||||
},
|
||||
)
|
1
token-scanner-api/src/__init__.py
Normal file
1
token-scanner-api/src/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
# -*- encoding: utf-8 -*-
|
1
token-scanner-api/src/blockchains/__init__.py
Normal file
1
token-scanner-api/src/blockchains/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
# -*- encoding: utf-8 -*-
|
190
token-scanner-api/src/blockchains/ethereum.py
Normal file
190
token-scanner-api/src/blockchains/ethereum.py
Normal file
|
@ -0,0 +1,190 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
# blockchains/ethereum.py
|
||||
# This class implements a blockchain indexer for Ethereum.
|
||||
|
||||
import time
|
||||
import datetime
|
||||
|
||||
from web3 import Web3
|
||||
from web3.exceptions import BlockNotFound
|
||||
from web3.providers.rpc import HTTPProvider
|
||||
from web3._utils.filters import construct_event_filter_params
|
||||
|
||||
import src.utils.os_utils as os_utils
|
||||
from src.utils.arithmetics import wei_to_eth, to_decimal
|
||||
|
||||
|
||||
class TokenIndexer:
|
||||
|
||||
def __init__(self, indexing_type = "address"):
|
||||
|
||||
self.env_vars = os_utils.load_config()
|
||||
self.web3 = self._set_web3_object()
|
||||
|
||||
if not self._is_connected():
|
||||
os_utils.exit_with_error('Cannot connect to the node. Exiting.')
|
||||
|
||||
# contract parameters
|
||||
self.contract_address = self.env_vars['TOKEN_CONTRACT']
|
||||
self.contract_abi = self._set_contract_abi()
|
||||
self.contract_object = self.web3.eth.contract(abi=self.contract_abi)
|
||||
self.events = self.contract_object.events.Transfer
|
||||
|
||||
# indexing parameters
|
||||
self.indexing_type = self._set_indexing_type(indexing_type)
|
||||
self.max_retries = int(self.env_vars['MAX_RETRIES'])
|
||||
self.retries_timeout = float(self.env_vars['RETRIES_TIMEOUT'])
|
||||
self.size_chunks_next = float(self.env_vars['SIZE_CHUNK_NEXT'])
|
||||
|
||||
# results parameters
|
||||
self.result_data = {}
|
||||
self.result_filepath = self._set_result_destination()
|
||||
|
||||
|
||||
###########################################
|
||||
# Private methods: setters #
|
||||
###########################################
|
||||
|
||||
def _is_connected(self) -> bool:
|
||||
"""Check if the node is connected to the network."""
|
||||
|
||||
return self.web3.isConnected()
|
||||
|
||||
def _set_web3_object(self) -> None:
|
||||
"""Set web3 object from RPC provider."""
|
||||
|
||||
rpc_provider = HTTPProvider(self.env_vars['RPC_PROVIDER_URL'])
|
||||
rpc_provider.middlewares.clear()
|
||||
return Web3(rpc_provider)
|
||||
|
||||
def _set_result_destination(self) -> None:
|
||||
"""Set result destination."""
|
||||
|
||||
this_result_file = os_utils.create_result_file("raw_data")
|
||||
return os_utils.set_output(this_result_file, self.env_vars)
|
||||
|
||||
def _set_contract_abi(self) -> None:
|
||||
"""Set contract ABI."""
|
||||
|
||||
try:
|
||||
return os_utils.open_json(self.env_vars['TOKEN_CONTRACT_ABI'])
|
||||
except Exception as e:
|
||||
os_utils.exit_with_error(f'Cannot parse contract ABI: {e}. Exiting.')
|
||||
|
||||
def _set_indexing_type(self, indexing_type: str) -> None:
|
||||
"""Set filter for indexing."""
|
||||
|
||||
if indexing_type == "address":
|
||||
return {indexing_type: self.contract_address}
|
||||
|
||||
else:
|
||||
os_utils.exit_with_error(f'Indexing type {indexing_type} is not implemented yet. Exiting.')
|
||||
|
||||
|
||||
###########################################
|
||||
# Private methods: logic #
|
||||
###########################################
|
||||
|
||||
def _get_end_block(self, start_block) -> int:
|
||||
"""Get the last block to index."""
|
||||
|
||||
end_block = self.web3.eth.blockNumber - 1
|
||||
|
||||
if start_block > end_block:
|
||||
os_utils.exit_with_error(f'Cannot start from block {start_block} and end at block {end_block}. Exiting.')
|
||||
|
||||
return end_block
|
||||
|
||||
def _get_block_timestamp(self, block_number) -> int:
|
||||
"""Get the timestamp of a given block."""
|
||||
|
||||
try:
|
||||
block_timestamp = self.web3.eth.getBlock(block_number)['timestamp']
|
||||
return int(datetime.datetime.utcfromtimestamp(block_timestamp))
|
||||
except (BlockNotFound, ValueError):
|
||||
return None
|
||||
|
||||
def _fetch_events(self, start_block, end_block) -> dict:
|
||||
"""Fetch events from a range of blocks."""
|
||||
|
||||
# https://github.com/ethereum/web3.py/blob/master/web3/_utils/filters.py
|
||||
_, event_filter = construct_event_filter_params(self.contract_abi,
|
||||
self.web3.codec,
|
||||
address=self.contract_address,
|
||||
argument_filters=self.indexing_type,
|
||||
fromBlock=start_block,
|
||||
toBlock=end_block)
|
||||
filter_logs = self.web3.eth.get_logs(event_filter)
|
||||
return [self._get_event_data(self.web3.codec, self.contract_abi, event) for event in filter_logs]
|
||||
|
||||
def _web3_retry_call(self, start_block, end_block) -> None:
|
||||
"""Handle eth_getLogs multiple reuests by retrying."""
|
||||
|
||||
retry = 0
|
||||
while retry < self.max_retries - 1:
|
||||
try:
|
||||
return end_block, self._fetch_events(start_block, end_block)
|
||||
|
||||
except Exception as e:
|
||||
os_utils.log_error(f'Failed to index events for blocks range {start_block} to {end_block}: {e}')
|
||||
end_block = start_block + ((end_block - start_block) // 2)
|
||||
time.sleep(self.retries_timeout)
|
||||
retry += 1
|
||||
|
||||
def _run_indexer_by_chunk(self, start_block, end_block_for_chunk) -> (int, dict):
|
||||
"""Run the indexer for each chunk."""
|
||||
|
||||
this_results = []
|
||||
this_end_block, events = self._web3_retry_call(start_block, end_block_for_chunk)
|
||||
|
||||
for events in events:
|
||||
transfer = {
|
||||
"from": events["args"]["from"],
|
||||
"to": events["args"]["to"],
|
||||
"value": str(to_decimal(wei_to_eth(events["args"]["to"],))),
|
||||
}
|
||||
this_results.append(transfer)
|
||||
|
||||
return this_end_block, this_results
|
||||
|
||||
def _run_indexer(self, start_block=None, end_block=None) -> None:
|
||||
|
||||
# set up the indexer
|
||||
results = {}
|
||||
start_block = start_block or 0
|
||||
end_block = end_block or self._get_end_block(start_block)
|
||||
|
||||
# start the indexer loop
|
||||
while start_block <= end_block:
|
||||
|
||||
end_block_for_chunk = int(start_block + self.size_chunks_next)
|
||||
os_utils.log_info(f'Indexing transfers for blocks: {start_block} - {end_block_for_chunk}')
|
||||
|
||||
# scan chunk
|
||||
this_block_end, this_results = self._run_indexer_by_chunk(start_block, end_block_for_chunk)
|
||||
|
||||
# update indexer parameters
|
||||
results += this_results
|
||||
start_block = this_block_end + 1
|
||||
|
||||
self.result_data = results
|
||||
|
||||
|
||||
###########################
|
||||
# Public methods #
|
||||
###########################
|
||||
|
||||
def run(self):
|
||||
"""Run the indexer."""
|
||||
|
||||
start_time = time.time()
|
||||
self._run_indexer()
|
||||
|
||||
print(self.result_data)
|
||||
import sys
|
||||
sys.exit()
|
||||
delta_time = time.time() - start_time
|
||||
os_utils.log_info(f'{len(self.result_data)} transfer events were indexed on {delta_time} seconds.')
|
||||
|
||||
os_utils.save_output(self.result_filepath, self.result_data)
|
||||
os_utils.log_info(f'Results were saved at {self.result_filepath}.')
|
90
token-scanner-api/src/main.py
Normal file
90
token-scanner-api/src/main.py
Normal file
|
@ -0,0 +1,90 @@
|
|||
#!/usr/bin/env python3
|
||||
# -*- encoding: utf-8 -*-
|
||||
# src/main.py
|
||||
# Entry point for ethereum-token-api.
|
||||
|
||||
import uvicorn
|
||||
import argparse
|
||||
|
||||
from src.utils.db_utils import populate_db
|
||||
from src.blockchains.ethereum import TokenIndexer
|
||||
from src.utils.vercel_utils import upload_to_vercel
|
||||
from src.utils.test_api import fetch_token_balance as f
|
||||
from src.utils.data_processing import run_data_processing
|
||||
|
||||
|
||||
def run_menu() -> argparse.ArgumentParser:
|
||||
|
||||
parser = argparse.ArgumentParser(description='🪙 Token indexer and API.')
|
||||
|
||||
parser.add_argument('-e', dest='indexer', action='store_true', \
|
||||
help="Retrieve historical transfer events data on Ethereum. \
|
||||
Example: indexer -e")
|
||||
parser.add_argument('-p', dest='process', nargs=1,
|
||||
help="Process historical transfer events data. \
|
||||
Example: indexer -p <json data file>")
|
||||
parser.add_argument('-d', dest='db', nargs=1,
|
||||
help="Populate db with processed event data. \
|
||||
Example: indexer -d <json data file>")
|
||||
|
||||
parser.add_argument('-a', dest='api', action='store_true',
|
||||
help="Run the event scanner api locally. \
|
||||
Example: indexer -a")
|
||||
parser.add_argument('-c', dest='vercel', action='store_true',
|
||||
help="Deploy event scanner to Vercel. \
|
||||
Example: indexer -c")
|
||||
|
||||
parser.add_argument('-b', dest='balance', nargs=1,
|
||||
help="Fetch token balance for a given wallet. \
|
||||
Example: indexer -b <wallet address>")
|
||||
parser.add_argument('-t', dest='top', nargs=1,
|
||||
help="Fetch top token holders. \
|
||||
Example: indexer -t <number of holders>")
|
||||
parser.add_argument('-g', dest='change', nargs=1,
|
||||
help="Fetch weekly balance change for a given wallet. \
|
||||
Example: indexer -g <wallet address>")
|
||||
return parser
|
||||
|
||||
|
||||
def run() -> None:
|
||||
"""Entry point for this module."""
|
||||
|
||||
parser = run_menu()
|
||||
args = parser.parse_args()
|
||||
|
||||
#############################
|
||||
# Run historical data indexer
|
||||
#############################
|
||||
if args.indexer:
|
||||
indexer = TokenIndexer()
|
||||
indexer.run()
|
||||
elif args.process:
|
||||
run_data_processing(args.process[0])
|
||||
elif args.db:
|
||||
populate_db(args.db[0])
|
||||
|
||||
#############################
|
||||
# Run deployment tools
|
||||
#############################
|
||||
elif args.api:
|
||||
uvicorn.run("src.server.api:app", host="0.0.0.0", port=8000, reload=True)
|
||||
elif args.vercel:
|
||||
upload_to_vercel()
|
||||
|
||||
#############################
|
||||
# Run api tests
|
||||
#############################
|
||||
elif args.balance:
|
||||
f.fetch_token_balance(args.balance[0])
|
||||
elif args.top:
|
||||
f.fetch_top_holders(args.top[0])
|
||||
elif args.change:
|
||||
f.fetch_change(args.change[0])
|
||||
|
||||
|
||||
else:
|
||||
parser.print_help()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
run()
|
1
token-scanner-api/src/server/__init__.py
Normal file
1
token-scanner-api/src/server/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
# -*- encoding: utf-8 -*-
|
25
token-scanner-api/src/server/api.py
Normal file
25
token-scanner-api/src/server/api.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
from fastapi import FastAPI
|
||||
from routes import router
|
||||
|
||||
|
||||
app = FastAPI()
|
||||
|
||||
app.include_router(router)
|
||||
|
||||
|
||||
from pymongo import MongoClient
|
||||
|
||||
|
||||
url = "mongodb://localhost:27017/"
|
||||
DB_NAME = "balances"
|
||||
|
||||
@app.on_event("startup")
|
||||
def startup_db_client():
|
||||
app.mongodb_client = MongoClient(url)
|
||||
app.database = app.mongodb_client[DB_NAME]
|
||||
|
||||
print("Connected to the MongoDB database!")
|
||||
|
||||
@app.on_event("shutdown")
|
||||
def shutdown_db_client():
|
||||
app.mongodb_client.close()
|
52
token-scanner-api/src/server/database.py
Normal file
52
token-scanner-api/src/server/database.py
Normal file
|
@ -0,0 +1,52 @@
|
|||
import motor.motor_asyncio
|
||||
import json
|
||||
MONGO_DETAILS = "mongodb://localhost:27017"
|
||||
|
||||
#client = motor.motor_asyncio.AsyncIOMotorClient(MONGO_DETAILS)
|
||||
|
||||
from pymongo import MongoClient
|
||||
|
||||
client = MongoClient(MONGO_DETAILS)
|
||||
|
||||
database = client.balances
|
||||
|
||||
collection = database.get_collection("balances_fuckkk")
|
||||
|
||||
|
||||
print(database.list_collection_names())
|
||||
|
||||
|
||||
def wallet_helper(item) -> dict:
|
||||
return {
|
||||
"wallet": item["wallet"],
|
||||
}
|
||||
|
||||
|
||||
from bson.objectid import ObjectId
|
||||
|
||||
async def retrieve_students():
|
||||
bals = collection.find()
|
||||
|
||||
res = []
|
||||
counter = 0
|
||||
for i in bals:
|
||||
res.append(wallet_helper(i))
|
||||
if counter > 2:
|
||||
break
|
||||
counter += 1
|
||||
|
||||
return res
|
||||
|
||||
|
||||
def balancer_helper(item) -> dict:
|
||||
return {
|
||||
"wallet": item["wallet"],
|
||||
"balance": item["balance"],
|
||||
}
|
||||
|
||||
|
||||
# Retrieve a student with a matching ID
|
||||
async def retrieve_student(wallet: str) -> dict:
|
||||
student = collection.find_one({"wallet": wallet})
|
||||
if student:
|
||||
return balancer_helper(student)
|
27
token-scanner-api/src/server/models.py
Normal file
27
token-scanner-api/src/server/models.py
Normal file
|
@ -0,0 +1,27 @@
|
|||
from typing import Optional
|
||||
|
||||
from pydantic import BaseModel, EmailStr, Field
|
||||
|
||||
|
||||
class WalletsSchema(BaseModel):
|
||||
wallet: float = Field(...)
|
||||
|
||||
class Config:
|
||||
allow_population_by_field_name = True
|
||||
schema_extra = {
|
||||
"example": {
|
||||
"wallet": "balance"
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
def ResponseModel(data, message):
|
||||
return {
|
||||
"data": [data],
|
||||
"code": 200,
|
||||
"message": message,
|
||||
}
|
||||
|
||||
|
||||
def ErrorResponseModel(error, code, message):
|
||||
return {"error": error, "code": code, "message": message}
|
61
token-scanner-api/src/server/routes.py
Normal file
61
token-scanner-api/src/server/routes.py
Normal file
|
@ -0,0 +1,61 @@
|
|||
import asyncio
|
||||
import ethereum as APIEth
|
||||
from fastapi import APIRouter, Body
|
||||
from fastapi.encoders import jsonable_encoder
|
||||
|
||||
|
||||
from database import (
|
||||
retrieve_students,
|
||||
retrieve_student,
|
||||
)
|
||||
from models import (
|
||||
WalletsSchema,
|
||||
ResponseModel,
|
||||
ErrorResponseModel,
|
||||
)
|
||||
|
||||
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
|
||||
@router.get("/")
|
||||
async def get_notes() -> dict:
|
||||
|
||||
return {
|
||||
"message": "server is up and running!"
|
||||
}
|
||||
|
||||
|
||||
@router.get("/balance/{address}")
|
||||
async def get_token_balance(address: str) -> dict:
|
||||
"""Get a token balance for a given address."""
|
||||
|
||||
futures = [retrieve_student(address)]
|
||||
result = await asyncio.gather(*futures)
|
||||
return {"result": result}
|
||||
|
||||
|
||||
@router.get("/top")
|
||||
async def get_top_holders() -> dict:
|
||||
"""Get top holders of a given token."""
|
||||
|
||||
futures = [retrieve_students()]
|
||||
result = await asyncio.gather(*futures)
|
||||
if result:
|
||||
return {"top_holders": result}
|
||||
else:
|
||||
return {"error": "No holders found"}
|
||||
|
||||
|
||||
@router.get("/weekly/{address}")
|
||||
async def get_holder_weekly_change(address: str) -> dict:
|
||||
"""Get weekly change of a given address."""
|
||||
|
||||
futures = [APIEth.fetch_weekly_balance_change_by_address(address)]
|
||||
result = await asyncio.gather(*futures)
|
||||
print(result)
|
||||
return {"result": result}
|
||||
|
||||
|
||||
|
1
token-scanner-api/src/utils/__init__.py
Normal file
1
token-scanner-api/src/utils/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
# -*- encoding: utf-8 -*-
|
30
token-scanner-api/src/utils/arithmetics.py
Normal file
30
token-scanner-api/src/utils/arithmetics.py
Normal file
|
@ -0,0 +1,30 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
# utils/arithmetics.py
|
||||
# This class implements math methods used by the other classes.
|
||||
|
||||
|
||||
from decimal import Decimal, getcontext
|
||||
from src.utils.os_utils import log_error
|
||||
|
||||
|
||||
def div(dividend, divisor) -> Decimal:
|
||||
"""Return higher precision division."""
|
||||
|
||||
if divisor == 0:
|
||||
log_error('Found a zero division error. Returning 0.')
|
||||
return 0
|
||||
return to_decimal(dividend) / to_decimal(divisor)
|
||||
|
||||
|
||||
def to_decimal(value, precision=None) -> Decimal:
|
||||
"""Return Decimal value for higher (defined) precision."""
|
||||
|
||||
precision = precision or 22
|
||||
getcontext().prec = precision
|
||||
return Decimal(value)
|
||||
|
||||
|
||||
def wei_to_eth(num) -> float:
|
||||
"""Convert wei to eth."""
|
||||
|
||||
return num / float(1000000000000000000)
|
36
token-scanner-api/src/utils/data_processing.py
Normal file
36
token-scanner-api/src/utils/data_processing.py
Normal file
|
@ -0,0 +1,36 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
# utils/data_processing.py
|
||||
# Data processing for token transfers.
|
||||
|
||||
import collections
|
||||
from decimal import Decimal
|
||||
|
||||
import src.utils.os_utils as os_utils
|
||||
|
||||
|
||||
def process_balances(filepath) -> list:
|
||||
"""Return a list of balances for each address."""
|
||||
|
||||
data = os_utils.open_json(filepath)
|
||||
balances = collections.defaultdict(Decimal)
|
||||
|
||||
for _, block_data in data.items():
|
||||
for _, tx_data in block_data.items():
|
||||
for _, event_data in tx_data.items():
|
||||
balances[event_data["from"]] -= Decimal(event_data["value"])
|
||||
balances[event_data["to"]] += Decimal(event_data["value"])
|
||||
|
||||
|
||||
balances = {key: float(value) for key, value in balances.items() if value >= Decimal('0')}
|
||||
return dict(sorted(balances.items(), key=lambda x: x[1]))
|
||||
|
||||
|
||||
def run_data_processing(filepath) -> None:
|
||||
"""Run data processing."""
|
||||
|
||||
balance_data = process_balances(filepath)
|
||||
balance_output_file = os_utils.create_result_file("balances")
|
||||
balance_output_filepath = os_utils.set_output(balance_output_file)
|
||||
|
||||
os_utils.log_info(f' Writing balances to {balance_output_filepath}')
|
||||
os_utils.save_output(balance_output_filepath, balance_data)
|
29
token-scanner-api/src/utils/db_utils.py
Normal file
29
token-scanner-api/src/utils/db_utils.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
# utils/furnish_db.py
|
||||
# Furnish the database with data.
|
||||
|
||||
import pymongo
|
||||
import src.utils.os_utils as os_utils
|
||||
|
||||
def run():
|
||||
|
||||
url = "mongodb://localhost:27017/"
|
||||
|
||||
client = pymongo.MongoClient(url)
|
||||
db = client.test
|
||||
database_name = client["balances"]
|
||||
collection_name = database_name["balances"]
|
||||
|
||||
|
||||
filename = "./balance.json"
|
||||
data = os_utils.open_json(filename)
|
||||
|
||||
|
||||
result = []
|
||||
for wallet, balance in data.items():
|
||||
result.append({"wallet": wallet, "balance": balance})
|
||||
|
||||
collection_name.insert_many(result)
|
||||
|
||||
def populate_db():
|
||||
pass
|
138
token-scanner-api/src/utils/os_utils.py
Normal file
138
token-scanner-api/src/utils/os_utils.py
Normal file
|
@ -0,0 +1,138 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
# utils/os.py
|
||||
# This class implements OS/file system util methods used by the other classes.
|
||||
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from dotenv import load_dotenv
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
def set_logging(log_level) -> None:
|
||||
"""Set logging level according to .env config."""
|
||||
|
||||
if log_level == 'info':
|
||||
logging.basicConfig(level=logging.INFO, format='%(message)s')
|
||||
|
||||
elif log_level == 'error':
|
||||
logging.basicConfig(level=logging.ERROR, format='%(message)s')
|
||||
|
||||
elif log_level == 'debug':
|
||||
logging.basicConfig(level=logging.DEBUG, format='%(message)s')
|
||||
|
||||
else:
|
||||
print(f'Logging level {log_level} is not available. Setting to ERROR')
|
||||
logging.basicConfig(level=logging.ERROR, format='%(message)s')
|
||||
|
||||
|
||||
def load_config() -> dict:
|
||||
"""Load and set environment variables."""
|
||||
|
||||
env_file = Path('.') / '.env'
|
||||
if not os.path.isfile(env_file):
|
||||
exit_with_error('Please create an .env file')
|
||||
|
||||
env_vars = {}
|
||||
load_dotenv(env_file)
|
||||
|
||||
try:
|
||||
env_vars['RPC_PROVIDER_URL'] = os.getenv("RPC_PROVIDER_URL")
|
||||
env_vars['TOKEN_CONTRACT'] = os.getenv("TOKEN_CONTRACT")
|
||||
env_vars['TOKEN_CONTRACT_ABI'] = os.getenv("TOKEN_CONTRACT_ABI")
|
||||
env_vars['MAX_RETRIES'] = os.getenv("MAX_RETRIES")
|
||||
env_vars['RETRIES_TIMEOUT'] = os.getenv("RETRIES_TIMEOUT")
|
||||
env_vars['SIZE_CHUNK_NEXT'] = os.getenv("SIZE_CHUNK_NEXT")
|
||||
env_vars['OUTPUT_DIR'] = os.getenv("OUTPUT_DIR")
|
||||
set_logging(os.getenv("LOG_LEVEL"))
|
||||
return env_vars
|
||||
|
||||
except KeyError as e:
|
||||
exit_with_error(f'Cannot extract env variables: {e}. Exiting.')
|
||||
|
||||
|
||||
def log_error(string) -> None:
|
||||
"""Print STDOUT error using the logging library."""
|
||||
|
||||
logging.error('⛔️ %s', string)
|
||||
|
||||
|
||||
def log_info(string) -> None:
|
||||
"""Print STDOUT info using the logging library."""
|
||||
|
||||
logging.info('ℹ️ %s', string)
|
||||
|
||||
|
||||
def log_debug(string) -> None:
|
||||
"""Print STDOUT debug using the logging library."""
|
||||
|
||||
logging.debug('⚠️ %s', string)
|
||||
|
||||
|
||||
def open_json(filepath) -> dict:
|
||||
"""Load and parse a file."""
|
||||
|
||||
try:
|
||||
with open(filepath, 'r', encoding='utf-8') as infile:
|
||||
return json.load(infile)
|
||||
|
||||
except (IOError, FileNotFoundError, TypeError) as e:
|
||||
exit_with_error(f'Failed to parse: "{filepath}": {e}')
|
||||
|
||||
|
||||
def format_path(dir_path, filename) -> str:
|
||||
"""Format a OS full filepath."""
|
||||
|
||||
return os.path.join(dir_path, filename)
|
||||
|
||||
|
||||
def save_output(destination, data, mode="w") -> None:
|
||||
"""Save data from memory to a destination in disk."""
|
||||
|
||||
try:
|
||||
with open(destination, mode, encoding='utf-8') as outfile:
|
||||
json.dump(data, outfile, indent=4)
|
||||
|
||||
except (IOError, TypeError) as e:
|
||||
log_error(f'Could not save {destination}: {e}')
|
||||
|
||||
|
||||
def create_dir(result_dir) -> None:
|
||||
"""Check whether a directory exists and create it if needed."""
|
||||
|
||||
try:
|
||||
if not os.path.isdir(result_dir):
|
||||
os.mkdir(result_dir)
|
||||
|
||||
except OSError as e:
|
||||
log_error(f'Could not create {result_dir}: {e}')
|
||||
|
||||
|
||||
def set_output(output_file, env_vars=None) -> str:
|
||||
"""Create an output destination to save solutions."""
|
||||
|
||||
if env_vars is None:
|
||||
env_vars = load_config()
|
||||
|
||||
try:
|
||||
output_dir = env_vars['OUTPUT_DIR']
|
||||
create_dir(output_dir)
|
||||
return format_path(output_dir, output_file)
|
||||
|
||||
except (TypeError, KeyError) as e:
|
||||
exit_with_error(f'Could not format output file: {e}')
|
||||
|
||||
|
||||
def exit_with_error(message) -> None:
|
||||
"""Log an error message and halt the program."""
|
||||
log_error(message)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def create_result_file(prefix) -> str:
|
||||
"""Create an output file to save solutions."""
|
||||
|
||||
this_time = datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
|
||||
return f'{prefix}_{this_time}.json'
|
8
token-scanner-api/src/utils/test_api.py
Normal file
8
token-scanner-api/src/utils/test_api.py
Normal file
|
@ -0,0 +1,8 @@
|
|||
def fetch_token_balance():
|
||||
pass
|
||||
|
||||
def fetch_top_token_holders():
|
||||
pass
|
||||
|
||||
def fetch_change():
|
||||
pass
|
4
token-scanner-api/src/utils/vercel_utils.py
Normal file
4
token-scanner-api/src/utils/vercel_utils.py
Normal file
|
@ -0,0 +1,4 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
|
||||
def upload_to_vercel():
|
||||
pass
|
8
token-scanner-api/tox.ini
Normal file
8
token-scanner-api/tox.ini
Normal file
|
@ -0,0 +1,8 @@
|
|||
[tox]
|
||||
envlist = py36, py37, py38, py39, py10
|
||||
skip_missing_interpreters = true
|
||||
|
||||
[testenv:lint]
|
||||
skip_install = true
|
||||
deps = flake8
|
||||
commands = flake8 src/
|
8
token-scanner-api/vercel.json
Normal file
8
token-scanner-api/vercel.json
Normal file
|
@ -0,0 +1,8 @@
|
|||
{
|
||||
"builds": [
|
||||
{"src": "/src/api.py", "use": "@vercel/python"}
|
||||
],
|
||||
"routes": [
|
||||
{"src": "/(.*)", "dest": "src/api.py"}
|
||||
]
|
||||
}
|
Loading…
Add table
Add a link
Reference in a new issue