From ea0c3b646964bea2c4f809c503714f4ee1096eaa Mon Sep 17 00:00:00 2001 From: TC Johnson Date: Sun, 16 Mar 2025 11:08:58 -0500 Subject: [PATCH] Converting CICD from Bash to Python Scripts Calling this Phase 1. I've switch the build machine philosophy from using a dedicated Digital Ocean droplet per arch to using one large build machine and the +package-linux Earthly target which results in .deb and .rpm packages for both amd64 and arm64/aarch64. The script to create and delete the build machine has been migrated to Python. I feel like the error handling is better and the delete function now does its thing by using the specific ID of the running build machine vs the name. Using the name would, in rare circumstances, fail when more than one machine of the same name existed causing duplicates to be created, all very expensive and creating larger than normal Digital Ocean costs. Lastly, moving the .deb and .rpm packages from the build machine to the build orchestrator for creating and signing the repositories now uses the Gitlab CICD artifact system verses SCP. This switch will allow us to include the packages in the release records and maybe streamline the Python and Crates distribution jobs in a later phase of this project. Changes are made in the Dry Run section off the CICD config for testing, which will start in a few minutes and probably result in a bunch of failed pipelines and tweaking because there's just no way I got all of this right on the first try. --- .gitignore | 178 ++++++++++++++++++ .gitlab-ci.yml | 50 ++--- scripts/cicd-python/.env | 1 - scripts/cicd-python/.gitignore | 178 ++++++++++++++++++ scripts/cicd-python/config.json | 8 + scripts/cicd-python/main.py | 38 ---- .../__pycache__/__init__.cpython-313.pyc | Bin 170 -> 0 bytes .../__pycache__/droplets.cpython-313.pyc | Bin 7566 -> 0 bytes .../test_credentials.cpython-313.pyc | Bin 1810 -> 0 bytes .../{droplets.py => build_machine_control.py} | 104 +++++----- scripts/cicd-python/utils/repos_builder.py | 8 + scripts/cicd-python/veilid_release_utils.py | 37 ++++ 12 files changed, 476 insertions(+), 126 deletions(-) delete mode 100644 scripts/cicd-python/.env create mode 100644 scripts/cicd-python/.gitignore create mode 100644 scripts/cicd-python/config.json delete mode 100644 scripts/cicd-python/main.py delete mode 100644 scripts/cicd-python/utils/__pycache__/__init__.cpython-313.pyc delete mode 100644 scripts/cicd-python/utils/__pycache__/droplets.cpython-313.pyc delete mode 100644 scripts/cicd-python/utils/__pycache__/test_credentials.cpython-313.pyc rename scripts/cicd-python/utils/{droplets.py => build_machine_control.py} (54%) create mode 100644 scripts/cicd-python/utils/repos_builder.py create mode 100644 scripts/cicd-python/veilid_release_utils.py diff --git a/.gitignore b/.gitignore index e1c9c94a..cde65e51 100644 --- a/.gitignore +++ b/.gitignore @@ -76,3 +76,181 @@ perf.data.old # Earthly temporary build output .tmp-earthly-out/ + +############################################################################### +### Python + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# UV +# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +#uv.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/latest/usage/project/#working-with-version-control +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +# Ruff stuff: +.ruff_cache/ + +# PyPI configuration file +.pypirc \ No newline at end of file diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml index dd48bdce..27b8235c 100644 --- a/.gitlab-ci.yml +++ b/.gitlab-ci.yml @@ -261,13 +261,11 @@ dryrun_create_build_machines: tags: - build-orchestration script: - - bash scripts/cicd/build-orchestration/build-machine-ctrl.sh create amd64-deb - - bash scripts/cicd/build-orchestration/build-machine-ctrl.sh create arm64-deb - - bash scripts/cicd/build-orchestration/build-machine-ctrl.sh create amd64-rpm + - uv scripts/cicd-python/main.py create_build_machine rules: - if: $CI_COMMIT_MESSAGE =~ /\[ci dryrun]/ -dryrun_package_amd64_deb: +dryrun_package_linux: stage: build_packages needs: - dryrun_create_build_machines @@ -275,34 +273,10 @@ dryrun_package_amd64_deb: - build-amd64-deb script: - earthly bootstrap - - earthly +package-linux-amd64-deb - - bash scripts/cicd/build-machine/scp-amd64-debs-to-orchestrator.sh - rules: - - if: $CI_COMMIT_MESSAGE =~ /\[ci dryrun]/ - -dryrun_package_arm64_deb: - stage: build_packages - needs: - - dryrun_create_build_machines - tags: - - build-arm64-deb - script: - - earthly bootstrap - - earthly +package-linux-arm64-deb - - bash scripts/cicd/build-machine/scp-arm64-debs-to-orchestrator.sh - rules: - - if: $CI_COMMIT_MESSAGE =~ /\[ci dryrun]/ - -dryrun_package_amd64_rpm: - stage: build_packages - needs: - - dryrun_create_build_machines - tags: - - build-amd64-rpm - script: - - earthly bootstrap - - earthly +package-linux-amd64-rpm - - bash scripts/cicd/build-machine/scp-amd64-rpms-to-orchestrator.sh + - earthly +package-linux + artifacts: + paths: + - target/packages/* rules: - if: $CI_COMMIT_MESSAGE =~ /\[ci dryrun]/ @@ -315,7 +289,6 @@ dryrun_publish_crates: script: - vlt login - vlt run --command="cargo publish -p veilid-tools --dry-run" - - vlt run --command="cargo publish -p veilid-core --dry-run" rules: - if: $CI_COMMIT_MESSAGE =~ /\[ci dryrun]/ @@ -338,8 +311,11 @@ dryrun_build_repositories: SECURE_FILES_DOWNLOAD_PATH: './' script: - curl --silent "https://gitlab.com/gitlab-org/incubation-engineering/mobile-devops/download-secure-files/-/raw/main/installer" | bash - - cp scripts/cicd/build-orchestration/generate-release.sh ~ - - bash scripts/cicd/build-orchestration/distribute-packages.sh + - cp scripts/cicd/build-orchestration/rpm-repo-building/Dockerfile ~/rpm-build-container + - cp scripts/cicd/build-orchestration/rpm-repo-building/repobuild.sh ~/rpm-build-container + - cp scripts/cicd/build-orchestration/generate-stable-release.sh ~ + - bash scripts/cicd/build-orchestration/distribute-stable-packages.sh + dependencies: dryrun_package_linux rules: - if: $CI_COMMIT_MESSAGE =~ /\[ci dryrun]/ @@ -361,9 +337,7 @@ dryrun_delete_build_machines: tags: - build-orchestration script: - - bash scripts/cicd/build-orchestration/build-machine-ctrl.sh delete amd64-deb - - bash scripts/cicd/build-orchestration/build-machine-ctrl.sh delete arm64-deb - - bash scripts/cicd/build-orchestration/build-machine-ctrl.sh delete amd64-rpm + - uv scripts/cicd-python/main.py create_build_machine rules: - if: $CI_COMMIT_MESSAGE =~ /\[ci dryrun]/ diff --git a/scripts/cicd-python/.env b/scripts/cicd-python/.env deleted file mode 100644 index d8fbf577..00000000 --- a/scripts/cicd-python/.env +++ /dev/null @@ -1 +0,0 @@ -DO_API_TOKEN=dop_v1_4cce22b1171e09c37b5a191ab42dde8004b4cb699bf924e5f479d8c9764fb36e \ No newline at end of file diff --git a/scripts/cicd-python/.gitignore b/scripts/cicd-python/.gitignore new file mode 100644 index 00000000..ccb05fe0 --- /dev/null +++ b/scripts/cicd-python/.gitignore @@ -0,0 +1,178 @@ +############################################################################### +### Python + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class +**/__pycache__/ + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# UV +# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +#uv.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/latest/usage/project/#working-with-version-control +.pdm.toml +.pdm-python +.pdm-build/ + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ + +# PyCharm +# JetBrains specific template is maintained in a separate JetBrains.gitignore that can +# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore +# and can be added to the global gitignore or merged into this file. For a more nuclear +# option (not recommended) you can uncomment the following to ignore the entire idea folder. +#.idea/ + +# Ruff stuff: +.ruff_cache/ + +# PyPI configuration file +.pypirc \ No newline at end of file diff --git a/scripts/cicd-python/config.json b/scripts/cicd-python/config.json new file mode 100644 index 00000000..3e346720 --- /dev/null +++ b/scripts/cicd-python/config.json @@ -0,0 +1,8 @@ +{ + "droplet_config": { + "name": "build-server-tmp", + "image": 181171505, + "size": "c2-16vcpu-32gb" + }, + "droplet_id": 482837155 +} \ No newline at end of file diff --git a/scripts/cicd-python/main.py b/scripts/cicd-python/main.py deleted file mode 100644 index 4d89663f..00000000 --- a/scripts/cicd-python/main.py +++ /dev/null @@ -1,38 +0,0 @@ -#!/usr/bin/env python3 -import os -import sys -from dotenv import load_dotenv -import argparse -import asyncio - -from utils.test_credentials import test_api_credentials -from utils.droplets import create_droplet, delete_droplet - -if __name__ == "__main__": - # Load environment variables from the .env file. - load_dotenv() - token = os.getenv("DO_API_TOKEN") - if not token: - print("Error: DO_API_TOKEN environment variable not found. Please set it in the .env file.", file=sys.stderr) - sys.exit(1) - - # Set up command-line argument parsing. - parser = argparse.ArgumentParser(description="DigitalOcean API Utility") - subparsers = parser.add_subparsers(dest="command", required=True) - - subparsers.add_parser("test-credentials", help="Test DigitalOcean API credentials") - - create_parser = subparsers.add_parser("create", help="Create a droplet") - create_parser.add_argument("droplet_type", help="Type of droplet to create (e.g., amd64-deb)") - - delete_parser = subparsers.add_parser("delete", help="Delete a droplet") - delete_parser.add_argument("droplet_type", help="Type of droplet to delete (e.g., amd64-deb)") - - args = parser.parse_args() - - if args.command == "test-credentials": - asyncio.run(test_api_credentials(token)) - elif args.command == "create": - asyncio.run(create_droplet(token, args.droplet_type)) - elif args.command == "delete": - asyncio.run(delete_droplet(token, args.droplet_type)) diff --git a/scripts/cicd-python/utils/__pycache__/__init__.cpython-313.pyc b/scripts/cicd-python/utils/__pycache__/__init__.cpython-313.pyc deleted file mode 100644 index bfdeefdd3c28e0f573143f6b959a629be1d9e9c1..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 170 zcmey&%ge<81n25@r-SInAOZ#$p^VQgK*m&tbOudEzm*I{OhDdekklD1D`iF_4fJXhdiSG==A((U(y-c(&1yy(l1`dOr-Q-F+T&qcJ+t!@?GJSKY$UW= z?YU(aCTV9g>Bq;K)Vb%Ld+#~to_lW9IVYbN6etP!{qx+f#=oa0i2uMH@keHn&(8z$ zbAlyUbcWzFE#i^Tm+%r+!b({gD`!cE^eEz`EcKdV89|}(dqL-pG;XsLk3$b&QlD`}Hn z6zxci0{^Ffth7_Y5QpXn=kesMDuR`X5*dP(dk78DCSggIa!5ESOLH=hgjHbCtP(I4 z#-LRTlmb@ckaO~jYLVxtM#OS$715FHAi@bJdl_~;jeah5%hd!&ddShj+*}bmf6^mo zwVl#DD<}^&S|rZPwXCii?B;cXcGPHbjxJ)N4o>i~Cp4fB*|pF^ZwaSn&6!nPfk(}jbiGLsgUD-xU71B&jh2d7t_8m>73DmArXZKhr}g;+ zYw48a@hOc*lh>!pur`8EX}+>gm48bN#P+oRpsoIqk0N+)aRYT*Bu`xXkt+6HUN~k|3`qo6X4$gMD9Ne z@KXW)N`O-WM3qD%A^3z${L;{bnmBl4!gpc}zKazE1AoBB)FZ7R#t==oKZEB1Jbw!h zT!O!xi+lQj$K(sG`_aL=nHO0%PGr`-EFDAkYWKcoAQOl%I1piM`-L`!3q=_|;0uj& z5U}_iw@_L(7Mut?g54AQRDZzB2Y6;*=M9G^f$X*Z0O$IAWKismlScKfp)QGOUqZ@@Nf~K3WG5 zGoRx55YPDd04T?eGg)a}Oq$A~F~%Jd$xM(N3-Ob}KEN0_Is;RrB|(3zUbI+-RfvPR z!D!GMg$ax}<{SXVWF+d1PDRpEF7W2Q{V&~qbDH$}qQU9FBmAMeg?JnRJX*zjux9oU z4oyrP+5(di)5XMeqWA;KfwF#Y)XPi;BN4nX$T1=hJ`f3qxJV#YDy|Jin5i)1<@^jk z#c?p*5v$9{1v~J;!1Q6!hs*@>4I&R`&U#&TO$3ls6c{=1LcwoCtygu>sIK#HD57P$bHigDKLoXyC0V zk3aMDTa(n*ZENcmZ7F@} zjpK927nIAk+2cvQYg6x9^{2G@pI!a&)rBJoZDXq1_I~Hh&R=vV%N?`I9bIwUd~~UF zNgX%#+&j7Ay5ri?jqK?3H|Tfh`O|bMBI#+gOyrg?ucRW?Xyk~jWvhZr6P1FMRZiAkDlnHMIY-_f~JBp*vaIJ+Is` zm&cjj+p*gdaqF4wQBQL8wawAjw#?t#FF!V2FaEc#&gU-TbM^dgJQc)rIca zt4Y&|P1A{GS4v+F=HArTEJ_plhLo-4{U6@^VZwGSS$S;MwPPrWm-R1qEjPwXN4C%X zRr1{TH_v^4%i!HHm~ITcGc;l)P80r3T0K*wm(h1Cq!_QN8EKP$Kq7Yq{Q=cSL+73rxvS`Vx_$;aA09>S zTKdBl6W}W}a@Q+Y6jF?^;#9oT8Hr#dc8%7 z@m?#ykIK-9R{2pm=J63zGh(71$k0fk@_`&=o2ZWq(1>37v8EK`^_a!SZD^#J{4fqD7fVCTi2z1!c3$Pf^+%S4(7W z6^K$iDo)7|BN7#43y111?N8*C8gV7m<+N>5ksm`?pPQ>bFH0JKRxYj+r9cg(Fq3~n z4MmWm{(zdi@H1x zHhC=u*h_d^s^#%pgRjD%8bHkQth6Em$rGK8Tr=rltn5&3HWr$K?Bva0^fH4B?2IiY zu{roM%opw|@#q0&p3V3C0jRPc%2*&vQ4C~LUR%s$V}uC-KNFeq`5+S*o0^!IaoE*- z4Q5x1K^+EA2qJh~2;l7)9KoOg1DuWUO&AEt2$|t$*XEKC{wN;BS=>R{KZ0`*z75mi zZ9wq&-O0a%!3h9q4Wu2AAzc!KcbtOoFXItqhP{x&sIoZDJ0jT9PbCz5kH9NCOXM{O z#8=>n)WBQnTSx>9z1N2Sq&0py$nT!(p8vsYcT(T8sqa}fWJCOUUqV}%C&X`CaxEQ? z8&BMetipg#J4g!m@Qjko6BT2Lej6Z*Qvx+UW+ z$8Yt=Qnihysr7n#me#VEMxGaJ=NZfJ12f_T1YGS7zT%n))|Q{mXBp z^wy-lc2i%ws88q{Q?}Nm?d46|%L!X&va&N99JeoBSQ?I(_C1hpDA$x*2KI1d4C>7* z7U~v_Hy!cHmUzoh-00laxp3rq5(47>$F)x?DTKp4VmQ2~g3&!3Deu+c1%IgQd@Rqd zgvD`F|GvloHX7IWd@14`#u0B{tE(Q}X-7_iywf4Y_%VY^BfYCePAPr2K#Fl)jk8sL z7v_-k-HtvQIv?~Pr-J^Vw+--n9Sp$x#VVM3pFvJFdB0MI@mdwIyMGipHM09Ha*V%Z zhnXK%Ag50GAtS|jb&b|)gr@q1!lc^1UZkAs|}d- zYBOfN+J#(d*=jcl_?i>}eT_sge@!7kwLsAzJgO5YMF{w>83f!&I-68$Wyo2fTPs&% z+)m-OM}<|bq)VY#JBFOynzc?f;OlY<_^$)cdbF+~UA5G@0l7@-^(lw?EOH(>#kCGLUyHZjwxu)IK^`NQ+=`I1yQk&4+ZvO)JcHc%d=nTSRxW$9G Oz1{w2q2*m5<$nRodK>%z diff --git a/scripts/cicd-python/utils/__pycache__/test_credentials.cpython-313.pyc b/scripts/cicd-python/utils/__pycache__/test_credentials.cpython-313.pyc deleted file mode 100644 index e1cf38397e533c786a936791696f627915f10c61..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 1810 zcmb7EO>7%Q6rR~#uQ&e5x;UgXKg}i$jvW!N6x)y=+A74Q3P=XLSZYa7w6ZtJ26xw* zT}O#~iOPusA|REf;?Pt@ZQ(+XNJu~$BXQu6ra=zd915IrOBGd-5C`5mUL{Qn60@3_ zZ{GXfoA=(#&W_*jLC`LJ{M}?TjnJR=!f8wm?1TVZMiP?nn~3bqm|%$-qa<3waYkaA z&@@BnNgQ!)*+wbLW$GU&n#B^uED|k?Yp(2xgZ?4(ozE$RP9VW{jWeho#V83JMkx(T zj7E*x+-ge$?{MotiJAzb5iFr82z~}%rc*8-(&!AGb{)1r=v=$x>ZhK;XEIEheVQ+^ z1LqKuQwV{+?rFUf!3dP(iTzy}*R%GQ+#*WUa?#jPuj5hT61DmW=zz3t)bB7dtiz~B zk|#zxD*?TA7GmQa-v0crvAGZI;LW&41CCAlIS1{}lCPhJnD#jh`#J}8pp1Jz9=Ig% zIa!WSPgX{#YX3_=3D+a_2lQW{--P}i^wxIt5~})wE4g>d9P*)XErc9YZ-Np~8{8!U zrFu`&ZD<06UF84RO`<)!SLg{G@uy1e*A$s3L?{L1LLskaWmDBPaoW(e2xa+)isqC~ z)KXPi@(t^nsc2@;yK@Dl6rM88f-xY9azTye)JfHp^Lkd1wP;qqAkOrPIieTxifKf! z<(^XHoI;GPpCK8Gb2O!)&a$e0vOP;i15>=T|aze=`)J(!g zBwFfvT~_lTHg!RU0Bu`{TM-U98-R_O31CLNmRnYJJ6_8_oL9j>QVatUwdhI3v{=KG z&7xs3cG{M!Kvd1N=sf(`jH%25H0BJO1Q`&EQD#*$!V&lr5pmn;&_$BB+(ykKLlmQ6 zF*(_kiTyWO9}tPq#$y<^&^&tB+_98c>RLV4 zz41c#BNxrH^Jlj?5zH^Q5tni_AK6JZlBiM9h@YBIXV_~oojNc2!zhyS~ z;3pS9y11CV&$n;1wqHAc_52s3m80PWW}~_7THn>ar8n+S_nTu2+-BhU)yCfSKn%d5 z=gZjA?2VI4;+>Y&##6rrPCq=+b*uk+|Bb;)YjnX=<(~erX+52(q{r^1$JPSxZw7)_ zxX-x7ru9JQZV)vjAcS2S1-c-*H9quPF#h||@FNd8F7EPsS|F?j<9|6M;H~nZzm0Do z^Gls)>hMjRtYdG{fWB@>iuCt5>E(W)0WDLV$)IaFgp+=5xogM|i@R~0pP0%&<20Vvv{p82DwrIv%M?f^4$8u6zt9Tk6YAJ{yPJ zHnRuYRw8?bb(a%3+2;wE0 None: - config = DROPLET_CONFIGS.get(droplet_type) - if not config: - print(f"Droplet type '{droplet_type}' not recognized.", file=sys.stderr) +# Load config from file +def load_config(): + try: + with open(CONFIG_FILE, "r") as f: + return json.load(f) + except (FileNotFoundError, json.JSONDecodeError): + return {} + +# Save config to file +def save_config(config): + with open(CONFIG_FILE, "w") as f: + json.dump(config, f, indent=4) + +async def create_build_machine(token: str) -> None: + config = load_config() + droplet_config = config.get("droplet_config", {}) + + if not droplet_config: + print("Droplet configuration not found.", file=sys.stderr) sys.exit(1) headers = { @@ -23,10 +32,10 @@ async def create_droplet(token: str, droplet_type: str) -> None: } create_url = "https://api.digitalocean.com/v2/droplets" payload = { - "name": config["name"], - "region": "nyc1", # Changed default region to "ncy1" - "size": config["size"], - "image": config["image"], + "name": droplet_config["name"], + "region": "nyc1", + "size": droplet_config["size"], + "image": droplet_config["image"], "backups": False, } @@ -42,17 +51,23 @@ async def create_droplet(token: str, droplet_type: str) -> None: print("No droplet information returned.", file=sys.stderr) sys.exit("No droplet information returned.") droplet_id = droplet.get("id") - print(f"Droplet creation initiated. Droplet ID: {droplet_id}") + print(f"Droplet created. Droplet ID: {droplet_id}") - # Poll for droplet status until it becomes "active" + # Save droplet ID to config + config["droplet_id"] = droplet_id + save_config(config) + print("Droplet ID saved to config.") + + # Poll every 10 second for droplet status until it becomes "active" status = droplet.get("status", "new") droplet_url = f"https://api.digitalocean.com/v2/droplets/{droplet_id}" while status != "active": - await asyncio.sleep(2) + await asyncio.sleep(10) async with session.get(droplet_url, headers=headers) as poll_resp: if poll_resp.status != 200: error_text = await poll_resp.text() - print(f"Error polling droplet status: {error_text}", file=sys.stderr) + print(f"Error polling droplet status: {error_text}", + file=sys.stderr) sys.exit(error_text) droplet_data = await poll_resp.json() droplet = droplet_data.get("droplet") @@ -60,7 +75,8 @@ async def create_droplet(token: str, droplet_type: str) -> None: status = droplet.get("status", status) print(f"Droplet status: {status}") else: - print("Droplet data missing in polling response", file=sys.stderr) + print("Droplet data missing in polling response", + file=sys.stderr) sys.exit("Droplet data missing in polling response") print("Droplet is up and running.") @@ -68,46 +84,36 @@ async def create_droplet(token: str, droplet_type: str) -> None: async with session.get(droplet_url, headers=headers) as final_resp: if final_resp.status != 200: error_text = await final_resp.text() - print(f"Error retrieving droplet information: {error_text}", file=sys.stderr) + print(f"Error retrieving droplet information: {error_text}", + file=sys.stderr) sys.exit(error_text) final_data = await final_resp.json() print("Droplet Information:") print(final_data) -async def delete_droplet(token: str, droplet_type: str) -> None: - config = DROPLET_CONFIGS.get(droplet_type) - if not config: - print(f"Droplet type '{droplet_type}' not recognized.", file=sys.stderr) - sys.exit(1) +async def delete_build_machine(token: str) -> None: + config = load_config() + droplet_id = config.get("droplet_id") + + if not droplet_id: + print("No droplet ID found in config.", file=sys.stderr) + return headers = { "Authorization": f"Bearer {token}", "Content-Type": "application/json", } - droplets_url = "https://api.digitalocean.com/v2/droplets" + delete_url = f"https://api.digitalocean.com/v2/droplets/{droplet_id}" async with aiohttp.ClientSession() as session: - async with session.get(droplets_url, headers=headers) as resp: - if resp.status != 200: + async with session.delete(delete_url, headers=headers) as resp: + if resp.status != 204: error_text = await resp.text() - print(f"Error retrieving droplets: {error_text}", file=sys.stderr) + print(f"Error deleting droplet: {error_text}", file=sys.stderr) sys.exit(error_text) - data = await resp.json() - droplets = data.get("droplets", []) - target_droplet = None - for droplet in droplets: - if droplet.get("name") == config["name"]: - target_droplet = droplet - break - if not target_droplet: - print(f"No droplet found with name '{config['name']}'.") - return + print(f"Droplet {droplet_id} deleted successfully.") - droplet_id = target_droplet.get("id") - delete_url = f"https://api.digitalocean.com/v2/droplets/{droplet_id}" - async with session.delete(delete_url, headers=headers) as delete_resp: - if delete_resp.status != 204: - error_text = await delete_resp.text() - print(f"Error deleting droplet: {error_text}", file=sys.stderr) - sys.exit(error_text) - print(f"Droplet '{config['name']}' deleted successfully.") + # Remove droplet ID from config + config.pop("droplet_id", None) + save_config(config) + print("Droplet ID removed from config.") diff --git a/scripts/cicd-python/utils/repos_builder.py b/scripts/cicd-python/utils/repos_builder.py new file mode 100644 index 00000000..10d656b9 --- /dev/null +++ b/scripts/cicd-python/utils/repos_builder.py @@ -0,0 +1,8 @@ +import subprocess + +def build_deb_repo(): + print("Creating and signing .deb package repository.") + +def build_rpm_repo(): + print("Creating and signing .rpm package repository.") + diff --git a/scripts/cicd-python/veilid_release_utils.py b/scripts/cicd-python/veilid_release_utils.py new file mode 100644 index 00000000..c0ae73d1 --- /dev/null +++ b/scripts/cicd-python/veilid_release_utils.py @@ -0,0 +1,37 @@ +#!/usr/bin/env python3 +import os +import sys +import argparse +import asyncio +from dotenv import load_dotenv +from utils.build_machine_control import create_build_machine, delete_build_machine +from utils.test_credentials import test_api_credentials +from utils.repos_builder import build_deb_repo, build_rpm_repo + +if __name__ == "__main__": + # Load environment variables from the .env file. + load_dotenv() + token = os.getenv("DO_API_TOKEN") + if not token: + print("Error: DO_API_TOKEN environment variable not found. Please set it in the .env file.", file=sys.stderr) + sys.exit(1) + + # Set up command-line argument parsing. + parser = argparse.ArgumentParser(description="Veilid compiling and releasing utility") + parser.add_argument("--create-build-machine", action="store_true", help="Create a build machine") + parser.add_argument("--delete-build-machine", action="store_true", help="Delete the created build machine") + parser.add_argument("--build-deb-repo", action="store_true", help="Creates and signs .deb repository") + parser.add_argument("--build-rpm-repo", action="store_true", help="Creates and signs .rpm repository") + parser.add_argument("--test-api-credentials", action="store_true", help="Test DigitalOcean API credentials") + args = parser.parse_args() + + if args.create_build_machine: + asyncio.run(create_build_machine(token)) + elif args.delete_build_machine: + asyncio.run(delete_build_machine(token)) + elif args.build_deb_repo: + asyncio.run(build_deb_repo()) + elif args.build_rpm_repo: + asyncio.run(build_rpm_repo()) + elif args.test_api_credentials: + asyncio.run(test_api_credentials(token))