mirror of
https://gitlab.com/veilid/veilid.git
synced 2025-06-24 14:50:35 -04:00
Merge branch 'main' into add-cicd-scripts
[ci skip]
This commit is contained in:
commit
c969df33d8
31 changed files with 381 additions and 219 deletions
|
@ -22,11 +22,11 @@ welcome!
|
||||||
|
|
||||||
Running the setup script requires:
|
Running the setup script requires:
|
||||||
|
|
||||||
* Rust
|
- Rust
|
||||||
|
|
||||||
#### Optionally, to build for Android:
|
#### Optionally, to build for Android:
|
||||||
|
|
||||||
* Android SDK and NDK
|
- Android SDK and NDK
|
||||||
|
|
||||||
You may decide to use Android Studio [here](https://developer.android.com/studio)
|
You may decide to use Android Studio [here](https://developer.android.com/studio)
|
||||||
to maintain your Android dependencies. If so, use the dependency manager
|
to maintain your Android dependencies. If so, use the dependency manager
|
||||||
|
@ -35,11 +35,11 @@ method is highly recommended as you may run into path problems with the 'flutter
|
||||||
command line without it. If you do so, you may skip to
|
command line without it. If you do so, you may skip to
|
||||||
[Run Veilid setup script](#Run Veilid setup script).
|
[Run Veilid setup script](#Run Veilid setup script).
|
||||||
|
|
||||||
* build-tools;33.0.1
|
- build-tools;34.0.0
|
||||||
* ndk;25.1.8937393
|
- ndk;26.3.11579264
|
||||||
* cmake;3.22.1
|
- cmake;3.22.1
|
||||||
* platform-tools
|
- platform-tools
|
||||||
* platforms;android-33
|
- platforms;android-34
|
||||||
|
|
||||||
#### Setup Dependencies using the CLI
|
#### Setup Dependencies using the CLI
|
||||||
|
|
||||||
|
@ -56,9 +56,9 @@ the command line to install the requisite package versions:
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
sdkmanager --install "platform-tools"
|
sdkmanager --install "platform-tools"
|
||||||
sdkmanager --install "platforms;android-33"
|
sdkmanager --install "platforms;android-34"
|
||||||
sdkmanager --install "build-tools;33.0.1"
|
sdkmanager --install "build-tools;34.0.0"
|
||||||
sdkmanager --install "ndk;25.1.8937393"
|
sdkmanager --install "ndk;26.3.11579264"
|
||||||
sdkmanager --install "cmake;3.22.1"
|
sdkmanager --install "cmake;3.22.1"
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -66,7 +66,7 @@ Export environment variables and add the Android SDK platform-tools directory to
|
||||||
your path. See [instructions here](https://developer.android.com/tools/variables).
|
your path. See [instructions here](https://developer.android.com/tools/variables).
|
||||||
|
|
||||||
```shell
|
```shell
|
||||||
cat << EOF >> ~/.profile
|
cat << EOF >> ~/.profile
|
||||||
export ANDROID_HOME=<path to sdk>
|
export ANDROID_HOME=<path to sdk>
|
||||||
export PATH=$PATH:$ANDROID_HOME/tools:$ANDROID_HOME/tools/bin:$ANDROID_HOME/platform-tools
|
export PATH=$PATH:$ANDROID_HOME/tools:$ANDROID_HOME/tools/bin:$ANDROID_HOME/platform-tools
|
||||||
EOF
|
EOF
|
||||||
|
@ -97,23 +97,23 @@ Development of Veilid on MacOS is possible on both Intel and ARM hardware.
|
||||||
|
|
||||||
Development requires:
|
Development requires:
|
||||||
|
|
||||||
* Xcode, preferably latest version
|
- Xcode, preferably latest version
|
||||||
* Homebrew [here](https://brew.sh)
|
- Homebrew [here](https://brew.sh)
|
||||||
* Rust
|
- Rust
|
||||||
|
|
||||||
#### Optionally, to build for Android:
|
#### Optionally, to build for Android:
|
||||||
|
|
||||||
* Android Studio
|
- Android Studio
|
||||||
* Android SDK and NDK
|
- Android SDK and NDK
|
||||||
|
|
||||||
You will need to use Android Studio [here](https://developer.android.com/studio)
|
You will need to use Android Studio [here](https://developer.android.com/studio)
|
||||||
to maintain your Android dependencies. Use the SDK Manager in the IDE to install the following packages (use package details view to select version):
|
to maintain your Android dependencies. Use the SDK Manager in the IDE to install the following packages (use package details view to select version):
|
||||||
|
|
||||||
* Android SDK Build Tools (33.0.1)
|
- Android SDK Build Tools (34.0.0)
|
||||||
* NDK (Side-by-side) (25.1.8937393)
|
- NDK (Side-by-side) (26.3.11579264)
|
||||||
* Cmake (3.22.1)
|
- Cmake (3.22.1)
|
||||||
* Android SDK 33
|
- Android SDK 34
|
||||||
* Android SDK Command Line Tools (latest) (7.0/latest)
|
- Android SDK Command Line Tools (latest) (7.0/latest)
|
||||||
|
|
||||||
#### Setup command line environment
|
#### Setup command line environment
|
||||||
|
|
||||||
|
|
|
@ -68,7 +68,7 @@ deps-android:
|
||||||
RUN mkdir /Android; mkdir /Android/Sdk
|
RUN mkdir /Android; mkdir /Android/Sdk
|
||||||
RUN curl -o /Android/cmdline-tools.zip https://dl.google.com/android/repository/commandlinetools-linux-9123335_latest.zip
|
RUN curl -o /Android/cmdline-tools.zip https://dl.google.com/android/repository/commandlinetools-linux-9123335_latest.zip
|
||||||
RUN cd /Android; unzip /Android/cmdline-tools.zip
|
RUN cd /Android; unzip /Android/cmdline-tools.zip
|
||||||
RUN yes | /Android/cmdline-tools/bin/sdkmanager --sdk_root=/Android/Sdk build-tools\;33.0.1 ndk\;25.1.8937393 cmake\;3.22.1 platform-tools platforms\;android-33 cmdline-tools\;latest
|
RUN yes | /Android/cmdline-tools/bin/sdkmanager --sdk_root=/Android/Sdk build-tools\;34.0.0 ndk\;26.3.11579264 cmake\;3.22.1 platform-tools platforms\;android-34 cmdline-tools\;latest
|
||||||
RUN rm -rf /Android/cmdline-tools
|
RUN rm -rf /Android/cmdline-tools
|
||||||
RUN apt-get clean
|
RUN apt-get clean
|
||||||
|
|
||||||
|
@ -155,7 +155,7 @@ build-linux-arm64:
|
||||||
build-android:
|
build-android:
|
||||||
FROM +code-android
|
FROM +code-android
|
||||||
WORKDIR /veilid/veilid-core
|
WORKDIR /veilid/veilid-core
|
||||||
ENV PATH=$PATH:/Android/Sdk/ndk/25.1.8937393/toolchains/llvm/prebuilt/linux-x86_64/bin/
|
ENV PATH=$PATH:/Android/Sdk/ndk/26.3.11579264/toolchains/llvm/prebuilt/linux-x86_64/bin/
|
||||||
RUN cargo build --target aarch64-linux-android --release
|
RUN cargo build --target aarch64-linux-android --release
|
||||||
RUN cargo build --target armv7-linux-androideabi --release
|
RUN cargo build --target armv7-linux-androideabi --release
|
||||||
RUN cargo build --target i686-linux-android --release
|
RUN cargo build --target i686-linux-android --release
|
||||||
|
@ -261,4 +261,4 @@ package-linux-arm64:
|
||||||
|
|
||||||
package-linux:
|
package-linux:
|
||||||
BUILD +package-linux-amd64
|
BUILD +package-linux-amd64
|
||||||
BUILD +package-linux-arm64
|
BUILD +package-linux-arm64
|
|
@ -20,13 +20,13 @@ Releases happen via a CI/CD pipeline. The release process flows as follows:
|
||||||
|
|
||||||
2.1 Update your local copy of `main` to mirror the newly merged upstream `main`
|
2.1 Update your local copy of `main` to mirror the newly merged upstream `main`
|
||||||
|
|
||||||
2.2 Ensure the [CHANGELOG](./CHANGELOG.md) is updated
|
2.2 Ensure the [CHANGELOG](./CHANGELOG.md) is updated. Include `[ci skip]` in the commit message so that the testing pipeline is skipped.
|
||||||
|
|
||||||
2.3 Activate your bumpversion Python venv (see bumpversion setup section for details)
|
2.3 Activate your bumpversion Python venv (see bumpversion setup section for details)
|
||||||
|
|
||||||
2.4 Execute version_bump.sh with the appropriate parameter (patch, minor, or major). This results in all version entries being updated and a matching git tag created locally.
|
2.4 Execute version_bump.sh with the appropriate parameter (patch, minor, or major). This results in all version entries being updated and a matching git tag created locally.
|
||||||
|
|
||||||
2.5 Add all changes `git add *`
|
2.5 Add all changes `git add .`
|
||||||
|
|
||||||
2.6 Git commit the changes with the following message: `Version update: v{current_version} → v{new_version}`
|
2.6 Git commit the changes with the following message: `Version update: v{current_version} → v{new_version}`
|
||||||
|
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -eo pipefail
|
set -eo pipefail
|
||||||
|
|
||||||
if [ $(id -u) -eq 0 ]; then
|
if [ $(id -u) -eq 0 ]; then
|
||||||
echo "Don't run this as root"
|
echo "Don't run this as root"
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
|
@ -27,40 +27,45 @@ elif [ ! -z "$(command -v dnf)" ]; then
|
||||||
sudo dnf groupinstall -y 'Development Tools'
|
sudo dnf groupinstall -y 'Development Tools'
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
|
||||||
# Install Rust
|
# Install Rust
|
||||||
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y -c clippy --profile default
|
curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y -c clippy --profile default
|
||||||
source "$HOME/.cargo/env"
|
source "$HOME/.cargo/env"
|
||||||
|
|
||||||
#ask if they want to install optional android sdk (and install if yes)
|
#ask if they want to install optional android sdk (and install if yes)
|
||||||
while true; do
|
while true; do
|
||||||
read -p "Do you want to install Android SDK (optional) Y/N) " response
|
read -p "Do you want to install Android SDK (optional) Y/N) " response
|
||||||
|
|
||||||
case $response in
|
case $response in
|
||||||
[yY] ) echo Installing Android SDK...;
|
[yY])
|
||||||
# Install Android SDK
|
echo Installing Android SDK...
|
||||||
mkdir $HOME/Android; mkdir $HOME/Android/Sdk
|
# Install Android SDK
|
||||||
curl -o $HOME/Android/cmdline-tools.zip https://dl.google.com/android/repository/commandlinetools-linux-9123335_latest.zip
|
mkdir $HOME/Android
|
||||||
cd $HOME/Android; unzip $HOME/Android/cmdline-tools.zip
|
mkdir $HOME/Android/Sdk
|
||||||
$HOME/Android/cmdline-tools/bin/sdkmanager --sdk_root=$HOME/Android/Sdk build-tools\;33.0.1 ndk\;25.1.8937393 cmake\;3.22.1 platform-tools platforms\;android-33 cmdline-tools\;latest emulator
|
curl -o $HOME/Android/cmdline-tools.zip https://dl.google.com/android/repository/commandlinetools-linux-9123335_latest.zip
|
||||||
cd $HOME
|
cd $HOME/Android
|
||||||
rm -rf $HOME/Android/cmdline-tools $HOME/Android/cmdline-tools.zip
|
unzip $HOME/Android/cmdline-tools.zip
|
||||||
|
$HOME/Android/cmdline-tools/bin/sdkmanager --sdk_root=$HOME/Android/Sdk build-tools\;34.0.0 ndk\;26.3.11579264 cmake\;3.22.1 platform-tools platforms\;android-34 cmdline-tools\;latest emulator
|
||||||
|
cd $HOME
|
||||||
|
rm -rf $HOME/Android/cmdline-tools $HOME/Android/cmdline-tools.zip
|
||||||
|
|
||||||
# Add environment variables
|
# Add environment variables
|
||||||
cat >> $HOME/.profile <<END
|
cat >>$HOME/.profile <<END
|
||||||
source "\$HOME/.cargo/env"
|
source "\$HOME/.cargo/env"
|
||||||
export PATH=\$PATH:\$HOME/Android/Sdk/ndk/25.1.8937393/toolchains/llvm/prebuilt/linux-x86_64/bin:\$HOME/Android/Sdk/platform-tools:\$HOME/Android/Sdk/cmdline-tools/latest/bin
|
export PATH=\$PATH:\$HOME/Android/Sdk/ndk/26.3.11579264/toolchains/llvm/prebuilt/linux-x86_64/bin:\$HOME/Android/Sdk/platform-tools:\$HOME/Android/Sdk/cmdline-tools/latest/bin
|
||||||
export ANDROID_HOME=\$HOME/Android/Sdk
|
export ANDROID_HOME=\$HOME/Android/Sdk
|
||||||
END
|
END
|
||||||
break ;;
|
break
|
||||||
[nN] ) echo Skipping Android SDK;
|
;;
|
||||||
cat >> $HOME/.profile <<END
|
[nN])
|
||||||
|
echo Skipping Android SDK
|
||||||
|
cat >>$HOME/.profile <<END
|
||||||
source "\$HOME/.cargo/env"
|
source "\$HOME/.cargo/env"
|
||||||
END
|
END
|
||||||
break;;
|
break
|
||||||
|
;;
|
||||||
|
|
||||||
* ) echo invalid response;;
|
*) echo invalid response ;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
|
|
||||||
echo Complete! Exit and reopen the shell and continue with ./setup_linux.sh
|
echo Complete! Exit and reopen the shell and continue with ./setup_linux.sh
|
||||||
|
|
|
@ -1,14 +1,14 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -eo pipefail
|
set -eo pipefail
|
||||||
|
|
||||||
if [ $(id -u) -eq 0 ]; then
|
if [ $(id -u) -eq 0 ]; then
|
||||||
echo "Don't run this as root"
|
echo "Don't run this as root"
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
|
|
||||||
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
SCRIPTDIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
|
||||||
|
|
||||||
if [[ "$(uname)" != "Linux" ]]; then
|
if [[ "$(uname)" != "Linux" ]]; then
|
||||||
echo Not running Linux
|
echo Not running Linux
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
@ -22,7 +22,8 @@ while true; do
|
||||||
read -p "Did you install Android SDK? Y/N " response
|
read -p "Did you install Android SDK? Y/N " response
|
||||||
|
|
||||||
case $response in
|
case $response in
|
||||||
[yY] ) echo Checking android setup...;
|
[yY])
|
||||||
|
echo Checking android setup...
|
||||||
|
|
||||||
# ensure ANDROID_HOME is defined and exists
|
# ensure ANDROID_HOME is defined and exists
|
||||||
if [ -d "$ANDROID_HOME" ]; then
|
if [ -d "$ANDROID_HOME" ]; then
|
||||||
|
@ -34,16 +35,16 @@ while true; do
|
||||||
|
|
||||||
# ensure Android Command Line Tools exist
|
# ensure Android Command Line Tools exist
|
||||||
if [ -d "$ANDROID_HOME/cmdline-tools/latest/bin" ]; then
|
if [ -d "$ANDROID_HOME/cmdline-tools/latest/bin" ]; then
|
||||||
echo '[X] Android command line tools are installed'
|
echo '[X] Android command line tools are installed'
|
||||||
else
|
else
|
||||||
echo 'Android command line tools are not installed'
|
echo 'Android command line tools are not installed'
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# ensure ndk is installed
|
# ensure ndk is installed
|
||||||
ANDROID_NDK_HOME="$ANDROID_HOME/ndk/25.1.8937393"
|
ANDROID_NDK_HOME="$ANDROID_HOME/ndk/26.3.11579264"
|
||||||
if [ -f "$ANDROID_NDK_HOME/ndk-build" ]; then
|
if [ -f "$ANDROID_NDK_HOME/ndk-build" ]; then
|
||||||
echo '[X] Android NDK is installed at the location $ANDROID_NDK_HOME'
|
echo '[X] Android NDK is installed at the location $ANDROID_NDK_HOME'
|
||||||
else
|
else
|
||||||
echo 'Android NDK is not installed at the location $ANDROID_NDK_HOME'
|
echo 'Android NDK is not installed at the location $ANDROID_NDK_HOME'
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -51,7 +52,7 @@ while true; do
|
||||||
|
|
||||||
# ensure cmake is installed
|
# ensure cmake is installed
|
||||||
if [ -d "$ANDROID_HOME/cmake" ]; then
|
if [ -d "$ANDROID_HOME/cmake" ]; then
|
||||||
echo '[X] Android SDK CMake is installed'
|
echo '[X] Android SDK CMake is installed'
|
||||||
else
|
else
|
||||||
echo 'Android SDK CMake is not installed'
|
echo 'Android SDK CMake is not installed'
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -59,28 +60,31 @@ while true; do
|
||||||
|
|
||||||
# ensure emulator is installed
|
# ensure emulator is installed
|
||||||
if [ -d "$ANDROID_HOME/emulator" ]; then
|
if [ -d "$ANDROID_HOME/emulator" ]; then
|
||||||
echo '[X] Android SDK emulator is installed'
|
echo '[X] Android SDK emulator is installed'
|
||||||
else
|
else
|
||||||
echo 'Android SDK emulator is not installed'
|
echo 'Android SDK emulator is not installed'
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# ensure adb is installed
|
# ensure adb is installed
|
||||||
if command -v adb &> /dev/null; then
|
if command -v adb &>/dev/null; then
|
||||||
echo '[X] adb is available in the path'
|
echo '[X] adb is available in the path'
|
||||||
else
|
else
|
||||||
echo 'adb is not available in the path'
|
echo 'adb is not available in the path'
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
break;;
|
break
|
||||||
[nN] ) echo Skipping Android SDK config check...;
|
;;
|
||||||
break;;
|
[nN])
|
||||||
* ) echo invalid response;;
|
echo Skipping Android SDK config check...
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
*) echo invalid response ;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
|
|
||||||
# ensure rustup is installed
|
# ensure rustup is installed
|
||||||
if command -v rustup &> /dev/null; then
|
if command -v rustup &>/dev/null; then
|
||||||
echo '[X] rustup is available in the path'
|
echo '[X] rustup is available in the path'
|
||||||
else
|
else
|
||||||
echo 'rustup is not available in the path'
|
echo 'rustup is not available in the path'
|
||||||
|
@ -88,7 +92,7 @@ else
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# ensure cargo is installed
|
# ensure cargo is installed
|
||||||
if command -v cargo &> /dev/null; then
|
if command -v cargo &>/dev/null; then
|
||||||
echo '[X] cargo is available in the path'
|
echo '[X] cargo is available in the path'
|
||||||
else
|
else
|
||||||
echo 'cargo is not available in the path'
|
echo 'cargo is not available in the path'
|
||||||
|
@ -96,7 +100,7 @@ else
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# ensure pip3 is installed
|
# ensure pip3 is installed
|
||||||
if command -v pip3 &> /dev/null; then
|
if command -v pip3 &>/dev/null; then
|
||||||
echo '[X] pip3 is available in the path'
|
echo '[X] pip3 is available in the path'
|
||||||
else
|
else
|
||||||
echo 'pip3 is not available in the path'
|
echo 'pip3 is not available in the path'
|
||||||
|
@ -117,14 +121,18 @@ while true; do
|
||||||
read -p "Will you be modifying the capnproto schema? Y/N (say N if unsure)" response
|
read -p "Will you be modifying the capnproto schema? Y/N (say N if unsure)" response
|
||||||
|
|
||||||
case $response in
|
case $response in
|
||||||
[yY] ) echo Installing capnproto...;
|
[yY])
|
||||||
|
echo Installing capnproto...
|
||||||
|
|
||||||
# Install capnproto using the same mechanism as our earthly build
|
# Install capnproto using the same mechanism as our earthly build
|
||||||
$SCRIPTDIR/../scripts/earthly/install_capnproto.sh
|
$SCRIPTDIR/../scripts/earthly/install_capnproto.sh
|
||||||
|
|
||||||
break;;
|
break
|
||||||
[nN] ) echo Skipping capnproto installation...;
|
;;
|
||||||
break;;
|
[nN])
|
||||||
* ) echo invalid response;;
|
echo Skipping capnproto installation...
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
*) echo invalid response ;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
set -eo pipefail
|
set -eo pipefail
|
||||||
|
|
||||||
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
|
SCRIPTDIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" >/dev/null 2>&1 && pwd)"
|
||||||
|
|
||||||
if [ ! "$(uname)" == "Darwin" ]; then
|
if [ ! "$(uname)" == "Darwin" ]; then
|
||||||
echo Not running on MacOS
|
echo Not running on MacOS
|
||||||
|
@ -12,7 +12,8 @@ while true; do
|
||||||
|
|
||||||
read -p "Did you install Android SDK? Y/N " response
|
read -p "Did you install Android SDK? Y/N " response
|
||||||
case $response in
|
case $response in
|
||||||
[yY] ) echo Checking android setup...;
|
[yY])
|
||||||
|
echo Checking android setup...
|
||||||
# ensure ANDROID_HOME is defined and exists
|
# ensure ANDROID_HOME is defined and exists
|
||||||
if [ -d "$ANDROID_HOME" ]; then
|
if [ -d "$ANDROID_HOME" ]; then
|
||||||
echo '[X] $ANDROID_HOME is defined and exists'
|
echo '[X] $ANDROID_HOME is defined and exists'
|
||||||
|
@ -23,17 +24,17 @@ while true; do
|
||||||
|
|
||||||
# ensure Android Command Line Tools exist
|
# ensure Android Command Line Tools exist
|
||||||
if [ -d "$ANDROID_HOME/cmdline-tools/latest/bin" ]; then
|
if [ -d "$ANDROID_HOME/cmdline-tools/latest/bin" ]; then
|
||||||
echo '[X] Android command line tools are installed'
|
echo '[X] Android command line tools are installed'
|
||||||
else
|
else
|
||||||
echo 'Android command line tools are not installed'
|
echo 'Android command line tools are not installed'
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# ensure Android SDK packages are installed
|
# ensure Android SDK packages are installed
|
||||||
$ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager build-tools\;33.0.1 ndk\;25.1.8937393 cmake\;3.22.1 platform-tools platforms\;android-33
|
$ANDROID_HOME/cmdline-tools/latest/bin/sdkmanager build-tools\;34.0.0 ndk\;26.3.11579264 cmake\;3.22.1 platform-tools platforms\;android-34
|
||||||
|
|
||||||
# ensure ANDROID_NDK_HOME is defined and exists
|
# ensure ANDROID_NDK_HOME is defined and exists
|
||||||
ANDROID_NDK_HOME="$ANDROID_HOME/ndk/25.1.8937393"
|
ANDROID_NDK_HOME="$ANDROID_HOME/ndk/26.3.11579264"
|
||||||
if [ -d "$ANDROID_NDK_HOME" ]; then
|
if [ -d "$ANDROID_NDK_HOME" ]; then
|
||||||
echo '[X] Android NDK is defined and exists'
|
echo '[X] Android NDK is defined and exists'
|
||||||
else
|
else
|
||||||
|
@ -43,7 +44,7 @@ while true; do
|
||||||
|
|
||||||
# ensure ndk is installed
|
# ensure ndk is installed
|
||||||
if [ -f "$ANDROID_NDK_HOME/ndk-build" ]; then
|
if [ -f "$ANDROID_NDK_HOME/ndk-build" ]; then
|
||||||
echo '[X] Android NDK is installed at the location $ANDROID_NDK_HOME'
|
echo '[X] Android NDK is installed at the location $ANDROID_NDK_HOME'
|
||||||
else
|
else
|
||||||
echo 'Android NDK is not installed at the location $ANDROID_NDK_HOME'
|
echo 'Android NDK is not installed at the location $ANDROID_NDK_HOME'
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -51,7 +52,7 @@ while true; do
|
||||||
|
|
||||||
# ensure cmake is installed
|
# ensure cmake is installed
|
||||||
if [ -d "$ANDROID_HOME/cmake" ]; then
|
if [ -d "$ANDROID_HOME/cmake" ]; then
|
||||||
echo '[X] Android SDK CMake is installed'
|
echo '[X] Android SDK CMake is installed'
|
||||||
else
|
else
|
||||||
echo 'Android SDK CMake is not installed'
|
echo 'Android SDK CMake is not installed'
|
||||||
exit 1
|
exit 1
|
||||||
|
@ -59,29 +60,32 @@ while true; do
|
||||||
|
|
||||||
# ensure emulator is installed
|
# ensure emulator is installed
|
||||||
if [ -d "$ANDROID_HOME/emulator" ]; then
|
if [ -d "$ANDROID_HOME/emulator" ]; then
|
||||||
echo '[X] Android SDK emulator is installed'
|
echo '[X] Android SDK emulator is installed'
|
||||||
else
|
else
|
||||||
echo 'Android SDK emulator is not installed'
|
echo 'Android SDK emulator is not installed'
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# ensure adb is installed
|
# ensure adb is installed
|
||||||
if command -v adb &> /dev/null; then
|
if command -v adb &>/dev/null; then
|
||||||
echo '[X] adb is available in the path'
|
echo '[X] adb is available in the path'
|
||||||
else
|
else
|
||||||
echo 'adb is not available in the path'
|
echo 'adb is not available in the path'
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
break;;
|
break
|
||||||
[nN] ) echo Skipping Android SDK config check...;
|
;;
|
||||||
break;;
|
[nN])
|
||||||
|
echo Skipping Android SDK config check...
|
||||||
|
break
|
||||||
|
;;
|
||||||
|
|
||||||
* ) echo invalid response;;
|
*) echo invalid response ;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
|
|
||||||
# ensure brew is installed
|
# ensure brew is installed
|
||||||
if command -v brew &> /dev/null; then
|
if command -v brew &>/dev/null; then
|
||||||
echo '[X] brew is available in the path'
|
echo '[X] brew is available in the path'
|
||||||
else
|
else
|
||||||
echo 'brew is not available in the path'
|
echo 'brew is not available in the path'
|
||||||
|
@ -89,7 +93,7 @@ else
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# ensure xcode is installed
|
# ensure xcode is installed
|
||||||
if command -v xcode-select &> /dev/null; then
|
if command -v xcode-select &>/dev/null; then
|
||||||
echo '[X] XCode is available in the path'
|
echo '[X] XCode is available in the path'
|
||||||
else
|
else
|
||||||
echo 'XCode is not available in the path'
|
echo 'XCode is not available in the path'
|
||||||
|
@ -97,7 +101,7 @@ else
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# ensure rustup is installed
|
# ensure rustup is installed
|
||||||
if command -v rustup &> /dev/null; then
|
if command -v rustup &>/dev/null; then
|
||||||
echo '[X] rustup is available in the path'
|
echo '[X] rustup is available in the path'
|
||||||
else
|
else
|
||||||
echo 'rustup is not available in the path'
|
echo 'rustup is not available in the path'
|
||||||
|
@ -105,7 +109,7 @@ else
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# ensure cargo is installed
|
# ensure cargo is installed
|
||||||
if command -v cargo &> /dev/null; then
|
if command -v cargo &>/dev/null; then
|
||||||
echo '[X] cargo is available in the path'
|
echo '[X] cargo is available in the path'
|
||||||
else
|
else
|
||||||
echo 'cargo is not available in the path'
|
echo 'cargo is not available in the path'
|
||||||
|
@ -113,7 +117,7 @@ else
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# ensure pip3 is installed
|
# ensure pip3 is installed
|
||||||
if command -v pip3 &> /dev/null; then
|
if command -v pip3 &>/dev/null; then
|
||||||
echo '[X] pip3 is available in the path'
|
echo '[X] pip3 is available in the path'
|
||||||
else
|
else
|
||||||
echo 'pip3 is not available in the path'
|
echo 'pip3 is not available in the path'
|
||||||
|
@ -130,9 +134,9 @@ else
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# ensure we have command line tools
|
# ensure we have command line tools
|
||||||
xcode-select --install 2> /dev/null || true
|
xcode-select --install 2>/dev/null || true
|
||||||
until [ -d /Library/Developer/CommandLineTools/usr/bin ]; do
|
until [ -d /Library/Developer/CommandLineTools/usr/bin ]; do
|
||||||
sleep 5;
|
sleep 5
|
||||||
done
|
done
|
||||||
|
|
||||||
# install packages
|
# install packages
|
||||||
|
@ -155,7 +159,7 @@ cargo install wasm-bindgen-cli wasm-pack cargo-edit
|
||||||
# install pip packages
|
# install pip packages
|
||||||
pip3 install --upgrade bumpversion
|
pip3 install --upgrade bumpversion
|
||||||
|
|
||||||
if command -v pod &> /dev/null; then
|
if command -v pod &>/dev/null; then
|
||||||
echo '[X] CocoaPods is available in the path'
|
echo '[X] CocoaPods is available in the path'
|
||||||
else
|
else
|
||||||
echo 'CocoaPods is not available in the path, installing it now'
|
echo 'CocoaPods is not available in the path, installing it now'
|
||||||
|
|
|
@ -1,8 +1,8 @@
|
||||||
[target.aarch64-linux-android]
|
[target.aarch64-linux-android]
|
||||||
linker = "/Android/Sdk/ndk/25.1.8937393/toolchains/llvm/prebuilt/linux-x86_64/bin/aarch64-linux-android33-clang"
|
linker = "/Android/Sdk/ndk/26.3.11579264/toolchains/llvm/prebuilt/linux-x86_64/bin/aarch64-linux-android34-clang"
|
||||||
[target.armv7-linux-androideabi]
|
[target.armv7-linux-androideabi]
|
||||||
linker = "/Android/Sdk/ndk/25.1.8937393/toolchains/llvm/prebuilt/linux-x86_64/bin/armv7a-linux-androideabi33-clang"
|
linker = "/Android/Sdk/ndk/26.3.11579264/toolchains/llvm/prebuilt/linux-x86_64/bin/armv7a-linux-androideabi33-clang"
|
||||||
[target.x86_64-linux-android]
|
[target.x86_64-linux-android]
|
||||||
linker = "/Android/Sdk/ndk/25.1.8937393/toolchains/llvm/prebuilt/linux-x86_64/bin/x86_64-linux-android33-clang"
|
linker = "/Android/Sdk/ndk/26.3.11579264/toolchains/llvm/prebuilt/linux-x86_64/bin/x86_64-linux-android34-clang"
|
||||||
[target.i686-linux-android]
|
[target.i686-linux-android]
|
||||||
linker = "/Android/Sdk/ndk/25.1.8937393/toolchains/llvm/prebuilt/linux-x86_64/bin/i686-linux-android33-clang"
|
linker = "/Android/Sdk/ndk/26.3.11579264/toolchains/llvm/prebuilt/linux-x86_64/bin/i686-linux-android34-clang"
|
||||||
|
|
|
@ -1,10 +1,9 @@
|
||||||
use glob::glob;
|
|
||||||
use sha2::{Digest, Sha256};
|
use sha2::{Digest, Sha256};
|
||||||
use std::fs::OpenOptions;
|
use std::fs::OpenOptions;
|
||||||
use std::io::BufRead;
|
use std::io::BufRead;
|
||||||
use std::io::Write;
|
use std::io::Write;
|
||||||
use std::{
|
use std::{
|
||||||
env, io,
|
io,
|
||||||
path::Path,
|
path::Path,
|
||||||
process::{Command, Stdio},
|
process::{Command, Stdio},
|
||||||
};
|
};
|
||||||
|
@ -126,26 +125,6 @@ fn do_capnp_build() {
|
||||||
append_hash("proto/veilid.capnp", "proto/veilid_capnp.rs");
|
append_hash("proto/veilid.capnp", "proto/veilid_capnp.rs");
|
||||||
}
|
}
|
||||||
|
|
||||||
// Fix for missing __extenddftf2 on Android x86_64 Emulator
|
|
||||||
fn fix_android_emulator() {
|
|
||||||
let target_os = env::var("CARGO_CFG_TARGET_OS").unwrap();
|
|
||||||
let target_arch = env::var("CARGO_CFG_TARGET_ARCH").unwrap();
|
|
||||||
if target_arch == "x86_64" && target_os == "android" {
|
|
||||||
let missing_library = "clang_rt.builtins-x86_64-android";
|
|
||||||
let android_home = env::var("ANDROID_HOME").expect("ANDROID_HOME not set");
|
|
||||||
let lib_path = glob(&format!(
|
|
||||||
"{android_home}/ndk/25.1.8937393/**/lib{missing_library}.a"
|
|
||||||
))
|
|
||||||
.expect("failed to glob")
|
|
||||||
.next()
|
|
||||||
.expect("Need libclang_rt.builtins-x86_64-android.a")
|
|
||||||
.unwrap();
|
|
||||||
let lib_dir = lib_path.parent().unwrap();
|
|
||||||
println!("cargo:rustc-link-search={}", lib_dir.display());
|
|
||||||
println!("cargo:rustc-link-lib=static={missing_library}");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn main() {
|
fn main() {
|
||||||
if std::env::var("DOCS_RS").is_ok()
|
if std::env::var("DOCS_RS").is_ok()
|
||||||
|| std::env::var("CARGO_CFG_DOC").is_ok()
|
|| std::env::var("CARGO_CFG_DOC").is_ok()
|
||||||
|
@ -158,6 +137,4 @@ fn main() {
|
||||||
println!("cargo:warning=rebuilding proto/veilid_capnp.rs because it has changed from the last generation of proto/veilid.capnp");
|
println!("cargo:warning=rebuilding proto/veilid_capnp.rs because it has changed from the last generation of proto/veilid.capnp");
|
||||||
do_capnp_build();
|
do_capnp_build();
|
||||||
}
|
}
|
||||||
|
|
||||||
fix_android_emulator();
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -284,14 +284,13 @@ where
|
||||||
};
|
};
|
||||||
|
|
||||||
// Initialize closest nodes list
|
// Initialize closest nodes list
|
||||||
if init_fanout_queue.is_empty() {
|
if let Err(e) = self.clone().init_closest_nodes() {
|
||||||
if let Err(e) = self.clone().init_closest_nodes() {
|
return TimeoutOr::value(Err(e));
|
||||||
return TimeoutOr::value(Err(e));
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
self.clone().add_to_fanout_queue(&init_fanout_queue);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Ensure we include the most recent nodes
|
||||||
|
self.clone().add_to_fanout_queue(&init_fanout_queue);
|
||||||
|
|
||||||
// Do a quick check to see if we're already done
|
// Do a quick check to see if we're already done
|
||||||
{
|
{
|
||||||
let mut ctx = self.context.lock();
|
let mut ctx = self.context.lock();
|
||||||
|
|
|
@ -43,6 +43,12 @@ impl StorageManager {
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Get the nodes we know are caching this value to seed the fanout
|
||||||
|
let init_fanout_queue = {
|
||||||
|
let inner = self.inner.lock().await;
|
||||||
|
inner.get_value_nodes(key)?.unwrap_or_default()
|
||||||
|
};
|
||||||
|
|
||||||
// Make do-get-value answer context
|
// Make do-get-value answer context
|
||||||
let schema = if let Some(d) = &last_get_result.opt_descriptor {
|
let schema = if let Some(d) = &last_get_result.opt_descriptor {
|
||||||
Some(d.schema()?)
|
Some(d.schema()?)
|
||||||
|
@ -179,7 +185,7 @@ impl StorageManager {
|
||||||
check_done,
|
check_done,
|
||||||
);
|
);
|
||||||
|
|
||||||
let kind = match fanout_call.run(vec![]).await {
|
let kind = match fanout_call.run(init_fanout_queue).await {
|
||||||
// If we don't finish in the timeout (too much time passed checking for consensus)
|
// If we don't finish in the timeout (too much time passed checking for consensus)
|
||||||
TimeoutOr::Timeout => FanoutResultKind::Timeout,
|
TimeoutOr::Timeout => FanoutResultKind::Timeout,
|
||||||
// If we finished with or without consensus (enough nodes returning the same value)
|
// If we finished with or without consensus (enough nodes returning the same value)
|
||||||
|
|
|
@ -82,6 +82,12 @@ impl StorageManager {
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Get the nodes we know are caching this value to seed the fanout
|
||||||
|
let init_fanout_queue = {
|
||||||
|
let inner = self.inner.lock().await;
|
||||||
|
inner.get_value_nodes(key)?.unwrap_or_default()
|
||||||
|
};
|
||||||
|
|
||||||
// Make do-inspect-value answer context
|
// Make do-inspect-value answer context
|
||||||
let opt_descriptor_info = if let Some(descriptor) = &local_inspect_result.opt_descriptor {
|
let opt_descriptor_info = if let Some(descriptor) = &local_inspect_result.opt_descriptor {
|
||||||
// Get the descriptor info. This also truncates the subkeys list to what can be returned from the network.
|
// Get the descriptor info. This also truncates the subkeys list to what can be returned from the network.
|
||||||
|
@ -253,7 +259,7 @@ impl StorageManager {
|
||||||
check_done,
|
check_done,
|
||||||
);
|
);
|
||||||
|
|
||||||
let kind = match fanout_call.run(vec![]).await {
|
let kind = match fanout_call.run(init_fanout_queue).await {
|
||||||
// If we don't finish in the timeout (too much time passed checking for consensus)
|
// If we don't finish in the timeout (too much time passed checking for consensus)
|
||||||
TimeoutOr::Timeout => FanoutResultKind::Timeout,
|
TimeoutOr::Timeout => FanoutResultKind::Timeout,
|
||||||
// If we finished with or without consensus (enough nodes returning the same value)
|
// If we finished with or without consensus (enough nodes returning the same value)
|
||||||
|
|
|
@ -795,10 +795,19 @@ impl StorageManager {
|
||||||
"more subkeys returned locally than requested"
|
"more subkeys returned locally than requested"
|
||||||
);
|
);
|
||||||
|
|
||||||
|
// Get the offline subkeys for this record still only returning the ones we're inspecting
|
||||||
|
let offline_subkey_writes = inner
|
||||||
|
.offline_subkey_writes
|
||||||
|
.get(&key)
|
||||||
|
.map(|o| o.subkeys.clone())
|
||||||
|
.unwrap_or_default()
|
||||||
|
.intersect(&subkeys);
|
||||||
|
|
||||||
// If this is the maximum scope we're interested in, return the report
|
// If this is the maximum scope we're interested in, return the report
|
||||||
if matches!(scope, DHTReportScope::Local) {
|
if matches!(scope, DHTReportScope::Local) {
|
||||||
return Ok(DHTRecordReport::new(
|
return Ok(DHTRecordReport::new(
|
||||||
local_inspect_result.subkeys,
|
local_inspect_result.subkeys,
|
||||||
|
offline_subkey_writes,
|
||||||
local_inspect_result.seqs,
|
local_inspect_result.seqs,
|
||||||
vec![],
|
vec![],
|
||||||
));
|
));
|
||||||
|
@ -864,6 +873,7 @@ impl StorageManager {
|
||||||
|
|
||||||
Ok(DHTRecordReport::new(
|
Ok(DHTRecordReport::new(
|
||||||
result.inspect_result.subkeys,
|
result.inspect_result.subkeys,
|
||||||
|
offline_subkey_writes,
|
||||||
local_inspect_result.seqs,
|
local_inspect_result.seqs,
|
||||||
result.inspect_result.seqs,
|
result.inspect_result.seqs,
|
||||||
))
|
))
|
||||||
|
|
|
@ -1037,7 +1037,7 @@ where
|
||||||
let Some(member_check) = self.with_record(key, |record| {
|
let Some(member_check) = self.with_record(key, |record| {
|
||||||
let schema = record.schema();
|
let schema = record.schema();
|
||||||
let owner = *record.owner();
|
let owner = *record.owner();
|
||||||
Box::new(move |watcher| owner == params.watcher || schema.is_member(&watcher))
|
Box::new(move |watcher| owner == watcher || schema.is_member(&watcher))
|
||||||
}) else {
|
}) else {
|
||||||
// Record not found
|
// Record not found
|
||||||
return Ok(WatchResult::Rejected);
|
return Ok(WatchResult::Rejected);
|
||||||
|
|
|
@ -44,6 +44,12 @@ impl StorageManager {
|
||||||
)
|
)
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// Get the nodes we know are caching this value to seed the fanout
|
||||||
|
let init_fanout_queue = {
|
||||||
|
let inner = self.inner.lock().await;
|
||||||
|
inner.get_value_nodes(key)?.unwrap_or_default()
|
||||||
|
};
|
||||||
|
|
||||||
// Make do-set-value answer context
|
// Make do-set-value answer context
|
||||||
let schema = descriptor.schema()?;
|
let schema = descriptor.schema()?;
|
||||||
let context = Arc::new(Mutex::new(OutboundSetValueContext {
|
let context = Arc::new(Mutex::new(OutboundSetValueContext {
|
||||||
|
@ -100,11 +106,17 @@ impl StorageManager {
|
||||||
return Ok(NetworkResult::invalid_message("Schema validation failed"));
|
return Ok(NetworkResult::invalid_message("Schema validation failed"));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// If we got a value back it should be different than the one we are setting
|
||||||
|
if ctx.value.value_data() == value.value_data() {
|
||||||
|
// Move to the next node
|
||||||
|
return Ok(NetworkResult::invalid_message("same value returned"));
|
||||||
|
}
|
||||||
|
|
||||||
// We have a prior value, ensure this is a newer sequence number
|
// We have a prior value, ensure this is a newer sequence number
|
||||||
let prior_seq = ctx.value.value_data().seq();
|
let prior_seq = ctx.value.value_data().seq();
|
||||||
let new_seq = value.value_data().seq();
|
let new_seq = value.value_data().seq();
|
||||||
if new_seq > prior_seq {
|
if new_seq >= prior_seq {
|
||||||
// If the sequence number is greater, keep it
|
// If the sequence number is greater or equal, keep it
|
||||||
ctx.value = Arc::new(value);
|
ctx.value = Arc::new(value);
|
||||||
// One node has shown us this value so far
|
// One node has shown us this value so far
|
||||||
ctx.value_nodes = vec![next_node];
|
ctx.value_nodes = vec![next_node];
|
||||||
|
@ -164,7 +176,7 @@ impl StorageManager {
|
||||||
check_done,
|
check_done,
|
||||||
);
|
);
|
||||||
|
|
||||||
let kind = match fanout_call.run(vec![]).await {
|
let kind = match fanout_call.run(init_fanout_queue).await {
|
||||||
// If we don't finish in the timeout (too much time passed checking for consensus)
|
// If we don't finish in the timeout (too much time passed checking for consensus)
|
||||||
TimeoutOr::Timeout => FanoutResultKind::Timeout,
|
TimeoutOr::Timeout => FanoutResultKind::Timeout,
|
||||||
// If we finished with or without consensus (enough nodes returning the same value)
|
// If we finished with or without consensus (enough nodes returning the same value)
|
||||||
|
|
|
@ -10,13 +10,17 @@ impl StorageManager {
|
||||||
_last_ts: Timestamp,
|
_last_ts: Timestamp,
|
||||||
_cur_ts: Timestamp,
|
_cur_ts: Timestamp,
|
||||||
) -> EyreResult<()> {
|
) -> EyreResult<()> {
|
||||||
let offline_subkey_writes = {
|
let (mut offline_subkey_writes, opt_update_callback) = {
|
||||||
let inner = self.lock().await?;
|
let mut inner = self.lock().await?;
|
||||||
inner.offline_subkey_writes.clone()
|
let out = (
|
||||||
|
inner.offline_subkey_writes.clone(),
|
||||||
|
inner.update_callback.clone(),
|
||||||
|
);
|
||||||
|
inner.offline_subkey_writes.clear();
|
||||||
|
out
|
||||||
};
|
};
|
||||||
|
|
||||||
// make a safety selection that is conservative
|
for (key, osw) in offline_subkey_writes.iter_mut() {
|
||||||
for (key, osw) in offline_subkey_writes {
|
|
||||||
if poll!(stop_token.clone()).is_ready() {
|
if poll!(stop_token.clone()).is_ready() {
|
||||||
log_stor!(debug "Offline subkey writes cancelled.");
|
log_stor!(debug "Offline subkey writes cancelled.");
|
||||||
break;
|
break;
|
||||||
|
@ -25,10 +29,12 @@ impl StorageManager {
|
||||||
log_stor!(debug "Offline subkey writes stopped for network.");
|
log_stor!(debug "Offline subkey writes stopped for network.");
|
||||||
break;
|
break;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
let mut written_subkeys = ValueSubkeyRangeSet::new();
|
||||||
for subkey in osw.subkeys.iter() {
|
for subkey in osw.subkeys.iter() {
|
||||||
let get_result = {
|
let get_result = {
|
||||||
let mut inner = self.lock().await?;
|
let mut inner = self.lock().await?;
|
||||||
inner.handle_get_local_value(key, subkey, true).await
|
inner.handle_get_local_value(*key, subkey, true).await
|
||||||
};
|
};
|
||||||
let Ok(get_result) = get_result else {
|
let Ok(get_result) = get_result else {
|
||||||
log_stor!(debug "Offline subkey write had no subkey result: {}:{}", key, subkey);
|
log_stor!(debug "Offline subkey write had no subkey result: {}:{}", key, subkey);
|
||||||
|
@ -43,22 +49,52 @@ impl StorageManager {
|
||||||
continue;
|
continue;
|
||||||
};
|
};
|
||||||
log_stor!(debug "Offline subkey write: {}:{} len={}", key, subkey, value.value_data().data().len());
|
log_stor!(debug "Offline subkey write: {}:{} len={}", key, subkey, value.value_data().data().len());
|
||||||
if let Err(e) = self
|
let osvres = self
|
||||||
.outbound_set_value(
|
.outbound_set_value(
|
||||||
rpc_processor.clone(),
|
rpc_processor.clone(),
|
||||||
key,
|
*key,
|
||||||
subkey,
|
subkey,
|
||||||
osw.safety_selection,
|
osw.safety_selection,
|
||||||
value,
|
value,
|
||||||
descriptor,
|
descriptor,
|
||||||
)
|
)
|
||||||
.await
|
.await;
|
||||||
{
|
match osvres {
|
||||||
log_stor!(debug "failed to write offline subkey: {}", e);
|
Ok(osv) => {
|
||||||
|
if let Some(update_callback) = opt_update_callback.clone() {
|
||||||
|
// Send valuechange with dead count and no subkeys
|
||||||
|
update_callback(VeilidUpdate::ValueChange(Box::new(
|
||||||
|
VeilidValueChange {
|
||||||
|
key: *key,
|
||||||
|
subkeys: ValueSubkeyRangeSet::single(subkey),
|
||||||
|
count: u32::MAX,
|
||||||
|
value: Some(osv.signed_value_data.value_data().clone()),
|
||||||
|
},
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
written_subkeys.insert(subkey);
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
log_stor!(debug "failed to write offline subkey: {}", e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
let mut inner = self.lock().await?;
|
|
||||||
inner.offline_subkey_writes.remove(&key);
|
osw.subkeys = osw.subkeys.difference(&written_subkeys);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add any subkeys back in that were not successfully written
|
||||||
|
let mut inner = self.lock().await?;
|
||||||
|
for (key, osw) in offline_subkey_writes {
|
||||||
|
if !osw.subkeys.is_empty() {
|
||||||
|
inner
|
||||||
|
.offline_subkey_writes
|
||||||
|
.entry(key)
|
||||||
|
.and_modify(|x| {
|
||||||
|
x.subkeys = x.subkeys.union(&osw.subkeys);
|
||||||
|
})
|
||||||
|
.or_insert(osw);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
|
|
|
@ -3,13 +3,13 @@ plugins {
|
||||||
}
|
}
|
||||||
|
|
||||||
android {
|
android {
|
||||||
compileSdkVersion 33
|
compileSdkVersion 34
|
||||||
buildToolsVersion "33.0.1"
|
buildToolsVersion "34.0.0"
|
||||||
|
|
||||||
defaultConfig {
|
defaultConfig {
|
||||||
applicationId "com.veilid.veilid_core_android_tests"
|
applicationId "com.veilid.veilid_core_android_tests"
|
||||||
minSdkVersion 24
|
minSdkVersion 24
|
||||||
targetSdkVersion 33
|
targetSdkVersion 34
|
||||||
versionCode 1
|
versionCode 1
|
||||||
versionName "1.0"
|
versionName "1.0"
|
||||||
|
|
||||||
|
@ -38,7 +38,7 @@ android {
|
||||||
sourceCompatibility JavaVersion.VERSION_1_8
|
sourceCompatibility JavaVersion.VERSION_1_8
|
||||||
targetCompatibility JavaVersion.VERSION_1_8
|
targetCompatibility JavaVersion.VERSION_1_8
|
||||||
}
|
}
|
||||||
ndkVersion '25.1.8937393'
|
ndkVersion '26.3.11579264'
|
||||||
|
|
||||||
// Required to copy libc++_shared.so
|
// Required to copy libc++_shared.so
|
||||||
externalNativeBuild {
|
externalNativeBuild {
|
||||||
|
@ -82,5 +82,4 @@ afterEvaluate {
|
||||||
def buildType = "${variant.buildType.name.capitalize()}"
|
def buildType = "${variant.buildType.name.capitalize()}"
|
||||||
tasks["generate${productFlavor}${buildType}Assets"].dependsOn(tasks["cargoBuild"])
|
tasks["generate${productFlavor}${buildType}Assets"].dependsOn(tasks["cargoBuild"])
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -12,6 +12,8 @@ pub struct DHTRecordReport {
|
||||||
/// This may be a subset of the requested range if it exceeds the schema limits
|
/// This may be a subset of the requested range if it exceeds the schema limits
|
||||||
/// or has more than 512 subkeys
|
/// or has more than 512 subkeys
|
||||||
subkeys: ValueSubkeyRangeSet,
|
subkeys: ValueSubkeyRangeSet,
|
||||||
|
/// The subkeys that have been writen offline that still need to be flushed
|
||||||
|
offline_subkeys: ValueSubkeyRangeSet,
|
||||||
/// The sequence numbers of each subkey requested from a locally stored DHT Record
|
/// The sequence numbers of each subkey requested from a locally stored DHT Record
|
||||||
local_seqs: Vec<ValueSeqNum>,
|
local_seqs: Vec<ValueSeqNum>,
|
||||||
/// The sequence numbers of each subkey requested from the DHT over the network
|
/// The sequence numbers of each subkey requested from the DHT over the network
|
||||||
|
@ -22,11 +24,13 @@ from_impl_to_jsvalue!(DHTRecordReport);
|
||||||
impl DHTRecordReport {
|
impl DHTRecordReport {
|
||||||
pub fn new(
|
pub fn new(
|
||||||
subkeys: ValueSubkeyRangeSet,
|
subkeys: ValueSubkeyRangeSet,
|
||||||
|
offline_subkeys: ValueSubkeyRangeSet,
|
||||||
local_seqs: Vec<ValueSeqNum>,
|
local_seqs: Vec<ValueSeqNum>,
|
||||||
network_seqs: Vec<ValueSeqNum>,
|
network_seqs: Vec<ValueSeqNum>,
|
||||||
) -> Self {
|
) -> Self {
|
||||||
Self {
|
Self {
|
||||||
subkeys,
|
subkeys,
|
||||||
|
offline_subkeys,
|
||||||
local_seqs,
|
local_seqs,
|
||||||
network_seqs,
|
network_seqs,
|
||||||
}
|
}
|
||||||
|
@ -35,6 +39,9 @@ impl DHTRecordReport {
|
||||||
pub fn subkeys(&self) -> &ValueSubkeyRangeSet {
|
pub fn subkeys(&self) -> &ValueSubkeyRangeSet {
|
||||||
&self.subkeys
|
&self.subkeys
|
||||||
}
|
}
|
||||||
|
pub fn offline_subkeys(&self) -> &ValueSubkeyRangeSet {
|
||||||
|
&self.offline_subkeys
|
||||||
|
}
|
||||||
pub fn local_seqs(&self) -> &[ValueSeqNum] {
|
pub fn local_seqs(&self) -> &[ValueSeqNum] {
|
||||||
&self.local_seqs
|
&self.local_seqs
|
||||||
}
|
}
|
||||||
|
@ -47,8 +54,9 @@ impl fmt::Debug for DHTRecordReport {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
write!(
|
write!(
|
||||||
f,
|
f,
|
||||||
"DHTRecordReport {{\n subkeys: {:?}\n local_seqs:\n{}\n remote_seqs:\n{}\n}}\n",
|
"DHTRecordReport {{\n subkeys: {:?}\n offline_subkeys: {:?}\n local_seqs:\n{}\n remote_seqs:\n{}\n}}\n",
|
||||||
&self.subkeys,
|
&self.subkeys,
|
||||||
|
&self.offline_subkeys,
|
||||||
&debug_seqs(&self.local_seqs),
|
&debug_seqs(&self.local_seqs),
|
||||||
&debug_seqs(&self.network_seqs)
|
&debug_seqs(&self.network_seqs)
|
||||||
)
|
)
|
||||||
|
|
|
@ -63,7 +63,7 @@ android {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
ndkVersion '25.1.8937393'
|
ndkVersion '26.3.11579264'
|
||||||
|
|
||||||
// Required to copy libc++_shared.so
|
// Required to copy libc++_shared.so
|
||||||
externalNativeBuild {
|
externalNativeBuild {
|
||||||
|
@ -99,4 +99,4 @@ afterEvaluate {
|
||||||
def buildType = "${variant.buildType.name.capitalize()}"
|
def buildType = "${variant.buildType.name.capitalize()}"
|
||||||
tasks["generate${productFlavor}${buildType}Assets"].dependsOn(tasks["cargoBuild"])
|
tasks["generate${productFlavor}${buildType}Assets"].dependsOn(tasks["cargoBuild"])
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -27,7 +27,7 @@ apply from: "$flutterRoot/packages/flutter_tools/gradle/flutter.gradle"
|
||||||
|
|
||||||
android {
|
android {
|
||||||
compileSdkVersion flutter.compileSdkVersion
|
compileSdkVersion flutter.compileSdkVersion
|
||||||
ndkVersion '25.1.8937393'
|
ndkVersion '26.3.11579264'
|
||||||
compileOptions {
|
compileOptions {
|
||||||
sourceCompatibility JavaVersion.VERSION_1_8
|
sourceCompatibility JavaVersion.VERSION_1_8
|
||||||
targetCompatibility JavaVersion.VERSION_1_8
|
targetCompatibility JavaVersion.VERSION_1_8
|
||||||
|
@ -71,4 +71,4 @@ flutter {
|
||||||
|
|
||||||
dependencies {
|
dependencies {
|
||||||
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version"
|
implementation "org.jetbrains.kotlin:kotlin-stdlib-jdk8:$kotlin_version"
|
||||||
}
|
}
|
|
@ -55,7 +55,7 @@ class HistoryWrapper {
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
focusNode: FocusNode(onKey: (FocusNode node, RawKeyEvent event) {
|
focusNode: FocusNode(onKeyEvent: (FocusNode node, KeyEvent event) {
|
||||||
if (event.logicalKey == LogicalKeyboardKey.arrowDown ||
|
if (event.logicalKey == LogicalKeyboardKey.arrowDown ||
|
||||||
event.logicalKey == LogicalKeyboardKey.arrowUp) {
|
event.logicalKey == LogicalKeyboardKey.arrowUp) {
|
||||||
return KeyEventResult.handled;
|
return KeyEventResult.handled;
|
||||||
|
|
|
@ -246,6 +246,7 @@ class RouteBlob with _$RouteBlob {
|
||||||
class DHTRecordReport with _$DHTRecordReport {
|
class DHTRecordReport with _$DHTRecordReport {
|
||||||
const factory DHTRecordReport({
|
const factory DHTRecordReport({
|
||||||
required List<ValueSubkeyRange> subkeys,
|
required List<ValueSubkeyRange> subkeys,
|
||||||
|
required List<ValueSubkeyRange> offlineSubkeys,
|
||||||
required List<int> localSeqs,
|
required List<int> localSeqs,
|
||||||
required List<int> networkSeqs,
|
required List<int> networkSeqs,
|
||||||
}) = _DHTRecordReport;
|
}) = _DHTRecordReport;
|
||||||
|
|
|
@ -1363,6 +1363,8 @@ DHTRecordReport _$DHTRecordReportFromJson(Map<String, dynamic> json) {
|
||||||
/// @nodoc
|
/// @nodoc
|
||||||
mixin _$DHTRecordReport {
|
mixin _$DHTRecordReport {
|
||||||
List<ValueSubkeyRange> get subkeys => throw _privateConstructorUsedError;
|
List<ValueSubkeyRange> get subkeys => throw _privateConstructorUsedError;
|
||||||
|
List<ValueSubkeyRange> get offlineSubkeys =>
|
||||||
|
throw _privateConstructorUsedError;
|
||||||
List<int> get localSeqs => throw _privateConstructorUsedError;
|
List<int> get localSeqs => throw _privateConstructorUsedError;
|
||||||
List<int> get networkSeqs => throw _privateConstructorUsedError;
|
List<int> get networkSeqs => throw _privateConstructorUsedError;
|
||||||
|
|
||||||
|
@ -1380,6 +1382,7 @@ abstract class $DHTRecordReportCopyWith<$Res> {
|
||||||
@useResult
|
@useResult
|
||||||
$Res call(
|
$Res call(
|
||||||
{List<ValueSubkeyRange> subkeys,
|
{List<ValueSubkeyRange> subkeys,
|
||||||
|
List<ValueSubkeyRange> offlineSubkeys,
|
||||||
List<int> localSeqs,
|
List<int> localSeqs,
|
||||||
List<int> networkSeqs});
|
List<int> networkSeqs});
|
||||||
}
|
}
|
||||||
|
@ -1398,6 +1401,7 @@ class _$DHTRecordReportCopyWithImpl<$Res, $Val extends DHTRecordReport>
|
||||||
@override
|
@override
|
||||||
$Res call({
|
$Res call({
|
||||||
Object? subkeys = null,
|
Object? subkeys = null,
|
||||||
|
Object? offlineSubkeys = null,
|
||||||
Object? localSeqs = null,
|
Object? localSeqs = null,
|
||||||
Object? networkSeqs = null,
|
Object? networkSeqs = null,
|
||||||
}) {
|
}) {
|
||||||
|
@ -1406,6 +1410,10 @@ class _$DHTRecordReportCopyWithImpl<$Res, $Val extends DHTRecordReport>
|
||||||
? _value.subkeys
|
? _value.subkeys
|
||||||
: subkeys // ignore: cast_nullable_to_non_nullable
|
: subkeys // ignore: cast_nullable_to_non_nullable
|
||||||
as List<ValueSubkeyRange>,
|
as List<ValueSubkeyRange>,
|
||||||
|
offlineSubkeys: null == offlineSubkeys
|
||||||
|
? _value.offlineSubkeys
|
||||||
|
: offlineSubkeys // ignore: cast_nullable_to_non_nullable
|
||||||
|
as List<ValueSubkeyRange>,
|
||||||
localSeqs: null == localSeqs
|
localSeqs: null == localSeqs
|
||||||
? _value.localSeqs
|
? _value.localSeqs
|
||||||
: localSeqs // ignore: cast_nullable_to_non_nullable
|
: localSeqs // ignore: cast_nullable_to_non_nullable
|
||||||
|
@ -1428,6 +1436,7 @@ abstract class _$$DHTRecordReportImplCopyWith<$Res>
|
||||||
@useResult
|
@useResult
|
||||||
$Res call(
|
$Res call(
|
||||||
{List<ValueSubkeyRange> subkeys,
|
{List<ValueSubkeyRange> subkeys,
|
||||||
|
List<ValueSubkeyRange> offlineSubkeys,
|
||||||
List<int> localSeqs,
|
List<int> localSeqs,
|
||||||
List<int> networkSeqs});
|
List<int> networkSeqs});
|
||||||
}
|
}
|
||||||
|
@ -1444,6 +1453,7 @@ class __$$DHTRecordReportImplCopyWithImpl<$Res>
|
||||||
@override
|
@override
|
||||||
$Res call({
|
$Res call({
|
||||||
Object? subkeys = null,
|
Object? subkeys = null,
|
||||||
|
Object? offlineSubkeys = null,
|
||||||
Object? localSeqs = null,
|
Object? localSeqs = null,
|
||||||
Object? networkSeqs = null,
|
Object? networkSeqs = null,
|
||||||
}) {
|
}) {
|
||||||
|
@ -1452,6 +1462,10 @@ class __$$DHTRecordReportImplCopyWithImpl<$Res>
|
||||||
? _value._subkeys
|
? _value._subkeys
|
||||||
: subkeys // ignore: cast_nullable_to_non_nullable
|
: subkeys // ignore: cast_nullable_to_non_nullable
|
||||||
as List<ValueSubkeyRange>,
|
as List<ValueSubkeyRange>,
|
||||||
|
offlineSubkeys: null == offlineSubkeys
|
||||||
|
? _value._offlineSubkeys
|
||||||
|
: offlineSubkeys // ignore: cast_nullable_to_non_nullable
|
||||||
|
as List<ValueSubkeyRange>,
|
||||||
localSeqs: null == localSeqs
|
localSeqs: null == localSeqs
|
||||||
? _value._localSeqs
|
? _value._localSeqs
|
||||||
: localSeqs // ignore: cast_nullable_to_non_nullable
|
: localSeqs // ignore: cast_nullable_to_non_nullable
|
||||||
|
@ -1469,9 +1483,11 @@ class __$$DHTRecordReportImplCopyWithImpl<$Res>
|
||||||
class _$DHTRecordReportImpl implements _DHTRecordReport {
|
class _$DHTRecordReportImpl implements _DHTRecordReport {
|
||||||
const _$DHTRecordReportImpl(
|
const _$DHTRecordReportImpl(
|
||||||
{required final List<ValueSubkeyRange> subkeys,
|
{required final List<ValueSubkeyRange> subkeys,
|
||||||
|
required final List<ValueSubkeyRange> offlineSubkeys,
|
||||||
required final List<int> localSeqs,
|
required final List<int> localSeqs,
|
||||||
required final List<int> networkSeqs})
|
required final List<int> networkSeqs})
|
||||||
: _subkeys = subkeys,
|
: _subkeys = subkeys,
|
||||||
|
_offlineSubkeys = offlineSubkeys,
|
||||||
_localSeqs = localSeqs,
|
_localSeqs = localSeqs,
|
||||||
_networkSeqs = networkSeqs;
|
_networkSeqs = networkSeqs;
|
||||||
|
|
||||||
|
@ -1486,6 +1502,14 @@ class _$DHTRecordReportImpl implements _DHTRecordReport {
|
||||||
return EqualUnmodifiableListView(_subkeys);
|
return EqualUnmodifiableListView(_subkeys);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
final List<ValueSubkeyRange> _offlineSubkeys;
|
||||||
|
@override
|
||||||
|
List<ValueSubkeyRange> get offlineSubkeys {
|
||||||
|
if (_offlineSubkeys is EqualUnmodifiableListView) return _offlineSubkeys;
|
||||||
|
// ignore: implicit_dynamic_type
|
||||||
|
return EqualUnmodifiableListView(_offlineSubkeys);
|
||||||
|
}
|
||||||
|
|
||||||
final List<int> _localSeqs;
|
final List<int> _localSeqs;
|
||||||
@override
|
@override
|
||||||
List<int> get localSeqs {
|
List<int> get localSeqs {
|
||||||
|
@ -1504,7 +1528,7 @@ class _$DHTRecordReportImpl implements _DHTRecordReport {
|
||||||
|
|
||||||
@override
|
@override
|
||||||
String toString() {
|
String toString() {
|
||||||
return 'DHTRecordReport(subkeys: $subkeys, localSeqs: $localSeqs, networkSeqs: $networkSeqs)';
|
return 'DHTRecordReport(subkeys: $subkeys, offlineSubkeys: $offlineSubkeys, localSeqs: $localSeqs, networkSeqs: $networkSeqs)';
|
||||||
}
|
}
|
||||||
|
|
||||||
@override
|
@override
|
||||||
|
@ -1513,6 +1537,8 @@ class _$DHTRecordReportImpl implements _DHTRecordReport {
|
||||||
(other.runtimeType == runtimeType &&
|
(other.runtimeType == runtimeType &&
|
||||||
other is _$DHTRecordReportImpl &&
|
other is _$DHTRecordReportImpl &&
|
||||||
const DeepCollectionEquality().equals(other._subkeys, _subkeys) &&
|
const DeepCollectionEquality().equals(other._subkeys, _subkeys) &&
|
||||||
|
const DeepCollectionEquality()
|
||||||
|
.equals(other._offlineSubkeys, _offlineSubkeys) &&
|
||||||
const DeepCollectionEquality()
|
const DeepCollectionEquality()
|
||||||
.equals(other._localSeqs, _localSeqs) &&
|
.equals(other._localSeqs, _localSeqs) &&
|
||||||
const DeepCollectionEquality()
|
const DeepCollectionEquality()
|
||||||
|
@ -1524,6 +1550,7 @@ class _$DHTRecordReportImpl implements _DHTRecordReport {
|
||||||
int get hashCode => Object.hash(
|
int get hashCode => Object.hash(
|
||||||
runtimeType,
|
runtimeType,
|
||||||
const DeepCollectionEquality().hash(_subkeys),
|
const DeepCollectionEquality().hash(_subkeys),
|
||||||
|
const DeepCollectionEquality().hash(_offlineSubkeys),
|
||||||
const DeepCollectionEquality().hash(_localSeqs),
|
const DeepCollectionEquality().hash(_localSeqs),
|
||||||
const DeepCollectionEquality().hash(_networkSeqs));
|
const DeepCollectionEquality().hash(_networkSeqs));
|
||||||
|
|
||||||
|
@ -1545,6 +1572,7 @@ class _$DHTRecordReportImpl implements _DHTRecordReport {
|
||||||
abstract class _DHTRecordReport implements DHTRecordReport {
|
abstract class _DHTRecordReport implements DHTRecordReport {
|
||||||
const factory _DHTRecordReport(
|
const factory _DHTRecordReport(
|
||||||
{required final List<ValueSubkeyRange> subkeys,
|
{required final List<ValueSubkeyRange> subkeys,
|
||||||
|
required final List<ValueSubkeyRange> offlineSubkeys,
|
||||||
required final List<int> localSeqs,
|
required final List<int> localSeqs,
|
||||||
required final List<int> networkSeqs}) = _$DHTRecordReportImpl;
|
required final List<int> networkSeqs}) = _$DHTRecordReportImpl;
|
||||||
|
|
||||||
|
@ -1554,6 +1582,8 @@ abstract class _DHTRecordReport implements DHTRecordReport {
|
||||||
@override
|
@override
|
||||||
List<ValueSubkeyRange> get subkeys;
|
List<ValueSubkeyRange> get subkeys;
|
||||||
@override
|
@override
|
||||||
|
List<ValueSubkeyRange> get offlineSubkeys;
|
||||||
|
@override
|
||||||
List<int> get localSeqs;
|
List<int> get localSeqs;
|
||||||
@override
|
@override
|
||||||
List<int> get networkSeqs;
|
List<int> get networkSeqs;
|
||||||
|
|
|
@ -116,6 +116,9 @@ _$DHTRecordReportImpl _$$DHTRecordReportImplFromJson(
|
||||||
subkeys: (json['subkeys'] as List<dynamic>)
|
subkeys: (json['subkeys'] as List<dynamic>)
|
||||||
.map(ValueSubkeyRange.fromJson)
|
.map(ValueSubkeyRange.fromJson)
|
||||||
.toList(),
|
.toList(),
|
||||||
|
offlineSubkeys: (json['offline_subkeys'] as List<dynamic>)
|
||||||
|
.map(ValueSubkeyRange.fromJson)
|
||||||
|
.toList(),
|
||||||
localSeqs:
|
localSeqs:
|
||||||
(json['local_seqs'] as List<dynamic>).map((e) => e as int).toList(),
|
(json['local_seqs'] as List<dynamic>).map((e) => e as int).toList(),
|
||||||
networkSeqs:
|
networkSeqs:
|
||||||
|
@ -126,6 +129,8 @@ Map<String, dynamic> _$$DHTRecordReportImplToJson(
|
||||||
_$DHTRecordReportImpl instance) =>
|
_$DHTRecordReportImpl instance) =>
|
||||||
<String, dynamic>{
|
<String, dynamic>{
|
||||||
'subkeys': instance.subkeys.map((e) => e.toJson()).toList(),
|
'subkeys': instance.subkeys.map((e) => e.toJson()).toList(),
|
||||||
|
'offline_subkeys':
|
||||||
|
instance.offlineSubkeys.map((e) => e.toJson()).toList(),
|
||||||
'local_seqs': instance.localSeqs,
|
'local_seqs': instance.localSeqs,
|
||||||
'network_seqs': instance.networkSeqs,
|
'network_seqs': instance.networkSeqs,
|
||||||
};
|
};
|
||||||
|
|
|
@ -2805,6 +2805,7 @@
|
||||||
"required": [
|
"required": [
|
||||||
"local_seqs",
|
"local_seqs",
|
||||||
"network_seqs",
|
"network_seqs",
|
||||||
|
"offline_subkeys",
|
||||||
"subkeys"
|
"subkeys"
|
||||||
],
|
],
|
||||||
"properties": {
|
"properties": {
|
||||||
|
@ -2826,6 +2827,27 @@
|
||||||
"minimum": 0.0
|
"minimum": 0.0
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
"offline_subkeys": {
|
||||||
|
"description": "The subkeys that have been writen offline that still need to be flushed",
|
||||||
|
"type": "array",
|
||||||
|
"items": {
|
||||||
|
"type": "array",
|
||||||
|
"items": [
|
||||||
|
{
|
||||||
|
"type": "integer",
|
||||||
|
"format": "uint32",
|
||||||
|
"minimum": 0.0
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"type": "integer",
|
||||||
|
"format": "uint32",
|
||||||
|
"minimum": 0.0
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"maxItems": 2,
|
||||||
|
"minItems": 2
|
||||||
|
}
|
||||||
|
},
|
||||||
"subkeys": {
|
"subkeys": {
|
||||||
"description": "The actual subkey range within the schema being reported on This may be a subset of the requested range if it exceeds the schema limits or has more than 512 subkeys",
|
"description": "The actual subkey range within the schema being reported on This may be a subset of the requested range if it exceeds the schema limits or has more than 512 subkeys",
|
||||||
"type": "array",
|
"type": "array",
|
||||||
|
|
|
@ -382,26 +382,30 @@ class DHTRecordDescriptor:
|
||||||
|
|
||||||
class DHTRecordReport:
|
class DHTRecordReport:
|
||||||
subkeys: list[tuple[ValueSubkey, ValueSubkey]]
|
subkeys: list[tuple[ValueSubkey, ValueSubkey]]
|
||||||
|
offline_subkeys: list[tuple[ValueSubkey, ValueSubkey]]
|
||||||
local_seqs: list[ValueSeqNum]
|
local_seqs: list[ValueSeqNum]
|
||||||
network_seqs: list[ValueSeqNum]
|
network_seqs: list[ValueSeqNum]
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
subkeys: list[tuple[ValueSubkey, ValueSubkey]],
|
subkeys: list[tuple[ValueSubkey, ValueSubkey]],
|
||||||
|
offline_subkeys: list[tuple[ValueSubkey, ValueSubkey]],
|
||||||
local_seqs: list[ValueSeqNum],
|
local_seqs: list[ValueSeqNum],
|
||||||
network_seqs: list[ValueSeqNum],
|
network_seqs: list[ValueSeqNum],
|
||||||
):
|
):
|
||||||
self.subkeys = subkeys
|
self.subkeys = subkeys
|
||||||
|
self.offline_subkey = offline_subkeys
|
||||||
self.local_seqs = local_seqs
|
self.local_seqs = local_seqs
|
||||||
self.network_seqs = network_seqs
|
self.network_seqs = network_seqs
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return f"<{self.__class__.__name__}(subkeys={self.subkeys!r}, local_seqs={self.local_seqs!r}, network_seqs={self.network_seqs!r})>"
|
return f"<{self.__class__.__name__}(subkeys={self.subkeys!r}, offline_subkeys={self.offline_subkeys!r}, local_seqs={self.local_seqs!r}, network_seqs={self.network_seqs!r})>"
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def from_json(cls, j: dict) -> Self:
|
def from_json(cls, j: dict) -> Self:
|
||||||
return cls(
|
return cls(
|
||||||
[[p[0], p[1]] for p in j["subkeys"]],
|
[[p[0], p[1]] for p in j["subkeys"]],
|
||||||
|
[[p[0], p[1]] for p in j["offline_subkeys"]],
|
||||||
[ValueSeqNum(s) for s in j["local_seqs"]],
|
[ValueSeqNum(s) for s in j["local_seqs"]],
|
||||||
[ValueSeqNum(s) for s in j["network_seqs"]],
|
[ValueSeqNum(s) for s in j["network_seqs"]],
|
||||||
)
|
)
|
||||||
|
|
|
@ -26,7 +26,7 @@ cfg_if! {
|
||||||
use std::convert::TryInto;
|
use std::convert::TryInto;
|
||||||
use std::ffi::CStr;
|
use std::ffi::CStr;
|
||||||
use std::io;
|
use std::io;
|
||||||
use std::os::raw::{c_char, c_int};
|
use std::os::raw::c_int;
|
||||||
use tools::*;
|
use tools::*;
|
||||||
|
|
||||||
fn get_interface_name(index: u32) -> io::Result<String> {
|
fn get_interface_name(index: u32) -> io::Result<String> {
|
||||||
|
@ -37,6 +37,7 @@ fn get_interface_name(index: u32) -> io::Result<String> {
|
||||||
bail_io_error_other!("if_indextoname returned null");
|
bail_io_error_other!("if_indextoname returned null");
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
|
use std::os::raw::c_char;
|
||||||
if unsafe { if_indextoname(index, ifnamebuf.as_mut_ptr() as *mut c_char) }.is_null() {
|
if unsafe { if_indextoname(index, ifnamebuf.as_mut_ptr() as *mut c_char) }.is_null() {
|
||||||
bail_io_error_other!("if_indextoname returned null");
|
bail_io_error_other!("if_indextoname returned null");
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,7 +4,7 @@ plugins {
|
||||||
|
|
||||||
android {
|
android {
|
||||||
compileSdkVersion 33
|
compileSdkVersion 33
|
||||||
buildToolsVersion "33.0.1"
|
buildToolsVersion "34.0.0"
|
||||||
|
|
||||||
defaultConfig {
|
defaultConfig {
|
||||||
applicationId "com.veilid.veilid_tools_android_tests"
|
applicationId "com.veilid.veilid_tools_android_tests"
|
||||||
|
@ -38,7 +38,7 @@ android {
|
||||||
sourceCompatibility JavaVersion.VERSION_1_8
|
sourceCompatibility JavaVersion.VERSION_1_8
|
||||||
targetCompatibility JavaVersion.VERSION_1_8
|
targetCompatibility JavaVersion.VERSION_1_8
|
||||||
}
|
}
|
||||||
ndkVersion '25.1.8937393'
|
ndkVersion '26.3.11579264'
|
||||||
|
|
||||||
// Required to copy libc++_shared.so
|
// Required to copy libc++_shared.so
|
||||||
externalNativeBuild {
|
externalNativeBuild {
|
||||||
|
@ -82,4 +82,4 @@ afterEvaluate {
|
||||||
def buildType = "${variant.buildType.name.capitalize()}"
|
def buildType = "${variant.buildType.name.capitalize()}"
|
||||||
tasks["generate${productFlavor}${buildType}Assets"].dependsOn(tasks["cargoBuild"])
|
tasks["generate${productFlavor}${buildType}Assets"].dependsOn(tasks["cargoBuild"])
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -86,17 +86,23 @@ pub async fn test_one_frag_out_in() {
|
||||||
// Sending
|
// Sending
|
||||||
info!("sending");
|
info!("sending");
|
||||||
for _ in 0..10000 {
|
for _ in 0..10000 {
|
||||||
let random_len = (get_random_u32() % 1000) as usize + FRAGMENT_LEN;
|
let to_send = loop {
|
||||||
let mut message = vec![1u8; random_len];
|
let random_len = (get_random_u32() % 1000) as usize + FRAGMENT_LEN;
|
||||||
random_bytes(&mut message);
|
let mut message = vec![1u8; random_len];
|
||||||
let remote_addr = random_sockaddr();
|
random_bytes(&mut message);
|
||||||
|
let remote_addr = random_sockaddr();
|
||||||
|
|
||||||
|
let to_send = (message, remote_addr);
|
||||||
|
|
||||||
|
if !all_sent.contains(&to_send) {
|
||||||
|
break to_send;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// Send single message above fragmentation limit
|
// Send single message above fragmentation limit
|
||||||
all_sent.insert((message.clone(), remote_addr));
|
all_sent.insert(to_send.clone());
|
||||||
assert!(matches!(
|
assert!(matches!(
|
||||||
assbuf_out
|
assbuf_out.split_message(to_send.0, to_send.1, sender).await,
|
||||||
.split_message(message.clone(), remote_addr, sender)
|
|
||||||
.await,
|
|
||||||
Ok(NetworkResult::Value(()))
|
Ok(NetworkResult::Value(()))
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
@ -150,18 +156,24 @@ pub async fn test_many_frags_out_in() {
|
||||||
let mut total_sent_size = 0usize;
|
let mut total_sent_size = 0usize;
|
||||||
info!("sending");
|
info!("sending");
|
||||||
for _ in 0..1000 {
|
for _ in 0..1000 {
|
||||||
let random_len = (get_random_u32() % 65536) as usize;
|
let to_send = loop {
|
||||||
total_sent_size += random_len;
|
let random_len = (get_random_u32() % 65536) as usize;
|
||||||
let mut message = vec![1u8; random_len];
|
let mut message = vec![1u8; random_len];
|
||||||
random_bytes(&mut message);
|
random_bytes(&mut message);
|
||||||
let remote_addr = random_sockaddr();
|
let remote_addr = random_sockaddr();
|
||||||
|
let to_send = (message, remote_addr);
|
||||||
|
|
||||||
|
if !all_sent.contains(&to_send) {
|
||||||
|
break to_send;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// Send single message
|
// Send single message
|
||||||
all_sent.insert((message.clone(), remote_addr));
|
all_sent.insert(to_send.clone());
|
||||||
|
total_sent_size += to_send.0.len();
|
||||||
|
|
||||||
assert!(matches!(
|
assert!(matches!(
|
||||||
assbuf_out
|
assbuf_out.split_message(to_send.0, to_send.1, sender).await,
|
||||||
.split_message(message.clone(), remote_addr, sender)
|
|
||||||
.await,
|
|
||||||
Ok(NetworkResult::Value(()))
|
Ok(NetworkResult::Value(()))
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
@ -215,18 +227,24 @@ pub async fn test_many_frags_out_in_single_host() {
|
||||||
let mut total_sent_size = 0usize;
|
let mut total_sent_size = 0usize;
|
||||||
info!("sending");
|
info!("sending");
|
||||||
for _ in 0..1000 {
|
for _ in 0..1000 {
|
||||||
let random_len = (get_random_u32() % 65536) as usize;
|
let to_send = loop {
|
||||||
total_sent_size += random_len;
|
let remote_addr = random_sockaddr();
|
||||||
let mut message = vec![1u8; random_len];
|
let random_len = (get_random_u32() % 65536) as usize;
|
||||||
random_bytes(&mut message);
|
let mut message = vec![1u8; random_len];
|
||||||
let remote_addr = SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(1, 2, 3, 4), 5678));
|
random_bytes(&mut message);
|
||||||
|
|
||||||
|
let to_send = (message.clone(), remote_addr);
|
||||||
|
|
||||||
|
if !all_sent.contains(&to_send) {
|
||||||
|
break to_send;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// Send single message
|
// Send single message
|
||||||
all_sent.insert((message.clone(), remote_addr));
|
all_sent.insert(to_send.clone());
|
||||||
|
total_sent_size += to_send.0.len();
|
||||||
assert!(matches!(
|
assert!(matches!(
|
||||||
assbuf_out
|
assbuf_out.split_message(to_send.0, to_send.1, sender).await,
|
||||||
.split_message(message.clone(), remote_addr, sender)
|
|
||||||
.await,
|
|
||||||
Ok(NetworkResult::Value(()))
|
Ok(NetworkResult::Value(()))
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
@ -288,21 +306,28 @@ pub async fn test_many_frags_with_drops() {
|
||||||
let mut total_fragged = 0usize;
|
let mut total_fragged = 0usize;
|
||||||
info!("sending");
|
info!("sending");
|
||||||
for _ in 0..1000 {
|
for _ in 0..1000 {
|
||||||
let random_len = (get_random_u32() % 65536) as usize;
|
let to_send = loop {
|
||||||
if random_len > FRAGMENT_LEN {
|
let remote_addr = random_sockaddr();
|
||||||
total_fragged += 1;
|
let random_len = (get_random_u32() % 65536) as usize;
|
||||||
}
|
if random_len > FRAGMENT_LEN {
|
||||||
total_sent_size += random_len;
|
total_fragged += 1;
|
||||||
let mut message = vec![1u8; random_len];
|
}
|
||||||
random_bytes(&mut message);
|
let mut message = vec![1u8; random_len];
|
||||||
let remote_addr = random_sockaddr();
|
random_bytes(&mut message);
|
||||||
|
|
||||||
|
let to_send = (message.clone(), remote_addr);
|
||||||
|
|
||||||
|
if !all_sent.contains(&to_send) {
|
||||||
|
break to_send;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// Send single message
|
// Send single message
|
||||||
all_sent.insert((message.clone(), remote_addr));
|
all_sent.insert(to_send.clone());
|
||||||
|
total_sent_size += to_send.0.len();
|
||||||
|
|
||||||
assert!(matches!(
|
assert!(matches!(
|
||||||
assbuf_out
|
assbuf_out.split_message(to_send.0, to_send.1, sender).await,
|
||||||
.split_message(message.clone(), remote_addr, sender)
|
|
||||||
.await,
|
|
||||||
Ok(NetworkResult::Value(()))
|
Ok(NetworkResult::Value(()))
|
||||||
));
|
));
|
||||||
|
|
||||||
|
@ -358,18 +383,24 @@ pub async fn test_many_frags_reordered() {
|
||||||
let mut rng = rand::thread_rng();
|
let mut rng = rand::thread_rng();
|
||||||
info!("sending");
|
info!("sending");
|
||||||
for _ in 0..1000 {
|
for _ in 0..1000 {
|
||||||
let random_len = (get_random_u32() % 65536) as usize;
|
let to_send = loop {
|
||||||
total_sent_size += random_len;
|
let random_len = (get_random_u32() % 65536) as usize;
|
||||||
let mut message = vec![1u8; random_len];
|
let mut message = vec![1u8; random_len];
|
||||||
random_bytes(&mut message);
|
random_bytes(&mut message);
|
||||||
let remote_addr = SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(1, 2, 3, 4), 5678));
|
let remote_addr = SocketAddr::V4(SocketAddrV4::new(Ipv4Addr::new(1, 2, 3, 4), 5678));
|
||||||
|
|
||||||
|
let to_send = (message.clone(), remote_addr);
|
||||||
|
|
||||||
|
if !all_sent.contains(&to_send) {
|
||||||
|
break to_send;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
// Send single message
|
// Send single message
|
||||||
all_sent.insert((message.clone(), remote_addr));
|
all_sent.insert(to_send.clone());
|
||||||
|
total_sent_size += to_send.0.len();
|
||||||
assert!(matches!(
|
assert!(matches!(
|
||||||
assbuf_out
|
assbuf_out.split_message(to_send.0, to_send.1, sender).await,
|
||||||
.split_message(message.clone(), remote_addr, sender)
|
|
||||||
.await,
|
|
||||||
Ok(NetworkResult::Value(()))
|
Ok(NetworkResult::Value(()))
|
||||||
));
|
));
|
||||||
|
|
||||||
|
|
|
@ -272,7 +272,7 @@ pub fn change_log_ignore(layer: String, log_ignore: String) {
|
||||||
// Change all layers
|
// Change all layers
|
||||||
for f in filters.values() {
|
for f in filters.values() {
|
||||||
f.set_ignore_list(Some(VeilidLayerFilter::apply_ignore_change(
|
f.set_ignore_list(Some(VeilidLayerFilter::apply_ignore_change(
|
||||||
f.ignore_list(),
|
&f.ignore_list(),
|
||||||
log_ignore.clone(),
|
log_ignore.clone(),
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
@ -280,7 +280,7 @@ pub fn change_log_ignore(layer: String, log_ignore: String) {
|
||||||
// Change a specific layer
|
// Change a specific layer
|
||||||
let f = filters.get(layer.as_str()).unwrap();
|
let f = filters.get(layer.as_str()).unwrap();
|
||||||
f.set_ignore_list(Some(VeilidLayerFilter::apply_ignore_change(
|
f.set_ignore_list(Some(VeilidLayerFilter::apply_ignore_change(
|
||||||
f.ignore_list(),
|
&f.ignore_list(),
|
||||||
log_ignore.clone(),
|
log_ignore.clone(),
|
||||||
)));
|
)));
|
||||||
}
|
}
|
||||||
|
|
|
@ -136,18 +136,16 @@ impl VeilidClient {
|
||||||
if layer.is_empty() {
|
if layer.is_empty() {
|
||||||
// Change all layers
|
// Change all layers
|
||||||
for f in filters.values() {
|
for f in filters.values() {
|
||||||
f.set_ignore_list(Some(VeilidLayerFilter::apply_ignore_change(
|
let mut ignore_list = f.ignore_list();
|
||||||
f.ignore_list(),
|
VeilidLayerFilter::apply_ignore_change_list(&mut ignore_list, &changes);
|
||||||
changes.clone(),
|
f.set_ignore_list(Some(ignore_list));
|
||||||
)));
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Change a specific layer
|
// Change a specific layer
|
||||||
let f = filters.get(layer.as_str()).unwrap();
|
let f = filters.get(layer.as_str()).unwrap();
|
||||||
f.set_ignore_list(Some(VeilidLayerFilter::apply_ignore_change(
|
let mut ignore_list = f.ignore_list();
|
||||||
f.ignore_list(),
|
VeilidLayerFilter::apply_ignore_change_list(&mut ignore_list, &changes);
|
||||||
changes.clone(),
|
f.set_ignore_list(Some(ignore_list));
|
||||||
)));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
/// Shut down Veilid and terminate the API.
|
/// Shut down Veilid and terminate the API.
|
||||||
|
|
2
veilid-wasm/tests/package-lock.json
generated
2
veilid-wasm/tests/package-lock.json
generated
|
@ -21,7 +21,7 @@
|
||||||
},
|
},
|
||||||
"../pkg": {
|
"../pkg": {
|
||||||
"name": "veilid-wasm",
|
"name": "veilid-wasm",
|
||||||
"version": "0.2.5",
|
"version": "0.3.1",
|
||||||
"dev": true,
|
"dev": true,
|
||||||
"license": "MPL-2.0"
|
"license": "MPL-2.0"
|
||||||
},
|
},
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue