Clippy and other fixes for Rust 1.85

This commit is contained in:
Christien Rioux 2025-03-28 13:17:15 -04:00
parent a46a2e51d4
commit 4f8765d25e
38 changed files with 138 additions and 506 deletions

53
Cargo.lock generated
View File

@ -1006,9 +1006,9 @@ dependencies = [
[[package]]
name = "clap"
version = "4.5.32"
version = "4.5.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6088f3ae8c3608d19260cd7445411865a485688711b78b5be70d78cd96136f83"
checksum = "e958897981290da2a852763fe9cdb89cd36977a5d729023127095fa94d95e2ff"
dependencies = [
"clap_builder",
"clap_derive",
@ -1016,9 +1016,9 @@ dependencies = [
[[package]]
name = "clap_builder"
version = "4.5.32"
version = "4.5.34"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "22a7ef7f676155edfb82daa97f99441f3ebf4a58d5e32f295a56259f1b6facc8"
checksum = "83b0f35019843db2160b5bb19ae09b4e6411ac33fc6a712003c33e03090e2489"
dependencies = [
"anstream",
"anstyle",
@ -1143,7 +1143,7 @@ dependencies = [
"pathdiff",
"serde",
"winnow 0.7.4",
"yaml-rust2 0.10.0",
"yaml-rust2 0.10.1",
]
[[package]]
@ -1846,9 +1846,9 @@ dependencies = [
[[package]]
name = "event-listener-strategy"
version = "0.5.3"
version = "0.5.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3c3e4e0dd3673c1139bf041f3008816d9cf2946bbfac2945c09e523b8d7b05b2"
checksum = "8be9f3dfaaffdae2972880079a491a1a8bb7cbed0b8dd7a347f668b4150a3b93"
dependencies = [
"event-listener 5.4.0",
"pin-project-lite",
@ -2657,14 +2657,15 @@ dependencies = [
[[package]]
name = "iana-time-zone"
version = "0.1.61"
version = "0.1.62"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "235e081f3925a06703c2d0117ea8b91f042756fd6e7a6e5d901e8ca1a996b220"
checksum = "b2fd658b06e56721792c5df4475705b6cda790e9298d19d2f8af083457bcd127"
dependencies = [
"android_system_properties",
"core-foundation-sys",
"iana-time-zone-haiku",
"js-sys",
"log",
"wasm-bindgen",
"windows-core 0.52.0",
]
@ -2719,9 +2720,9 @@ dependencies = [
[[package]]
name = "icu_locid_transform_data"
version = "1.5.0"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fdc8ff3388f852bede6b579ad4e978ab004f139284d7b28715f773507b946f6e"
checksum = "7515e6d781098bf9f7205ab3fc7e9709d34554ae0b21ddbcb5febfa4bc7df11d"
[[package]]
name = "icu_normalizer"
@ -2743,9 +2744,9 @@ dependencies = [
[[package]]
name = "icu_normalizer_data"
version = "1.5.0"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f8cafbf7aa791e9b22bec55a167906f9e1215fd475cd22adfcf660e03e989516"
checksum = "c5e8338228bdc8ab83303f16b797e177953730f601a96c25d10cb3ab0daa0cb7"
[[package]]
name = "icu_properties"
@ -2764,9 +2765,9 @@ dependencies = [
[[package]]
name = "icu_properties_data"
version = "1.5.0"
version = "1.5.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "67a8effbc3dd3e4ba1afa8ad918d5684b8868b3b26500753effea8d2eed19569"
checksum = "85fb8799753b75aee8d2a21d7c14d9f38921b54b3dbda10f5a3c7a7b82dba5e2"
[[package]]
name = "icu_provider"
@ -3214,9 +3215,9 @@ dependencies = [
[[package]]
name = "log"
version = "0.4.26"
version = "0.4.27"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "30bde2b3dc3671ae49d8e2e9f044c7c005836e7a023ee57cffa25ab82764bb9e"
checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94"
dependencies = [
"value-bag",
]
@ -3821,9 +3822,9 @@ dependencies = [
[[package]]
name = "once_cell"
version = "1.21.1"
version = "1.21.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d75b0bedcc4fe52caa0e03d9f1151a323e4aa5e2d78ba3580400cd3c9e2bc4bc"
checksum = "c2806eaa3524762875e21c3dcd057bc4b7bfa01ce4da8d46be1cd43649e1cc6b"
[[package]]
name = "opaque-debug"
@ -6341,9 +6342,9 @@ checksum = "ba73ea9cf16a25df0c8caa16c51acb937d5712a8429db78a3ee29d5dcacd3a65"
[[package]]
name = "value-bag"
version = "1.10.0"
version = "1.11.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3ef4c4aa54d5d05a279399bfa921ec387b7aba77caf7a682ae8d86785b8fdad2"
checksum = "943ce29a8a743eb10d6082545d861b24f9d1b160b7d741e0f2cdf726bec909c5"
[[package]]
name = "vcpkg"
@ -6375,7 +6376,7 @@ dependencies = [
"async-tungstenite 0.23.0",
"cfg-if 1.0.0",
"chrono",
"clap 4.5.32",
"clap 4.5.34",
"config 0.15.11",
"console",
"crossbeam-channel",
@ -6578,7 +6579,7 @@ dependencies = [
"backtrace",
"cfg-if 1.0.0",
"chrono",
"clap 4.5.32",
"clap 4.5.34",
"color-eyre",
"config 0.14.1",
"console-subscriber",
@ -6636,7 +6637,7 @@ dependencies = [
"backtrace",
"cfg-if 1.0.0",
"chrono",
"clap 4.5.32",
"clap 4.5.34",
"console_error_panic_hook",
"ctrlc",
"eyre",
@ -7441,9 +7442,9 @@ dependencies = [
[[package]]
name = "yaml-rust2"
version = "0.10.0"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "232bdb534d65520716bef0bbb205ff8f2db72d807b19c0bc3020853b92a0cd4b"
checksum = "818913695e83ece1f8d2a1c52d54484b7b46d0f9c06beeb2649b9da50d9b512d"
dependencies = [
"arraydeque",
"encoding_rs",

11
scripts/clippy_all.sh Executable file
View File

@ -0,0 +1,11 @@
#!/bin/bash
SCRIPTDIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
pushd $SCRIPTDIR/.. >/dev/null
cargo-zigbuild clippy --target x86_64-unknown-linux-gnu
cargo-zigbuild clippy --target x86_64-unknown-linux-gnu --manifest-path=veilid-server/Cargo.toml --no-default-features --features=default-async-std
cargo-zigbuild clippy --target x86_64-pc-windows-gnu
cargo-zigbuild clippy --target aarch64-apple-darwin
cargo clippy --manifest-path=veilid-wasm/Cargo.toml --target wasm32-unknown-unknown
popd >/dev/null

View File

@ -409,26 +409,20 @@ Core Debug Commands:
let ipc_path_opt = self.inner_mut().ipc_path.clone();
if let Some(ipc_path) = ipc_path_opt {
if first {
info!(
"Connecting to server at {}",
ipc_path.to_string_lossy().to_string()
);
info!("Connecting to server at {}", ipc_path.to_string_lossy());
self.set_connection_state(ConnectionState::RetryingIPC(
ipc_path.clone(),
SystemTime::now(),
));
} else {
debug!(
"Retrying connection to {}",
ipc_path.to_string_lossy().to_string()
);
debug!("Retrying connection to {}", ipc_path.to_string_lossy());
}
let capi = self.capi();
let res = capi.ipc_connect(ipc_path.clone()).await;
if res.is_ok() {
info!(
"Connection to server at {} terminated normally",
ipc_path.to_string_lossy().to_string()
ipc_path.to_string_lossy()
);
break;
}

View File

@ -286,5 +286,8 @@ reqwest = { version = "0.11", features = ["blocking"], optional = true }
[package.metadata.wasm-pack.profile.release]
wasm-opt = ["-O", "--enable-mutable-globals"]
[package.metadata.wasm-pack.profile.dev.wasm-bindgen]
dwarf-debug-info = true
[lints]
workspace = true

View File

@ -50,7 +50,7 @@ pub struct VeilidComponentGuard<'a, T: VeilidComponent + Send + Sync + 'static>
_phantom: core::marker::PhantomData<&'a T>,
}
impl<'a, T> core::ops::Deref for VeilidComponentGuard<'a, T>
impl<T> core::ops::Deref for VeilidComponentGuard<'_, T>
where
T: VeilidComponent + Send + Sync + 'static,
{

View File

@ -3,6 +3,10 @@ use super::*;
use crate::*;
use core::convert::TryInto;
pub const MAX_ENVELOPE_SIZE: usize = 65507;
pub const MIN_ENVELOPE_SIZE: usize = 0x6A + 0x40; // Header + Signature
pub const ENVELOPE_MAGIC: &[u8; 3] = b"VLD";
/// Envelopes are versioned
///
/// These are the formats for the on-the-wire serialization performed by this module
@ -29,11 +33,6 @@ use core::convert::TryInto;
/// signature: [u8; 64], // 0x?? (end-0x40): Signature of the entire envelope including header is appended to the packet
/// // entire header needs to be included in message digest, relays are not allowed to modify the envelope without invalidating the signature.
/// }
pub const MAX_ENVELOPE_SIZE: usize = 65507;
pub const MIN_ENVELOPE_SIZE: usize = 0x6A + 0x40; // Header + Signature
pub const ENVELOPE_MAGIC: &[u8; 3] = b"VLD";
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct Envelope {
version: EnvelopeVersion,

View File

@ -19,7 +19,7 @@ impl<'a> CryptoSystemGuard<'a> {
}
}
impl<'a> core::ops::Deref for CryptoSystemGuard<'a> {
impl core::ops::Deref for CryptoSystemGuard<'_> {
type Target = dyn CryptoSystem + Send + Sync;
fn deref(&self) -> &Self::Target {
@ -39,7 +39,7 @@ async fn yielding<R, T: FnOnce() -> R>(x: T) -> R {
out
}
impl<'a> AsyncCryptoSystemGuard<'a> {
impl AsyncCryptoSystemGuard<'_> {
// Accessors
pub fn kind(&self) -> CryptoKind {
self.guard.kind()

View File

@ -3,6 +3,11 @@ use super::*;
use crate::*;
use core::convert::TryInto;
pub const MAX_RECEIPT_SIZE: usize = 1380;
pub const MAX_EXTRA_DATA_SIZE: usize = MAX_RECEIPT_SIZE - MIN_RECEIPT_SIZE; // 1250
pub const MIN_RECEIPT_SIZE: usize = 130;
pub const RECEIPT_MAGIC: &[u8; 3] = b"RCP";
/// Out-of-band receipts are versioned along with envelope versions
///
/// These are the formats for the on-the-wire serialization performed by this module
@ -26,12 +31,6 @@ use core::convert::TryInto;
/// extra_data: [u8; ??], // 0x42: Extra data is appended (arbitrary extra data, not encrypted by receipt itself, maximum size is 1250 bytes)
/// signature: [u8; 64], // 0x?? (end-0x40): Signature of the entire receipt including header and extra data is appended to the packet
/// }
pub const MAX_RECEIPT_SIZE: usize = 1380;
pub const MAX_EXTRA_DATA_SIZE: usize = MAX_RECEIPT_SIZE - MIN_RECEIPT_SIZE; // 1250
pub const MIN_RECEIPT_SIZE: usize = 130;
pub const RECEIPT_MAGIC: &[u8; 3] = b"RCP";
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct Receipt {
version: u8,

View File

@ -53,7 +53,7 @@ where
}
}
impl<'a, F, Out> VisitOutput<Out> for FmtStripVisitor<'a, F, Out>
impl<F, Out> VisitOutput<Out> for FmtStripVisitor<'_, F, Out>
where
F: Visit + VisitOutput<Out>,
{
@ -70,7 +70,7 @@ where
}
}
impl<'a, F, Out> Visit for FmtStripVisitor<'a, F, Out>
impl<F, Out> Visit for FmtStripVisitor<'_, F, Out>
where
F: Visit + VisitOutput<Out>,
{

View File

@ -13,7 +13,7 @@ pub const ADDRESS_CONSISTENCY_DETECTION_COUNT: usize = 5;
/// Length of consistent/inconsistent result cache for detection
pub const ADDRESS_CHECK_CACHE_SIZE: usize = 10;
/// Length of consistent/inconsistent result cache for detection
// /// Length of consistent/inconsistent result cache for detection
// pub const ADDRESS_CHECK_PEER_COUNT: usize = 256;
// /// Frequency of address checks
// pub const PUBLIC_ADDRESS_CHECK_TASK_INTERVAL_SECS: u32 = 60;

View File

@ -521,7 +521,7 @@ impl ConnectionManager {
}
///////////////////////////////////////////////////////////////////////////////////////////////////////
/// Asynchronous Event Processor
// Asynchronous Event Processor
async fn process_connection_manager_event(
&self,

View File

@ -351,7 +351,7 @@ impl Network {
// Add static public dialinfo if it's configured
if let Some(url) = url.as_ref() {
let mut split_url = SplitUrl::from_str(url).wrap_err("couldn't split url")?;
if split_url.scheme.to_ascii_lowercase() != "ws" {
if !split_url.scheme.eq_ignore_ascii_case("ws") {
bail!("WS URL must use 'ws://' scheme");
}
"ws".clone_into(&mut split_url.scheme);
@ -478,7 +478,7 @@ impl Network {
if let Some(url) = url.as_ref() {
// Add static public dialinfo if it's configured
let mut split_url = SplitUrl::from_str(url)?;
if split_url.scheme.to_ascii_lowercase() != "wss" {
if !split_url.scheme.eq_ignore_ascii_case("wss") {
bail!("WSS URL must use 'wss://' scheme");
}
"wss".clone_into(&mut split_url.scheme);

View File

@ -981,7 +981,7 @@ impl BucketEntryInner {
self.peer_stats.rpc_stats.last_question_ts
}
/// Return the last time we asked a node a question
// /// Return the last time we asked a node a question
// fn last_question_time(&self) -> Option<Timestamp> {
// self.peer_stats.rpc_stats.last_question_ts
// }
@ -1112,7 +1112,7 @@ impl BucketEntryInner {
}
////////////////////////////////////////////////////////////////
/// Called when rpc processor things happen
// Called when rpc processor things happen
pub(super) fn question_sent(
&mut self,

View File

@ -209,7 +209,7 @@ impl RoutingTable {
}
/////////////////////////////////////
/// Initialization
// Initialization
/// Called to initialize the routing table after it is created
async fn init_async(&self) -> EyreResult<()> {
@ -517,7 +517,7 @@ impl RoutingTable {
}
/////////////////////////////////////
/// Locked operations
// Locked operations
pub fn routing_domain_for_address(&self, address: Address) -> Option<RoutingDomain> {
self.inner.read().routing_domain_for_address(address)

View File

@ -16,14 +16,13 @@ pub(crate) struct NodeRefLock<
}
impl<
'a,
N: NodeRefAccessorsTrait
+ NodeRefOperateTrait
+ VeilidComponentRegistryAccessor
+ fmt::Debug
+ fmt::Display
+ Clone,
> VeilidComponentRegistryAccessor for NodeRefLock<'a, N>
> VeilidComponentRegistryAccessor for NodeRefLock<'_, N>
{
fn registry(&self) -> VeilidComponentRegistry {
self.nr.registry()
@ -45,8 +44,8 @@ impl<'a, N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Disp
}
}
impl<'a, N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
NodeRefAccessorsTrait for NodeRefLock<'a, N>
impl<N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
NodeRefAccessorsTrait for NodeRefLock<'_, N>
{
fn entry(&self) -> Arc<BucketEntry> {
self.nr.entry()
@ -73,8 +72,8 @@ impl<'a, N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Disp
}
}
impl<'a, N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
NodeRefOperateTrait for NodeRefLock<'a, N>
impl<N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
NodeRefOperateTrait for NodeRefLock<'_, N>
{
fn operate<T, F>(&self, f: F) -> T
where
@ -107,21 +106,21 @@ impl<'a, N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Disp
}
}
impl<'a, N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
NodeRefCommonTrait for NodeRefLock<'a, N>
impl<N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
NodeRefCommonTrait for NodeRefLock<'_, N>
{
}
impl<'a, N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
fmt::Display for NodeRefLock<'a, N>
impl<N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
fmt::Display for NodeRefLock<'_, N>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.nr)
}
}
impl<'a, N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
fmt::Debug for NodeRefLock<'a, N>
impl<N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone> fmt::Debug
for NodeRefLock<'_, N>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("NodeRefLock").field("nr", &self.nr).finish()

View File

@ -16,14 +16,13 @@ pub(crate) struct NodeRefLockMut<
}
impl<
'a,
N: NodeRefAccessorsTrait
+ NodeRefOperateTrait
+ VeilidComponentRegistryAccessor
+ fmt::Debug
+ fmt::Display
+ Clone,
> VeilidComponentRegistryAccessor for NodeRefLockMut<'a, N>
> VeilidComponentRegistryAccessor for NodeRefLockMut<'_, N>
{
fn registry(&self) -> VeilidComponentRegistry {
self.nr.registry()
@ -46,8 +45,8 @@ impl<'a, N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Disp
}
}
impl<'a, N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
NodeRefAccessorsTrait for NodeRefLockMut<'a, N>
impl<N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
NodeRefAccessorsTrait for NodeRefLockMut<'_, N>
{
fn entry(&self) -> Arc<BucketEntry> {
self.nr.entry()
@ -74,8 +73,8 @@ impl<'a, N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Disp
}
}
impl<'a, N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
NodeRefOperateTrait for NodeRefLockMut<'a, N>
impl<N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
NodeRefOperateTrait for NodeRefLockMut<'_, N>
{
fn operate<T, F>(&self, f: F) -> T
where
@ -110,21 +109,21 @@ impl<'a, N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Disp
}
}
impl<'a, N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
NodeRefCommonTrait for NodeRefLockMut<'a, N>
impl<N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
NodeRefCommonTrait for NodeRefLockMut<'_, N>
{
}
impl<'a, N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
fmt::Display for NodeRefLockMut<'a, N>
impl<N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
fmt::Display for NodeRefLockMut<'_, N>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.nr)
}
}
impl<'a, N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone>
fmt::Debug for NodeRefLockMut<'a, N>
impl<N: NodeRefAccessorsTrait + NodeRefOperateTrait + fmt::Debug + fmt::Display + Clone> fmt::Debug
for NodeRefLockMut<'_, N>
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("NodeRefLockMut")

View File

@ -1337,12 +1337,12 @@ impl RoutingTableInner {
}
// reliable nodes come first, pessimistically treating our own node as unreliable
let ra = a_entry.as_ref().map_or(false, |x| {
x.with_inner(|x| x.check_unreliable(cur_ts).is_none())
});
let rb = b_entry.as_ref().map_or(false, |x| {
x.with_inner(|x| x.check_unreliable(cur_ts).is_none())
});
let ra = a_entry
.as_ref()
.is_some_and(|x| x.with_inner(|x| x.check_unreliable(cur_ts).is_none()));
let rb = b_entry
.as_ref()
.is_some_and(|x| x.with_inner(|x| x.check_unreliable(cur_ts).is_none()));
if ra != rb {
if ra {
return core::cmp::Ordering::Less;

View File

@ -30,7 +30,7 @@ impl<'a> RoutingDomainEditorLocalNetwork<'a> {
}
}
impl<'a> RoutingDomainEditorCommonTrait for RoutingDomainEditorLocalNetwork<'a> {
impl RoutingDomainEditorCommonTrait for RoutingDomainEditorLocalNetwork<'_> {
#[instrument(level = "debug", skip(self))]
fn clear_dial_info_details(
&mut self,

View File

@ -41,7 +41,7 @@ impl<'a> RoutingDomainEditorPublicInternet<'a> {
}
}
impl<'a> RoutingDomainEditorCommonTrait for RoutingDomainEditorPublicInternet<'a> {
impl RoutingDomainEditorCommonTrait for RoutingDomainEditorPublicInternet<'_> {
#[instrument(level = "debug", skip(self))]
fn clear_dial_info_details(
&mut self,

View File

@ -97,8 +97,7 @@ impl SignedNodeInfo {
}
}
// Check our relay if we have one
return self
.relay_info()
self.relay_info()
.map(|relay_ni| {
for did in relay_ni.dial_info_detail_list() {
match sequencing {
@ -112,7 +111,7 @@ impl SignedNodeInfo {
}
false
})
.unwrap_or_default();
.unwrap_or_default()
}
#[cfg(feature = "geolocation")]

View File

@ -207,7 +207,7 @@ impl RPCProcessor {
}
/////////////////////////////////////
/// Initialization
// Initialization
#[expect(clippy::unused_async)]
async fn init_async(&self) -> EyreResult<()> {

View File

@ -234,7 +234,7 @@ impl RoutingContext {
}
///////////////////////////////////
/// DHT Records
// DHT Records
/// Deterministicly builds the record key for a given schema and owner public key
#[instrument(target = "veilid_api", level = "debug", fields(__VEILID_LOG_KEY = self.log_key()), ret, err)]

View File

@ -5,7 +5,6 @@ use super::*;
/// Required on 32-bit platforms for serialization because Rust aligns u64 on 4 byte boundaries.
/// Some zero-copy serialization frameworks also want 8-byte alignment.
/// Supports serializing to string for JSON as well, since JSON can't handle 64-bit numbers to Javascript.
macro_rules! aligned_u64_type {
($name:ident) => {
#[derive(

View File

@ -209,7 +209,7 @@ pub extern "C" fn initialize_veilid_flutter(
}
//////////////////////////////////////////////////////////////////////////////////
/// C-compatible FFI Functions
// C-compatible FFI Functions
#[no_mangle]
#[instrument]

View File

@ -192,6 +192,9 @@ veilid-tracing-wasm = "^0"
[package.metadata.wasm-pack.profile.release]
wasm-opt = ["-O", "--enable-mutable-globals"]
[package.metadata.wasm-pack.profile.dev.wasm-bindgen]
dwarf-debug-info = true
[package.metadata.ios]
build_targets = [
"aarch64-apple-ios",

View File

@ -63,7 +63,7 @@ pub struct IpcIncoming<'a> {
internal: Incoming<'a>,
}
impl<'a> Drop for IpcIncoming<'a> {
impl Drop for IpcIncoming<'_> {
fn drop(&mut self) {
// Clean up IPC path
if let Err(e) = std::fs::remove_file(&self.path) {
@ -72,7 +72,7 @@ impl<'a> Drop for IpcIncoming<'a> {
}
}
impl<'a> Stream for IpcIncoming<'a> {
impl Stream for IpcIncoming<'_> {
type Item = io::Result<IpcStream>;
fn poll_next(
@ -124,7 +124,7 @@ impl IpcListener {
}
/// Returns a stream of incoming connections.
pub fn incoming<'a>(&'a mut self) -> io::Result<IpcIncoming<'a>> {
pub fn incoming(&mut self) -> io::Result<IpcIncoming<'_>> {
if self.path.is_none() {
return Err(io::Error::from(io::ErrorKind::NotConnected));
}

View File

@ -71,7 +71,7 @@ pub struct IpcIncoming<'a> {
phantom: std::marker::PhantomData<&'a ()>,
}
impl<'a> Stream for IpcIncoming<'a> {
impl Stream for IpcIncoming<'_> {
type Item = io::Result<IpcStream>;
fn poll_next(
@ -88,7 +88,7 @@ impl<'a> Stream for IpcIncoming<'a> {
}
}
impl<'a> Drop for IpcIncoming<'a> {
impl Drop for IpcIncoming<'_> {
fn drop(&mut self) {
// Clean up IPC path
if let Err(e) = std::fs::remove_file(&self.path) {

View File

@ -121,7 +121,7 @@ pub struct IpcIncoming<'a> {
phantom: std::marker::PhantomData<&'a ()>,
}
impl<'t> Stream for IpcIncoming<'t> {
impl Stream for IpcIncoming<'_> {
type Item = io::Result<IpcStream>;
fn poll_next<'a>(

View File

@ -293,7 +293,7 @@ pub struct WindowsInterfacesIterator<'a> {
_phantom: std::marker::PhantomData<&'a u8>,
}
impl<'a> Iterator for WindowsInterfacesIterator<'a> {
impl Iterator for WindowsInterfacesIterator<'_> {
type Item = IpAdapterAddresses;
#[allow(unsafe_code)]

View File

@ -129,6 +129,7 @@ cfg_if! {
}
}
#[must_use]
pub fn async_tcp_listener_incoming(
tcp_listener: TcpListener,
) -> Pin<Box<impl futures_util::stream::Stream<Item = std::io::Result<TcpStream>> + Send>> {
@ -143,6 +144,7 @@ pub fn async_tcp_listener_incoming(
}
}
#[must_use]
pub fn split_async_tcp_stream(tcp_stream: TcpStream) -> (ReadHalf, WriteHalf) {
cfg_if! {
if #[cfg(feature="rt-async-std")] {

View File

@ -101,7 +101,7 @@ fn url_decode<S: AsRef<str>>(s: S) -> Result<String, SplitUrlError> {
if (i + 1) >= end {
return Err(SplitUrlError::new("Invalid URL encoding"));
}
b = hex_decode(url_bytes[i])? << 4 | hex_decode(url_bytes[i + 1])?;
b = (hex_decode(url_bytes[i])? << 4) | hex_decode(url_bytes[i + 1])?;
i += 2;
}
dec_bytes.push(b);

View File

@ -22,7 +22,7 @@ pub struct StartupLockGuard<'a> {
success_value: bool,
}
impl<'a> StartupLockGuard<'a> {
impl StartupLockGuard<'_> {
/// Call this function at the end of a successful startup or shutdown
/// operation to switch the state of the StartupLock.
pub fn success(mut self) {

View File

@ -465,7 +465,7 @@ struct AlignToEight([u8; 8]);
/// Ensure you immediately initialize this vector as it could contain sensitive data
#[must_use]
pub unsafe fn aligned_8_u8_vec_uninit(n_bytes: usize) -> Vec<u8> {
let n_units = (n_bytes + mem::size_of::<AlignToEight>() - 1) / mem::size_of::<AlignToEight>();
let n_units = n_bytes.div_ceil(mem::size_of::<AlignToEight>());
let mut aligned: Vec<AlignToEight> = Vec::with_capacity(n_units);
let ptr = aligned.as_mut_ptr();
let cap_units = aligned.capacity();

View File

@ -50,3 +50,6 @@ parking_lot = "0.12.3"
[lints]
workspace = true
[package.metadata.wasm-pack.profile.dev.wasm-bindgen]
dwarf-debug-info = true

View File

@ -190,7 +190,7 @@ impl VeilidRoutingContext {
}
///////////////////////////////////
/// DHT Records
// DHT Records
/// Deterministicly builds the record key for a given schema and owner public key
#[allow(clippy::unused_async)]

View File

@ -1,377 +0,0 @@
#!/usr/bin/env python3
# Copyright 2018 The Emscripten Authors. All rights reserved.
# Emscripten is available under two separate licenses, the MIT license and the
# University of Illinois/NCSA Open Source License. Both these licenses can be
# found in the LICENSE file.
"""Utility tools that extracts DWARF information encoded in a wasm output
produced by the LLVM tools, and encodes it as a wasm source map. Additionally,
it can collect original sources, change files prefixes, and strip debug
sections from a wasm file.
"""
import argparse
from collections import OrderedDict
import json
import logging
from math import floor, log
import os
import re
from subprocess import Popen, PIPE
from pathlib import Path
import sys
__scriptdir__ = os.path.dirname(os.path.abspath(__file__))
__rootdir__ = os.path.dirname(__scriptdir__)
sys.path.append(__rootdir__)
logger = logging.getLogger('wasm-sourcemap')
def parse_args():
parser = argparse.ArgumentParser(
prog='wasm-sourcemap.py', description=__doc__)
parser.add_argument('wasm', help='wasm file')
parser.add_argument('-o', '--output', help='output source map')
parser.add_argument('-p', '--prefix', nargs='*',
help='replace source debug filename prefix for source map', default=[])
parser.add_argument('-s', '--sources', action='store_true',
help='read and embed source files from file system into source map')
parser.add_argument('-l', '--load-prefix', nargs='*',
help='replace source debug filename prefix for reading sources from file system (see also --sources)', default=[])
parser.add_argument('-w', nargs='?', help='set output wasm file')
parser.add_argument('-x', '--strip', action='store_true',
help='removes debug and linking sections')
parser.add_argument('-u', '--source-map-url', nargs='?',
help='specifies sourceMappingURL section contest')
parser.add_argument(
'--dwarfdump', help="path to llvm-dwarfdump executable")
parser.add_argument('--dwarfdump-output', nargs='?',
help=argparse.SUPPRESS)
parser.add_argument(
'--basepath', help='base path for source files, which will be relative to this')
return parser.parse_args()
class Prefixes:
def __init__(self, args):
prefixes = []
for p in args:
if '=' in p:
prefix, replacement = p.split('=')
prefixes.append({'prefix': prefix, 'replacement': replacement})
else:
prefixes.append({'prefix': p, 'replacement': None})
self.prefixes = prefixes
self.cache = {}
def resolve(self, name):
if name in self.cache:
return self.cache[name]
for p in self.prefixes:
if name.startswith(p['prefix']):
if p['replacement'] is None:
result = name[len(p['prefix'])::]
else:
result = p['replacement'] + name[len(p['prefix'])::]
break
self.cache[name] = result
return result
# SourceMapPrefixes contains resolver for file names that are:
# - "sources" is for names that output to source maps JSON
# - "load" is for paths that used to load source text
class SourceMapPrefixes:
def __init__(self, sources, load):
self.sources = sources
self.load = load
def provided(self):
return bool(self.sources.prefixes or self.load.prefixes)
def encode_vlq(n):
VLQ_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"
x = (n << 1) if n >= 0 else ((-n << 1) + 1)
result = ""
while x > 31:
result = result + VLQ_CHARS[32 + (x & 31)]
x = x >> 5
return result + VLQ_CHARS[x]
def read_var_uint(wasm, pos):
n = 0
shift = 0
b = ord(wasm[pos:pos + 1])
pos = pos + 1
while b >= 128:
n = n | ((b - 128) << shift)
b = ord(wasm[pos:pos + 1])
pos = pos + 1
shift += 7
return n + (b << shift), pos
def strip_debug_sections(wasm):
logger.debug('Strip debug sections')
pos = 8
stripped = wasm[:pos]
while pos < len(wasm):
section_start = pos
section_id, pos_ = read_var_uint(wasm, pos)
section_size, section_body = read_var_uint(wasm, pos_)
pos = section_body + section_size
if section_id == 0:
name_len, name_pos = read_var_uint(wasm, section_body)
name_end = name_pos + name_len
name = wasm[name_pos:name_end]
if name == "linking" or name == "sourceMappingURL" or name.startswith("reloc..debug_") or name.startswith(".debug_"):
continue # skip debug related sections
stripped = stripped + wasm[section_start:pos]
return stripped
def encode_uint_var(n):
result = bytearray()
while n > 127:
result.append(128 | (n & 127))
n = n >> 7
result.append(n)
return bytes(result)
def append_source_mapping(wasm, url):
logger.debug('Append sourceMappingURL section')
section_name = "sourceMappingURL"
section_content = encode_uint_var(
len(section_name)) + section_name + encode_uint_var(len(url)) + url
return wasm + encode_uint_var(0) + encode_uint_var(len(section_content)) + section_content
def get_code_section_offset(wasm):
logger.debug('Read sections index')
pos = 8
while pos < len(wasm):
section_id, pos_ = read_var_uint(wasm, pos)
section_size, pos = read_var_uint(wasm, pos_)
if section_id == 10:
return pos
pos = pos + section_size
def remove_dead_entries(entries):
# Remove entries for dead functions. It is a heuristics to ignore data if the
# function starting address near to 0 (is equal to its size field length).
block_start = 0
cur_entry = 0
while cur_entry < len(entries):
if not entries[cur_entry]['eos']:
cur_entry += 1
continue
fn_start = entries[block_start]['address']
# Calculate the LEB encoded function size (including size field)
fn_size_length = floor(
log(entries[cur_entry]['address'] - fn_start + 1, 128)) + 1
min_live_offset = 1 + fn_size_length # 1 byte is for code section entries
if fn_start < min_live_offset:
# Remove dead code debug info block.
del entries[block_start:cur_entry + 1]
cur_entry = block_start
continue
cur_entry += 1
block_start = cur_entry
def read_dwarf_entries(wasm, options):
if options.dwarfdump_output:
output = Path(options.dwarfdump_output).read_bytes()
elif options.dwarfdump:
logger.debug('Reading DWARF information from %s' % wasm)
if not os.path.exists(options.dwarfdump):
logger.error('llvm-dwarfdump not found: ' + options.dwarfdump)
sys.exit(1)
process = Popen([options.dwarfdump, '-debug-info',
'-debug-line', '--recurse-depth=0', wasm], stdout=PIPE)
output, err = process.communicate()
exit_code = process.wait()
if exit_code != 0:
logger.error(
'Error during llvm-dwarfdump execution (%s)' % exit_code)
sys.exit(1)
else:
logger.error('Please specify either --dwarfdump or --dwarfdump-output')
sys.exit(1)
entries = []
debug_line_chunks = re.split(
r"debug_line\[(0x[0-9a-f]*)\]", output.decode('utf-8'))
maybe_debug_info_content = debug_line_chunks[0]
for i in range(1, len(debug_line_chunks), 2):
stmt_list = debug_line_chunks[i]
comp_dir_match = re.search(r"DW_AT_stmt_list\s+\(" + stmt_list + r"\)\s+" +
r"DW_AT_comp_dir\s+\(\"([^\"]+)", maybe_debug_info_content)
comp_dir = comp_dir_match.group(
1) if comp_dir_match is not None else ""
line_chunk = debug_line_chunks[i + 1]
# include_directories[ 1] = "/Users/yury/Work/junk/sqlite-playground/src"
# file_names[ 1]:
# name: "playground.c"
# dir_index: 1
# mod_time: 0x00000000
# length: 0x00000000
#
# Address Line Column File ISA Discriminator Flags
# ------------------ ------ ------ ------ --- ------------- -------------
# 0x0000000000000006 22 0 1 0 0 is_stmt
# 0x0000000000000007 23 10 1 0 0 is_stmt prologue_end
# 0x000000000000000f 23 3 1 0 0
# 0x0000000000000010 23 3 1 0 0 end_sequence
# 0x0000000000000011 28 0 1 0 0 is_stmt
include_directories = {'0': comp_dir}
for dir in re.finditer(r"include_directories\[\s*(\d+)\] = \"([^\"]*)", line_chunk):
include_directories[dir.group(1)] = dir.group(2)
files = {}
for file in re.finditer(r"file_names\[\s*(\d+)\]:\s+name: \"([^\"]*)\"\s+dir_index: (\d+)", line_chunk):
dir = include_directories[file.group(3)]
file_path = (dir + '/' if file.group(2)
[0] != '/' else '') + file.group(2)
files[file.group(1)] = file_path
for line in re.finditer(r"\n0x([0-9a-f]+)\s+(\d+)\s+(\d+)\s+(\d+)(.*?end_sequence)?", line_chunk):
entry = {'address': int(line.group(1), 16), 'line': int(line.group(2)), 'column': int(
line.group(3)), 'file': files[line.group(4)], 'eos': line.group(5) is not None}
if not entry['eos']:
entries.append(entry)
else:
# move end of function to the last END operator
entry['address'] -= 1
if entries[-1]['address'] == entry['address']:
# last entry has the same address, reusing
entries[-1]['eos'] = True
else:
entries.append(entry)
remove_dead_entries(entries)
# return entries sorted by the address field
return sorted(entries, key=lambda entry: entry['address'])
def normalize_path(path):
return path.replace('\\', '/').replace('//', '/')
def build_sourcemap(entries, code_section_offset, prefixes, collect_sources, base_path):
sources = []
sources_content = [] if collect_sources else None
mappings = []
sources_map = {}
last_address = 0
last_source_id = 0
last_line = 1
last_column = 1
for entry in entries:
line = entry['line']
column = entry['column']
# ignore entries with line 0
if line == 0:
continue
# start at least at column 1
if column == 0:
column = 1
address = entry['address'] + code_section_offset
file_name = entry['file']
file_name = normalize_path(file_name)
# if prefixes were provided, we use that; otherwise, we emit a relative
# path
if prefixes.provided():
source_name = prefixes.sources.resolve(file_name)
else:
try:
file_name = os.path.relpath(file_name, base_path)
except ValueError:
file_name = os.path.abspath(file_name)
file_name = normalize_path(file_name)
source_name = file_name
if source_name not in sources_map:
source_id = len(sources)
sources_map[source_name] = source_id
sources.append(source_name)
if collect_sources:
load_name = prefixes.load.resolve(file_name)
try:
with open(load_name, 'r') as infile:
source_content = infile.read()
sources_content.append(source_content)
except IOError:
print('Failed to read source: %s' % load_name)
sources_content.append(None)
else:
source_id = sources_map[source_name]
address_delta = address - last_address
source_id_delta = source_id - last_source_id
line_delta = line - last_line
column_delta = column - last_column
mappings.append(encode_vlq(address_delta) + encode_vlq(source_id_delta) +
encode_vlq(line_delta) + encode_vlq(column_delta))
last_address = address
last_source_id = source_id
last_line = line
last_column = column
return OrderedDict([('version', 3),
('names', []),
('sources', sources),
('sourcesContent', sources_content),
('mappings', ','.join(mappings))])
def main():
options = parse_args()
wasm_input = options.wasm
with open(wasm_input, 'rb') as infile:
wasm = infile.read()
entries = read_dwarf_entries(wasm_input, options)
code_section_offset = get_code_section_offset(wasm)
prefixes = SourceMapPrefixes(sources=Prefixes(
options.prefix), load=Prefixes(options.load_prefix))
logger.debug('Saving to %s' % options.output)
map = build_sourcemap(entries, code_section_offset,
prefixes, options.sources, options.basepath)
with open(options.output, 'w') as outfile:
json.dump(map, outfile, separators=(',', ':'))
if options.strip:
wasm = strip_debug_sections(wasm)
if options.source_map_url:
wasm = append_source_mapping(wasm, options.source_map_url)
if options.w:
logger.debug('Saving wasm to %s' % options.w)
with open(options.w, 'wb') as outfile:
outfile.write(wasm)
logger.debug('Done')
return 0
if __name__ == '__main__':
logging.basicConfig(level=logging.DEBUG if os.environ.get(
'EMCC_DEBUG') else logging.INFO)
sys.exit(main())

View File

@ -10,18 +10,6 @@ get_abs_filename() {
pushd $SCRIPTDIR &> /dev/null
if [ -f /usr/local/opt/llvm/bin/llvm-dwarfdump ]; then
DWARFDUMP=/usr/local/opt/llvm/bin/llvm-dwarfdump
elif [ -f /opt/homebrew/llvm/bin/llvm-dwarfdump ]; then
DWARFDUMP=/opt/homebrew/llvm/bin/llvm-dwarfdump
else
# some systems may have the major LLVM version suffixed on the LLVM binaries - and we need `true` at the end because the whole script will fail with a nonzero return if something goes wrong here
DWARFDUMP=`which llvm-dwarfdump || find ${PATH//:/\/ } -name 'llvm-dwarfdump*' 2>/dev/null | head -n1 || true`
if [[ "${DWARFDUMP}" == "" ]]; then
echo "llvm-dwarfdump not found"
fi
fi
if [[ "$1" == "release" ]]; then
OUTPUTDIR=$SCRIPTDIR/../target/wasm32-unknown-unknown/release/pkg
INPUTDIR=$SCRIPTDIR/../target/wasm32-unknown-unknown/release
@ -38,15 +26,9 @@ else
RUSTFLAGS="-O -g $RUSTFLAGS" cargo build --target wasm32-unknown-unknown
mkdir -p $OUTPUTDIR
wasm-bindgen --out-dir $OUTPUTDIR --target web --weak-refs --keep-debug --debug $INPUTDIR/veilid_wasm.wasm
if [[ -f "$DWARFDUMP" ]]; then
./wasm-sourcemap.py $OUTPUTDIR/veilid_wasm_bg.wasm -o $OUTPUTDIR/veilid_wasm_bg.wasm.map --dwarfdump $DWARFDUMP
else
echo "not generating sourcemaps because llvm-dwarfdump was not found"
fi
# wasm-strip $OUTPUTDIR/veilid_wasm_bg.wasm
fi
popd &> /dev/null
popd &> /dev/null
# Print for use with scripts
echo SUCCESS:OUTPUTDIR=$(get_abs_filename $OUTPUTDIR)

View File

@ -28,7 +28,6 @@ else
exit 1
fi
if command -v npm &> /dev/null; then
echo '[X] npm is available in the path'
else
@ -38,4 +37,21 @@ else
exit 1
fi
if command -v wasm-tools &> /dev/null; then
echo '[X] wasm-tools is available in the path'
else
echo -e 'wasm-tools is not available in the path.
Install wasm-tools: cargo install wasm-tools'
exit 1
fi
if command -v wasm-bindgen &> /dev/null; then
echo '[X] wasm-bindgen is available in the path'
else
echo -e 'wasm-bindgen is not available in the path.
Install wasm-bindgen: cargo install wasm-bindgen'
exit 1
fi
popd &> /dev/null