refactor some more

This commit is contained in:
Christien Rioux 2023-12-27 22:56:24 -05:00
parent 2adc958128
commit c516323e7d
91 changed files with 1237 additions and 748 deletions

56
packages/veilid_support/.gitignore vendored Normal file
View file

@ -0,0 +1,56 @@
# Miscellaneous
*.class
*.log
*.pyc
*.swp
.DS_Store
.atom/
.buildlog/
.history
.svn/
migrate_working_dir/
# IntelliJ related
*.iml
*.ipr
*.iws
.idea/
# The .vscode folder contains launch configuration and tasks you configure in
# VS Code which you may wish to be included in version control, so this line
# is commented out by default.
.vscode/
# Flutter/Dart/Pub related
**/doc/api/
**/ios/Flutter/.last_build_id
.dart_tool/
.flutter-plugins
.flutter-plugins-dependencies
.packages
.pub-cache/
.pub/
/build/
# Flutter generated files
# Not doing this at this time: https://stackoverflow.com/questions/56110386/should-i-commit-generated-code-in-flutter-dart-to-vcs
# *.g.dart
# *.freezed.dart
# *.pb.dart
# *.pbenum.dart
# *.pbjson.dart
# *.pbserver.dart
# Symbolication related
app.*.symbols
# Obfuscation related
app.*.map.json
# Android Studio will place build artifacts here
/android/app/debug
/android/app/profile
/android/app/release
# WASM
/web/wasm/

View file

@ -0,0 +1,15 @@
include: package:lint_hard/all.yaml
analyzer:
errors:
invalid_annotation_target: ignore
exclude:
- '**/*.g.dart'
- '**/*.freezed.dart'
- '**/*.pb.dart'
- '**/*.pbenum.dart'
- '**/*.pbjson.dart'
- '**/*.pbserver.dart'
linter:
rules:
unawaited_futures: true
avoid_positional_boolean_parameters: false

View file

@ -0,0 +1,7 @@
@echo off
dart run build_runner build --delete-conflicting-outputs
pushd lib
protoc --dart_out=proto -I proto -I dht_support\proto dht.proto
protoc --dart_out=proto -I proto veilid.proto
popd

View file

@ -0,0 +1,8 @@
#!/bin/bash
set -e
dart run build_runner build --delete-conflicting-outputs
pushd lib > /dev/null
protoc --dart_out=proto -I proto -I dht_support/proto dht.proto
protoc --dart_out=proto -I proto veilid.proto
popd > /dev/null

View file

@ -0,0 +1,8 @@
/// Support functions for Veilid DHT data structures
library dht_support;
export 'src/dht_record.dart';
export 'src/dht_record_crypto.dart';
export 'src/dht_record_pool.dart';
export 'src/dht_short_array.dart';

View file

@ -0,0 +1,84 @@
syntax = "proto3";
package dht;
import "veilid.proto";
// DHTData - represents chunked blob data in the DHT
// Header in subkey 0 follows this structure
//
// stride = descriptor subkey count on first key - 1
// Subkeys 1..=stride on the first key are concatenated chunks
// Subkeys 0..stride on the 'keys' keys are concatenated chunks
//
// Keys must use writable schema in order to make this data mutable
message DHTData {
// Other keys to concatenate
// Uses the same writer as this DHTList with SMPL schema
repeated veilid.TypedKey keys = 1;
// Hash of reassembled data to verify contents
veilid.TypedKey hash = 2;
// Chunk size per subkey
uint32 chunk = 3;
// Total data size
uint32 size = 4;
}
// DHTShortArray - represents a re-orderable collection of up to 256 individual elements
// Header in subkey 0 of first key follows this structure
//
// stride = descriptor subkey count on first key - 1
// Subkeys 1..=stride on the first key are individual elements
// Subkeys 0..stride on the 'keys' keys are also individual elements
//
// Keys must use writable schema in order to make this list mutable
message DHTShortArray {
// Other keys to concatenate
// Uses the same writer as this DHTList with SMPL schema
repeated veilid.TypedKey keys = 1;
// Item position index (uint8[256])
// Actual item location is:
// idx = index[n] + 1 (offset for header at idx 0)
// key = idx / stride
// subkey = idx % stride
bytes index = 2;
// Free items are not represented in the list but can be
// calculated through iteration
}
// DHTLog - represents an appendable/truncatable log collection of individual elements
// Header in subkey 0 of first key follows this structure
//
// stride = descriptor subkey count on first key - 1
// Subkeys 1..=stride on the first key are individual elements
// Subkeys 0..stride on the 'keys' keys are also individual elements
//
// Keys must use writable schema in order to make this list mutable
message DHTLog {
// Other keys to concatenate
repeated veilid.TypedKey keys = 1;
// Back link to another DHTLog further back
veilid.TypedKey back = 2;
// Count of subkeys in all keys in this DHTLog
repeated uint32 subkey_counts = 3;
// Total count of subkeys in all keys in this DHTLog including all backlogs
uint32 total_subkeys = 4;
}
// DataReference
// Pointer to data somewhere in Veilid
// Abstraction over DHTData and BlockStore
message DataReference {
oneof kind {
veilid.TypedKey dht_data = 1;
// TypedKey block = 2;
}
}
// A pointer to an child DHT record
message OwnedDHTRecordPointer {
// DHT Record key
veilid.TypedKey record_key = 1;
// DHT record owner key
veilid.KeyPair owner = 2;
}

View file

@ -0,0 +1,25 @@
import '../../proto/dht.pb.dart' as dhtproto;
import '../../proto/proto.dart' as veilidproto;
import '../dht_support.dart';
export '../../proto/dht.pb.dart';
export '../../proto/dht.pbenum.dart';
export '../../proto/dht.pbjson.dart';
export '../../proto/dht.pbserver.dart';
export '../../proto/proto.dart';
/// OwnedDHTRecordPointer protobuf marshaling
///
extension OwnedDHTRecordPointerProto on OwnedDHTRecordPointer {
dhtproto.OwnedDHTRecordPointer toProto() {
final out = dhtproto.OwnedDHTRecordPointer()
..recordKey = recordKey.toProto()
..owner = owner.toProto();
return out;
}
static OwnedDHTRecordPointer fromProto(dhtproto.OwnedDHTRecordPointer p) =>
OwnedDHTRecordPointer(
recordKey: veilidproto.TypedKeyProto.fromProto(p.recordKey),
owner: veilidproto.KeyPairProto.fromProto(p.owner));
}

View file

@ -0,0 +1,256 @@
import 'dart:async';
import 'dart:typed_data';
import 'package:protobuf/protobuf.dart';
import '../../../../veilid_support.dart';
class DHTRecord {
DHTRecord(
{required VeilidRoutingContext routingContext,
required DHTRecordDescriptor recordDescriptor,
int defaultSubkey = 0,
KeyPair? writer,
DHTRecordCrypto crypto = const DHTRecordCryptoPublic()})
: _crypto = crypto,
_routingContext = routingContext,
_recordDescriptor = recordDescriptor,
_defaultSubkey = defaultSubkey,
_writer = writer,
_open = true,
_valid = true,
_subkeySeqCache = {};
final VeilidRoutingContext _routingContext;
final DHTRecordDescriptor _recordDescriptor;
final int _defaultSubkey;
final KeyPair? _writer;
final Map<int, int> _subkeySeqCache;
final DHTRecordCrypto _crypto;
bool _open;
bool _valid;
int subkeyOrDefault(int subkey) => (subkey == -1) ? _defaultSubkey : subkey;
VeilidRoutingContext get routingContext => _routingContext;
TypedKey get key => _recordDescriptor.key;
PublicKey get owner => _recordDescriptor.owner;
KeyPair? get ownerKeyPair => _recordDescriptor.ownerKeyPair();
DHTSchema get schema => _recordDescriptor.schema;
KeyPair? get writer => _writer;
OwnedDHTRecordPointer get ownedDHTRecordPointer =>
OwnedDHTRecordPointer(recordKey: key, owner: ownerKeyPair!);
Future<void> close() async {
if (!_valid) {
throw StateError('already deleted');
}
if (!_open) {
return;
}
final pool = await DHTRecordPool.instance();
await _routingContext.closeDHTRecord(_recordDescriptor.key);
pool.recordClosed(_recordDescriptor.key);
_open = false;
}
Future<void> delete() async {
if (!_valid) {
throw StateError('already deleted');
}
if (_open) {
await close();
}
final pool = await DHTRecordPool.instance();
await pool.deleteDeep(key);
_valid = false;
}
Future<T> scope<T>(Future<T> Function(DHTRecord) scopeFunction) async {
try {
return await scopeFunction(this);
} finally {
if (_valid) {
await close();
}
}
}
Future<T> deleteScope<T>(Future<T> Function(DHTRecord) scopeFunction) async {
try {
final out = await scopeFunction(this);
if (_valid && _open) {
await close();
}
return out;
} on Exception catch (_) {
if (_valid) {
await delete();
}
rethrow;
}
}
Future<T> maybeDeleteScope<T>(
bool delete, Future<T> Function(DHTRecord) scopeFunction) async {
if (delete) {
return deleteScope(scopeFunction);
} else {
return scope(scopeFunction);
}
}
Future<Uint8List?> get(
{int subkey = -1,
bool forceRefresh = false,
bool onlyUpdates = false}) async {
subkey = subkeyOrDefault(subkey);
final valueData = await _routingContext.getDHTValue(
_recordDescriptor.key, subkey, forceRefresh);
if (valueData == null) {
return null;
}
final lastSeq = _subkeySeqCache[subkey];
if (onlyUpdates && lastSeq != null && valueData.seq <= lastSeq) {
return null;
}
final out = _crypto.decrypt(valueData.data, subkey);
_subkeySeqCache[subkey] = valueData.seq;
return out;
}
Future<T?> getJson<T>(T Function(dynamic) fromJson,
{int subkey = -1,
bool forceRefresh = false,
bool onlyUpdates = false}) async {
final data = await get(
subkey: subkey, forceRefresh: forceRefresh, onlyUpdates: onlyUpdates);
if (data == null) {
return null;
}
return jsonDecodeBytes(fromJson, data);
}
Future<T?> getProtobuf<T extends GeneratedMessage>(
T Function(List<int> i) fromBuffer,
{int subkey = -1,
bool forceRefresh = false,
bool onlyUpdates = false}) async {
final data = await get(
subkey: subkey, forceRefresh: forceRefresh, onlyUpdates: onlyUpdates);
if (data == null) {
return null;
}
return fromBuffer(data.toList());
}
Future<Uint8List?> tryWriteBytes(Uint8List newValue,
{int subkey = -1}) async {
subkey = subkeyOrDefault(subkey);
newValue = await _crypto.encrypt(newValue, subkey);
// Set the new data if possible
var valueData = await _routingContext.setDHTValue(
_recordDescriptor.key, subkey, newValue);
if (valueData == null) {
// Get the data to check its sequence number
valueData = await _routingContext.getDHTValue(
_recordDescriptor.key, subkey, false);
assert(valueData != null, "can't get value that was just set");
_subkeySeqCache[subkey] = valueData!.seq;
return null;
}
_subkeySeqCache[subkey] = valueData.seq;
return valueData.data;
}
Future<void> eventualWriteBytes(Uint8List newValue, {int subkey = -1}) async {
subkey = subkeyOrDefault(subkey);
newValue = await _crypto.encrypt(newValue, subkey);
ValueData? valueData;
do {
// Set the new data
valueData = await _routingContext.setDHTValue(
_recordDescriptor.key, subkey, newValue);
// Repeat if newer data on the network was found
} while (valueData != null);
// Get the data to check its sequence number
valueData =
await _routingContext.getDHTValue(_recordDescriptor.key, subkey, false);
assert(valueData != null, "can't get value that was just set");
_subkeySeqCache[subkey] = valueData!.seq;
}
Future<void> eventualUpdateBytes(
Future<Uint8List> Function(Uint8List oldValue) update,
{int subkey = -1}) async {
subkey = subkeyOrDefault(subkey);
// Get existing identity key, do not allow force refresh here
// because if we need a refresh the setDHTValue will fail anyway
var valueData =
await _routingContext.getDHTValue(_recordDescriptor.key, subkey, false);
// Ensure it exists already
if (valueData == null) {
throw const FormatException('value does not exist');
}
do {
// Update cache
_subkeySeqCache[subkey] = valueData!.seq;
// Update the data
final oldData = await _crypto.decrypt(valueData.data, subkey);
final updatedData = await update(oldData);
final newData = await _crypto.encrypt(updatedData, subkey);
// Set it back
valueData = await _routingContext.setDHTValue(
_recordDescriptor.key, subkey, newData);
// Repeat if newer data on the network was found
} while (valueData != null);
// Get the data to check its sequence number
valueData =
await _routingContext.getDHTValue(_recordDescriptor.key, subkey, false);
assert(valueData != null, "can't get value that was just set");
_subkeySeqCache[subkey] = valueData!.seq;
}
Future<T?> tryWriteJson<T>(T Function(dynamic) fromJson, T newValue,
{int subkey = -1}) =>
tryWriteBytes(jsonEncodeBytes(newValue), subkey: subkey).then((out) {
if (out == null) {
return null;
}
return jsonDecodeBytes(fromJson, out);
});
Future<T?> tryWriteProtobuf<T extends GeneratedMessage>(
T Function(List<int>) fromBuffer, T newValue,
{int subkey = -1}) =>
tryWriteBytes(newValue.writeToBuffer(), subkey: subkey).then((out) {
if (out == null) {
return null;
}
return fromBuffer(out);
});
Future<void> eventualWriteJson<T>(T newValue, {int subkey = -1}) =>
eventualWriteBytes(jsonEncodeBytes(newValue), subkey: subkey);
Future<void> eventualWriteProtobuf<T extends GeneratedMessage>(T newValue,
{int subkey = -1}) =>
eventualWriteBytes(newValue.writeToBuffer(), subkey: subkey);
Future<void> eventualUpdateJson<T>(
T Function(dynamic) fromJson, Future<T> Function(T) update,
{int subkey = -1}) =>
eventualUpdateBytes(jsonUpdate(fromJson, update), subkey: subkey);
Future<void> eventualUpdateProtobuf<T extends GeneratedMessage>(
T Function(List<int>) fromBuffer, Future<T> Function(T) update,
{int subkey = -1}) =>
eventualUpdateBytes(protobufUpdate(fromBuffer, update), subkey: subkey);
}

View file

@ -0,0 +1,53 @@
import 'dart:async';
import 'dart:typed_data';
import '../../../../veilid_support.dart';
abstract class DHTRecordCrypto {
FutureOr<Uint8List> encrypt(Uint8List data, int subkey);
FutureOr<Uint8List> decrypt(Uint8List data, int subkey);
}
////////////////////////////////////
/// Private DHT Record: Encrypted for a specific symmetric key
class DHTRecordCryptoPrivate implements DHTRecordCrypto {
DHTRecordCryptoPrivate._(
VeilidCryptoSystem cryptoSystem, SharedSecret secretKey)
: _cryptoSystem = cryptoSystem,
_secretKey = secretKey;
final VeilidCryptoSystem _cryptoSystem;
final SharedSecret _secretKey;
static Future<DHTRecordCryptoPrivate> fromTypedKeyPair(
TypedKeyPair typedKeyPair) async {
final cryptoSystem =
await Veilid.instance.getCryptoSystem(typedKeyPair.kind);
final secretKey = typedKeyPair.secret;
return DHTRecordCryptoPrivate._(cryptoSystem, secretKey);
}
static Future<DHTRecordCryptoPrivate> fromSecret(
CryptoKind kind, SharedSecret secretKey) async {
final cryptoSystem = await Veilid.instance.getCryptoSystem(kind);
return DHTRecordCryptoPrivate._(cryptoSystem, secretKey);
}
@override
FutureOr<Uint8List> encrypt(Uint8List data, int subkey) =>
_cryptoSystem.encryptNoAuthWithNonce(data, _secretKey);
@override
FutureOr<Uint8List> decrypt(Uint8List data, int subkey) =>
_cryptoSystem.decryptNoAuthWithNonce(data, _secretKey);
}
////////////////////////////////////
/// Public DHT Record: No encryption
class DHTRecordCryptoPublic implements DHTRecordCrypto {
const DHTRecordCryptoPublic();
@override
FutureOr<Uint8List> encrypt(Uint8List data, int subkey) => data;
@override
FutureOr<Uint8List> decrypt(Uint8List data, int subkey) => data;
}

View file

@ -0,0 +1,327 @@
import 'package:fast_immutable_collections/fast_immutable_collections.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
import 'package:mutex/mutex.dart';
import '../../../../veilid_support.dart';
part 'dht_record_pool.freezed.dart';
part 'dht_record_pool.g.dart';
/// Record pool that managed DHTRecords and allows for tagged deletion
@freezed
class DHTRecordPoolAllocations with _$DHTRecordPoolAllocations {
const factory DHTRecordPoolAllocations({
required IMap<String, ISet<TypedKey>>
childrenByParent, // String key due to IMap<> json unsupported in key
required IMap<String, TypedKey>
parentByChild, // String key due to IMap<> json unsupported in key
required ISet<TypedKey> rootRecords,
}) = _DHTRecordPoolAllocations;
factory DHTRecordPoolAllocations.fromJson(dynamic json) =>
_$DHTRecordPoolAllocationsFromJson(json as Map<String, dynamic>);
}
/// Pointer to an owned record, with key, owner key and owner secret
/// Ensure that these are only serialized encrypted
@freezed
class OwnedDHTRecordPointer with _$OwnedDHTRecordPointer {
const factory OwnedDHTRecordPointer({
required TypedKey recordKey,
required KeyPair owner,
}) = _OwnedDHTRecordPointer;
factory OwnedDHTRecordPointer.fromJson(dynamic json) =>
_$OwnedDHTRecordPointerFromJson(json as Map<String, dynamic>);
}
class DHTRecordPool with TableDBBacked<DHTRecordPoolAllocations> {
DHTRecordPool._(Veilid veilid, VeilidRoutingContext routingContext)
: _state = DHTRecordPoolAllocations(
childrenByParent: IMap(),
parentByChild: IMap(),
rootRecords: ISet()),
_opened = <TypedKey, Mutex>{},
_routingContext = routingContext,
_veilid = veilid;
// Persistent DHT record list
DHTRecordPoolAllocations _state;
// Which DHT records are currently open
final Map<TypedKey, Mutex> _opened;
// Default routing context to use for new keys
final VeilidRoutingContext _routingContext;
// Convenience accessor
final Veilid _veilid;
static DHTRecordPool? _singleton;
//////////////////////////////////////////////////////////////
/// AsyncTableDBBacked
@override
String tableName() => 'dht_record_pool';
@override
String tableKeyName() => 'pool_allocations';
@override
DHTRecordPoolAllocations valueFromJson(Object? obj) => obj != null
? DHTRecordPoolAllocations.fromJson(obj)
: DHTRecordPoolAllocations(
childrenByParent: IMap(), parentByChild: IMap(), rootRecords: ISet());
@override
Object? valueToJson(DHTRecordPoolAllocations val) => val.toJson();
//////////////////////////////////////////////////////////////
static Mutex instanceSetupMutex = Mutex();
// ignore: prefer_expression_function_bodies
static Future<DHTRecordPool> instance() async {
return instanceSetupMutex.protect(() async {
if (_singleton == null) {
final routingContext = await Veilid.instance.routingContext();
final globalPool = DHTRecordPool._(Veilid.instance, routingContext);
globalPool._state = await globalPool.load();
_singleton = globalPool;
}
return _singleton!;
});
}
Veilid get veilid => _veilid;
Future<void> _recordOpened(TypedKey key) async {
// no race because dart is single threaded until async breaks
final m = _opened[key] ?? Mutex();
_opened[key] = m;
await m.acquire();
_opened[key] = m;
}
void recordClosed(TypedKey key) {
final m = _opened.remove(key);
if (m == null) {
throw StateError('record already closed');
}
m.release();
}
Future<void> deleteDeep(TypedKey parent) async {
// Collect all dependencies
final allDeps = <TypedKey>[];
final currentDeps = [parent];
while (currentDeps.isNotEmpty) {
final nextDep = currentDeps.removeLast();
// Ensure we get the exclusive lock on this record
await _recordOpened(nextDep);
// Remove this child from its parent
await _removeDependency(nextDep);
allDeps.add(nextDep);
final childDeps =
_state.childrenByParent[nextDep.toJson()]?.toList() ?? [];
currentDeps.addAll(childDeps);
}
// Delete all records
final allFutures = <Future<void>>[];
for (final dep in allDeps) {
allFutures.add(_routingContext.deleteDHTRecord(dep));
recordClosed(dep);
}
await Future.wait(allFutures);
}
void _validateParent(TypedKey? parent, TypedKey child) {
final childJson = child.toJson();
final existingParent = _state.parentByChild[childJson];
if (parent == null) {
if (existingParent != null) {
throw StateError('Child is already parented: $child');
}
} else {
if (_state.rootRecords.contains(child)) {
throw StateError('Child already added as root: $child');
}
if (existingParent != null && existingParent != parent) {
throw StateError('Child has two parents: $child <- $parent');
}
}
}
Future<void> _addDependency(TypedKey? parent, TypedKey child) async {
if (parent == null) {
if (_state.rootRecords.contains(child)) {
// Dependency already added
return;
}
_state = await store(
_state.copyWith(rootRecords: _state.rootRecords.add(child)));
} else {
final childrenOfParent =
_state.childrenByParent[parent.toJson()] ?? ISet<TypedKey>();
if (childrenOfParent.contains(child)) {
// Dependency already added (consecutive opens, etc)
return;
}
_state = await store(_state.copyWith(
childrenByParent: _state.childrenByParent
.add(parent.toJson(), childrenOfParent.add(child)),
parentByChild: _state.parentByChild.add(child.toJson(), parent)));
}
}
Future<void> _removeDependency(TypedKey child) async {
if (_state.rootRecords.contains(child)) {
_state = await store(
_state.copyWith(rootRecords: _state.rootRecords.remove(child)));
} else {
final parent = _state.parentByChild[child.toJson()];
if (parent == null) {
return;
}
final children = _state.childrenByParent[parent.toJson()]!.remove(child);
late final DHTRecordPoolAllocations newState;
if (children.isEmpty) {
newState = _state.copyWith(
childrenByParent: _state.childrenByParent.remove(parent.toJson()),
parentByChild: _state.parentByChild.remove(child.toJson()));
} else {
newState = _state.copyWith(
childrenByParent:
_state.childrenByParent.add(parent.toJson(), children),
parentByChild: _state.parentByChild.remove(child.toJson()));
}
_state = await store(newState);
}
}
///////////////////////////////////////////////////////////////////////
/// Create a root DHTRecord that has no dependent records
Future<DHTRecord> create({
VeilidRoutingContext? routingContext,
TypedKey? parent,
DHTSchema schema = const DHTSchema.dflt(oCnt: 1),
int defaultSubkey = 0,
DHTRecordCrypto? crypto,
KeyPair? writer,
}) async {
final dhtctx = routingContext ?? _routingContext;
final recordDescriptor = await dhtctx.createDHTRecord(schema);
final rec = DHTRecord(
routingContext: dhtctx,
recordDescriptor: recordDescriptor,
defaultSubkey: defaultSubkey,
writer: writer ?? recordDescriptor.ownerKeyPair(),
crypto: crypto ??
await DHTRecordCryptoPrivate.fromTypedKeyPair(
recordDescriptor.ownerTypedKeyPair()!));
await _addDependency(parent, rec.key);
await _recordOpened(rec.key);
return rec;
}
/// Open a DHTRecord readonly
Future<DHTRecord> openRead(TypedKey recordKey,
{VeilidRoutingContext? routingContext,
TypedKey? parent,
int defaultSubkey = 0,
DHTRecordCrypto? crypto}) async {
await _recordOpened(recordKey);
late final DHTRecord rec;
try {
// If we are opening a key that already exists
// make sure we are using the same parent if one was specified
_validateParent(parent, recordKey);
// Open from the veilid api
final dhtctx = routingContext ?? _routingContext;
final recordDescriptor = await dhtctx.openDHTRecord(recordKey, null);
rec = DHTRecord(
routingContext: dhtctx,
recordDescriptor: recordDescriptor,
defaultSubkey: defaultSubkey,
crypto: crypto ?? const DHTRecordCryptoPublic());
// Register the dependency
await _addDependency(parent, rec.key);
} on Exception catch (_) {
recordClosed(recordKey);
rethrow;
}
return rec;
}
/// Open a DHTRecord writable
Future<DHTRecord> openWrite(
TypedKey recordKey,
KeyPair writer, {
VeilidRoutingContext? routingContext,
TypedKey? parent,
int defaultSubkey = 0,
DHTRecordCrypto? crypto,
}) async {
await _recordOpened(recordKey);
late final DHTRecord rec;
try {
// If we are opening a key that already exists
// make sure we are using the same parent if one was specified
_validateParent(parent, recordKey);
// Open from the veilid api
final dhtctx = routingContext ?? _routingContext;
final recordDescriptor = await dhtctx.openDHTRecord(recordKey, writer);
rec = DHTRecord(
routingContext: dhtctx,
recordDescriptor: recordDescriptor,
defaultSubkey: defaultSubkey,
writer: writer,
crypto: crypto ??
await DHTRecordCryptoPrivate.fromTypedKeyPair(
TypedKeyPair.fromKeyPair(recordKey.kind, writer)));
// Register the dependency if specified
await _addDependency(parent, rec.key);
} on Exception catch (_) {
recordClosed(recordKey);
rethrow;
}
return rec;
}
/// Open a DHTRecord owned
/// This is the same as writable but uses an OwnedDHTRecordPointer
/// for convenience and uses symmetric encryption on the key
/// This is primarily used for backing up private content on to the DHT
/// to synchronizing it between devices. Because it is 'owned', the correct
/// parent must be specified.
Future<DHTRecord> openOwned(
OwnedDHTRecordPointer ownedDHTRecordPointer, {
required TypedKey parent,
VeilidRoutingContext? routingContext,
int defaultSubkey = 0,
DHTRecordCrypto? crypto,
}) =>
openWrite(
ownedDHTRecordPointer.recordKey,
ownedDHTRecordPointer.owner,
routingContext: routingContext,
parent: parent,
defaultSubkey: defaultSubkey,
crypto: crypto,
);
/// Get the parent of a DHTRecord key if it exists
TypedKey? getParentRecord(TypedKey child) {
final childJson = child.toJson();
return _state.parentByChild[childJson];
}
}

View file

@ -0,0 +1,374 @@
// coverage:ignore-file
// GENERATED CODE - DO NOT MODIFY BY HAND
// ignore_for_file: type=lint
// ignore_for_file: unused_element, deprecated_member_use, deprecated_member_use_from_same_package, use_function_type_syntax_for_parameters, unnecessary_const, avoid_init_to_null, invalid_override_different_default_values_named, prefer_expression_function_bodies, annotate_overrides, invalid_annotation_target, unnecessary_question_mark
part of 'dht_record_pool.dart';
// **************************************************************************
// FreezedGenerator
// **************************************************************************
T _$identity<T>(T value) => value;
final _privateConstructorUsedError = UnsupportedError(
'It seems like you constructed your class using `MyClass._()`. This constructor is only meant to be used by freezed and you are not supposed to need it nor use it.\nPlease check the documentation here for more information: https://github.com/rrousselGit/freezed#custom-getters-and-methods');
DHTRecordPoolAllocations _$DHTRecordPoolAllocationsFromJson(
Map<String, dynamic> json) {
return _DHTRecordPoolAllocations.fromJson(json);
}
/// @nodoc
mixin _$DHTRecordPoolAllocations {
IMap<String, ISet<Typed<FixedEncodedString43>>> get childrenByParent =>
throw _privateConstructorUsedError; // String key due to IMap<> json unsupported in key
IMap<String, Typed<FixedEncodedString43>> get parentByChild =>
throw _privateConstructorUsedError; // String key due to IMap<> json unsupported in key
ISet<Typed<FixedEncodedString43>> get rootRecords =>
throw _privateConstructorUsedError;
Map<String, dynamic> toJson() => throw _privateConstructorUsedError;
@JsonKey(ignore: true)
$DHTRecordPoolAllocationsCopyWith<DHTRecordPoolAllocations> get copyWith =>
throw _privateConstructorUsedError;
}
/// @nodoc
abstract class $DHTRecordPoolAllocationsCopyWith<$Res> {
factory $DHTRecordPoolAllocationsCopyWith(DHTRecordPoolAllocations value,
$Res Function(DHTRecordPoolAllocations) then) =
_$DHTRecordPoolAllocationsCopyWithImpl<$Res, DHTRecordPoolAllocations>;
@useResult
$Res call(
{IMap<String, ISet<Typed<FixedEncodedString43>>> childrenByParent,
IMap<String, Typed<FixedEncodedString43>> parentByChild,
ISet<Typed<FixedEncodedString43>> rootRecords});
}
/// @nodoc
class _$DHTRecordPoolAllocationsCopyWithImpl<$Res,
$Val extends DHTRecordPoolAllocations>
implements $DHTRecordPoolAllocationsCopyWith<$Res> {
_$DHTRecordPoolAllocationsCopyWithImpl(this._value, this._then);
// ignore: unused_field
final $Val _value;
// ignore: unused_field
final $Res Function($Val) _then;
@pragma('vm:prefer-inline')
@override
$Res call({
Object? childrenByParent = null,
Object? parentByChild = null,
Object? rootRecords = null,
}) {
return _then(_value.copyWith(
childrenByParent: null == childrenByParent
? _value.childrenByParent
: childrenByParent // ignore: cast_nullable_to_non_nullable
as IMap<String, ISet<Typed<FixedEncodedString43>>>,
parentByChild: null == parentByChild
? _value.parentByChild
: parentByChild // ignore: cast_nullable_to_non_nullable
as IMap<String, Typed<FixedEncodedString43>>,
rootRecords: null == rootRecords
? _value.rootRecords
: rootRecords // ignore: cast_nullable_to_non_nullable
as ISet<Typed<FixedEncodedString43>>,
) as $Val);
}
}
/// @nodoc
abstract class _$$DHTRecordPoolAllocationsImplCopyWith<$Res>
implements $DHTRecordPoolAllocationsCopyWith<$Res> {
factory _$$DHTRecordPoolAllocationsImplCopyWith(
_$DHTRecordPoolAllocationsImpl value,
$Res Function(_$DHTRecordPoolAllocationsImpl) then) =
__$$DHTRecordPoolAllocationsImplCopyWithImpl<$Res>;
@override
@useResult
$Res call(
{IMap<String, ISet<Typed<FixedEncodedString43>>> childrenByParent,
IMap<String, Typed<FixedEncodedString43>> parentByChild,
ISet<Typed<FixedEncodedString43>> rootRecords});
}
/// @nodoc
class __$$DHTRecordPoolAllocationsImplCopyWithImpl<$Res>
extends _$DHTRecordPoolAllocationsCopyWithImpl<$Res,
_$DHTRecordPoolAllocationsImpl>
implements _$$DHTRecordPoolAllocationsImplCopyWith<$Res> {
__$$DHTRecordPoolAllocationsImplCopyWithImpl(
_$DHTRecordPoolAllocationsImpl _value,
$Res Function(_$DHTRecordPoolAllocationsImpl) _then)
: super(_value, _then);
@pragma('vm:prefer-inline')
@override
$Res call({
Object? childrenByParent = null,
Object? parentByChild = null,
Object? rootRecords = null,
}) {
return _then(_$DHTRecordPoolAllocationsImpl(
childrenByParent: null == childrenByParent
? _value.childrenByParent
: childrenByParent // ignore: cast_nullable_to_non_nullable
as IMap<String, ISet<Typed<FixedEncodedString43>>>,
parentByChild: null == parentByChild
? _value.parentByChild
: parentByChild // ignore: cast_nullable_to_non_nullable
as IMap<String, Typed<FixedEncodedString43>>,
rootRecords: null == rootRecords
? _value.rootRecords
: rootRecords // ignore: cast_nullable_to_non_nullable
as ISet<Typed<FixedEncodedString43>>,
));
}
}
/// @nodoc
@JsonSerializable()
class _$DHTRecordPoolAllocationsImpl implements _DHTRecordPoolAllocations {
const _$DHTRecordPoolAllocationsImpl(
{required this.childrenByParent,
required this.parentByChild,
required this.rootRecords});
factory _$DHTRecordPoolAllocationsImpl.fromJson(Map<String, dynamic> json) =>
_$$DHTRecordPoolAllocationsImplFromJson(json);
@override
final IMap<String, ISet<Typed<FixedEncodedString43>>> childrenByParent;
// String key due to IMap<> json unsupported in key
@override
final IMap<String, Typed<FixedEncodedString43>> parentByChild;
// String key due to IMap<> json unsupported in key
@override
final ISet<Typed<FixedEncodedString43>> rootRecords;
@override
String toString() {
return 'DHTRecordPoolAllocations(childrenByParent: $childrenByParent, parentByChild: $parentByChild, rootRecords: $rootRecords)';
}
@override
bool operator ==(Object other) {
return identical(this, other) ||
(other.runtimeType == runtimeType &&
other is _$DHTRecordPoolAllocationsImpl &&
(identical(other.childrenByParent, childrenByParent) ||
other.childrenByParent == childrenByParent) &&
(identical(other.parentByChild, parentByChild) ||
other.parentByChild == parentByChild) &&
const DeepCollectionEquality()
.equals(other.rootRecords, rootRecords));
}
@JsonKey(ignore: true)
@override
int get hashCode => Object.hash(runtimeType, childrenByParent, parentByChild,
const DeepCollectionEquality().hash(rootRecords));
@JsonKey(ignore: true)
@override
@pragma('vm:prefer-inline')
_$$DHTRecordPoolAllocationsImplCopyWith<_$DHTRecordPoolAllocationsImpl>
get copyWith => __$$DHTRecordPoolAllocationsImplCopyWithImpl<
_$DHTRecordPoolAllocationsImpl>(this, _$identity);
@override
Map<String, dynamic> toJson() {
return _$$DHTRecordPoolAllocationsImplToJson(
this,
);
}
}
abstract class _DHTRecordPoolAllocations implements DHTRecordPoolAllocations {
const factory _DHTRecordPoolAllocations(
{required final IMap<String, ISet<Typed<FixedEncodedString43>>>
childrenByParent,
required final IMap<String, Typed<FixedEncodedString43>> parentByChild,
required final ISet<Typed<FixedEncodedString43>>
rootRecords}) = _$DHTRecordPoolAllocationsImpl;
factory _DHTRecordPoolAllocations.fromJson(Map<String, dynamic> json) =
_$DHTRecordPoolAllocationsImpl.fromJson;
@override
IMap<String, ISet<Typed<FixedEncodedString43>>> get childrenByParent;
@override // String key due to IMap<> json unsupported in key
IMap<String, Typed<FixedEncodedString43>> get parentByChild;
@override // String key due to IMap<> json unsupported in key
ISet<Typed<FixedEncodedString43>> get rootRecords;
@override
@JsonKey(ignore: true)
_$$DHTRecordPoolAllocationsImplCopyWith<_$DHTRecordPoolAllocationsImpl>
get copyWith => throw _privateConstructorUsedError;
}
OwnedDHTRecordPointer _$OwnedDHTRecordPointerFromJson(
Map<String, dynamic> json) {
return _OwnedDHTRecordPointer.fromJson(json);
}
/// @nodoc
mixin _$OwnedDHTRecordPointer {
Typed<FixedEncodedString43> get recordKey =>
throw _privateConstructorUsedError;
KeyPair get owner => throw _privateConstructorUsedError;
Map<String, dynamic> toJson() => throw _privateConstructorUsedError;
@JsonKey(ignore: true)
$OwnedDHTRecordPointerCopyWith<OwnedDHTRecordPointer> get copyWith =>
throw _privateConstructorUsedError;
}
/// @nodoc
abstract class $OwnedDHTRecordPointerCopyWith<$Res> {
factory $OwnedDHTRecordPointerCopyWith(OwnedDHTRecordPointer value,
$Res Function(OwnedDHTRecordPointer) then) =
_$OwnedDHTRecordPointerCopyWithImpl<$Res, OwnedDHTRecordPointer>;
@useResult
$Res call({Typed<FixedEncodedString43> recordKey, KeyPair owner});
}
/// @nodoc
class _$OwnedDHTRecordPointerCopyWithImpl<$Res,
$Val extends OwnedDHTRecordPointer>
implements $OwnedDHTRecordPointerCopyWith<$Res> {
_$OwnedDHTRecordPointerCopyWithImpl(this._value, this._then);
// ignore: unused_field
final $Val _value;
// ignore: unused_field
final $Res Function($Val) _then;
@pragma('vm:prefer-inline')
@override
$Res call({
Object? recordKey = null,
Object? owner = null,
}) {
return _then(_value.copyWith(
recordKey: null == recordKey
? _value.recordKey
: recordKey // ignore: cast_nullable_to_non_nullable
as Typed<FixedEncodedString43>,
owner: null == owner
? _value.owner
: owner // ignore: cast_nullable_to_non_nullable
as KeyPair,
) as $Val);
}
}
/// @nodoc
abstract class _$$OwnedDHTRecordPointerImplCopyWith<$Res>
implements $OwnedDHTRecordPointerCopyWith<$Res> {
factory _$$OwnedDHTRecordPointerImplCopyWith(
_$OwnedDHTRecordPointerImpl value,
$Res Function(_$OwnedDHTRecordPointerImpl) then) =
__$$OwnedDHTRecordPointerImplCopyWithImpl<$Res>;
@override
@useResult
$Res call({Typed<FixedEncodedString43> recordKey, KeyPair owner});
}
/// @nodoc
class __$$OwnedDHTRecordPointerImplCopyWithImpl<$Res>
extends _$OwnedDHTRecordPointerCopyWithImpl<$Res,
_$OwnedDHTRecordPointerImpl>
implements _$$OwnedDHTRecordPointerImplCopyWith<$Res> {
__$$OwnedDHTRecordPointerImplCopyWithImpl(_$OwnedDHTRecordPointerImpl _value,
$Res Function(_$OwnedDHTRecordPointerImpl) _then)
: super(_value, _then);
@pragma('vm:prefer-inline')
@override
$Res call({
Object? recordKey = null,
Object? owner = null,
}) {
return _then(_$OwnedDHTRecordPointerImpl(
recordKey: null == recordKey
? _value.recordKey
: recordKey // ignore: cast_nullable_to_non_nullable
as Typed<FixedEncodedString43>,
owner: null == owner
? _value.owner
: owner // ignore: cast_nullable_to_non_nullable
as KeyPair,
));
}
}
/// @nodoc
@JsonSerializable()
class _$OwnedDHTRecordPointerImpl implements _OwnedDHTRecordPointer {
const _$OwnedDHTRecordPointerImpl(
{required this.recordKey, required this.owner});
factory _$OwnedDHTRecordPointerImpl.fromJson(Map<String, dynamic> json) =>
_$$OwnedDHTRecordPointerImplFromJson(json);
@override
final Typed<FixedEncodedString43> recordKey;
@override
final KeyPair owner;
@override
String toString() {
return 'OwnedDHTRecordPointer(recordKey: $recordKey, owner: $owner)';
}
@override
bool operator ==(Object other) {
return identical(this, other) ||
(other.runtimeType == runtimeType &&
other is _$OwnedDHTRecordPointerImpl &&
(identical(other.recordKey, recordKey) ||
other.recordKey == recordKey) &&
(identical(other.owner, owner) || other.owner == owner));
}
@JsonKey(ignore: true)
@override
int get hashCode => Object.hash(runtimeType, recordKey, owner);
@JsonKey(ignore: true)
@override
@pragma('vm:prefer-inline')
_$$OwnedDHTRecordPointerImplCopyWith<_$OwnedDHTRecordPointerImpl>
get copyWith => __$$OwnedDHTRecordPointerImplCopyWithImpl<
_$OwnedDHTRecordPointerImpl>(this, _$identity);
@override
Map<String, dynamic> toJson() {
return _$$OwnedDHTRecordPointerImplToJson(
this,
);
}
}
abstract class _OwnedDHTRecordPointer implements OwnedDHTRecordPointer {
const factory _OwnedDHTRecordPointer(
{required final Typed<FixedEncodedString43> recordKey,
required final KeyPair owner}) = _$OwnedDHTRecordPointerImpl;
factory _OwnedDHTRecordPointer.fromJson(Map<String, dynamic> json) =
_$OwnedDHTRecordPointerImpl.fromJson;
@override
Typed<FixedEncodedString43> get recordKey;
@override
KeyPair get owner;
@override
@JsonKey(ignore: true)
_$$OwnedDHTRecordPointerImplCopyWith<_$OwnedDHTRecordPointerImpl>
get copyWith => throw _privateConstructorUsedError;
}

View file

@ -0,0 +1,57 @@
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'dht_record_pool.dart';
// **************************************************************************
// JsonSerializableGenerator
// **************************************************************************
_$DHTRecordPoolAllocationsImpl _$$DHTRecordPoolAllocationsImplFromJson(
Map<String, dynamic> json) =>
_$DHTRecordPoolAllocationsImpl(
childrenByParent:
IMap<String, ISet<Typed<FixedEncodedString43>>>.fromJson(
json['childrenByParent'] as Map<String, dynamic>,
(value) => value as String,
(value) => ISet<Typed<FixedEncodedString43>>.fromJson(value,
(value) => Typed<FixedEncodedString43>.fromJson(value))),
parentByChild: IMap<String, Typed<FixedEncodedString43>>.fromJson(
json['parentByChild'] as Map<String, dynamic>,
(value) => value as String,
(value) => Typed<FixedEncodedString43>.fromJson(value)),
rootRecords: ISet<Typed<FixedEncodedString43>>.fromJson(
json['rootRecords'],
(value) => Typed<FixedEncodedString43>.fromJson(value)),
);
Map<String, dynamic> _$$DHTRecordPoolAllocationsImplToJson(
_$DHTRecordPoolAllocationsImpl instance) =>
<String, dynamic>{
'childrenByParent': instance.childrenByParent.toJson(
(value) => value,
(value) => value.toJson(
(value) => value,
),
),
'parentByChild': instance.parentByChild.toJson(
(value) => value,
(value) => value,
),
'rootRecords': instance.rootRecords.toJson(
(value) => value,
),
};
_$OwnedDHTRecordPointerImpl _$$OwnedDHTRecordPointerImplFromJson(
Map<String, dynamic> json) =>
_$OwnedDHTRecordPointerImpl(
recordKey: Typed<FixedEncodedString43>.fromJson(json['recordKey']),
owner: KeyPair.fromJson(json['owner']),
);
Map<String, dynamic> _$$OwnedDHTRecordPointerImplToJson(
_$OwnedDHTRecordPointerImpl instance) =>
<String, dynamic>{
'recordKey': instance.recordKey,
'owner': instance.owner,
};

View file

@ -0,0 +1,615 @@
import 'dart:async';
import 'dart:typed_data';
import 'package:protobuf/protobuf.dart';
import '../../../../veilid_support.dart';
import '../proto/proto.dart' as proto;
class _DHTShortArrayCache {
_DHTShortArrayCache()
: linkedRecords = List<DHTRecord>.empty(growable: true),
index = List<int>.empty(growable: true),
free = List<int>.empty(growable: true);
_DHTShortArrayCache.from(_DHTShortArrayCache other)
: linkedRecords = List.of(other.linkedRecords),
index = List.of(other.index),
free = List.of(other.free);
final List<DHTRecord> linkedRecords;
final List<int> index;
final List<int> free;
proto.DHTShortArray toProto() {
final head = proto.DHTShortArray();
head.keys.addAll(linkedRecords.map((lr) => lr.key.toProto()));
head.index = head.index..addAll(index);
// Do not serialize free list, it gets recreated
return head;
}
}
class DHTShortArray {
DHTShortArray._({required DHTRecord headRecord})
: _headRecord = headRecord,
_head = _DHTShortArrayCache() {
late final int stride;
switch (headRecord.schema) {
case DHTSchemaDFLT(oCnt: final oCnt):
if (oCnt <= 1) {
throw StateError('Invalid DFLT schema in DHTShortArray');
}
stride = oCnt - 1;
case DHTSchemaSMPL(oCnt: final oCnt, members: final members):
if (oCnt != 0 || members.length != 1 || members[0].mCnt <= 1) {
throw StateError('Invalid SMPL schema in DHTShortArray');
}
stride = members[0].mCnt - 1;
}
assert(stride <= maxElements, 'stride too long');
_stride = stride;
}
static const maxElements = 256;
// Head DHT record
final DHTRecord _headRecord;
late final int _stride;
// Cached representation refreshed from head record
_DHTShortArrayCache _head;
// Create a DHTShortArray
// if smplWriter is specified, uses a SMPL schema with a single writer
// rather than the key owner
static Future<DHTShortArray> create(
{int stride = maxElements,
VeilidRoutingContext? routingContext,
TypedKey? parent,
DHTRecordCrypto? crypto,
KeyPair? smplWriter}) async {
assert(stride <= maxElements, 'stride too long');
final pool = await DHTRecordPool.instance();
late final DHTRecord dhtRecord;
if (smplWriter != null) {
final schema = DHTSchema.smpl(
oCnt: 0,
members: [DHTSchemaMember(mKey: smplWriter.key, mCnt: stride + 1)]);
final dhtCreateRecord = await pool.create(
parent: parent,
routingContext: routingContext,
schema: schema,
crypto: crypto,
writer: smplWriter);
// Reopen with SMPL writer
await dhtCreateRecord.close();
dhtRecord = await pool.openWrite(dhtCreateRecord.key, smplWriter,
parent: parent, routingContext: routingContext, crypto: crypto);
} else {
final schema = DHTSchema.dflt(oCnt: stride + 1);
dhtRecord = await pool.create(
parent: parent,
routingContext: routingContext,
schema: schema,
crypto: crypto);
}
try {
final dhtShortArray = DHTShortArray._(headRecord: dhtRecord);
if (!await dhtShortArray._tryWriteHead()) {
throw StateError('Failed to write head at this time');
}
return dhtShortArray;
} on Exception catch (_) {
await dhtRecord.delete();
rethrow;
}
}
static Future<DHTShortArray> openRead(TypedKey headRecordKey,
{VeilidRoutingContext? routingContext,
TypedKey? parent,
DHTRecordCrypto? crypto}) async {
final pool = await DHTRecordPool.instance();
final dhtRecord = await pool.openRead(headRecordKey,
parent: parent, routingContext: routingContext, crypto: crypto);
try {
final dhtShortArray = DHTShortArray._(headRecord: dhtRecord);
await dhtShortArray._refreshHead();
return dhtShortArray;
} on Exception catch (_) {
await dhtRecord.close();
rethrow;
}
}
static Future<DHTShortArray> openWrite(
TypedKey headRecordKey,
KeyPair writer, {
VeilidRoutingContext? routingContext,
TypedKey? parent,
DHTRecordCrypto? crypto,
}) async {
final pool = await DHTRecordPool.instance();
final dhtRecord = await pool.openWrite(headRecordKey, writer,
parent: parent, routingContext: routingContext, crypto: crypto);
try {
final dhtShortArray = DHTShortArray._(headRecord: dhtRecord);
await dhtShortArray._refreshHead();
return dhtShortArray;
} on Exception catch (_) {
await dhtRecord.close();
rethrow;
}
}
static Future<DHTShortArray> openOwned(
OwnedDHTRecordPointer ownedDHTRecordPointer, {
required TypedKey parent,
VeilidRoutingContext? routingContext,
DHTRecordCrypto? crypto,
}) =>
openWrite(
ownedDHTRecordPointer.recordKey,
ownedDHTRecordPointer.owner,
routingContext: routingContext,
parent: parent,
crypto: crypto,
);
DHTRecord get record => _headRecord;
////////////////////////////////////////////////////////////////
/// Serialize and write out the current head record, possibly updating it
/// if a newer copy is available online. Returns true if the write was
/// successful
Future<bool> _tryWriteHead() async {
final head = _head.toProto();
final headBuffer = head.writeToBuffer();
final existingData = await _headRecord.tryWriteBytes(headBuffer);
if (existingData != null) {
// Head write failed, incorporate update
await _newHead(proto.DHTShortArray.fromBuffer(existingData));
return false;
}
return true;
}
/// Validate the head from the DHT is properly formatted
/// and calculate the free list from it while we're here
List<int> _validateHeadCacheData(
List<Typed<FixedEncodedString43>> linkedKeys, List<int> index) {
// Ensure nothing is duplicated in the linked keys set
final newKeys = linkedKeys.toSet();
assert(newKeys.length <= (maxElements + (_stride - 1)) ~/ _stride,
'too many keys');
assert(newKeys.length == linkedKeys.length, 'duplicated linked keys');
final newIndex = index.toSet();
assert(newIndex.length <= maxElements, 'too many indexes');
assert(newIndex.length == index.length, 'duplicated index locations');
// Ensure all the index keys fit into the existing records
final indexCapacity = (linkedKeys.length + 1) * _stride;
int? maxIndex;
for (final idx in newIndex) {
assert(idx >= 0 || idx < indexCapacity, 'index out of range');
if (maxIndex == null || idx > maxIndex) {
maxIndex = idx;
}
}
final free = <int>[];
if (maxIndex != null) {
for (var i = 0; i < maxIndex; i++) {
if (!newIndex.contains(i)) {
free.add(i);
}
}
}
return free;
}
/// Open a linked record for reading or writing, same as the head record
Future<DHTRecord> _openLinkedRecord(TypedKey recordKey) async {
final pool = await DHTRecordPool.instance();
final writer = _headRecord.writer;
return (writer != null)
? await pool.openWrite(
recordKey,
writer,
parent: _headRecord.key,
routingContext: _headRecord.routingContext,
)
: await pool.openRead(
recordKey,
parent: _headRecord.key,
routingContext: _headRecord.routingContext,
);
}
/// Validate a new head record
Future<void> _newHead(proto.DHTShortArray head) async {
// Get the set of new linked keys and validate it
final linkedKeys =
head.keys.map<TypedKey>(proto.TypedKeyProto.fromProto).toList();
final index = head.index;
final free = _validateHeadCacheData(linkedKeys, index);
// See which records are actually new
final oldRecords = Map<TypedKey, DHTRecord>.fromEntries(
_head.linkedRecords.map((lr) => MapEntry(lr.key, lr)));
final newRecords = <TypedKey, DHTRecord>{};
final sameRecords = <TypedKey, DHTRecord>{};
try {
for (var n = 0; n < linkedKeys.length; n++) {
final newKey = linkedKeys[n];
final oldRecord = oldRecords[newKey];
if (oldRecord == null) {
// Open the new record
final newRecord = await _openLinkedRecord(newKey);
newRecords[newKey] = newRecord;
} else {
sameRecords[newKey] = oldRecord;
}
}
} on Exception catch (_) {
// On any exception close the records we have opened
await Future.wait(newRecords.entries.map((e) => e.value.close()));
rethrow;
}
// From this point forward we should not throw an exception or everything
// is possibly invalid. Just pass the exception up it happens and the caller
// will have to delete this short array and reopen it if it can
await Future.wait(oldRecords.entries
.where((e) => !sameRecords.containsKey(e.key))
.map((e) => e.value.close()));
// Figure out which indices are free
// Make the new head cache
_head = _DHTShortArrayCache()
..linkedRecords.addAll(
linkedKeys.map((key) => (sameRecords[key] ?? newRecords[key])!))
..index.addAll(index)
..free.addAll(free);
}
/// Pull the latest or updated copy of the head record from the network
Future<bool> _refreshHead(
{bool forceRefresh = true, bool onlyUpdates = false}) async {
// Get an updated head record copy if one exists
final head = await _headRecord.getProtobuf(proto.DHTShortArray.fromBuffer,
forceRefresh: forceRefresh, onlyUpdates: onlyUpdates);
if (head == null) {
if (onlyUpdates) {
// No update
return false;
}
throw StateError('head missing during refresh');
}
await _newHead(head);
return true;
}
////////////////////////////////////////////////////////////////
Future<void> close() async {
final futures = <Future<void>>[_headRecord.close()];
for (final lr in _head.linkedRecords) {
futures.add(lr.close());
}
await Future.wait(futures);
}
Future<void> delete() async {
final futures = <Future<void>>[_headRecord.close()];
for (final lr in _head.linkedRecords) {
futures.add(lr.delete());
}
await Future.wait(futures);
}
Future<T> scope<T>(Future<T> Function(DHTShortArray) scopeFunction) async {
try {
return await scopeFunction(this);
} finally {
await close();
}
}
Future<T> deleteScope<T>(
Future<T> Function(DHTShortArray) scopeFunction) async {
try {
final out = await scopeFunction(this);
await close();
return out;
} on Exception catch (_) {
await delete();
rethrow;
}
}
DHTRecord? _getRecord(int recordNumber) {
if (recordNumber == 0) {
return _headRecord;
}
recordNumber--;
if (recordNumber >= _head.linkedRecords.length) {
return null;
}
return _head.linkedRecords[recordNumber];
}
int _emptyIndex() {
if (_head.free.isNotEmpty) {
return _head.free.removeLast();
}
if (_head.index.length == maxElements) {
throw StateError('too many elements');
}
return _head.index.length;
}
void _freeIndex(int idx) {
_head.free.add(idx);
// xxx: free list optimization here?
}
int get length => _head.index.length;
Future<Uint8List?> getItem(int pos, {bool forceRefresh = false}) async {
await _refreshHead(forceRefresh: forceRefresh, onlyUpdates: true);
if (pos < 0 || pos >= _head.index.length) {
throw IndexError.withLength(pos, _head.index.length);
}
final index = _head.index[pos];
final recordNumber = index ~/ _stride;
final record = _getRecord(recordNumber);
assert(record != null, 'Record does not exist');
final recordSubkey = (index % _stride) + ((recordNumber == 0) ? 1 : 0);
return record!.get(subkey: recordSubkey, forceRefresh: forceRefresh);
}
Future<T?> getItemJson<T>(T Function(dynamic) fromJson, int pos,
{bool forceRefresh = false}) =>
getItem(pos, forceRefresh: forceRefresh)
.then((out) => jsonDecodeOptBytes(fromJson, out));
Future<T?> getItemProtobuf<T extends GeneratedMessage>(
T Function(List<int>) fromBuffer, int pos,
{bool forceRefresh = false}) =>
getItem(pos, forceRefresh: forceRefresh)
.then((out) => (out == null) ? null : fromBuffer(out));
Future<bool> tryAddItem(Uint8List value) async {
await _refreshHead(onlyUpdates: true);
final oldHead = _DHTShortArrayCache.from(_head);
late final int pos;
try {
// Allocate empty index
final idx = _emptyIndex();
// Add new index
pos = _head.index.length;
_head.index.add(idx);
// Write new head
if (!await _tryWriteHead()) {
// Failed to write head means head got overwritten
return false;
}
} on Exception catch (_) {
// Exception on write means state needs to be reverted
_head = oldHead;
return false;
}
// Head write succeeded, now write item
await eventualWriteItem(pos, value);
return true;
}
Future<bool> tryInsertItem(int pos, Uint8List value) async {
await _refreshHead(onlyUpdates: true);
final oldHead = _DHTShortArrayCache.from(_head);
try {
// Allocate empty index
final idx = _emptyIndex();
// Add new index
_head.index.insert(pos, idx);
// Write new head
if (!await _tryWriteHead()) {
// Failed to write head means head got overwritten
return false;
}
} on Exception catch (_) {
// Exception on write means state needs to be reverted
_head = oldHead;
return false;
}
// Head write succeeded, now write item
await eventualWriteItem(pos, value);
return true;
}
Future<bool> trySwapItem(int aPos, int bPos) async {
await _refreshHead(onlyUpdates: true);
final oldHead = _DHTShortArrayCache.from(_head);
try {
// Add new index
final aIdx = _head.index[aPos];
final bIdx = _head.index[bPos];
_head.index[aPos] = bIdx;
_head.index[bPos] = aIdx;
// Write new head
if (!await _tryWriteHead()) {
// Failed to write head means head got overwritten
return false;
}
} on Exception catch (_) {
// Exception on write means state needs to be reverted
_head = oldHead;
return false;
}
return true;
}
Future<Uint8List?> tryRemoveItem(int pos) async {
await _refreshHead(onlyUpdates: true);
final oldHead = _DHTShortArrayCache.from(_head);
try {
final removedIdx = _head.index.removeAt(pos);
_freeIndex(removedIdx);
final recordNumber = removedIdx ~/ _stride;
final record = _getRecord(recordNumber);
assert(record != null, 'Record does not exist');
final recordSubkey =
(removedIdx % _stride) + ((recordNumber == 0) ? 1 : 0);
// Write new head
if (!await _tryWriteHead()) {
// Failed to write head means head got overwritten
return null;
}
return record!.get(subkey: recordSubkey);
} on Exception catch (_) {
// Exception on write means state needs to be reverted
_head = oldHead;
return null;
}
}
Future<T?> tryRemoveItemJson<T>(
T Function(dynamic) fromJson,
int pos,
) =>
tryRemoveItem(pos).then((out) => jsonDecodeOptBytes(fromJson, out));
Future<T?> tryRemoveItemProtobuf<T extends GeneratedMessage>(
T Function(List<int>) fromBuffer, int pos) =>
getItem(pos).then((out) => (out == null) ? null : fromBuffer(out));
Future<bool> tryClear() async {
await _refreshHead(onlyUpdates: true);
final oldHead = _DHTShortArrayCache.from(_head);
try {
_head.index.clear();
_head.free.clear();
// Write new head
if (!await _tryWriteHead()) {
// Failed to write head means head got overwritten
return false;
}
} on Exception catch (_) {
// Exception on write means state needs to be reverted
_head = oldHead;
return false;
}
return true;
}
Future<Uint8List?> tryWriteItem(int pos, Uint8List newValue) async {
if (await _refreshHead(onlyUpdates: true)) {
throw StateError('structure changed');
}
if (pos < 0 || pos >= _head.index.length) {
throw IndexError.withLength(pos, _head.index.length);
}
final index = _head.index[pos];
final recordNumber = index ~/ _stride;
final record = _getRecord(recordNumber);
assert(record != null, 'Record does not exist');
final recordSubkey = (index % _stride) + ((recordNumber == 0) ? 1 : 0);
return record!.tryWriteBytes(newValue, subkey: recordSubkey);
}
Future<void> eventualWriteItem(int pos, Uint8List newValue) async {
Uint8List? oldData;
do {
// Set it back
oldData = await tryWriteItem(pos, newValue);
// Repeat if newer data on the network was found
} while (oldData != null);
}
Future<void> eventualUpdateItem(
int pos, Future<Uint8List> Function(Uint8List oldValue) update) async {
var oldData = await getItem(pos);
// Ensure it exists already
if (oldData == null) {
throw const FormatException('value does not exist');
}
do {
// Update the data
final updatedData = await update(oldData!);
// Set it back
oldData = await tryWriteItem(pos, updatedData);
// Repeat if newer data on the network was found
} while (oldData != null);
}
Future<T?> tryWriteItemJson<T>(
T Function(dynamic) fromJson,
int pos,
T newValue,
) =>
tryWriteItem(pos, jsonEncodeBytes(newValue))
.then((out) => jsonDecodeOptBytes(fromJson, out));
Future<T?> tryWriteItemProtobuf<T extends GeneratedMessage>(
T Function(List<int>) fromBuffer,
int pos,
T newValue,
) =>
tryWriteItem(pos, newValue.writeToBuffer()).then((out) {
if (out == null) {
return null;
}
return fromBuffer(out);
});
Future<void> eventualWriteItemJson<T>(int pos, T newValue) =>
eventualWriteItem(pos, jsonEncodeBytes(newValue));
Future<void> eventualWriteItemProtobuf<T extends GeneratedMessage>(
int pos, T newValue,
{int subkey = -1}) =>
eventualWriteItem(pos, newValue.writeToBuffer());
Future<void> eventualUpdateItemJson<T>(
T Function(dynamic) fromJson,
int pos,
Future<T> Function(T) update,
) =>
eventualUpdateItem(pos, jsonUpdate(fromJson, update));
Future<void> eventualUpdateItemProtobuf<T extends GeneratedMessage>(
T Function(List<int>) fromBuffer,
int pos,
Future<T> Function(T) update,
) =>
eventualUpdateItem(pos, protobufUpdate(fromBuffer, update));
}

View file

@ -0,0 +1,309 @@
//
// Generated code. Do not modify.
// source: dht.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
import 'dart:core' as $core;
import 'package:protobuf/protobuf.dart' as $pb;
import 'veilid.pb.dart' as $0;
class DHTData extends $pb.GeneratedMessage {
factory DHTData() => create();
DHTData._() : super();
factory DHTData.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory DHTData.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'DHTData', package: const $pb.PackageName(_omitMessageNames ? '' : 'dht'), createEmptyInstance: create)
..pc<$0.TypedKey>(1, _omitFieldNames ? '' : 'keys', $pb.PbFieldType.PM, subBuilder: $0.TypedKey.create)
..aOM<$0.TypedKey>(2, _omitFieldNames ? '' : 'hash', subBuilder: $0.TypedKey.create)
..a<$core.int>(3, _omitFieldNames ? '' : 'chunk', $pb.PbFieldType.OU3)
..a<$core.int>(4, _omitFieldNames ? '' : 'size', $pb.PbFieldType.OU3)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
DHTData clone() => DHTData()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
DHTData copyWith(void Function(DHTData) updates) => super.copyWith((message) => updates(message as DHTData)) as DHTData;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static DHTData create() => DHTData._();
DHTData createEmptyInstance() => create();
static $pb.PbList<DHTData> createRepeated() => $pb.PbList<DHTData>();
@$core.pragma('dart2js:noInline')
static DHTData getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<DHTData>(create);
static DHTData? _defaultInstance;
@$pb.TagNumber(1)
$core.List<$0.TypedKey> get keys => $_getList(0);
@$pb.TagNumber(2)
$0.TypedKey get hash => $_getN(1);
@$pb.TagNumber(2)
set hash($0.TypedKey v) { setField(2, v); }
@$pb.TagNumber(2)
$core.bool hasHash() => $_has(1);
@$pb.TagNumber(2)
void clearHash() => clearField(2);
@$pb.TagNumber(2)
$0.TypedKey ensureHash() => $_ensure(1);
@$pb.TagNumber(3)
$core.int get chunk => $_getIZ(2);
@$pb.TagNumber(3)
set chunk($core.int v) { $_setUnsignedInt32(2, v); }
@$pb.TagNumber(3)
$core.bool hasChunk() => $_has(2);
@$pb.TagNumber(3)
void clearChunk() => clearField(3);
@$pb.TagNumber(4)
$core.int get size => $_getIZ(3);
@$pb.TagNumber(4)
set size($core.int v) { $_setUnsignedInt32(3, v); }
@$pb.TagNumber(4)
$core.bool hasSize() => $_has(3);
@$pb.TagNumber(4)
void clearSize() => clearField(4);
}
class DHTShortArray extends $pb.GeneratedMessage {
factory DHTShortArray() => create();
DHTShortArray._() : super();
factory DHTShortArray.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory DHTShortArray.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'DHTShortArray', package: const $pb.PackageName(_omitMessageNames ? '' : 'dht'), createEmptyInstance: create)
..pc<$0.TypedKey>(1, _omitFieldNames ? '' : 'keys', $pb.PbFieldType.PM, subBuilder: $0.TypedKey.create)
..a<$core.List<$core.int>>(2, _omitFieldNames ? '' : 'index', $pb.PbFieldType.OY)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
DHTShortArray clone() => DHTShortArray()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
DHTShortArray copyWith(void Function(DHTShortArray) updates) => super.copyWith((message) => updates(message as DHTShortArray)) as DHTShortArray;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static DHTShortArray create() => DHTShortArray._();
DHTShortArray createEmptyInstance() => create();
static $pb.PbList<DHTShortArray> createRepeated() => $pb.PbList<DHTShortArray>();
@$core.pragma('dart2js:noInline')
static DHTShortArray getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<DHTShortArray>(create);
static DHTShortArray? _defaultInstance;
@$pb.TagNumber(1)
$core.List<$0.TypedKey> get keys => $_getList(0);
@$pb.TagNumber(2)
$core.List<$core.int> get index => $_getN(1);
@$pb.TagNumber(2)
set index($core.List<$core.int> v) { $_setBytes(1, v); }
@$pb.TagNumber(2)
$core.bool hasIndex() => $_has(1);
@$pb.TagNumber(2)
void clearIndex() => clearField(2);
}
class DHTLog extends $pb.GeneratedMessage {
factory DHTLog() => create();
DHTLog._() : super();
factory DHTLog.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory DHTLog.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'DHTLog', package: const $pb.PackageName(_omitMessageNames ? '' : 'dht'), createEmptyInstance: create)
..pc<$0.TypedKey>(1, _omitFieldNames ? '' : 'keys', $pb.PbFieldType.PM, subBuilder: $0.TypedKey.create)
..aOM<$0.TypedKey>(2, _omitFieldNames ? '' : 'back', subBuilder: $0.TypedKey.create)
..p<$core.int>(3, _omitFieldNames ? '' : 'subkeyCounts', $pb.PbFieldType.KU3)
..a<$core.int>(4, _omitFieldNames ? '' : 'totalSubkeys', $pb.PbFieldType.OU3)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
DHTLog clone() => DHTLog()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
DHTLog copyWith(void Function(DHTLog) updates) => super.copyWith((message) => updates(message as DHTLog)) as DHTLog;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static DHTLog create() => DHTLog._();
DHTLog createEmptyInstance() => create();
static $pb.PbList<DHTLog> createRepeated() => $pb.PbList<DHTLog>();
@$core.pragma('dart2js:noInline')
static DHTLog getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<DHTLog>(create);
static DHTLog? _defaultInstance;
@$pb.TagNumber(1)
$core.List<$0.TypedKey> get keys => $_getList(0);
@$pb.TagNumber(2)
$0.TypedKey get back => $_getN(1);
@$pb.TagNumber(2)
set back($0.TypedKey v) { setField(2, v); }
@$pb.TagNumber(2)
$core.bool hasBack() => $_has(1);
@$pb.TagNumber(2)
void clearBack() => clearField(2);
@$pb.TagNumber(2)
$0.TypedKey ensureBack() => $_ensure(1);
@$pb.TagNumber(3)
$core.List<$core.int> get subkeyCounts => $_getList(2);
@$pb.TagNumber(4)
$core.int get totalSubkeys => $_getIZ(3);
@$pb.TagNumber(4)
set totalSubkeys($core.int v) { $_setUnsignedInt32(3, v); }
@$pb.TagNumber(4)
$core.bool hasTotalSubkeys() => $_has(3);
@$pb.TagNumber(4)
void clearTotalSubkeys() => clearField(4);
}
enum DataReference_Kind {
dhtData,
notSet
}
class DataReference extends $pb.GeneratedMessage {
factory DataReference() => create();
DataReference._() : super();
factory DataReference.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory DataReference.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static const $core.Map<$core.int, DataReference_Kind> _DataReference_KindByTag = {
1 : DataReference_Kind.dhtData,
0 : DataReference_Kind.notSet
};
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'DataReference', package: const $pb.PackageName(_omitMessageNames ? '' : 'dht'), createEmptyInstance: create)
..oo(0, [1])
..aOM<$0.TypedKey>(1, _omitFieldNames ? '' : 'dhtData', subBuilder: $0.TypedKey.create)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
DataReference clone() => DataReference()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
DataReference copyWith(void Function(DataReference) updates) => super.copyWith((message) => updates(message as DataReference)) as DataReference;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static DataReference create() => DataReference._();
DataReference createEmptyInstance() => create();
static $pb.PbList<DataReference> createRepeated() => $pb.PbList<DataReference>();
@$core.pragma('dart2js:noInline')
static DataReference getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<DataReference>(create);
static DataReference? _defaultInstance;
DataReference_Kind whichKind() => _DataReference_KindByTag[$_whichOneof(0)]!;
void clearKind() => clearField($_whichOneof(0));
@$pb.TagNumber(1)
$0.TypedKey get dhtData => $_getN(0);
@$pb.TagNumber(1)
set dhtData($0.TypedKey v) { setField(1, v); }
@$pb.TagNumber(1)
$core.bool hasDhtData() => $_has(0);
@$pb.TagNumber(1)
void clearDhtData() => clearField(1);
@$pb.TagNumber(1)
$0.TypedKey ensureDhtData() => $_ensure(0);
}
class OwnedDHTRecordPointer extends $pb.GeneratedMessage {
factory OwnedDHTRecordPointer() => create();
OwnedDHTRecordPointer._() : super();
factory OwnedDHTRecordPointer.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory OwnedDHTRecordPointer.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'OwnedDHTRecordPointer', package: const $pb.PackageName(_omitMessageNames ? '' : 'dht'), createEmptyInstance: create)
..aOM<$0.TypedKey>(1, _omitFieldNames ? '' : 'recordKey', subBuilder: $0.TypedKey.create)
..aOM<$0.KeyPair>(2, _omitFieldNames ? '' : 'owner', subBuilder: $0.KeyPair.create)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
OwnedDHTRecordPointer clone() => OwnedDHTRecordPointer()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
OwnedDHTRecordPointer copyWith(void Function(OwnedDHTRecordPointer) updates) => super.copyWith((message) => updates(message as OwnedDHTRecordPointer)) as OwnedDHTRecordPointer;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static OwnedDHTRecordPointer create() => OwnedDHTRecordPointer._();
OwnedDHTRecordPointer createEmptyInstance() => create();
static $pb.PbList<OwnedDHTRecordPointer> createRepeated() => $pb.PbList<OwnedDHTRecordPointer>();
@$core.pragma('dart2js:noInline')
static OwnedDHTRecordPointer getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<OwnedDHTRecordPointer>(create);
static OwnedDHTRecordPointer? _defaultInstance;
@$pb.TagNumber(1)
$0.TypedKey get recordKey => $_getN(0);
@$pb.TagNumber(1)
set recordKey($0.TypedKey v) { setField(1, v); }
@$pb.TagNumber(1)
$core.bool hasRecordKey() => $_has(0);
@$pb.TagNumber(1)
void clearRecordKey() => clearField(1);
@$pb.TagNumber(1)
$0.TypedKey ensureRecordKey() => $_ensure(0);
@$pb.TagNumber(2)
$0.KeyPair get owner => $_getN(1);
@$pb.TagNumber(2)
set owner($0.KeyPair v) { setField(2, v); }
@$pb.TagNumber(2)
$core.bool hasOwner() => $_has(1);
@$pb.TagNumber(2)
void clearOwner() => clearField(2);
@$pb.TagNumber(2)
$0.KeyPair ensureOwner() => $_ensure(1);
}
const _omitFieldNames = $core.bool.fromEnvironment('protobuf.omit_field_names');
const _omitMessageNames = $core.bool.fromEnvironment('protobuf.omit_message_names');

View file

@ -0,0 +1,11 @@
//
// Generated code. Do not modify.
// source: dht.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import

View file

@ -0,0 +1,93 @@
//
// Generated code. Do not modify.
// source: dht.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
import 'dart:convert' as $convert;
import 'dart:core' as $core;
import 'dart:typed_data' as $typed_data;
@$core.Deprecated('Use dHTDataDescriptor instead')
const DHTData$json = {
'1': 'DHTData',
'2': [
{'1': 'keys', '3': 1, '4': 3, '5': 11, '6': '.veilid.TypedKey', '10': 'keys'},
{'1': 'hash', '3': 2, '4': 1, '5': 11, '6': '.veilid.TypedKey', '10': 'hash'},
{'1': 'chunk', '3': 3, '4': 1, '5': 13, '10': 'chunk'},
{'1': 'size', '3': 4, '4': 1, '5': 13, '10': 'size'},
],
};
/// Descriptor for `DHTData`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List dHTDataDescriptor = $convert.base64Decode(
'CgdESFREYXRhEiQKBGtleXMYASADKAsyEC52ZWlsaWQuVHlwZWRLZXlSBGtleXMSJAoEaGFzaB'
'gCIAEoCzIQLnZlaWxpZC5UeXBlZEtleVIEaGFzaBIUCgVjaHVuaxgDIAEoDVIFY2h1bmsSEgoE'
'c2l6ZRgEIAEoDVIEc2l6ZQ==');
@$core.Deprecated('Use dHTShortArrayDescriptor instead')
const DHTShortArray$json = {
'1': 'DHTShortArray',
'2': [
{'1': 'keys', '3': 1, '4': 3, '5': 11, '6': '.veilid.TypedKey', '10': 'keys'},
{'1': 'index', '3': 2, '4': 1, '5': 12, '10': 'index'},
],
};
/// Descriptor for `DHTShortArray`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List dHTShortArrayDescriptor = $convert.base64Decode(
'Cg1ESFRTaG9ydEFycmF5EiQKBGtleXMYASADKAsyEC52ZWlsaWQuVHlwZWRLZXlSBGtleXMSFA'
'oFaW5kZXgYAiABKAxSBWluZGV4');
@$core.Deprecated('Use dHTLogDescriptor instead')
const DHTLog$json = {
'1': 'DHTLog',
'2': [
{'1': 'keys', '3': 1, '4': 3, '5': 11, '6': '.veilid.TypedKey', '10': 'keys'},
{'1': 'back', '3': 2, '4': 1, '5': 11, '6': '.veilid.TypedKey', '10': 'back'},
{'1': 'subkey_counts', '3': 3, '4': 3, '5': 13, '10': 'subkeyCounts'},
{'1': 'total_subkeys', '3': 4, '4': 1, '5': 13, '10': 'totalSubkeys'},
],
};
/// Descriptor for `DHTLog`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List dHTLogDescriptor = $convert.base64Decode(
'CgZESFRMb2cSJAoEa2V5cxgBIAMoCzIQLnZlaWxpZC5UeXBlZEtleVIEa2V5cxIkCgRiYWNrGA'
'IgASgLMhAudmVpbGlkLlR5cGVkS2V5UgRiYWNrEiMKDXN1YmtleV9jb3VudHMYAyADKA1SDHN1'
'YmtleUNvdW50cxIjCg10b3RhbF9zdWJrZXlzGAQgASgNUgx0b3RhbFN1YmtleXM=');
@$core.Deprecated('Use dataReferenceDescriptor instead')
const DataReference$json = {
'1': 'DataReference',
'2': [
{'1': 'dht_data', '3': 1, '4': 1, '5': 11, '6': '.veilid.TypedKey', '9': 0, '10': 'dhtData'},
],
'8': [
{'1': 'kind'},
],
};
/// Descriptor for `DataReference`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List dataReferenceDescriptor = $convert.base64Decode(
'Cg1EYXRhUmVmZXJlbmNlEi0KCGRodF9kYXRhGAEgASgLMhAudmVpbGlkLlR5cGVkS2V5SABSB2'
'RodERhdGFCBgoEa2luZA==');
@$core.Deprecated('Use ownedDHTRecordPointerDescriptor instead')
const OwnedDHTRecordPointer$json = {
'1': 'OwnedDHTRecordPointer',
'2': [
{'1': 'record_key', '3': 1, '4': 1, '5': 11, '6': '.veilid.TypedKey', '10': 'recordKey'},
{'1': 'owner', '3': 2, '4': 1, '5': 11, '6': '.veilid.KeyPair', '10': 'owner'},
],
};
/// Descriptor for `OwnedDHTRecordPointer`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List ownedDHTRecordPointerDescriptor = $convert.base64Decode(
'ChVPd25lZERIVFJlY29yZFBvaW50ZXISLwoKcmVjb3JkX2tleRgBIAEoCzIQLnZlaWxpZC5UeX'
'BlZEtleVIJcmVjb3JkS2V5EiUKBW93bmVyGAIgASgLMg8udmVpbGlkLktleVBhaXJSBW93bmVy');

View file

@ -0,0 +1,14 @@
//
// Generated code. Do not modify.
// source: dht.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types
// ignore_for_file: constant_identifier_names
// ignore_for_file: deprecated_member_use_from_same_package, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
export 'dht.pb.dart';

View file

@ -0,0 +1,143 @@
import 'dart:typed_data';
import '../veilid_support.dart' as veilid;
import 'veilid.pb.dart' as proto;
export 'veilid.pb.dart';
export 'veilid.pbenum.dart';
export 'veilid.pbjson.dart';
export 'veilid.pbserver.dart';
/// CryptoKey protobuf marshaling
///
extension CryptoKeyProto on veilid.CryptoKey {
proto.CryptoKey toProto() {
final b = decode().buffer.asByteData();
final out = proto.CryptoKey()
..u0 = b.getUint32(0 * 4)
..u1 = b.getUint32(1 * 4)
..u2 = b.getUint32(2 * 4)
..u3 = b.getUint32(3 * 4)
..u4 = b.getUint32(4 * 4)
..u5 = b.getUint32(5 * 4)
..u6 = b.getUint32(6 * 4)
..u7 = b.getUint32(7 * 4);
return out;
}
static veilid.CryptoKey fromProto(proto.CryptoKey p) {
final b = ByteData(32)
..setUint32(0 * 4, p.u0)
..setUint32(1 * 4, p.u1)
..setUint32(2 * 4, p.u2)
..setUint32(3 * 4, p.u3)
..setUint32(4 * 4, p.u4)
..setUint32(5 * 4, p.u5)
..setUint32(6 * 4, p.u6)
..setUint32(7 * 4, p.u7);
return veilid.CryptoKey.fromBytes(Uint8List.view(b.buffer));
}
}
/// Signature protobuf marshaling
///
extension SignatureProto on veilid.Signature {
proto.Signature toProto() {
final b = decode().buffer.asByteData();
final out = proto.Signature()
..u0 = b.getUint32(0 * 4)
..u1 = b.getUint32(1 * 4)
..u2 = b.getUint32(2 * 4)
..u3 = b.getUint32(3 * 4)
..u4 = b.getUint32(4 * 4)
..u5 = b.getUint32(5 * 4)
..u6 = b.getUint32(6 * 4)
..u7 = b.getUint32(7 * 4)
..u8 = b.getUint32(8 * 4)
..u9 = b.getUint32(9 * 4)
..u10 = b.getUint32(10 * 4)
..u11 = b.getUint32(11 * 4)
..u12 = b.getUint32(12 * 4)
..u13 = b.getUint32(13 * 4)
..u14 = b.getUint32(14 * 4)
..u15 = b.getUint32(15 * 4);
return out;
}
static veilid.Signature fromProto(proto.Signature p) {
final b = ByteData(64)
..setUint32(0 * 4, p.u0)
..setUint32(1 * 4, p.u1)
..setUint32(2 * 4, p.u2)
..setUint32(3 * 4, p.u3)
..setUint32(4 * 4, p.u4)
..setUint32(5 * 4, p.u5)
..setUint32(6 * 4, p.u6)
..setUint32(7 * 4, p.u7)
..setUint32(8 * 4, p.u8)
..setUint32(9 * 4, p.u9)
..setUint32(10 * 4, p.u10)
..setUint32(11 * 4, p.u11)
..setUint32(12 * 4, p.u12)
..setUint32(13 * 4, p.u13)
..setUint32(14 * 4, p.u14)
..setUint32(15 * 4, p.u15);
return veilid.Signature.fromBytes(Uint8List.view(b.buffer));
}
}
/// Nonce protobuf marshaling
///
extension NonceProto on veilid.Nonce {
proto.Nonce toProto() {
final b = decode().buffer.asByteData();
final out = proto.Nonce()
..u0 = b.getUint32(0 * 4)
..u1 = b.getUint32(1 * 4)
..u2 = b.getUint32(2 * 4)
..u3 = b.getUint32(3 * 4)
..u4 = b.getUint32(4 * 4)
..u5 = b.getUint32(5 * 4);
return out;
}
static veilid.Nonce fromProto(proto.Nonce p) {
final b = ByteData(24)
..setUint32(0 * 4, p.u0)
..setUint32(1 * 4, p.u1)
..setUint32(2 * 4, p.u2)
..setUint32(3 * 4, p.u3)
..setUint32(4 * 4, p.u4)
..setUint32(5 * 4, p.u5);
return veilid.Nonce.fromBytes(Uint8List.view(b.buffer));
}
}
/// TypedKey protobuf marshaling
///
extension TypedKeyProto on veilid.TypedKey {
proto.TypedKey toProto() {
final out = proto.TypedKey()
..kind = kind
..value = value.toProto();
return out;
}
static veilid.TypedKey fromProto(proto.TypedKey p) =>
veilid.TypedKey(kind: p.kind, value: CryptoKeyProto.fromProto(p.value));
}
/// KeyPair protobuf marshaling
///
extension KeyPairProto on veilid.KeyPair {
proto.KeyPair toProto() {
final out = proto.KeyPair()
..key = key.toProto()
..secret = secret.toProto();
return out;
}
static veilid.KeyPair fromProto(proto.KeyPair p) => veilid.KeyPair(
key: CryptoKeyProto.fromProto(p.key),
secret: CryptoKeyProto.fromProto(p.secret));
}

View file

@ -0,0 +1,524 @@
//
// Generated code. Do not modify.
// source: veilid.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
import 'dart:core' as $core;
import 'package:protobuf/protobuf.dart' as $pb;
class CryptoKey extends $pb.GeneratedMessage {
factory CryptoKey() => create();
CryptoKey._() : super();
factory CryptoKey.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory CryptoKey.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'CryptoKey', package: const $pb.PackageName(_omitMessageNames ? '' : 'veilid'), createEmptyInstance: create)
..a<$core.int>(1, _omitFieldNames ? '' : 'u0', $pb.PbFieldType.OF3)
..a<$core.int>(2, _omitFieldNames ? '' : 'u1', $pb.PbFieldType.OF3)
..a<$core.int>(3, _omitFieldNames ? '' : 'u2', $pb.PbFieldType.OF3)
..a<$core.int>(4, _omitFieldNames ? '' : 'u3', $pb.PbFieldType.OF3)
..a<$core.int>(5, _omitFieldNames ? '' : 'u4', $pb.PbFieldType.OF3)
..a<$core.int>(6, _omitFieldNames ? '' : 'u5', $pb.PbFieldType.OF3)
..a<$core.int>(7, _omitFieldNames ? '' : 'u6', $pb.PbFieldType.OF3)
..a<$core.int>(8, _omitFieldNames ? '' : 'u7', $pb.PbFieldType.OF3)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
CryptoKey clone() => CryptoKey()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
CryptoKey copyWith(void Function(CryptoKey) updates) => super.copyWith((message) => updates(message as CryptoKey)) as CryptoKey;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static CryptoKey create() => CryptoKey._();
CryptoKey createEmptyInstance() => create();
static $pb.PbList<CryptoKey> createRepeated() => $pb.PbList<CryptoKey>();
@$core.pragma('dart2js:noInline')
static CryptoKey getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<CryptoKey>(create);
static CryptoKey? _defaultInstance;
@$pb.TagNumber(1)
$core.int get u0 => $_getIZ(0);
@$pb.TagNumber(1)
set u0($core.int v) { $_setUnsignedInt32(0, v); }
@$pb.TagNumber(1)
$core.bool hasU0() => $_has(0);
@$pb.TagNumber(1)
void clearU0() => clearField(1);
@$pb.TagNumber(2)
$core.int get u1 => $_getIZ(1);
@$pb.TagNumber(2)
set u1($core.int v) { $_setUnsignedInt32(1, v); }
@$pb.TagNumber(2)
$core.bool hasU1() => $_has(1);
@$pb.TagNumber(2)
void clearU1() => clearField(2);
@$pb.TagNumber(3)
$core.int get u2 => $_getIZ(2);
@$pb.TagNumber(3)
set u2($core.int v) { $_setUnsignedInt32(2, v); }
@$pb.TagNumber(3)
$core.bool hasU2() => $_has(2);
@$pb.TagNumber(3)
void clearU2() => clearField(3);
@$pb.TagNumber(4)
$core.int get u3 => $_getIZ(3);
@$pb.TagNumber(4)
set u3($core.int v) { $_setUnsignedInt32(3, v); }
@$pb.TagNumber(4)
$core.bool hasU3() => $_has(3);
@$pb.TagNumber(4)
void clearU3() => clearField(4);
@$pb.TagNumber(5)
$core.int get u4 => $_getIZ(4);
@$pb.TagNumber(5)
set u4($core.int v) { $_setUnsignedInt32(4, v); }
@$pb.TagNumber(5)
$core.bool hasU4() => $_has(4);
@$pb.TagNumber(5)
void clearU4() => clearField(5);
@$pb.TagNumber(6)
$core.int get u5 => $_getIZ(5);
@$pb.TagNumber(6)
set u5($core.int v) { $_setUnsignedInt32(5, v); }
@$pb.TagNumber(6)
$core.bool hasU5() => $_has(5);
@$pb.TagNumber(6)
void clearU5() => clearField(6);
@$pb.TagNumber(7)
$core.int get u6 => $_getIZ(6);
@$pb.TagNumber(7)
set u6($core.int v) { $_setUnsignedInt32(6, v); }
@$pb.TagNumber(7)
$core.bool hasU6() => $_has(6);
@$pb.TagNumber(7)
void clearU6() => clearField(7);
@$pb.TagNumber(8)
$core.int get u7 => $_getIZ(7);
@$pb.TagNumber(8)
set u7($core.int v) { $_setUnsignedInt32(7, v); }
@$pb.TagNumber(8)
$core.bool hasU7() => $_has(7);
@$pb.TagNumber(8)
void clearU7() => clearField(8);
}
class Signature extends $pb.GeneratedMessage {
factory Signature() => create();
Signature._() : super();
factory Signature.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory Signature.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'Signature', package: const $pb.PackageName(_omitMessageNames ? '' : 'veilid'), createEmptyInstance: create)
..a<$core.int>(1, _omitFieldNames ? '' : 'u0', $pb.PbFieldType.OF3)
..a<$core.int>(2, _omitFieldNames ? '' : 'u1', $pb.PbFieldType.OF3)
..a<$core.int>(3, _omitFieldNames ? '' : 'u2', $pb.PbFieldType.OF3)
..a<$core.int>(4, _omitFieldNames ? '' : 'u3', $pb.PbFieldType.OF3)
..a<$core.int>(5, _omitFieldNames ? '' : 'u4', $pb.PbFieldType.OF3)
..a<$core.int>(6, _omitFieldNames ? '' : 'u5', $pb.PbFieldType.OF3)
..a<$core.int>(7, _omitFieldNames ? '' : 'u6', $pb.PbFieldType.OF3)
..a<$core.int>(8, _omitFieldNames ? '' : 'u7', $pb.PbFieldType.OF3)
..a<$core.int>(9, _omitFieldNames ? '' : 'u8', $pb.PbFieldType.OF3)
..a<$core.int>(10, _omitFieldNames ? '' : 'u9', $pb.PbFieldType.OF3)
..a<$core.int>(11, _omitFieldNames ? '' : 'u10', $pb.PbFieldType.OF3)
..a<$core.int>(12, _omitFieldNames ? '' : 'u11', $pb.PbFieldType.OF3)
..a<$core.int>(13, _omitFieldNames ? '' : 'u12', $pb.PbFieldType.OF3)
..a<$core.int>(14, _omitFieldNames ? '' : 'u13', $pb.PbFieldType.OF3)
..a<$core.int>(15, _omitFieldNames ? '' : 'u14', $pb.PbFieldType.OF3)
..a<$core.int>(16, _omitFieldNames ? '' : 'u15', $pb.PbFieldType.OF3)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
Signature clone() => Signature()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
Signature copyWith(void Function(Signature) updates) => super.copyWith((message) => updates(message as Signature)) as Signature;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static Signature create() => Signature._();
Signature createEmptyInstance() => create();
static $pb.PbList<Signature> createRepeated() => $pb.PbList<Signature>();
@$core.pragma('dart2js:noInline')
static Signature getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<Signature>(create);
static Signature? _defaultInstance;
@$pb.TagNumber(1)
$core.int get u0 => $_getIZ(0);
@$pb.TagNumber(1)
set u0($core.int v) { $_setUnsignedInt32(0, v); }
@$pb.TagNumber(1)
$core.bool hasU0() => $_has(0);
@$pb.TagNumber(1)
void clearU0() => clearField(1);
@$pb.TagNumber(2)
$core.int get u1 => $_getIZ(1);
@$pb.TagNumber(2)
set u1($core.int v) { $_setUnsignedInt32(1, v); }
@$pb.TagNumber(2)
$core.bool hasU1() => $_has(1);
@$pb.TagNumber(2)
void clearU1() => clearField(2);
@$pb.TagNumber(3)
$core.int get u2 => $_getIZ(2);
@$pb.TagNumber(3)
set u2($core.int v) { $_setUnsignedInt32(2, v); }
@$pb.TagNumber(3)
$core.bool hasU2() => $_has(2);
@$pb.TagNumber(3)
void clearU2() => clearField(3);
@$pb.TagNumber(4)
$core.int get u3 => $_getIZ(3);
@$pb.TagNumber(4)
set u3($core.int v) { $_setUnsignedInt32(3, v); }
@$pb.TagNumber(4)
$core.bool hasU3() => $_has(3);
@$pb.TagNumber(4)
void clearU3() => clearField(4);
@$pb.TagNumber(5)
$core.int get u4 => $_getIZ(4);
@$pb.TagNumber(5)
set u4($core.int v) { $_setUnsignedInt32(4, v); }
@$pb.TagNumber(5)
$core.bool hasU4() => $_has(4);
@$pb.TagNumber(5)
void clearU4() => clearField(5);
@$pb.TagNumber(6)
$core.int get u5 => $_getIZ(5);
@$pb.TagNumber(6)
set u5($core.int v) { $_setUnsignedInt32(5, v); }
@$pb.TagNumber(6)
$core.bool hasU5() => $_has(5);
@$pb.TagNumber(6)
void clearU5() => clearField(6);
@$pb.TagNumber(7)
$core.int get u6 => $_getIZ(6);
@$pb.TagNumber(7)
set u6($core.int v) { $_setUnsignedInt32(6, v); }
@$pb.TagNumber(7)
$core.bool hasU6() => $_has(6);
@$pb.TagNumber(7)
void clearU6() => clearField(7);
@$pb.TagNumber(8)
$core.int get u7 => $_getIZ(7);
@$pb.TagNumber(8)
set u7($core.int v) { $_setUnsignedInt32(7, v); }
@$pb.TagNumber(8)
$core.bool hasU7() => $_has(7);
@$pb.TagNumber(8)
void clearU7() => clearField(8);
@$pb.TagNumber(9)
$core.int get u8 => $_getIZ(8);
@$pb.TagNumber(9)
set u8($core.int v) { $_setUnsignedInt32(8, v); }
@$pb.TagNumber(9)
$core.bool hasU8() => $_has(8);
@$pb.TagNumber(9)
void clearU8() => clearField(9);
@$pb.TagNumber(10)
$core.int get u9 => $_getIZ(9);
@$pb.TagNumber(10)
set u9($core.int v) { $_setUnsignedInt32(9, v); }
@$pb.TagNumber(10)
$core.bool hasU9() => $_has(9);
@$pb.TagNumber(10)
void clearU9() => clearField(10);
@$pb.TagNumber(11)
$core.int get u10 => $_getIZ(10);
@$pb.TagNumber(11)
set u10($core.int v) { $_setUnsignedInt32(10, v); }
@$pb.TagNumber(11)
$core.bool hasU10() => $_has(10);
@$pb.TagNumber(11)
void clearU10() => clearField(11);
@$pb.TagNumber(12)
$core.int get u11 => $_getIZ(11);
@$pb.TagNumber(12)
set u11($core.int v) { $_setUnsignedInt32(11, v); }
@$pb.TagNumber(12)
$core.bool hasU11() => $_has(11);
@$pb.TagNumber(12)
void clearU11() => clearField(12);
@$pb.TagNumber(13)
$core.int get u12 => $_getIZ(12);
@$pb.TagNumber(13)
set u12($core.int v) { $_setUnsignedInt32(12, v); }
@$pb.TagNumber(13)
$core.bool hasU12() => $_has(12);
@$pb.TagNumber(13)
void clearU12() => clearField(13);
@$pb.TagNumber(14)
$core.int get u13 => $_getIZ(13);
@$pb.TagNumber(14)
set u13($core.int v) { $_setUnsignedInt32(13, v); }
@$pb.TagNumber(14)
$core.bool hasU13() => $_has(13);
@$pb.TagNumber(14)
void clearU13() => clearField(14);
@$pb.TagNumber(15)
$core.int get u14 => $_getIZ(14);
@$pb.TagNumber(15)
set u14($core.int v) { $_setUnsignedInt32(14, v); }
@$pb.TagNumber(15)
$core.bool hasU14() => $_has(14);
@$pb.TagNumber(15)
void clearU14() => clearField(15);
@$pb.TagNumber(16)
$core.int get u15 => $_getIZ(15);
@$pb.TagNumber(16)
set u15($core.int v) { $_setUnsignedInt32(15, v); }
@$pb.TagNumber(16)
$core.bool hasU15() => $_has(15);
@$pb.TagNumber(16)
void clearU15() => clearField(16);
}
class Nonce extends $pb.GeneratedMessage {
factory Nonce() => create();
Nonce._() : super();
factory Nonce.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory Nonce.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'Nonce', package: const $pb.PackageName(_omitMessageNames ? '' : 'veilid'), createEmptyInstance: create)
..a<$core.int>(1, _omitFieldNames ? '' : 'u0', $pb.PbFieldType.OF3)
..a<$core.int>(2, _omitFieldNames ? '' : 'u1', $pb.PbFieldType.OF3)
..a<$core.int>(3, _omitFieldNames ? '' : 'u2', $pb.PbFieldType.OF3)
..a<$core.int>(4, _omitFieldNames ? '' : 'u3', $pb.PbFieldType.OF3)
..a<$core.int>(5, _omitFieldNames ? '' : 'u4', $pb.PbFieldType.OF3)
..a<$core.int>(6, _omitFieldNames ? '' : 'u5', $pb.PbFieldType.OF3)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
Nonce clone() => Nonce()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
Nonce copyWith(void Function(Nonce) updates) => super.copyWith((message) => updates(message as Nonce)) as Nonce;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static Nonce create() => Nonce._();
Nonce createEmptyInstance() => create();
static $pb.PbList<Nonce> createRepeated() => $pb.PbList<Nonce>();
@$core.pragma('dart2js:noInline')
static Nonce getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<Nonce>(create);
static Nonce? _defaultInstance;
@$pb.TagNumber(1)
$core.int get u0 => $_getIZ(0);
@$pb.TagNumber(1)
set u0($core.int v) { $_setUnsignedInt32(0, v); }
@$pb.TagNumber(1)
$core.bool hasU0() => $_has(0);
@$pb.TagNumber(1)
void clearU0() => clearField(1);
@$pb.TagNumber(2)
$core.int get u1 => $_getIZ(1);
@$pb.TagNumber(2)
set u1($core.int v) { $_setUnsignedInt32(1, v); }
@$pb.TagNumber(2)
$core.bool hasU1() => $_has(1);
@$pb.TagNumber(2)
void clearU1() => clearField(2);
@$pb.TagNumber(3)
$core.int get u2 => $_getIZ(2);
@$pb.TagNumber(3)
set u2($core.int v) { $_setUnsignedInt32(2, v); }
@$pb.TagNumber(3)
$core.bool hasU2() => $_has(2);
@$pb.TagNumber(3)
void clearU2() => clearField(3);
@$pb.TagNumber(4)
$core.int get u3 => $_getIZ(3);
@$pb.TagNumber(4)
set u3($core.int v) { $_setUnsignedInt32(3, v); }
@$pb.TagNumber(4)
$core.bool hasU3() => $_has(3);
@$pb.TagNumber(4)
void clearU3() => clearField(4);
@$pb.TagNumber(5)
$core.int get u4 => $_getIZ(4);
@$pb.TagNumber(5)
set u4($core.int v) { $_setUnsignedInt32(4, v); }
@$pb.TagNumber(5)
$core.bool hasU4() => $_has(4);
@$pb.TagNumber(5)
void clearU4() => clearField(5);
@$pb.TagNumber(6)
$core.int get u5 => $_getIZ(5);
@$pb.TagNumber(6)
set u5($core.int v) { $_setUnsignedInt32(5, v); }
@$pb.TagNumber(6)
$core.bool hasU5() => $_has(5);
@$pb.TagNumber(6)
void clearU5() => clearField(6);
}
class TypedKey extends $pb.GeneratedMessage {
factory TypedKey() => create();
TypedKey._() : super();
factory TypedKey.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory TypedKey.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'TypedKey', package: const $pb.PackageName(_omitMessageNames ? '' : 'veilid'), createEmptyInstance: create)
..a<$core.int>(1, _omitFieldNames ? '' : 'kind', $pb.PbFieldType.OF3)
..aOM<CryptoKey>(2, _omitFieldNames ? '' : 'value', subBuilder: CryptoKey.create)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
TypedKey clone() => TypedKey()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
TypedKey copyWith(void Function(TypedKey) updates) => super.copyWith((message) => updates(message as TypedKey)) as TypedKey;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static TypedKey create() => TypedKey._();
TypedKey createEmptyInstance() => create();
static $pb.PbList<TypedKey> createRepeated() => $pb.PbList<TypedKey>();
@$core.pragma('dart2js:noInline')
static TypedKey getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<TypedKey>(create);
static TypedKey? _defaultInstance;
@$pb.TagNumber(1)
$core.int get kind => $_getIZ(0);
@$pb.TagNumber(1)
set kind($core.int v) { $_setUnsignedInt32(0, v); }
@$pb.TagNumber(1)
$core.bool hasKind() => $_has(0);
@$pb.TagNumber(1)
void clearKind() => clearField(1);
@$pb.TagNumber(2)
CryptoKey get value => $_getN(1);
@$pb.TagNumber(2)
set value(CryptoKey v) { setField(2, v); }
@$pb.TagNumber(2)
$core.bool hasValue() => $_has(1);
@$pb.TagNumber(2)
void clearValue() => clearField(2);
@$pb.TagNumber(2)
CryptoKey ensureValue() => $_ensure(1);
}
class KeyPair extends $pb.GeneratedMessage {
factory KeyPair() => create();
KeyPair._() : super();
factory KeyPair.fromBuffer($core.List<$core.int> i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromBuffer(i, r);
factory KeyPair.fromJson($core.String i, [$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) => create()..mergeFromJson(i, r);
static final $pb.BuilderInfo _i = $pb.BuilderInfo(_omitMessageNames ? '' : 'KeyPair', package: const $pb.PackageName(_omitMessageNames ? '' : 'veilid'), createEmptyInstance: create)
..aOM<CryptoKey>(1, _omitFieldNames ? '' : 'key', subBuilder: CryptoKey.create)
..aOM<CryptoKey>(2, _omitFieldNames ? '' : 'secret', subBuilder: CryptoKey.create)
..hasRequiredFields = false
;
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
KeyPair clone() => KeyPair()..mergeFromMessage(this);
@$core.Deprecated(
'Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
KeyPair copyWith(void Function(KeyPair) updates) => super.copyWith((message) => updates(message as KeyPair)) as KeyPair;
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static KeyPair create() => KeyPair._();
KeyPair createEmptyInstance() => create();
static $pb.PbList<KeyPair> createRepeated() => $pb.PbList<KeyPair>();
@$core.pragma('dart2js:noInline')
static KeyPair getDefault() => _defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<KeyPair>(create);
static KeyPair? _defaultInstance;
@$pb.TagNumber(1)
CryptoKey get key => $_getN(0);
@$pb.TagNumber(1)
set key(CryptoKey v) { setField(1, v); }
@$pb.TagNumber(1)
$core.bool hasKey() => $_has(0);
@$pb.TagNumber(1)
void clearKey() => clearField(1);
@$pb.TagNumber(1)
CryptoKey ensureKey() => $_ensure(0);
@$pb.TagNumber(2)
CryptoKey get secret => $_getN(1);
@$pb.TagNumber(2)
set secret(CryptoKey v) { setField(2, v); }
@$pb.TagNumber(2)
$core.bool hasSecret() => $_has(1);
@$pb.TagNumber(2)
void clearSecret() => clearField(2);
@$pb.TagNumber(2)
CryptoKey ensureSecret() => $_ensure(1);
}
const _omitFieldNames = $core.bool.fromEnvironment('protobuf.omit_field_names');
const _omitMessageNames = $core.bool.fromEnvironment('protobuf.omit_message_names');

View file

@ -0,0 +1,11 @@
//
// Generated code. Do not modify.
// source: veilid.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import

View file

@ -0,0 +1,114 @@
//
// Generated code. Do not modify.
// source: veilid.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types
// ignore_for_file: constant_identifier_names, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
import 'dart:convert' as $convert;
import 'dart:core' as $core;
import 'dart:typed_data' as $typed_data;
@$core.Deprecated('Use cryptoKeyDescriptor instead')
const CryptoKey$json = {
'1': 'CryptoKey',
'2': [
{'1': 'u0', '3': 1, '4': 1, '5': 7, '10': 'u0'},
{'1': 'u1', '3': 2, '4': 1, '5': 7, '10': 'u1'},
{'1': 'u2', '3': 3, '4': 1, '5': 7, '10': 'u2'},
{'1': 'u3', '3': 4, '4': 1, '5': 7, '10': 'u3'},
{'1': 'u4', '3': 5, '4': 1, '5': 7, '10': 'u4'},
{'1': 'u5', '3': 6, '4': 1, '5': 7, '10': 'u5'},
{'1': 'u6', '3': 7, '4': 1, '5': 7, '10': 'u6'},
{'1': 'u7', '3': 8, '4': 1, '5': 7, '10': 'u7'},
],
};
/// Descriptor for `CryptoKey`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List cryptoKeyDescriptor = $convert.base64Decode(
'CglDcnlwdG9LZXkSDgoCdTAYASABKAdSAnUwEg4KAnUxGAIgASgHUgJ1MRIOCgJ1MhgDIAEoB1'
'ICdTISDgoCdTMYBCABKAdSAnUzEg4KAnU0GAUgASgHUgJ1NBIOCgJ1NRgGIAEoB1ICdTUSDgoC'
'dTYYByABKAdSAnU2Eg4KAnU3GAggASgHUgJ1Nw==');
@$core.Deprecated('Use signatureDescriptor instead')
const Signature$json = {
'1': 'Signature',
'2': [
{'1': 'u0', '3': 1, '4': 1, '5': 7, '10': 'u0'},
{'1': 'u1', '3': 2, '4': 1, '5': 7, '10': 'u1'},
{'1': 'u2', '3': 3, '4': 1, '5': 7, '10': 'u2'},
{'1': 'u3', '3': 4, '4': 1, '5': 7, '10': 'u3'},
{'1': 'u4', '3': 5, '4': 1, '5': 7, '10': 'u4'},
{'1': 'u5', '3': 6, '4': 1, '5': 7, '10': 'u5'},
{'1': 'u6', '3': 7, '4': 1, '5': 7, '10': 'u6'},
{'1': 'u7', '3': 8, '4': 1, '5': 7, '10': 'u7'},
{'1': 'u8', '3': 9, '4': 1, '5': 7, '10': 'u8'},
{'1': 'u9', '3': 10, '4': 1, '5': 7, '10': 'u9'},
{'1': 'u10', '3': 11, '4': 1, '5': 7, '10': 'u10'},
{'1': 'u11', '3': 12, '4': 1, '5': 7, '10': 'u11'},
{'1': 'u12', '3': 13, '4': 1, '5': 7, '10': 'u12'},
{'1': 'u13', '3': 14, '4': 1, '5': 7, '10': 'u13'},
{'1': 'u14', '3': 15, '4': 1, '5': 7, '10': 'u14'},
{'1': 'u15', '3': 16, '4': 1, '5': 7, '10': 'u15'},
],
};
/// Descriptor for `Signature`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List signatureDescriptor = $convert.base64Decode(
'CglTaWduYXR1cmUSDgoCdTAYASABKAdSAnUwEg4KAnUxGAIgASgHUgJ1MRIOCgJ1MhgDIAEoB1'
'ICdTISDgoCdTMYBCABKAdSAnUzEg4KAnU0GAUgASgHUgJ1NBIOCgJ1NRgGIAEoB1ICdTUSDgoC'
'dTYYByABKAdSAnU2Eg4KAnU3GAggASgHUgJ1NxIOCgJ1OBgJIAEoB1ICdTgSDgoCdTkYCiABKA'
'dSAnU5EhAKA3UxMBgLIAEoB1IDdTEwEhAKA3UxMRgMIAEoB1IDdTExEhAKA3UxMhgNIAEoB1ID'
'dTEyEhAKA3UxMxgOIAEoB1IDdTEzEhAKA3UxNBgPIAEoB1IDdTE0EhAKA3UxNRgQIAEoB1IDdT'
'E1');
@$core.Deprecated('Use nonceDescriptor instead')
const Nonce$json = {
'1': 'Nonce',
'2': [
{'1': 'u0', '3': 1, '4': 1, '5': 7, '10': 'u0'},
{'1': 'u1', '3': 2, '4': 1, '5': 7, '10': 'u1'},
{'1': 'u2', '3': 3, '4': 1, '5': 7, '10': 'u2'},
{'1': 'u3', '3': 4, '4': 1, '5': 7, '10': 'u3'},
{'1': 'u4', '3': 5, '4': 1, '5': 7, '10': 'u4'},
{'1': 'u5', '3': 6, '4': 1, '5': 7, '10': 'u5'},
],
};
/// Descriptor for `Nonce`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List nonceDescriptor = $convert.base64Decode(
'CgVOb25jZRIOCgJ1MBgBIAEoB1ICdTASDgoCdTEYAiABKAdSAnUxEg4KAnUyGAMgASgHUgJ1Mh'
'IOCgJ1MxgEIAEoB1ICdTMSDgoCdTQYBSABKAdSAnU0Eg4KAnU1GAYgASgHUgJ1NQ==');
@$core.Deprecated('Use typedKeyDescriptor instead')
const TypedKey$json = {
'1': 'TypedKey',
'2': [
{'1': 'kind', '3': 1, '4': 1, '5': 7, '10': 'kind'},
{'1': 'value', '3': 2, '4': 1, '5': 11, '6': '.veilid.CryptoKey', '10': 'value'},
],
};
/// Descriptor for `TypedKey`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List typedKeyDescriptor = $convert.base64Decode(
'CghUeXBlZEtleRISCgRraW5kGAEgASgHUgRraW5kEicKBXZhbHVlGAIgASgLMhEudmVpbGlkLk'
'NyeXB0b0tleVIFdmFsdWU=');
@$core.Deprecated('Use keyPairDescriptor instead')
const KeyPair$json = {
'1': 'KeyPair',
'2': [
{'1': 'key', '3': 1, '4': 1, '5': 11, '6': '.veilid.CryptoKey', '10': 'key'},
{'1': 'secret', '3': 2, '4': 1, '5': 11, '6': '.veilid.CryptoKey', '10': 'secret'},
],
};
/// Descriptor for `KeyPair`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List keyPairDescriptor = $convert.base64Decode(
'CgdLZXlQYWlyEiMKA2tleRgBIAEoCzIRLnZlaWxpZC5DcnlwdG9LZXlSA2tleRIpCgZzZWNyZX'
'QYAiABKAsyES52ZWlsaWQuQ3J5cHRvS2V5UgZzZWNyZXQ=');

View file

@ -0,0 +1,14 @@
//
// Generated code. Do not modify.
// source: veilid.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides, camel_case_types
// ignore_for_file: constant_identifier_names
// ignore_for_file: deprecated_member_use_from_same_package, library_prefixes
// ignore_for_file: non_constant_identifier_names, prefer_final_fields
// ignore_for_file: unnecessary_import, unnecessary_this, unused_import
export 'veilid.pb.dart';

View file

@ -0,0 +1,61 @@
syntax = "proto3";
package veilid;
// 32-byte value in bigendian format
message CryptoKey {
fixed32 u0 = 1;
fixed32 u1 = 2;
fixed32 u2 = 3;
fixed32 u3 = 4;
fixed32 u4 = 5;
fixed32 u5 = 6;
fixed32 u6 = 7;
fixed32 u7 = 8;
}
// 64-byte value in bigendian format
message Signature {
fixed32 u0 = 1;
fixed32 u1 = 2;
fixed32 u2 = 3;
fixed32 u3 = 4;
fixed32 u4 = 5;
fixed32 u5 = 6;
fixed32 u6 = 7;
fixed32 u7 = 8;
fixed32 u8 = 9;
fixed32 u9 = 10;
fixed32 u10 = 11;
fixed32 u11 = 12;
fixed32 u12 = 13;
fixed32 u13 = 14;
fixed32 u14 = 15;
fixed32 u15 = 16;
}
// 24-byte value in bigendian format
message Nonce {
fixed32 u0 = 1;
fixed32 u1 = 2;
fixed32 u2 = 3;
fixed32 u3 = 4;
fixed32 u4 = 5;
fixed32 u5 = 6;
}
// 36-byte typed crypto key
message TypedKey {
// CryptoKind FourCC in bigendian format
fixed32 kind = 1;
// Key value
CryptoKey value = 2;
}
// Key pair
message KeyPair {
// Public key
CryptoKey key = 1;
// Private key
CryptoKey secret = 2;
}

View file

@ -0,0 +1,87 @@
import 'package:veilid/veilid.dart';
Map<String, dynamic> getDefaultVeilidPlatformConfig(
bool isWeb, String appName) {
if (isWeb) {
return const VeilidWASMConfig(
logging: VeilidWASMConfigLogging(
performance: VeilidWASMConfigLoggingPerformance(
enabled: true,
level: VeilidConfigLogLevel.debug,
logsInTimings: true,
logsInConsole: false),
api: VeilidWASMConfigLoggingApi(
enabled: true, level: VeilidConfigLogLevel.info)))
.toJson();
}
return VeilidFFIConfig(
logging: VeilidFFIConfigLogging(
terminal: const VeilidFFIConfigLoggingTerminal(
enabled: false,
level: VeilidConfigLogLevel.debug,
),
otlp: VeilidFFIConfigLoggingOtlp(
enabled: false,
level: VeilidConfigLogLevel.trace,
grpcEndpoint: '127.0.0.1:4317',
serviceName: appName),
api: const VeilidFFIConfigLoggingApi(
enabled: true, level: VeilidConfigLogLevel.info)))
.toJson();
}
Future<VeilidConfig> getVeilidConfig(bool isWeb, String appName) async {
var config = await getDefaultVeilidConfig(appName);
// ignore: do_not_use_environment
if (const String.fromEnvironment('DELETE_TABLE_STORE') == '1') {
config =
config.copyWith(tableStore: config.tableStore.copyWith(delete: true));
}
// ignore: do_not_use_environment
if (const String.fromEnvironment('DELETE_PROTECTED_STORE') == '1') {
config = config.copyWith(
protectedStore: config.protectedStore.copyWith(delete: true));
}
// ignore: do_not_use_environment
if (const String.fromEnvironment('DELETE_BLOCK_STORE') == '1') {
config =
config.copyWith(blockStore: config.blockStore.copyWith(delete: true));
}
// ignore: do_not_use_environment
const envNetwork = String.fromEnvironment('NETWORK');
if (envNetwork.isNotEmpty) {
final bootstrap = isWeb
? ['ws://bootstrap.$envNetwork.veilid.net:5150/ws']
: ['bootstrap.$envNetwork.veilid.net'];
config = config.copyWith(
network: config.network.copyWith(
routingTable:
config.network.routingTable.copyWith(bootstrap: bootstrap)));
}
// ignore: do_not_use_environment
const envBootstrap = String.fromEnvironment('BOOTSTRAP');
if (envBootstrap.isNotEmpty) {
final bootstrap = envBootstrap.split(',').map((e) => e.trim()).toList();
config = config.copyWith(
network: config.network.copyWith(
routingTable:
config.network.routingTable.copyWith(bootstrap: bootstrap)));
}
return config.copyWith(
capabilities:
// XXX: Remove DHTV and DHTW when we get background sync implemented
const VeilidConfigCapabilities(disable: ['DHTV', 'DHTW', 'TUNL']),
protectedStore: config.protectedStore.copyWith(allowInsecureFallback: true),
// network: config.network.copyWith(
// dht: config.network.dht.copyWith(
// getValueCount: 3,
// getValueFanout: 8,
// getValueTimeoutMs: 5000,
// setValueCount: 4,
// setValueFanout: 10,
// setValueTimeoutMs: 5000))
);
}

View file

@ -0,0 +1,282 @@
import 'dart:typed_data';
import 'package:fast_immutable_collections/fast_immutable_collections.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
import 'package:protobuf/protobuf.dart';
import 'package:veilid/veilid.dart';
import '../dht_support/dht_support.dart';
part 'identity.freezed.dart';
part 'identity.g.dart';
// AccountOwnerInfo is the key and owner info for the account dht key that is
// stored in the identity key
@freezed
class AccountRecordInfo with _$AccountRecordInfo {
const factory AccountRecordInfo({
// Top level account keys and secrets
required OwnedDHTRecordPointer accountRecord,
}) = _AccountRecordInfo;
factory AccountRecordInfo.fromJson(dynamic json) =>
_$AccountRecordInfoFromJson(json as Map<String, dynamic>);
}
// Identity Key points to accounts associated with this identity
// accounts field has a map of bundle id or uuid to account key pairs
// DHT Schema: DFLT(1)
// DHT Key (Private): identityRecordKey
// DHT Owner Key: identityPublicKey
// DHT Secret: identitySecretKey (stored encrypted
// with unlock code in local table store)
@freezed
class Identity with _$Identity {
const factory Identity({
// Top level account keys and secrets
required IMap<String, ISet<AccountRecordInfo>> accountRecords,
}) = _Identity;
factory Identity.fromJson(dynamic json) =>
_$IdentityFromJson(json as Map<String, dynamic>);
}
// Identity Master key structure for created account
// Master key allows for regeneration of identity DHT record
// Bidirectional Master<->Identity signature allows for
// chain of identity ownership for account recovery process
//
// Backed by a DHT key at masterRecordKey, the secret is kept
// completely offline and only written to upon account recovery
//
// DHT Schema: DFLT(1)
// DHT Record Key (Public): masterRecordKey
// DHT Owner Key: masterPublicKey
// DHT Owner Secret: masterSecretKey (kept offline)
// Encryption: None
@freezed
class IdentityMaster with _$IdentityMaster {
const factory IdentityMaster(
{
// Private DHT record storing identity account mapping
required TypedKey identityRecordKey,
// Public key of identity
required PublicKey identityPublicKey,
// Public DHT record storing this structure for account recovery
required TypedKey masterRecordKey,
// Public key of master identity used to sign identity keys for recovery
required PublicKey masterPublicKey,
// Signature of identityRecordKey and identityPublicKey by masterPublicKey
required Signature identitySignature,
// Signature of masterRecordKey and masterPublicKey by identityPublicKey
required Signature masterSignature}) = _IdentityMaster;
factory IdentityMaster.fromJson(dynamic json) =>
_$IdentityMasterFromJson(json as Map<String, dynamic>);
}
extension IdentityMasterExtension on IdentityMaster {
/// Deletes a master identity and the identity record under it
Future<void> delete() async {
final pool = await DHTRecordPool.instance();
await (await pool.openRead(masterRecordKey)).delete();
}
KeyPair identityWriter(SecretKey secret) =>
KeyPair(key: identityPublicKey, secret: secret);
KeyPair masterWriter(SecretKey secret) =>
KeyPair(key: masterPublicKey, secret: secret);
TypedKey identityPublicTypedKey() =>
TypedKey(kind: identityRecordKey.kind, value: identityPublicKey);
Future<AccountRecordInfo> readAccountFromIdentity(
{required SharedSecret identitySecret,
required String accountKey}) async {
// Read the identity key to get the account keys
final pool = await DHTRecordPool.instance();
final identityRecordCrypto = await DHTRecordCryptoPrivate.fromSecret(
identityRecordKey.kind, identitySecret);
late final AccountRecordInfo accountRecordInfo;
await (await pool.openRead(identityRecordKey,
parent: masterRecordKey, crypto: identityRecordCrypto))
.scope((identityRec) async {
final identity = await identityRec.getJson(Identity.fromJson);
if (identity == null) {
// Identity could not be read or decrypted from DHT
throw StateError('identity could not be read');
}
final accountRecords = IMapOfSets.from(identity.accountRecords);
final vcAccounts = accountRecords.get(accountKey);
if (vcAccounts.length != 1) {
// No account, or multiple accounts somehow associated with identity
throw StateError('no single account record info');
}
accountRecordInfo = vcAccounts.first;
});
return accountRecordInfo;
}
/// Creates a new Account associated with master identity and store it in the
/// identity key.
Future<AccountRecordInfo> addAccountToIdentity<T extends GeneratedMessage>({
required SharedSecret identitySecret,
required String accountKey,
required Future<T> Function(TypedKey parent) createAccountCallback,
}) async {
final pool = await DHTRecordPool.instance();
/////// Add account with profile to DHT
// Open identity key for writing
return (await pool.openWrite(
identityRecordKey, identityWriter(identitySecret),
parent: masterRecordKey))
.scope((identityRec) async =>
// Create new account to insert into identity
(await pool.create(parent: identityRec.key))
.deleteScope((accountRec) async {
final account = await createAccountCallback(accountRec.key);
// Write account key
await accountRec.eventualWriteProtobuf(account);
// Update identity key to include account
final newAccountRecordInfo = AccountRecordInfo(
accountRecord: OwnedDHTRecordPointer(
recordKey: accountRec.key,
owner: accountRec.ownerKeyPair!));
await identityRec.eventualUpdateJson(Identity.fromJson,
(oldIdentity) async {
final oldAccountRecords =
IMapOfSets.from(oldIdentity.accountRecords);
// Only allow one account per identity for veilidchat
if (oldAccountRecords.get(accountKey).isNotEmpty) {
throw StateError('Only one account per key in identity');
}
final accountRecords = oldAccountRecords
.add(accountKey, newAccountRecordInfo)
.asIMap();
return oldIdentity.copyWith(accountRecords: accountRecords);
});
return newAccountRecordInfo;
}));
}
}
// Identity Master with secrets
// Not freezed because we never persist this class in its entirety
class IdentityMasterWithSecrets {
IdentityMasterWithSecrets._(
{required this.identityMaster,
required this.masterSecret,
required this.identitySecret});
IdentityMaster identityMaster;
SecretKey masterSecret;
SecretKey identitySecret;
/// Delete a master identity with secrets
Future<void> delete() async => identityMaster.delete();
/// Creates a new master identity and returns it with its secrets
static Future<IdentityMasterWithSecrets> create() async {
final pool = await DHTRecordPool.instance();
// IdentityMaster DHT record is public/unencrypted
return (await pool.create(crypto: const DHTRecordCryptoPublic()))
.deleteScope((masterRec) async =>
// Identity record is private
(await pool.create(parent: masterRec.key))
.scope((identityRec) async {
// Make IdentityMaster
final masterRecordKey = masterRec.key;
final masterOwner = masterRec.ownerKeyPair!;
final masterSigBuf = BytesBuilder()
..add(masterRecordKey.decode())
..add(masterOwner.key.decode());
final identityRecordKey = identityRec.key;
final identityOwner = identityRec.ownerKeyPair!;
final identitySigBuf = BytesBuilder()
..add(identityRecordKey.decode())
..add(identityOwner.key.decode());
assert(masterRecordKey.kind == identityRecordKey.kind,
'new master and identity should have same cryptosystem');
final crypto =
await pool.veilid.getCryptoSystem(masterRecordKey.kind);
final identitySignature = await crypto.signWithKeyPair(
masterOwner, identitySigBuf.toBytes());
final masterSignature = await crypto.signWithKeyPair(
identityOwner, masterSigBuf.toBytes());
final identityMaster = IdentityMaster(
identityRecordKey: identityRecordKey,
identityPublicKey: identityOwner.key,
masterRecordKey: masterRecordKey,
masterPublicKey: masterOwner.key,
identitySignature: identitySignature,
masterSignature: masterSignature);
// Write identity master to master dht key
await masterRec.eventualWriteJson(identityMaster);
// Make empty identity
const identity = Identity(accountRecords: IMapConst({}));
// Write empty identity to identity dht key
await identityRec.eventualWriteJson(identity);
return IdentityMasterWithSecrets._(
identityMaster: identityMaster,
masterSecret: masterOwner.secret,
identitySecret: identityOwner.secret);
}));
}
}
/// Opens an existing master identity and validates it
Future<IdentityMaster> openIdentityMaster(
{required TypedKey identityMasterRecordKey}) async {
final pool = await DHTRecordPool.instance();
// IdentityMaster DHT record is public/unencrypted
return (await pool.openRead(identityMasterRecordKey))
.deleteScope((masterRec) async {
final identityMaster =
(await masterRec.getJson(IdentityMaster.fromJson, forceRefresh: true))!;
// Validate IdentityMaster
final masterRecordKey = masterRec.key;
final masterOwnerKey = masterRec.owner;
final masterSigBuf = BytesBuilder()
..add(masterRecordKey.decode())
..add(masterOwnerKey.decode());
final masterSignature = identityMaster.masterSignature;
final identityRecordKey = identityMaster.identityRecordKey;
final identityOwnerKey = identityMaster.identityPublicKey;
final identitySigBuf = BytesBuilder()
..add(identityRecordKey.decode())
..add(identityOwnerKey.decode());
final identitySignature = identityMaster.identitySignature;
assert(masterRecordKey.kind == identityRecordKey.kind,
'new master and identity should have same cryptosystem');
final crypto = await pool.veilid.getCryptoSystem(masterRecordKey.kind);
await crypto.verify(
masterOwnerKey, identitySigBuf.toBytes(), identitySignature);
await crypto.verify(
identityOwnerKey, masterSigBuf.toBytes(), masterSignature);
return identityMaster;
});
}

View file

@ -0,0 +1,579 @@
// coverage:ignore-file
// GENERATED CODE - DO NOT MODIFY BY HAND
// ignore_for_file: type=lint
// ignore_for_file: unused_element, deprecated_member_use, deprecated_member_use_from_same_package, use_function_type_syntax_for_parameters, unnecessary_const, avoid_init_to_null, invalid_override_different_default_values_named, prefer_expression_function_bodies, annotate_overrides, invalid_annotation_target, unnecessary_question_mark
part of 'identity.dart';
// **************************************************************************
// FreezedGenerator
// **************************************************************************
T _$identity<T>(T value) => value;
final _privateConstructorUsedError = UnsupportedError(
'It seems like you constructed your class using `MyClass._()`. This constructor is only meant to be used by freezed and you are not supposed to need it nor use it.\nPlease check the documentation here for more information: https://github.com/rrousselGit/freezed#custom-getters-and-methods');
AccountRecordInfo _$AccountRecordInfoFromJson(Map<String, dynamic> json) {
return _AccountRecordInfo.fromJson(json);
}
/// @nodoc
mixin _$AccountRecordInfo {
// Top level account keys and secrets
OwnedDHTRecordPointer get accountRecord => throw _privateConstructorUsedError;
Map<String, dynamic> toJson() => throw _privateConstructorUsedError;
@JsonKey(ignore: true)
$AccountRecordInfoCopyWith<AccountRecordInfo> get copyWith =>
throw _privateConstructorUsedError;
}
/// @nodoc
abstract class $AccountRecordInfoCopyWith<$Res> {
factory $AccountRecordInfoCopyWith(
AccountRecordInfo value, $Res Function(AccountRecordInfo) then) =
_$AccountRecordInfoCopyWithImpl<$Res, AccountRecordInfo>;
@useResult
$Res call({OwnedDHTRecordPointer accountRecord});
$OwnedDHTRecordPointerCopyWith<$Res> get accountRecord;
}
/// @nodoc
class _$AccountRecordInfoCopyWithImpl<$Res, $Val extends AccountRecordInfo>
implements $AccountRecordInfoCopyWith<$Res> {
_$AccountRecordInfoCopyWithImpl(this._value, this._then);
// ignore: unused_field
final $Val _value;
// ignore: unused_field
final $Res Function($Val) _then;
@pragma('vm:prefer-inline')
@override
$Res call({
Object? accountRecord = null,
}) {
return _then(_value.copyWith(
accountRecord: null == accountRecord
? _value.accountRecord
: accountRecord // ignore: cast_nullable_to_non_nullable
as OwnedDHTRecordPointer,
) as $Val);
}
@override
@pragma('vm:prefer-inline')
$OwnedDHTRecordPointerCopyWith<$Res> get accountRecord {
return $OwnedDHTRecordPointerCopyWith<$Res>(_value.accountRecord, (value) {
return _then(_value.copyWith(accountRecord: value) as $Val);
});
}
}
/// @nodoc
abstract class _$$AccountRecordInfoImplCopyWith<$Res>
implements $AccountRecordInfoCopyWith<$Res> {
factory _$$AccountRecordInfoImplCopyWith(_$AccountRecordInfoImpl value,
$Res Function(_$AccountRecordInfoImpl) then) =
__$$AccountRecordInfoImplCopyWithImpl<$Res>;
@override
@useResult
$Res call({OwnedDHTRecordPointer accountRecord});
@override
$OwnedDHTRecordPointerCopyWith<$Res> get accountRecord;
}
/// @nodoc
class __$$AccountRecordInfoImplCopyWithImpl<$Res>
extends _$AccountRecordInfoCopyWithImpl<$Res, _$AccountRecordInfoImpl>
implements _$$AccountRecordInfoImplCopyWith<$Res> {
__$$AccountRecordInfoImplCopyWithImpl(_$AccountRecordInfoImpl _value,
$Res Function(_$AccountRecordInfoImpl) _then)
: super(_value, _then);
@pragma('vm:prefer-inline')
@override
$Res call({
Object? accountRecord = null,
}) {
return _then(_$AccountRecordInfoImpl(
accountRecord: null == accountRecord
? _value.accountRecord
: accountRecord // ignore: cast_nullable_to_non_nullable
as OwnedDHTRecordPointer,
));
}
}
/// @nodoc
@JsonSerializable()
class _$AccountRecordInfoImpl implements _AccountRecordInfo {
const _$AccountRecordInfoImpl({required this.accountRecord});
factory _$AccountRecordInfoImpl.fromJson(Map<String, dynamic> json) =>
_$$AccountRecordInfoImplFromJson(json);
// Top level account keys and secrets
@override
final OwnedDHTRecordPointer accountRecord;
@override
String toString() {
return 'AccountRecordInfo(accountRecord: $accountRecord)';
}
@override
bool operator ==(Object other) {
return identical(this, other) ||
(other.runtimeType == runtimeType &&
other is _$AccountRecordInfoImpl &&
(identical(other.accountRecord, accountRecord) ||
other.accountRecord == accountRecord));
}
@JsonKey(ignore: true)
@override
int get hashCode => Object.hash(runtimeType, accountRecord);
@JsonKey(ignore: true)
@override
@pragma('vm:prefer-inline')
_$$AccountRecordInfoImplCopyWith<_$AccountRecordInfoImpl> get copyWith =>
__$$AccountRecordInfoImplCopyWithImpl<_$AccountRecordInfoImpl>(
this, _$identity);
@override
Map<String, dynamic> toJson() {
return _$$AccountRecordInfoImplToJson(
this,
);
}
}
abstract class _AccountRecordInfo implements AccountRecordInfo {
const factory _AccountRecordInfo(
{required final OwnedDHTRecordPointer accountRecord}) =
_$AccountRecordInfoImpl;
factory _AccountRecordInfo.fromJson(Map<String, dynamic> json) =
_$AccountRecordInfoImpl.fromJson;
@override // Top level account keys and secrets
OwnedDHTRecordPointer get accountRecord;
@override
@JsonKey(ignore: true)
_$$AccountRecordInfoImplCopyWith<_$AccountRecordInfoImpl> get copyWith =>
throw _privateConstructorUsedError;
}
Identity _$IdentityFromJson(Map<String, dynamic> json) {
return _Identity.fromJson(json);
}
/// @nodoc
mixin _$Identity {
// Top level account keys and secrets
IMap<String, ISet<AccountRecordInfo>> get accountRecords =>
throw _privateConstructorUsedError;
Map<String, dynamic> toJson() => throw _privateConstructorUsedError;
@JsonKey(ignore: true)
$IdentityCopyWith<Identity> get copyWith =>
throw _privateConstructorUsedError;
}
/// @nodoc
abstract class $IdentityCopyWith<$Res> {
factory $IdentityCopyWith(Identity value, $Res Function(Identity) then) =
_$IdentityCopyWithImpl<$Res, Identity>;
@useResult
$Res call({IMap<String, ISet<AccountRecordInfo>> accountRecords});
}
/// @nodoc
class _$IdentityCopyWithImpl<$Res, $Val extends Identity>
implements $IdentityCopyWith<$Res> {
_$IdentityCopyWithImpl(this._value, this._then);
// ignore: unused_field
final $Val _value;
// ignore: unused_field
final $Res Function($Val) _then;
@pragma('vm:prefer-inline')
@override
$Res call({
Object? accountRecords = null,
}) {
return _then(_value.copyWith(
accountRecords: null == accountRecords
? _value.accountRecords
: accountRecords // ignore: cast_nullable_to_non_nullable
as IMap<String, ISet<AccountRecordInfo>>,
) as $Val);
}
}
/// @nodoc
abstract class _$$IdentityImplCopyWith<$Res>
implements $IdentityCopyWith<$Res> {
factory _$$IdentityImplCopyWith(
_$IdentityImpl value, $Res Function(_$IdentityImpl) then) =
__$$IdentityImplCopyWithImpl<$Res>;
@override
@useResult
$Res call({IMap<String, ISet<AccountRecordInfo>> accountRecords});
}
/// @nodoc
class __$$IdentityImplCopyWithImpl<$Res>
extends _$IdentityCopyWithImpl<$Res, _$IdentityImpl>
implements _$$IdentityImplCopyWith<$Res> {
__$$IdentityImplCopyWithImpl(
_$IdentityImpl _value, $Res Function(_$IdentityImpl) _then)
: super(_value, _then);
@pragma('vm:prefer-inline')
@override
$Res call({
Object? accountRecords = null,
}) {
return _then(_$IdentityImpl(
accountRecords: null == accountRecords
? _value.accountRecords
: accountRecords // ignore: cast_nullable_to_non_nullable
as IMap<String, ISet<AccountRecordInfo>>,
));
}
}
/// @nodoc
@JsonSerializable()
class _$IdentityImpl implements _Identity {
const _$IdentityImpl({required this.accountRecords});
factory _$IdentityImpl.fromJson(Map<String, dynamic> json) =>
_$$IdentityImplFromJson(json);
// Top level account keys and secrets
@override
final IMap<String, ISet<AccountRecordInfo>> accountRecords;
@override
String toString() {
return 'Identity(accountRecords: $accountRecords)';
}
@override
bool operator ==(Object other) {
return identical(this, other) ||
(other.runtimeType == runtimeType &&
other is _$IdentityImpl &&
(identical(other.accountRecords, accountRecords) ||
other.accountRecords == accountRecords));
}
@JsonKey(ignore: true)
@override
int get hashCode => Object.hash(runtimeType, accountRecords);
@JsonKey(ignore: true)
@override
@pragma('vm:prefer-inline')
_$$IdentityImplCopyWith<_$IdentityImpl> get copyWith =>
__$$IdentityImplCopyWithImpl<_$IdentityImpl>(this, _$identity);
@override
Map<String, dynamic> toJson() {
return _$$IdentityImplToJson(
this,
);
}
}
abstract class _Identity implements Identity {
const factory _Identity(
{required final IMap<String, ISet<AccountRecordInfo>>
accountRecords}) = _$IdentityImpl;
factory _Identity.fromJson(Map<String, dynamic> json) =
_$IdentityImpl.fromJson;
@override // Top level account keys and secrets
IMap<String, ISet<AccountRecordInfo>> get accountRecords;
@override
@JsonKey(ignore: true)
_$$IdentityImplCopyWith<_$IdentityImpl> get copyWith =>
throw _privateConstructorUsedError;
}
IdentityMaster _$IdentityMasterFromJson(Map<String, dynamic> json) {
return _IdentityMaster.fromJson(json);
}
/// @nodoc
mixin _$IdentityMaster {
// Private DHT record storing identity account mapping
Typed<FixedEncodedString43> get identityRecordKey =>
throw _privateConstructorUsedError; // Public key of identity
FixedEncodedString43 get identityPublicKey =>
throw _privateConstructorUsedError; // Public DHT record storing this structure for account recovery
Typed<FixedEncodedString43> get masterRecordKey =>
throw _privateConstructorUsedError; // Public key of master identity used to sign identity keys for recovery
FixedEncodedString43 get masterPublicKey =>
throw _privateConstructorUsedError; // Signature of identityRecordKey and identityPublicKey by masterPublicKey
FixedEncodedString86 get identitySignature =>
throw _privateConstructorUsedError; // Signature of masterRecordKey and masterPublicKey by identityPublicKey
FixedEncodedString86 get masterSignature =>
throw _privateConstructorUsedError;
Map<String, dynamic> toJson() => throw _privateConstructorUsedError;
@JsonKey(ignore: true)
$IdentityMasterCopyWith<IdentityMaster> get copyWith =>
throw _privateConstructorUsedError;
}
/// @nodoc
abstract class $IdentityMasterCopyWith<$Res> {
factory $IdentityMasterCopyWith(
IdentityMaster value, $Res Function(IdentityMaster) then) =
_$IdentityMasterCopyWithImpl<$Res, IdentityMaster>;
@useResult
$Res call(
{Typed<FixedEncodedString43> identityRecordKey,
FixedEncodedString43 identityPublicKey,
Typed<FixedEncodedString43> masterRecordKey,
FixedEncodedString43 masterPublicKey,
FixedEncodedString86 identitySignature,
FixedEncodedString86 masterSignature});
}
/// @nodoc
class _$IdentityMasterCopyWithImpl<$Res, $Val extends IdentityMaster>
implements $IdentityMasterCopyWith<$Res> {
_$IdentityMasterCopyWithImpl(this._value, this._then);
// ignore: unused_field
final $Val _value;
// ignore: unused_field
final $Res Function($Val) _then;
@pragma('vm:prefer-inline')
@override
$Res call({
Object? identityRecordKey = null,
Object? identityPublicKey = null,
Object? masterRecordKey = null,
Object? masterPublicKey = null,
Object? identitySignature = null,
Object? masterSignature = null,
}) {
return _then(_value.copyWith(
identityRecordKey: null == identityRecordKey
? _value.identityRecordKey
: identityRecordKey // ignore: cast_nullable_to_non_nullable
as Typed<FixedEncodedString43>,
identityPublicKey: null == identityPublicKey
? _value.identityPublicKey
: identityPublicKey // ignore: cast_nullable_to_non_nullable
as FixedEncodedString43,
masterRecordKey: null == masterRecordKey
? _value.masterRecordKey
: masterRecordKey // ignore: cast_nullable_to_non_nullable
as Typed<FixedEncodedString43>,
masterPublicKey: null == masterPublicKey
? _value.masterPublicKey
: masterPublicKey // ignore: cast_nullable_to_non_nullable
as FixedEncodedString43,
identitySignature: null == identitySignature
? _value.identitySignature
: identitySignature // ignore: cast_nullable_to_non_nullable
as FixedEncodedString86,
masterSignature: null == masterSignature
? _value.masterSignature
: masterSignature // ignore: cast_nullable_to_non_nullable
as FixedEncodedString86,
) as $Val);
}
}
/// @nodoc
abstract class _$$IdentityMasterImplCopyWith<$Res>
implements $IdentityMasterCopyWith<$Res> {
factory _$$IdentityMasterImplCopyWith(_$IdentityMasterImpl value,
$Res Function(_$IdentityMasterImpl) then) =
__$$IdentityMasterImplCopyWithImpl<$Res>;
@override
@useResult
$Res call(
{Typed<FixedEncodedString43> identityRecordKey,
FixedEncodedString43 identityPublicKey,
Typed<FixedEncodedString43> masterRecordKey,
FixedEncodedString43 masterPublicKey,
FixedEncodedString86 identitySignature,
FixedEncodedString86 masterSignature});
}
/// @nodoc
class __$$IdentityMasterImplCopyWithImpl<$Res>
extends _$IdentityMasterCopyWithImpl<$Res, _$IdentityMasterImpl>
implements _$$IdentityMasterImplCopyWith<$Res> {
__$$IdentityMasterImplCopyWithImpl(
_$IdentityMasterImpl _value, $Res Function(_$IdentityMasterImpl) _then)
: super(_value, _then);
@pragma('vm:prefer-inline')
@override
$Res call({
Object? identityRecordKey = null,
Object? identityPublicKey = null,
Object? masterRecordKey = null,
Object? masterPublicKey = null,
Object? identitySignature = null,
Object? masterSignature = null,
}) {
return _then(_$IdentityMasterImpl(
identityRecordKey: null == identityRecordKey
? _value.identityRecordKey
: identityRecordKey // ignore: cast_nullable_to_non_nullable
as Typed<FixedEncodedString43>,
identityPublicKey: null == identityPublicKey
? _value.identityPublicKey
: identityPublicKey // ignore: cast_nullable_to_non_nullable
as FixedEncodedString43,
masterRecordKey: null == masterRecordKey
? _value.masterRecordKey
: masterRecordKey // ignore: cast_nullable_to_non_nullable
as Typed<FixedEncodedString43>,
masterPublicKey: null == masterPublicKey
? _value.masterPublicKey
: masterPublicKey // ignore: cast_nullable_to_non_nullable
as FixedEncodedString43,
identitySignature: null == identitySignature
? _value.identitySignature
: identitySignature // ignore: cast_nullable_to_non_nullable
as FixedEncodedString86,
masterSignature: null == masterSignature
? _value.masterSignature
: masterSignature // ignore: cast_nullable_to_non_nullable
as FixedEncodedString86,
));
}
}
/// @nodoc
@JsonSerializable()
class _$IdentityMasterImpl implements _IdentityMaster {
const _$IdentityMasterImpl(
{required this.identityRecordKey,
required this.identityPublicKey,
required this.masterRecordKey,
required this.masterPublicKey,
required this.identitySignature,
required this.masterSignature});
factory _$IdentityMasterImpl.fromJson(Map<String, dynamic> json) =>
_$$IdentityMasterImplFromJson(json);
// Private DHT record storing identity account mapping
@override
final Typed<FixedEncodedString43> identityRecordKey;
// Public key of identity
@override
final FixedEncodedString43 identityPublicKey;
// Public DHT record storing this structure for account recovery
@override
final Typed<FixedEncodedString43> masterRecordKey;
// Public key of master identity used to sign identity keys for recovery
@override
final FixedEncodedString43 masterPublicKey;
// Signature of identityRecordKey and identityPublicKey by masterPublicKey
@override
final FixedEncodedString86 identitySignature;
// Signature of masterRecordKey and masterPublicKey by identityPublicKey
@override
final FixedEncodedString86 masterSignature;
@override
String toString() {
return 'IdentityMaster(identityRecordKey: $identityRecordKey, identityPublicKey: $identityPublicKey, masterRecordKey: $masterRecordKey, masterPublicKey: $masterPublicKey, identitySignature: $identitySignature, masterSignature: $masterSignature)';
}
@override
bool operator ==(Object other) {
return identical(this, other) ||
(other.runtimeType == runtimeType &&
other is _$IdentityMasterImpl &&
(identical(other.identityRecordKey, identityRecordKey) ||
other.identityRecordKey == identityRecordKey) &&
(identical(other.identityPublicKey, identityPublicKey) ||
other.identityPublicKey == identityPublicKey) &&
(identical(other.masterRecordKey, masterRecordKey) ||
other.masterRecordKey == masterRecordKey) &&
(identical(other.masterPublicKey, masterPublicKey) ||
other.masterPublicKey == masterPublicKey) &&
(identical(other.identitySignature, identitySignature) ||
other.identitySignature == identitySignature) &&
(identical(other.masterSignature, masterSignature) ||
other.masterSignature == masterSignature));
}
@JsonKey(ignore: true)
@override
int get hashCode => Object.hash(
runtimeType,
identityRecordKey,
identityPublicKey,
masterRecordKey,
masterPublicKey,
identitySignature,
masterSignature);
@JsonKey(ignore: true)
@override
@pragma('vm:prefer-inline')
_$$IdentityMasterImplCopyWith<_$IdentityMasterImpl> get copyWith =>
__$$IdentityMasterImplCopyWithImpl<_$IdentityMasterImpl>(
this, _$identity);
@override
Map<String, dynamic> toJson() {
return _$$IdentityMasterImplToJson(
this,
);
}
}
abstract class _IdentityMaster implements IdentityMaster {
const factory _IdentityMaster(
{required final Typed<FixedEncodedString43> identityRecordKey,
required final FixedEncodedString43 identityPublicKey,
required final Typed<FixedEncodedString43> masterRecordKey,
required final FixedEncodedString43 masterPublicKey,
required final FixedEncodedString86 identitySignature,
required final FixedEncodedString86 masterSignature}) =
_$IdentityMasterImpl;
factory _IdentityMaster.fromJson(Map<String, dynamic> json) =
_$IdentityMasterImpl.fromJson;
@override // Private DHT record storing identity account mapping
Typed<FixedEncodedString43> get identityRecordKey;
@override // Public key of identity
FixedEncodedString43 get identityPublicKey;
@override // Public DHT record storing this structure for account recovery
Typed<FixedEncodedString43> get masterRecordKey;
@override // Public key of master identity used to sign identity keys for recovery
FixedEncodedString43 get masterPublicKey;
@override // Signature of identityRecordKey and identityPublicKey by masterPublicKey
FixedEncodedString86 get identitySignature;
@override // Signature of masterRecordKey and masterPublicKey by identityPublicKey
FixedEncodedString86 get masterSignature;
@override
@JsonKey(ignore: true)
_$$IdentityMasterImplCopyWith<_$IdentityMasterImpl> get copyWith =>
throw _privateConstructorUsedError;
}

View file

@ -0,0 +1,63 @@
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'identity.dart';
// **************************************************************************
// JsonSerializableGenerator
// **************************************************************************
_$AccountRecordInfoImpl _$$AccountRecordInfoImplFromJson(
Map<String, dynamic> json) =>
_$AccountRecordInfoImpl(
accountRecord: OwnedDHTRecordPointer.fromJson(json['accountRecord']),
);
Map<String, dynamic> _$$AccountRecordInfoImplToJson(
_$AccountRecordInfoImpl instance) =>
<String, dynamic>{
'accountRecord': instance.accountRecord,
};
_$IdentityImpl _$$IdentityImplFromJson(Map<String, dynamic> json) =>
_$IdentityImpl(
accountRecords: IMap<String, ISet<AccountRecordInfo>>.fromJson(
json['accountRecords'] as Map<String, dynamic>,
(value) => value as String,
(value) => ISet<AccountRecordInfo>.fromJson(
value, (value) => AccountRecordInfo.fromJson(value))),
);
Map<String, dynamic> _$$IdentityImplToJson(_$IdentityImpl instance) =>
<String, dynamic>{
'accountRecords': instance.accountRecords.toJson(
(value) => value,
(value) => value.toJson(
(value) => value,
),
),
};
_$IdentityMasterImpl _$$IdentityMasterImplFromJson(Map<String, dynamic> json) =>
_$IdentityMasterImpl(
identityRecordKey:
Typed<FixedEncodedString43>.fromJson(json['identityRecordKey']),
identityPublicKey:
FixedEncodedString43.fromJson(json['identityPublicKey']),
masterRecordKey:
Typed<FixedEncodedString43>.fromJson(json['masterRecordKey']),
masterPublicKey: FixedEncodedString43.fromJson(json['masterPublicKey']),
identitySignature:
FixedEncodedString86.fromJson(json['identitySignature']),
masterSignature: FixedEncodedString86.fromJson(json['masterSignature']),
);
Map<String, dynamic> _$$IdentityMasterImplToJson(
_$IdentityMasterImpl instance) =>
<String, dynamic>{
'identityRecordKey': instance.identityRecordKey,
'identityPublicKey': instance.identityPublicKey,
'masterRecordKey': instance.masterRecordKey,
'masterPublicKey': instance.masterPublicKey,
'identitySignature': instance.identitySignature,
'masterSignature': instance.masterSignature,
};

View file

@ -0,0 +1,28 @@
import 'dart:convert';
import 'dart:typed_data';
T jsonDecodeBytes<T>(T Function(dynamic) fromJson, Uint8List data) =>
fromJson(jsonDecode(utf8.decode(data)));
T? jsonDecodeOptBytes<T>(T Function(dynamic) fromJson, Uint8List? data) =>
(data == null) ? null : fromJson(jsonDecode(utf8.decode(data)));
Uint8List jsonEncodeBytes(Object? object,
{Object? Function(Object?)? toEncodable}) =>
Uint8List.fromList(
utf8.encode(jsonEncode(object, toEncodable: toEncodable)));
Future<Uint8List> jsonUpdateBytes<T>(T Function(dynamic) fromJson,
Uint8List oldBytes, Future<T> Function(T) update) async {
final oldObj = fromJson(jsonDecode(utf8.decode(oldBytes)));
final newObj = await update(oldObj);
return jsonEncodeBytes(newObj);
}
Future<Uint8List> Function(Uint8List) jsonUpdate<T>(
T Function(dynamic) fromJson, Future<T> Function(T) update) =>
(oldBytes) => jsonUpdateBytes(fromJson, oldBytes, update);
T Function(Object?) genericFromJson<T>(
T Function(Map<String, dynamic>) fromJsonMap) =>
(json) => fromJsonMap(json! as Map<String, dynamic>);

View file

@ -0,0 +1,17 @@
import 'dart:typed_data';
import 'package:protobuf/protobuf.dart';
Future<Uint8List> protobufUpdateBytes<T extends GeneratedMessage>(
T Function(List<int>) fromBuffer,
Uint8List oldBytes,
Future<T> Function(T) update) async {
final oldObj = fromBuffer(oldBytes);
final newObj = await update(oldObj);
return Uint8List.fromList(newObj.writeToBuffer());
}
Future<Uint8List> Function(Uint8List)
protobufUpdate<T extends GeneratedMessage>(
T Function(List<int>) fromBuffer, Future<T> Function(T) update) =>
(oldBytes) => protobufUpdateBytes(fromBuffer, oldBytes, update);

View file

@ -0,0 +1,101 @@
import 'dart:async';
import 'package:veilid/veilid.dart';
Future<T> tableScope<T>(
String name, Future<T> Function(VeilidTableDB tdb) callback,
{int columnCount = 1}) async {
final tableDB = await Veilid.instance.openTableDB(name, columnCount);
try {
return await callback(tableDB);
} finally {
tableDB.close();
}
}
Future<T> transactionScope<T>(
VeilidTableDB tdb,
Future<T> Function(VeilidTableDBTransaction tdbt) callback,
) async {
final tdbt = tdb.transact();
try {
final ret = await callback(tdbt);
if (!tdbt.isDone()) {
await tdbt.commit();
}
return ret;
} finally {
if (!tdbt.isDone()) {
await tdbt.rollback();
}
}
}
abstract mixin class TableDBBacked<T> {
String tableName();
String tableKeyName();
T valueFromJson(Object? obj);
Object? valueToJson(T val);
/// Load things from storage
Future<T> load() async {
final obj = await tableScope(tableName(), (tdb) async {
final objJson = await tdb.loadStringJson(0, tableKeyName());
return valueFromJson(objJson);
});
return obj;
}
/// Store things to storage
Future<T> store(T obj) async {
await tableScope(tableName(), (tdb) async {
await tdb.storeStringJson(0, tableKeyName(), valueToJson(obj));
});
return obj;
}
}
class TableDBValue<T> extends TableDBBacked<T> {
TableDBValue({
required String tableName,
required String tableKeyName,
required T Function(Object? obj) valueFromJson,
required Object? Function(T obj) valueToJson,
}) : _tableName = tableName,
_valueFromJson = valueFromJson,
_valueToJson = valueToJson,
_tableKeyName = tableKeyName;
T? get value => _value;
T get requireValue => _value!;
Future<T> get() async {
final val = _value;
if (val != null) {
return val;
}
final loadedValue = await load();
return _value = loadedValue;
}
Future<void> set(T newVal) async {
_value = await store(newVal);
}
T? _value;
final String _tableName;
final String _tableKeyName;
final T Function(Object? obj) _valueFromJson;
final Object? Function(T obj) _valueToJson;
//////////////////////////////////////////////////////////////
/// AsyncTableDBBacked
@override
String tableName() => _tableName;
@override
String tableKeyName() => _tableKeyName;
@override
T valueFromJson(Object? obj) => _valueFromJson(obj);
@override
Object? valueToJson(T val) => _valueToJson(val);
}

View file

@ -0,0 +1,80 @@
import 'package:loggy/loggy.dart';
import 'package:veilid/veilid.dart';
// Loggy tools
const LogLevel traceLevel = LogLevel('Trace', 1);
extension TraceLoggy on Loggy {
void trace(dynamic message, [Object? error, StackTrace? stackTrace]) =>
log(traceLevel, message, error, stackTrace);
}
VeilidConfigLogLevel convertToVeilidConfigLogLevel(LogLevel? level) {
if (level == null) {
return VeilidConfigLogLevel.off;
}
switch (level) {
case LogLevel.error:
return VeilidConfigLogLevel.error;
case LogLevel.warning:
return VeilidConfigLogLevel.warn;
case LogLevel.info:
return VeilidConfigLogLevel.info;
case LogLevel.debug:
return VeilidConfigLogLevel.debug;
case traceLevel:
return VeilidConfigLogLevel.trace;
}
return VeilidConfigLogLevel.off;
}
void setVeilidLogLevel(LogLevel? level) {
Veilid.instance.changeLogLevel('all', convertToVeilidConfigLogLevel(level));
}
class VeilidLoggy implements LoggyType {
@override
Loggy<VeilidLoggy> get loggy => Loggy<VeilidLoggy>('Veilid');
}
Loggy get _veilidLoggy => Loggy<VeilidLoggy>('Veilid');
Future<void> processLog(VeilidLog log) async {
StackTrace? stackTrace;
Object? error;
final backtrace = log.backtrace;
if (backtrace != null) {
stackTrace = StackTrace.fromString('$backtrace\n${StackTrace.current}');
error = 'embedded stack trace for ${log.logLevel} ${log.message}';
}
switch (log.logLevel) {
case VeilidLogLevel.error:
_veilidLoggy.error(log.message, error, stackTrace);
break;
case VeilidLogLevel.warn:
_veilidLoggy.warning(log.message, error, stackTrace);
break;
case VeilidLogLevel.info:
_veilidLoggy.info(log.message, error, stackTrace);
break;
case VeilidLogLevel.debug:
_veilidLoggy.debug(log.message, error, stackTrace);
break;
case VeilidLogLevel.trace:
_veilidLoggy.trace(log.message, error, stackTrace);
break;
}
}
void initVeilidLog(bool debugMode) {
// ignore: do_not_use_environment
const isTrace = String.fromEnvironment('LOG_TRACE') != '';
LogLevel logLevel;
if (isTrace) {
logLevel = traceLevel;
} else {
logLevel = debugMode ? LogLevel.debug : LogLevel.info;
}
setVeilidLogLevel(logLevel);
}

View file

@ -0,0 +1,14 @@
/// Dart Veilid Support Library
/// Common functionality for interfacing with Veilid
library veilid_support;
export 'package:veilid/veilid.dart';
export 'dht_support/dht_support.dart';
export 'src/config.dart';
export 'src/identity.dart';
export 'src/json_tools.dart';
export 'src/protobuf_tools.dart';
export 'src/table_db.dart';
export 'src/veilid_log.dart';

View file

@ -0,0 +1,780 @@
# Generated by pub
# See https://dart.dev/tools/pub/glossary#lockfile
packages:
_fe_analyzer_shared:
dependency: transitive
description:
name: _fe_analyzer_shared
sha256: eb376e9acf6938204f90eb3b1f00b578640d3188b4c8a8ec054f9f479af8d051
url: "https://pub.dev"
source: hosted
version: "64.0.0"
analyzer:
dependency: transitive
description:
name: analyzer
sha256: "69f54f967773f6c26c7dcb13e93d7ccee8b17a641689da39e878d5cf13b06893"
url: "https://pub.dev"
source: hosted
version: "6.2.0"
args:
dependency: transitive
description:
name: args
sha256: eef6c46b622e0494a36c5a12d10d77fb4e855501a91c1b9ef9339326e58f0596
url: "https://pub.dev"
source: hosted
version: "2.4.2"
async:
dependency: transitive
description:
name: async
sha256: "947bfcf187f74dbc5e146c9eb9c0f10c9f8b30743e341481c1e2ed3ecc18c20c"
url: "https://pub.dev"
source: hosted
version: "2.11.0"
boolean_selector:
dependency: transitive
description:
name: boolean_selector
sha256: "6cfb5af12253eaf2b368f07bacc5a80d1301a071c73360d746b7f2e32d762c66"
url: "https://pub.dev"
source: hosted
version: "2.1.1"
build:
dependency: transitive
description:
name: build
sha256: "80184af8b6cb3e5c1c4ec6d8544d27711700bc3e6d2efad04238c7b5290889f0"
url: "https://pub.dev"
source: hosted
version: "2.4.1"
build_config:
dependency: transitive
description:
name: build_config
sha256: bf80fcfb46a29945b423bd9aad884590fb1dc69b330a4d4700cac476af1708d1
url: "https://pub.dev"
source: hosted
version: "1.1.1"
build_daemon:
dependency: transitive
description:
name: build_daemon
sha256: "0343061a33da9c5810b2d6cee51945127d8f4c060b7fbdd9d54917f0a3feaaa1"
url: "https://pub.dev"
source: hosted
version: "4.0.1"
build_resolvers:
dependency: transitive
description:
name: build_resolvers
sha256: "339086358431fa15d7eca8b6a36e5d783728cf025e559b834f4609a1fcfb7b0a"
url: "https://pub.dev"
source: hosted
version: "2.4.2"
build_runner:
dependency: "direct dev"
description:
name: build_runner
sha256: "67d591d602906ef9201caf93452495ad1812bea2074f04e25dbd7c133785821b"
url: "https://pub.dev"
source: hosted
version: "2.4.7"
build_runner_core:
dependency: transitive
description:
name: build_runner_core
sha256: c9e32d21dd6626b5c163d48b037ce906bbe428bc23ab77bcd77bb21e593b6185
url: "https://pub.dev"
source: hosted
version: "7.2.11"
built_collection:
dependency: transitive
description:
name: built_collection
sha256: "376e3dd27b51ea877c28d525560790aee2e6fbb5f20e2f85d5081027d94e2100"
url: "https://pub.dev"
source: hosted
version: "5.1.1"
built_value:
dependency: transitive
description:
name: built_value
sha256: c9aabae0718ec394e5bc3c7272e6bb0dc0b32201a08fe185ec1d8401d3e39309
url: "https://pub.dev"
source: hosted
version: "8.8.1"
change_case:
dependency: transitive
description:
name: change_case
sha256: f4e08feaa845e75e4f5ad2b0e15f24813d7ea6c27e7b78252f0c17f752cf1157
url: "https://pub.dev"
source: hosted
version: "1.1.0"
characters:
dependency: transitive
description:
name: characters
sha256: "04a925763edad70e8443c99234dc3328f442e811f1d8fd1a72f1c8ad0f69a605"
url: "https://pub.dev"
source: hosted
version: "1.3.0"
charcode:
dependency: transitive
description:
name: charcode
sha256: fb98c0f6d12c920a02ee2d998da788bca066ca5f148492b7085ee23372b12306
url: "https://pub.dev"
source: hosted
version: "1.3.1"
checked_yaml:
dependency: transitive
description:
name: checked_yaml
sha256: feb6bed21949061731a7a75fc5d2aa727cf160b91af9a3e464c5e3a32e28b5ff
url: "https://pub.dev"
source: hosted
version: "2.0.3"
code_builder:
dependency: transitive
description:
name: code_builder
sha256: feee43a5c05e7b3199bb375a86430b8ada1b04104f2923d0e03cc01ca87b6d84
url: "https://pub.dev"
source: hosted
version: "4.9.0"
collection:
dependency: transitive
description:
name: collection
sha256: ee67cb0715911d28db6bf4af1026078bd6f0128b07a5f66fb2ed94ec6783c09a
url: "https://pub.dev"
source: hosted
version: "1.18.0"
convert:
dependency: transitive
description:
name: convert
sha256: "0f08b14755d163f6e2134cb58222dd25ea2a2ee8a195e53983d57c075324d592"
url: "https://pub.dev"
source: hosted
version: "3.1.1"
coverage:
dependency: transitive
description:
name: coverage
sha256: "8acabb8306b57a409bf4c83522065672ee13179297a6bb0cb9ead73948df7c76"
url: "https://pub.dev"
source: hosted
version: "1.7.2"
crypto:
dependency: transitive
description:
name: crypto
sha256: ff625774173754681d66daaf4a448684fb04b78f902da9cb3d308c19cc5e8bab
url: "https://pub.dev"
source: hosted
version: "3.0.3"
dart_style:
dependency: transitive
description:
name: dart_style
sha256: "40ae61a5d43feea6d24bd22c0537a6629db858963b99b4bc1c3db80676f32368"
url: "https://pub.dev"
source: hosted
version: "2.3.4"
equatable:
dependency: transitive
description:
name: equatable
sha256: c2b87cb7756efdf69892005af546c56c0b5037f54d2a88269b4f347a505e3ca2
url: "https://pub.dev"
source: hosted
version: "2.0.5"
fast_immutable_collections:
dependency: "direct main"
description:
name: fast_immutable_collections
sha256: "3eb1d7495c70598964add20e10666003fad6e855b108fe684ebcbf8ad0c8e120"
url: "https://pub.dev"
source: hosted
version: "9.2.0"
ffi:
dependency: transitive
description:
name: ffi
sha256: "7bf0adc28a23d395f19f3f1eb21dd7cfd1dd9f8e1c50051c069122e6853bc878"
url: "https://pub.dev"
source: hosted
version: "2.1.0"
file:
dependency: transitive
description:
name: file
sha256: "5fc22d7c25582e38ad9a8515372cd9a93834027aacf1801cf01164dac0ffa08c"
url: "https://pub.dev"
source: hosted
version: "7.0.0"
file_utils:
dependency: transitive
description:
name: file_utils
sha256: d1e64389a22649095c8405c9e177272caf05139255931c9ff30d53b5c9bcaa34
url: "https://pub.dev"
source: hosted
version: "1.0.1"
fixnum:
dependency: transitive
description:
name: fixnum
sha256: "25517a4deb0c03aa0f32fd12db525856438902d9c16536311e76cdc57b31d7d1"
url: "https://pub.dev"
source: hosted
version: "1.1.0"
flutter:
dependency: transitive
description: flutter
source: sdk
version: "0.0.0"
flutter_web_plugins:
dependency: transitive
description: flutter
source: sdk
version: "0.0.0"
freezed:
dependency: "direct dev"
description:
name: freezed
sha256: "6c5031daae12c7072b3a87eff98983076434b4889ef2a44384d0cae3f82372ba"
url: "https://pub.dev"
source: hosted
version: "2.4.6"
freezed_annotation:
dependency: "direct main"
description:
name: freezed_annotation
sha256: c3fd9336eb55a38cc1bbd79ab17573113a8deccd0ecbbf926cca3c62803b5c2d
url: "https://pub.dev"
source: hosted
version: "2.4.1"
frontend_server_client:
dependency: transitive
description:
name: frontend_server_client
sha256: "408e3ca148b31c20282ad6f37ebfa6f4bdc8fede5b74bc2f08d9d92b55db3612"
url: "https://pub.dev"
source: hosted
version: "3.2.0"
glob:
dependency: transitive
description:
name: glob
sha256: "0e7014b3b7d4dac1ca4d6114f82bf1782ee86745b9b42a92c9289c23d8a0ab63"
url: "https://pub.dev"
source: hosted
version: "2.1.2"
globbing:
dependency: transitive
description:
name: globbing
sha256: "4f89cfaf6fa74c9c1740a96259da06bd45411ede56744e28017cc534a12b6e2d"
url: "https://pub.dev"
source: hosted
version: "1.0.0"
graphs:
dependency: transitive
description:
name: graphs
sha256: aedc5a15e78fc65a6e23bcd927f24c64dd995062bcd1ca6eda65a3cff92a4d19
url: "https://pub.dev"
source: hosted
version: "2.3.1"
http_multi_server:
dependency: transitive
description:
name: http_multi_server
sha256: "97486f20f9c2f7be8f514851703d0119c3596d14ea63227af6f7a481ef2b2f8b"
url: "https://pub.dev"
source: hosted
version: "3.2.1"
http_parser:
dependency: transitive
description:
name: http_parser
sha256: "2aa08ce0341cc9b354a498388e30986515406668dbcc4f7c950c3e715496693b"
url: "https://pub.dev"
source: hosted
version: "4.0.2"
io:
dependency: transitive
description:
name: io
sha256: "2ec25704aba361659e10e3e5f5d672068d332fc8ac516421d483a11e5cbd061e"
url: "https://pub.dev"
source: hosted
version: "1.0.4"
js:
dependency: transitive
description:
name: js
sha256: f2c445dce49627136094980615a031419f7f3eb393237e4ecd97ac15dea343f3
url: "https://pub.dev"
source: hosted
version: "0.6.7"
json_annotation:
dependency: "direct main"
description:
name: json_annotation
sha256: b10a7b2ff83d83c777edba3c6a0f97045ddadd56c944e1a23a3fdf43a1bf4467
url: "https://pub.dev"
source: hosted
version: "4.8.1"
json_serializable:
dependency: "direct dev"
description:
name: json_serializable
sha256: aa1f5a8912615733e0fdc7a02af03308933c93235bdc8d50d0b0c8a8ccb0b969
url: "https://pub.dev"
source: hosted
version: "6.7.1"
lint_hard:
dependency: "direct dev"
description:
name: lint_hard
sha256: "44d15ec309b1a8e1aff99069df9dcb1597f49d5f588f32811ca28fb7b38c32fe"
url: "https://pub.dev"
source: hosted
version: "4.0.0"
logging:
dependency: transitive
description:
name: logging
sha256: "623a88c9594aa774443aa3eb2d41807a48486b5613e67599fb4c41c0ad47c340"
url: "https://pub.dev"
source: hosted
version: "1.2.0"
loggy:
dependency: "direct main"
description:
name: loggy
sha256: "981e03162bbd3a5a843026f75f73d26e4a0d8aa035ae060456ca7b30dfd1e339"
url: "https://pub.dev"
source: hosted
version: "2.0.3"
matcher:
dependency: transitive
description:
name: matcher
sha256: d2323aa2060500f906aa31a895b4030b6da3ebdcc5619d14ce1aada65cd161cb
url: "https://pub.dev"
source: hosted
version: "0.12.16+1"
material_color_utilities:
dependency: transitive
description:
name: material_color_utilities
sha256: "9528f2f296073ff54cb9fee677df673ace1218163c3bc7628093e7eed5203d41"
url: "https://pub.dev"
source: hosted
version: "0.5.0"
meta:
dependency: transitive
description:
name: meta
sha256: a6e590c838b18133bb482a2745ad77c5bb7715fb0451209e1a7567d416678b8e
url: "https://pub.dev"
source: hosted
version: "1.10.0"
mime:
dependency: transitive
description:
name: mime
sha256: e4ff8e8564c03f255408decd16e7899da1733852a9110a58fe6d1b817684a63e
url: "https://pub.dev"
source: hosted
version: "1.0.4"
mutex:
dependency: "direct main"
description:
name: mutex
sha256: "8827da25de792088eb33e572115a5eb0d61d61a3c01acbc8bcbe76ed78f1a1f2"
url: "https://pub.dev"
source: hosted
version: "3.1.0"
node_preamble:
dependency: transitive
description:
name: node_preamble
sha256: "6e7eac89047ab8a8d26cf16127b5ed26de65209847630400f9aefd7cd5c730db"
url: "https://pub.dev"
source: hosted
version: "2.0.2"
package_config:
dependency: transitive
description:
name: package_config
sha256: "1c5b77ccc91e4823a5af61ee74e6b972db1ef98c2ff5a18d3161c982a55448bd"
url: "https://pub.dev"
source: hosted
version: "2.1.0"
path:
dependency: transitive
description:
name: path
sha256: "087ce49c3f0dc39180befefc60fdb4acd8f8620e5682fe2476afd0b3688bb4af"
url: "https://pub.dev"
source: hosted
version: "1.9.0"
path_provider:
dependency: transitive
description:
name: path_provider
sha256: a1aa8aaa2542a6bc57e381f132af822420216c80d4781f7aa085ca3229208aaa
url: "https://pub.dev"
source: hosted
version: "2.1.1"
path_provider_android:
dependency: transitive
description:
name: path_provider_android
sha256: "477184d672607c0a3bf68fbbf601805f92ef79c82b64b4d6eb318cbca4c48668"
url: "https://pub.dev"
source: hosted
version: "2.2.2"
path_provider_foundation:
dependency: transitive
description:
name: path_provider_foundation
sha256: "19314d595120f82aca0ba62787d58dde2cc6b5df7d2f0daf72489e38d1b57f2d"
url: "https://pub.dev"
source: hosted
version: "2.3.1"
path_provider_linux:
dependency: transitive
description:
name: path_provider_linux
sha256: f7a1fe3a634fe7734c8d3f2766ad746ae2a2884abe22e241a8b301bf5cac3279
url: "https://pub.dev"
source: hosted
version: "2.2.1"
path_provider_platform_interface:
dependency: transitive
description:
name: path_provider_platform_interface
sha256: "94b1e0dd80970c1ce43d5d4e050a9918fce4f4a775e6142424c30a29a363265c"
url: "https://pub.dev"
source: hosted
version: "2.1.1"
path_provider_windows:
dependency: transitive
description:
name: path_provider_windows
sha256: "8bc9f22eee8690981c22aa7fc602f5c85b497a6fb2ceb35ee5a5e5ed85ad8170"
url: "https://pub.dev"
source: hosted
version: "2.2.1"
platform:
dependency: transitive
description:
name: platform
sha256: "0a279f0707af40c890e80b1e9df8bb761694c074ba7e1d4ab1bc4b728e200b59"
url: "https://pub.dev"
source: hosted
version: "3.1.3"
plugin_platform_interface:
dependency: transitive
description:
name: plugin_platform_interface
sha256: f4f88d4a900933e7267e2b353594774fc0d07fb072b47eedcd5b54e1ea3269f8
url: "https://pub.dev"
source: hosted
version: "2.1.7"
pool:
dependency: transitive
description:
name: pool
sha256: "20fe868b6314b322ea036ba325e6fc0711a22948856475e2c2b6306e8ab39c2a"
url: "https://pub.dev"
source: hosted
version: "1.5.1"
protobuf:
dependency: "direct main"
description:
name: protobuf
sha256: "68645b24e0716782e58948f8467fd42a880f255096a821f9e7d0ec625b00c84d"
url: "https://pub.dev"
source: hosted
version: "3.1.0"
pub_semver:
dependency: transitive
description:
name: pub_semver
sha256: "40d3ab1bbd474c4c2328c91e3a7df8c6dd629b79ece4c4bd04bee496a224fb0c"
url: "https://pub.dev"
source: hosted
version: "2.1.4"
pubspec_parse:
dependency: transitive
description:
name: pubspec_parse
sha256: c63b2876e58e194e4b0828fcb080ad0e06d051cb607a6be51a9e084f47cb9367
url: "https://pub.dev"
source: hosted
version: "1.2.3"
shelf:
dependency: transitive
description:
name: shelf
sha256: ad29c505aee705f41a4d8963641f91ac4cee3c8fad5947e033390a7bd8180fa4
url: "https://pub.dev"
source: hosted
version: "1.4.1"
shelf_packages_handler:
dependency: transitive
description:
name: shelf_packages_handler
sha256: "89f967eca29607c933ba9571d838be31d67f53f6e4ee15147d5dc2934fee1b1e"
url: "https://pub.dev"
source: hosted
version: "3.0.2"
shelf_static:
dependency: transitive
description:
name: shelf_static
sha256: a41d3f53c4adf0f57480578c1d61d90342cd617de7fc8077b1304643c2d85c1e
url: "https://pub.dev"
source: hosted
version: "1.1.2"
shelf_web_socket:
dependency: transitive
description:
name: shelf_web_socket
sha256: "9ca081be41c60190ebcb4766b2486a7d50261db7bd0f5d9615f2d653637a84c1"
url: "https://pub.dev"
source: hosted
version: "1.0.4"
sky_engine:
dependency: transitive
description: flutter
source: sdk
version: "0.0.99"
source_gen:
dependency: transitive
description:
name: source_gen
sha256: "14658ba5f669685cd3d63701d01b31ea748310f7ab854e471962670abcf57832"
url: "https://pub.dev"
source: hosted
version: "1.5.0"
source_helper:
dependency: transitive
description:
name: source_helper
sha256: "6adebc0006c37dd63fe05bca0a929b99f06402fc95aa35bf36d67f5c06de01fd"
url: "https://pub.dev"
source: hosted
version: "1.3.4"
source_map_stack_trace:
dependency: transitive
description:
name: source_map_stack_trace
sha256: "84cf769ad83aa6bb61e0aa5a18e53aea683395f196a6f39c4c881fb90ed4f7ae"
url: "https://pub.dev"
source: hosted
version: "2.1.1"
source_maps:
dependency: transitive
description:
name: source_maps
sha256: "708b3f6b97248e5781f493b765c3337db11c5d2c81c3094f10904bfa8004c703"
url: "https://pub.dev"
source: hosted
version: "0.10.12"
source_span:
dependency: transitive
description:
name: source_span
sha256: "53e943d4206a5e30df338fd4c6e7a077e02254531b138a15aec3bd143c1a8b3c"
url: "https://pub.dev"
source: hosted
version: "1.10.0"
stack_trace:
dependency: transitive
description:
name: stack_trace
sha256: "73713990125a6d93122541237550ee3352a2d84baad52d375a4cad2eb9b7ce0b"
url: "https://pub.dev"
source: hosted
version: "1.11.1"
stream_channel:
dependency: transitive
description:
name: stream_channel
sha256: ba2aa5d8cc609d96bbb2899c28934f9e1af5cddbd60a827822ea467161eb54e7
url: "https://pub.dev"
source: hosted
version: "2.1.2"
stream_transform:
dependency: transitive
description:
name: stream_transform
sha256: "14a00e794c7c11aa145a170587321aedce29769c08d7f58b1d141da75e3b1c6f"
url: "https://pub.dev"
source: hosted
version: "2.1.0"
string_scanner:
dependency: transitive
description:
name: string_scanner
sha256: "556692adab6cfa87322a115640c11f13cb77b3f076ddcc5d6ae3c20242bedcde"
url: "https://pub.dev"
source: hosted
version: "1.2.0"
system_info2:
dependency: transitive
description:
name: system_info2
sha256: af2f948e3f31a3367a049932a8ad59faf0063ecf836a020d975b9f41566d8bc9
url: "https://pub.dev"
source: hosted
version: "3.0.2"
system_info_plus:
dependency: transitive
description:
name: system_info_plus
sha256: b915c811c6605b802f3988859bc2bb79c95f735762a75b5451741f7a2b949d1b
url: "https://pub.dev"
source: hosted
version: "0.0.5"
term_glyph:
dependency: transitive
description:
name: term_glyph
sha256: a29248a84fbb7c79282b40b8c72a1209db169a2e0542bce341da992fe1bc7e84
url: "https://pub.dev"
source: hosted
version: "1.2.1"
test:
dependency: "direct dev"
description:
name: test
sha256: "3d028996109ad5c253674c7f347822fb994a087614d6f353e6039704b4661ff2"
url: "https://pub.dev"
source: hosted
version: "1.25.0"
test_api:
dependency: transitive
description:
name: test_api
sha256: "9955ae474176f7ac8ee4e989dadfb411a58c30415bcfb648fa04b2b8a03afa7f"
url: "https://pub.dev"
source: hosted
version: "0.7.0"
test_core:
dependency: transitive
description:
name: test_core
sha256: "2bc4b4ecddd75309300d8096f781c0e3280ca1ef85beda558d33fcbedc2eead4"
url: "https://pub.dev"
source: hosted
version: "0.6.0"
timing:
dependency: transitive
description:
name: timing
sha256: "70a3b636575d4163c477e6de42f247a23b315ae20e86442bebe32d3cabf61c32"
url: "https://pub.dev"
source: hosted
version: "1.0.1"
typed_data:
dependency: transitive
description:
name: typed_data
sha256: facc8d6582f16042dd49f2463ff1bd6e2c9ef9f3d5da3d9b087e244a7b564b3c
url: "https://pub.dev"
source: hosted
version: "1.3.2"
vector_math:
dependency: transitive
description:
name: vector_math
sha256: "80b3257d1492ce4d091729e3a67a60407d227c27241d6927be0130c98e741803"
url: "https://pub.dev"
source: hosted
version: "2.1.4"
veilid:
dependency: "direct main"
description:
path: "../../../veilid/veilid-flutter"
relative: true
source: path
version: "0.2.5"
vm_service:
dependency: transitive
description:
name: vm_service
sha256: a2662fb1f114f4296cf3f5a50786a2d888268d7776cf681aa17d660ffa23b246
url: "https://pub.dev"
source: hosted
version: "14.0.0"
watcher:
dependency: transitive
description:
name: watcher
sha256: "3d2ad6751b3c16cf07c7fca317a1413b3f26530319181b37e3b9039b84fc01d8"
url: "https://pub.dev"
source: hosted
version: "1.1.0"
web:
dependency: transitive
description:
name: web
sha256: afe077240a270dcfd2aafe77602b4113645af95d0ad31128cc02bce5ac5d5152
url: "https://pub.dev"
source: hosted
version: "0.3.0"
web_socket_channel:
dependency: transitive
description:
name: web_socket_channel
sha256: d88238e5eac9a42bb43ca4e721edba3c08c6354d4a53063afaa568516217621b
url: "https://pub.dev"
source: hosted
version: "2.4.0"
webkit_inspection_protocol:
dependency: transitive
description:
name: webkit_inspection_protocol
sha256: "87d3f2333bb240704cd3f1c6b5b7acd8a10e7f0bc28c28dcf14e782014f4a572"
url: "https://pub.dev"
source: hosted
version: "1.2.1"
win32:
dependency: transitive
description:
name: win32
sha256: b0f37db61ba2f2e9b7a78a1caece0052564d1bc70668156cf3a29d676fe4e574
url: "https://pub.dev"
source: hosted
version: "5.1.1"
xdg_directories:
dependency: transitive
description:
name: xdg_directories
sha256: "589ada45ba9e39405c198fe34eb0f607cddb2108527e658136120892beac46d2"
url: "https://pub.dev"
source: hosted
version: "1.0.3"
yaml:
dependency: transitive
description:
name: yaml
sha256: "75769501ea3489fca56601ff33454fe45507ea3bfb014161abc3b43ae25989d5"
url: "https://pub.dev"
source: hosted
version: "3.1.2"
sdks:
dart: ">=3.2.0-194.0.dev <4.0.0"
flutter: ">=3.10.6"

View file

@ -0,0 +1,26 @@
name: veilid_support
description: Veilid Support Library
publish_to: 'none'
version: 1.0.2+0
environment:
sdk: '>=3.0.5 <4.0.0'
flutter: ">=3.10.0"
dependencies:
fast_immutable_collections: ^9.1.5
freezed_annotation: ^2.2.0
json_annotation: ^4.8.1
loggy: ^2.0.3
mutex: ^3.1.0
protobuf: ^3.0.0
veilid:
# veilid: ^0.0.1
path: ../../../veilid/veilid-flutter
dev_dependencies:
build_runner: ^2.4.6
test: ^1.25.0
freezed: ^2.3.5
json_serializable: ^6.7.1
lint_hard: ^4.0.0