This commit is contained in:
Christien Rioux 2023-08-01 00:39:50 -04:00
parent 57c366ef91
commit c35056f687
39 changed files with 1382 additions and 662 deletions

View file

@ -4,96 +4,70 @@ import 'dart:typed_data';
import 'package:protobuf/protobuf.dart';
import 'package:veilid/veilid.dart';
import '../tools/tools.dart';
import 'veilid_support.dart';
import '../../tools/tools.dart';
import '../veilid_support.dart';
class DHTRecord {
DHTRecord(
{required VeilidRoutingContext dhtctx,
{required VeilidRoutingContext routingContext,
required DHTRecordDescriptor recordDescriptor,
int defaultSubkey = 0,
KeyPair? writer,
this.crypto = const DHTRecordCryptoPublic()})
: _dhtctx = dhtctx,
DHTRecordCrypto crypto = const DHTRecordCryptoPublic()})
: _crypto = crypto,
_routingContext = routingContext,
_recordDescriptor = recordDescriptor,
_defaultSubkey = defaultSubkey,
_writer = writer,
_open = false,
_valid = true,
_subkeySeqCache = {};
final VeilidRoutingContext _dhtctx;
final VeilidRoutingContext _routingContext;
final DHTRecordDescriptor _recordDescriptor;
final int _defaultSubkey;
final KeyPair? _writer;
final Map<int, int> _subkeySeqCache;
DHTRecordCrypto crypto;
static Future<DHTRecord> create(VeilidRoutingContext dhtctx,
{DHTSchema schema = const DHTSchema.dflt(oCnt: 1),
int defaultSubkey = 0,
DHTRecordCrypto? crypto}) async {
final recordDescriptor = await dhtctx.createDHTRecord(schema);
final rec = DHTRecord(
dhtctx: dhtctx,
recordDescriptor: recordDescriptor,
defaultSubkey: defaultSubkey,
writer: recordDescriptor.ownerKeyPair(),
crypto: crypto ??
await DHTRecordCryptoPrivate.fromTypedKeyPair(
recordDescriptor.ownerTypedKeyPair()!));
return rec;
}
static Future<DHTRecord> openRead(
VeilidRoutingContext dhtctx, TypedKey recordKey,
{int defaultSubkey = 0, DHTRecordCrypto? crypto}) async {
final recordDescriptor = await dhtctx.openDHTRecord(recordKey, null);
final rec = DHTRecord(
dhtctx: dhtctx,
recordDescriptor: recordDescriptor,
defaultSubkey: defaultSubkey,
crypto: crypto ?? const DHTRecordCryptoPublic());
return rec;
}
static Future<DHTRecord> openWrite(
VeilidRoutingContext dhtctx,
TypedKey recordKey,
KeyPair writer, {
int defaultSubkey = 0,
DHTRecordCrypto? crypto,
}) async {
final recordDescriptor = await dhtctx.openDHTRecord(recordKey, writer);
final rec = DHTRecord(
dhtctx: dhtctx,
recordDescriptor: recordDescriptor,
defaultSubkey: defaultSubkey,
writer: writer,
crypto: crypto ??
await DHTRecordCryptoPrivate.fromTypedKeyPair(
TypedKeyPair.fromKeyPair(recordKey.kind, writer)));
return rec;
}
final DHTRecordCrypto _crypto;
bool _open;
bool _valid;
int subkeyOrDefault(int subkey) => (subkey == -1) ? _defaultSubkey : subkey;
VeilidRoutingContext get routingContext => _dhtctx;
VeilidRoutingContext get routingContext => _routingContext;
TypedKey get key => _recordDescriptor.key;
PublicKey get owner => _recordDescriptor.owner;
KeyPair? get ownerKeyPair => _recordDescriptor.ownerKeyPair();
DHTSchema get schema => _recordDescriptor.schema;
KeyPair? get writer => _writer;
OwnedDHTRecordPointer get ownedDHTRecordPointer =>
OwnedDHTRecordPointer(recordKey: key, owner: ownerKeyPair!);
Future<void> close() async {
await _dhtctx.closeDHTRecord(_recordDescriptor.key);
if (!_valid) {
throw StateError('already deleted');
}
if (!_open) {
return;
}
final pool = await DHTRecordPool.instance();
await _routingContext.closeDHTRecord(_recordDescriptor.key);
pool.recordClosed(this);
_open = false;
}
Future<void> delete() async {
await _dhtctx.deleteDHTRecord(_recordDescriptor.key);
if (!_valid) {
throw StateError('already deleted');
}
if (_open) {
await close();
}
final pool = await DHTRecordPool.instance();
await pool.deleteDeep(key);
_valid = false;
}
Future<T> scope<T>(Future<T> Function(DHTRecord) scopeFunction) async {
Future<T> scope<T>(FutureOr<T> Function(DHTRecord) scopeFunction) async {
try {
return await scopeFunction(this);
} finally {
@ -101,7 +75,8 @@ class DHTRecord {
}
}
Future<T> deleteScope<T>(Future<T> Function(DHTRecord) scopeFunction) async {
Future<T> deleteScope<T>(
FutureOr<T> Function(DHTRecord) scopeFunction) async {
try {
final out = await scopeFunction(this);
await close();
@ -117,8 +92,8 @@ class DHTRecord {
bool forceRefresh = false,
bool onlyUpdates = false}) async {
subkey = subkeyOrDefault(subkey);
final valueData =
await _dhtctx.getDHTValue(_recordDescriptor.key, subkey, forceRefresh);
final valueData = await _routingContext.getDHTValue(
_recordDescriptor.key, subkey, forceRefresh);
if (valueData == null) {
return null;
}
@ -126,7 +101,7 @@ class DHTRecord {
if (lastSeq != null && valueData.seq <= lastSeq) {
return null;
}
final out = crypto.decrypt(valueData.data, subkey);
final out = _crypto.decrypt(valueData.data, subkey);
_subkeySeqCache[subkey] = valueData.seq;
return out;
}
@ -159,11 +134,11 @@ class DHTRecord {
Future<Uint8List?> tryWriteBytes(Uint8List newValue,
{int subkey = -1}) async {
subkey = subkeyOrDefault(subkey);
newValue = await crypto.encrypt(newValue, subkey);
newValue = await _crypto.encrypt(newValue, subkey);
// Set the new data if possible
final valueData =
await _dhtctx.setDHTValue(_recordDescriptor.key, subkey, newValue);
final valueData = await _routingContext.setDHTValue(
_recordDescriptor.key, subkey, newValue);
if (valueData == null) {
return null;
}
@ -172,13 +147,13 @@ class DHTRecord {
Future<void> eventualWriteBytes(Uint8List newValue, {int subkey = -1}) async {
subkey = subkeyOrDefault(subkey);
newValue = await crypto.encrypt(newValue, subkey);
newValue = await _crypto.encrypt(newValue, subkey);
ValueData? valueData;
do {
// Set the new data
valueData =
await _dhtctx.setDHTValue(_recordDescriptor.key, subkey, newValue);
valueData = await _routingContext.setDHTValue(
_recordDescriptor.key, subkey, newValue);
// Repeat if newer data on the network was found
} while (valueData != null);
@ -191,7 +166,7 @@ class DHTRecord {
// Get existing identity key, do not allow force refresh here
// because if we need a refresh the setDHTValue will fail anyway
var valueData =
await _dhtctx.getDHTValue(_recordDescriptor.key, subkey, false);
await _routingContext.getDHTValue(_recordDescriptor.key, subkey, false);
// Ensure it exists already
if (valueData == null) {
throw const FormatException('value does not exist');
@ -201,13 +176,13 @@ class DHTRecord {
_subkeySeqCache[subkey] = valueData!.seq;
// Update the data
final oldData = await crypto.decrypt(valueData.data, subkey);
final oldData = await _crypto.decrypt(valueData.data, subkey);
final updatedData = await update(oldData);
final newData = await crypto.encrypt(updatedData, subkey);
final newData = await _crypto.encrypt(updatedData, subkey);
// Set it back
valueData =
await _dhtctx.setDHTValue(_recordDescriptor.key, subkey, newData);
valueData = await _routingContext.setDHTValue(
_recordDescriptor.key, subkey, newData);
// Repeat if newer data on the network was found
} while (valueData != null);

View file

@ -3,7 +3,7 @@ import 'dart:typed_data';
import 'package:veilid/veilid.dart';
import 'veilid_init.dart';
import '../veilid_init.dart';
abstract class DHTRecordCrypto {
FutureOr<Uint8List> encrypt(Uint8List data, int subkey);

View file

@ -0,0 +1,272 @@
import 'package:fast_immutable_collections/fast_immutable_collections.dart';
import 'package:freezed_annotation/freezed_annotation.dart';
import '../veilid_support.dart';
part 'dht_record_pool.freezed.dart';
part 'dht_record_pool.g.dart';
/// Record pool that managed DHTRecords and allows for tagged deletion
@freezed
class DHTRecordPoolAllocations with _$DHTRecordPoolAllocations {
const factory DHTRecordPoolAllocations({
required IMap<TypedKey, ISet<TypedKey>> childrenByParent,
required IMap<TypedKey, TypedKey> parentByChild,
}) = _DHTRecordPoolAllocations;
factory DHTRecordPoolAllocations.fromJson(dynamic json) =>
_$DHTRecordPoolAllocationsFromJson(json as Map<String, dynamic>);
}
/// Pointer to an owned record, with key, owner key and owner secret
/// Ensure that these are only serialized encrypted
@freezed
class OwnedDHTRecordPointer with _$OwnedDHTRecordPointer {
const factory OwnedDHTRecordPointer({
required TypedKey recordKey,
required KeyPair owner,
}) = _OwnedDHTRecordPointer;
factory OwnedDHTRecordPointer.fromJson(dynamic json) =>
_$OwnedDHTRecordPointerFromJson(json as Map<String, dynamic>);
}
class DHTRecordPool with AsyncTableDBBacked<DHTRecordPoolAllocations> {
DHTRecordPool._(Veilid veilid, VeilidRoutingContext routingContext)
: _state = DHTRecordPoolAllocations(
childrenByParent: IMap(), parentByChild: IMap()),
_opened = <TypedKey, DHTRecord>{},
_routingContext = routingContext,
_veilid = veilid;
// Persistent DHT record list
DHTRecordPoolAllocations _state;
// Which DHT records are currently open
final Map<TypedKey, DHTRecord> _opened;
// Default routing context to use for new keys
final VeilidRoutingContext _routingContext;
// Convenience accessor
final Veilid _veilid;
static DHTRecordPool? _singleton;
//////////////////////////////////////////////////////////////
/// AsyncTableDBBacked
@override
String tableName() => 'dht_record_pool';
@override
String tableKeyName() => 'pool_allocations';
@override
DHTRecordPoolAllocations valueFromJson(Object? obj) => obj != null
? DHTRecordPoolAllocations.fromJson(obj)
: DHTRecordPoolAllocations(
childrenByParent: IMap(), parentByChild: IMap());
@override
Object? valueToJson(DHTRecordPoolAllocations val) => val.toJson();
//////////////////////////////////////////////////////////////
static Future<DHTRecordPool> instance() async {
if (_singleton == null) {
final veilid = await eventualVeilid.future;
final routingContext = (await veilid.routingContext())
.withPrivacy()
.withSequencing(Sequencing.preferOrdered);
final globalPool = DHTRecordPool._(veilid, routingContext);
globalPool._state = await globalPool.load();
_singleton = globalPool;
}
return _singleton!;
}
Veilid get veilid => _veilid;
void _recordOpened(DHTRecord record) {
assert(!_opened.containsKey(record.key), 'record already opened');
_opened[record.key] = record;
}
void recordClosed(DHTRecord record) {
assert(_opened.containsKey(record.key), 'record already closed');
_opened.remove(record.key);
}
Future<void> deleteDeep(TypedKey parent) async {
// Collect all dependencies
final allDeps = <TypedKey>[];
final currentDeps = [parent];
while (currentDeps.isNotEmpty) {
final nextDep = currentDeps.removeLast();
// Remove this child from its parent
_removeDependency(nextDep);
// Ensure all records are closed before delete
assert(!_opened.containsKey(nextDep), 'should not delete opened record');
allDeps.add(nextDep);
final childDeps = _state.childrenByParent[nextDep]?.toList() ?? [];
currentDeps.addAll(childDeps);
}
// Delete all records
final allFutures = <Future<void>>[];
for (final dep in allDeps) {
allFutures.add(_routingContext.deleteDHTRecord(dep));
}
await Future.wait(allFutures);
}
void _addDependency(TypedKey parent, TypedKey child) {
final childrenOfParent =
_state.childrenByParent[parent] ?? ISet<TypedKey>();
if (childrenOfParent.contains(child)) {
throw StateError('Dependency added twice: $parent -> $child');
}
if (_state.parentByChild.containsKey(child)) {
throw StateError('Child has two parents: $child <- $parent');
}
if (_state.childrenByParent.containsKey(child)) {
// dependencies should be opened after their parents
throw StateError('Child is not a leaf: $child');
}
_state = _state.copyWith(
childrenByParent:
_state.childrenByParent.add(parent, childrenOfParent.add(child)),
parentByChild: _state.parentByChild.add(child, parent));
}
void _removeDependency(TypedKey child) {
final parent = _state.parentByChild[child];
if (parent == null) {
return;
}
final children = _state.childrenByParent[parent]!.remove(child);
if (children.isEmpty) {
_state = _state.copyWith(
childrenByParent: _state.childrenByParent.remove(parent),
parentByChild: _state.parentByChild.remove(child));
} else {
_state = _state.copyWith(
childrenByParent: _state.childrenByParent.add(parent, children),
parentByChild: _state.parentByChild.remove(child));
}
}
///////////////////////////////////////////////////////////////////////
/// Create a root DHTRecord that has no dependent records
Future<DHTRecord> create(
{VeilidRoutingContext? routingContext,
TypedKey? parent,
DHTSchema schema = const DHTSchema.dflt(oCnt: 1),
int defaultSubkey = 0,
DHTRecordCrypto? crypto}) async {
final dhtctx = routingContext ?? _routingContext;
final recordDescriptor = await dhtctx.createDHTRecord(schema);
final rec = DHTRecord(
routingContext: dhtctx,
recordDescriptor: recordDescriptor,
defaultSubkey: defaultSubkey,
writer: recordDescriptor.ownerKeyPair(),
crypto: crypto ??
await DHTRecordCryptoPrivate.fromTypedKeyPair(
recordDescriptor.ownerTypedKeyPair()!));
if (parent != null) {
_addDependency(parent, rec.key);
}
_recordOpened(rec);
return rec;
}
/// Open a DHTRecord readonly
Future<DHTRecord> openRead(TypedKey recordKey,
{VeilidRoutingContext? routingContext,
TypedKey? parent,
int defaultSubkey = 0,
DHTRecordCrypto? crypto}) async {
// If we are opening a key that already exists
// make sure we are using the same parent if one was specified
final existingParent = _state.parentByChild[recordKey];
assert(existingParent == parent, 'wrong parent for opened key');
// Open from the veilid api
final dhtctx = routingContext ?? _routingContext;
final recordDescriptor = await dhtctx.openDHTRecord(recordKey, null);
final rec = DHTRecord(
routingContext: dhtctx,
recordDescriptor: recordDescriptor,
defaultSubkey: defaultSubkey,
crypto: crypto ?? const DHTRecordCryptoPublic());
// Register the dependency if specified
if (parent != null) {
_addDependency(parent, rec.key);
}
_recordOpened(rec);
return rec;
}
/// Open a DHTRecord writable
Future<DHTRecord> openWrite(
TypedKey recordKey,
KeyPair writer, {
VeilidRoutingContext? routingContext,
TypedKey? parent,
int defaultSubkey = 0,
DHTRecordCrypto? crypto,
}) async {
// If we are opening a key that already exists
// make sure we are using the same parent if one was specified
final existingParent = _state.parentByChild[recordKey];
assert(existingParent == parent, 'wrong parent for opened key');
// Open from the veilid api
final dhtctx = routingContext ?? _routingContext;
final recordDescriptor = await dhtctx.openDHTRecord(recordKey, writer);
final rec = DHTRecord(
routingContext: dhtctx,
recordDescriptor: recordDescriptor,
defaultSubkey: defaultSubkey,
writer: writer,
crypto: crypto ??
await DHTRecordCryptoPrivate.fromTypedKeyPair(
TypedKeyPair.fromKeyPair(recordKey.kind, writer)));
// Register the dependency if specified
if (parent != null) {
_addDependency(parent, rec.key);
}
_recordOpened(rec);
return rec;
}
/// Open a DHTRecord owned
/// This is the same as writable but uses an OwnedDHTRecordPointer
/// for convenience and uses symmetric encryption on the key
/// This is primarily used for backing up private content on to the DHT
/// to synchronizing it between devices. Because it is 'owned', the correct
/// parent must be specified.
Future<DHTRecord> openOwned(
OwnedDHTRecordPointer ownedDHTRecordPointer, {
required TypedKey parent,
VeilidRoutingContext? routingContext,
int defaultSubkey = 0,
DHTRecordCrypto? crypto,
}) =>
openWrite(
ownedDHTRecordPointer.recordKey,
ownedDHTRecordPointer.owner,
routingContext: routingContext,
parent: parent,
defaultSubkey: defaultSubkey,
crypto: crypto,
);
}

View file

@ -0,0 +1,357 @@
// coverage:ignore-file
// GENERATED CODE - DO NOT MODIFY BY HAND
// ignore_for_file: type=lint
// ignore_for_file: unused_element, deprecated_member_use, deprecated_member_use_from_same_package, use_function_type_syntax_for_parameters, unnecessary_const, avoid_init_to_null, invalid_override_different_default_values_named, prefer_expression_function_bodies, annotate_overrides, invalid_annotation_target, unnecessary_question_mark
part of 'dht_record_pool.dart';
// **************************************************************************
// FreezedGenerator
// **************************************************************************
T _$identity<T>(T value) => value;
final _privateConstructorUsedError = UnsupportedError(
'It seems like you constructed your class using `MyClass._()`. This constructor is only meant to be used by freezed and you are not supposed to need it nor use it.\nPlease check the documentation here for more information: https://github.com/rrousselGit/freezed#custom-getters-and-methods');
DHTRecordPoolAllocations _$DHTRecordPoolAllocationsFromJson(
Map<String, dynamic> json) {
return _DHTRecordPoolAllocations.fromJson(json);
}
/// @nodoc
mixin _$DHTRecordPoolAllocations {
IMap<Typed<FixedEncodedString43>, ISet<Typed<FixedEncodedString43>>>
get childrenByParent => throw _privateConstructorUsedError;
IMap<Typed<FixedEncodedString43>, Typed<FixedEncodedString43>>
get parentByChild => throw _privateConstructorUsedError;
Map<String, dynamic> toJson() => throw _privateConstructorUsedError;
@JsonKey(ignore: true)
$DHTRecordPoolAllocationsCopyWith<DHTRecordPoolAllocations> get copyWith =>
throw _privateConstructorUsedError;
}
/// @nodoc
abstract class $DHTRecordPoolAllocationsCopyWith<$Res> {
factory $DHTRecordPoolAllocationsCopyWith(DHTRecordPoolAllocations value,
$Res Function(DHTRecordPoolAllocations) then) =
_$DHTRecordPoolAllocationsCopyWithImpl<$Res, DHTRecordPoolAllocations>;
@useResult
$Res call(
{IMap<Typed<FixedEncodedString43>, ISet<Typed<FixedEncodedString43>>>
childrenByParent,
IMap<Typed<FixedEncodedString43>, Typed<FixedEncodedString43>>
parentByChild});
}
/// @nodoc
class _$DHTRecordPoolAllocationsCopyWithImpl<$Res,
$Val extends DHTRecordPoolAllocations>
implements $DHTRecordPoolAllocationsCopyWith<$Res> {
_$DHTRecordPoolAllocationsCopyWithImpl(this._value, this._then);
// ignore: unused_field
final $Val _value;
// ignore: unused_field
final $Res Function($Val) _then;
@pragma('vm:prefer-inline')
@override
$Res call({
Object? childrenByParent = null,
Object? parentByChild = null,
}) {
return _then(_value.copyWith(
childrenByParent: null == childrenByParent
? _value.childrenByParent
: childrenByParent // ignore: cast_nullable_to_non_nullable
as IMap<Typed<FixedEncodedString43>,
ISet<Typed<FixedEncodedString43>>>,
parentByChild: null == parentByChild
? _value.parentByChild
: parentByChild // ignore: cast_nullable_to_non_nullable
as IMap<Typed<FixedEncodedString43>, Typed<FixedEncodedString43>>,
) as $Val);
}
}
/// @nodoc
abstract class _$$_DHTRecordPoolAllocationsCopyWith<$Res>
implements $DHTRecordPoolAllocationsCopyWith<$Res> {
factory _$$_DHTRecordPoolAllocationsCopyWith(
_$_DHTRecordPoolAllocations value,
$Res Function(_$_DHTRecordPoolAllocations) then) =
__$$_DHTRecordPoolAllocationsCopyWithImpl<$Res>;
@override
@useResult
$Res call(
{IMap<Typed<FixedEncodedString43>, ISet<Typed<FixedEncodedString43>>>
childrenByParent,
IMap<Typed<FixedEncodedString43>, Typed<FixedEncodedString43>>
parentByChild});
}
/// @nodoc
class __$$_DHTRecordPoolAllocationsCopyWithImpl<$Res>
extends _$DHTRecordPoolAllocationsCopyWithImpl<$Res,
_$_DHTRecordPoolAllocations>
implements _$$_DHTRecordPoolAllocationsCopyWith<$Res> {
__$$_DHTRecordPoolAllocationsCopyWithImpl(_$_DHTRecordPoolAllocations _value,
$Res Function(_$_DHTRecordPoolAllocations) _then)
: super(_value, _then);
@pragma('vm:prefer-inline')
@override
$Res call({
Object? childrenByParent = null,
Object? parentByChild = null,
}) {
return _then(_$_DHTRecordPoolAllocations(
childrenByParent: null == childrenByParent
? _value.childrenByParent
: childrenByParent // ignore: cast_nullable_to_non_nullable
as IMap<Typed<FixedEncodedString43>,
ISet<Typed<FixedEncodedString43>>>,
parentByChild: null == parentByChild
? _value.parentByChild
: parentByChild // ignore: cast_nullable_to_non_nullable
as IMap<Typed<FixedEncodedString43>, Typed<FixedEncodedString43>>,
));
}
}
/// @nodoc
@JsonSerializable()
class _$_DHTRecordPoolAllocations implements _DHTRecordPoolAllocations {
const _$_DHTRecordPoolAllocations(
{required this.childrenByParent, required this.parentByChild});
factory _$_DHTRecordPoolAllocations.fromJson(Map<String, dynamic> json) =>
_$$_DHTRecordPoolAllocationsFromJson(json);
@override
final IMap<Typed<FixedEncodedString43>, ISet<Typed<FixedEncodedString43>>>
childrenByParent;
@override
final IMap<Typed<FixedEncodedString43>, Typed<FixedEncodedString43>>
parentByChild;
@override
String toString() {
return 'DHTRecordPoolAllocations(childrenByParent: $childrenByParent, parentByChild: $parentByChild)';
}
@override
bool operator ==(dynamic other) {
return identical(this, other) ||
(other.runtimeType == runtimeType &&
other is _$_DHTRecordPoolAllocations &&
(identical(other.childrenByParent, childrenByParent) ||
other.childrenByParent == childrenByParent) &&
(identical(other.parentByChild, parentByChild) ||
other.parentByChild == parentByChild));
}
@JsonKey(ignore: true)
@override
int get hashCode => Object.hash(runtimeType, childrenByParent, parentByChild);
@JsonKey(ignore: true)
@override
@pragma('vm:prefer-inline')
_$$_DHTRecordPoolAllocationsCopyWith<_$_DHTRecordPoolAllocations>
get copyWith => __$$_DHTRecordPoolAllocationsCopyWithImpl<
_$_DHTRecordPoolAllocations>(this, _$identity);
@override
Map<String, dynamic> toJson() {
return _$$_DHTRecordPoolAllocationsToJson(
this,
);
}
}
abstract class _DHTRecordPoolAllocations implements DHTRecordPoolAllocations {
const factory _DHTRecordPoolAllocations(
{required final IMap<Typed<FixedEncodedString43>,
ISet<Typed<FixedEncodedString43>>>
childrenByParent,
required final IMap<Typed<FixedEncodedString43>,
Typed<FixedEncodedString43>>
parentByChild}) = _$_DHTRecordPoolAllocations;
factory _DHTRecordPoolAllocations.fromJson(Map<String, dynamic> json) =
_$_DHTRecordPoolAllocations.fromJson;
@override
IMap<Typed<FixedEncodedString43>, ISet<Typed<FixedEncodedString43>>>
get childrenByParent;
@override
IMap<Typed<FixedEncodedString43>, Typed<FixedEncodedString43>>
get parentByChild;
@override
@JsonKey(ignore: true)
_$$_DHTRecordPoolAllocationsCopyWith<_$_DHTRecordPoolAllocations>
get copyWith => throw _privateConstructorUsedError;
}
OwnedDHTRecordPointer _$OwnedDHTRecordPointerFromJson(
Map<String, dynamic> json) {
return _OwnedDHTRecordPointer.fromJson(json);
}
/// @nodoc
mixin _$OwnedDHTRecordPointer {
Typed<FixedEncodedString43> get recordKey =>
throw _privateConstructorUsedError;
KeyPair get owner => throw _privateConstructorUsedError;
Map<String, dynamic> toJson() => throw _privateConstructorUsedError;
@JsonKey(ignore: true)
$OwnedDHTRecordPointerCopyWith<OwnedDHTRecordPointer> get copyWith =>
throw _privateConstructorUsedError;
}
/// @nodoc
abstract class $OwnedDHTRecordPointerCopyWith<$Res> {
factory $OwnedDHTRecordPointerCopyWith(OwnedDHTRecordPointer value,
$Res Function(OwnedDHTRecordPointer) then) =
_$OwnedDHTRecordPointerCopyWithImpl<$Res, OwnedDHTRecordPointer>;
@useResult
$Res call({Typed<FixedEncodedString43> recordKey, KeyPair owner});
}
/// @nodoc
class _$OwnedDHTRecordPointerCopyWithImpl<$Res,
$Val extends OwnedDHTRecordPointer>
implements $OwnedDHTRecordPointerCopyWith<$Res> {
_$OwnedDHTRecordPointerCopyWithImpl(this._value, this._then);
// ignore: unused_field
final $Val _value;
// ignore: unused_field
final $Res Function($Val) _then;
@pragma('vm:prefer-inline')
@override
$Res call({
Object? recordKey = null,
Object? owner = null,
}) {
return _then(_value.copyWith(
recordKey: null == recordKey
? _value.recordKey
: recordKey // ignore: cast_nullable_to_non_nullable
as Typed<FixedEncodedString43>,
owner: null == owner
? _value.owner
: owner // ignore: cast_nullable_to_non_nullable
as KeyPair,
) as $Val);
}
}
/// @nodoc
abstract class _$$_OwnedDHTRecordPointerCopyWith<$Res>
implements $OwnedDHTRecordPointerCopyWith<$Res> {
factory _$$_OwnedDHTRecordPointerCopyWith(_$_OwnedDHTRecordPointer value,
$Res Function(_$_OwnedDHTRecordPointer) then) =
__$$_OwnedDHTRecordPointerCopyWithImpl<$Res>;
@override
@useResult
$Res call({Typed<FixedEncodedString43> recordKey, KeyPair owner});
}
/// @nodoc
class __$$_OwnedDHTRecordPointerCopyWithImpl<$Res>
extends _$OwnedDHTRecordPointerCopyWithImpl<$Res, _$_OwnedDHTRecordPointer>
implements _$$_OwnedDHTRecordPointerCopyWith<$Res> {
__$$_OwnedDHTRecordPointerCopyWithImpl(_$_OwnedDHTRecordPointer _value,
$Res Function(_$_OwnedDHTRecordPointer) _then)
: super(_value, _then);
@pragma('vm:prefer-inline')
@override
$Res call({
Object? recordKey = null,
Object? owner = null,
}) {
return _then(_$_OwnedDHTRecordPointer(
recordKey: null == recordKey
? _value.recordKey
: recordKey // ignore: cast_nullable_to_non_nullable
as Typed<FixedEncodedString43>,
owner: null == owner
? _value.owner
: owner // ignore: cast_nullable_to_non_nullable
as KeyPair,
));
}
}
/// @nodoc
@JsonSerializable()
class _$_OwnedDHTRecordPointer implements _OwnedDHTRecordPointer {
const _$_OwnedDHTRecordPointer(
{required this.recordKey, required this.owner});
factory _$_OwnedDHTRecordPointer.fromJson(Map<String, dynamic> json) =>
_$$_OwnedDHTRecordPointerFromJson(json);
@override
final Typed<FixedEncodedString43> recordKey;
@override
final KeyPair owner;
@override
String toString() {
return 'OwnedDHTRecordPointer(recordKey: $recordKey, owner: $owner)';
}
@override
bool operator ==(dynamic other) {
return identical(this, other) ||
(other.runtimeType == runtimeType &&
other is _$_OwnedDHTRecordPointer &&
(identical(other.recordKey, recordKey) ||
other.recordKey == recordKey) &&
(identical(other.owner, owner) || other.owner == owner));
}
@JsonKey(ignore: true)
@override
int get hashCode => Object.hash(runtimeType, recordKey, owner);
@JsonKey(ignore: true)
@override
@pragma('vm:prefer-inline')
_$$_OwnedDHTRecordPointerCopyWith<_$_OwnedDHTRecordPointer> get copyWith =>
__$$_OwnedDHTRecordPointerCopyWithImpl<_$_OwnedDHTRecordPointer>(
this, _$identity);
@override
Map<String, dynamic> toJson() {
return _$$_OwnedDHTRecordPointerToJson(
this,
);
}
}
abstract class _OwnedDHTRecordPointer implements OwnedDHTRecordPointer {
const factory _OwnedDHTRecordPointer(
{required final Typed<FixedEncodedString43> recordKey,
required final KeyPair owner}) = _$_OwnedDHTRecordPointer;
factory _OwnedDHTRecordPointer.fromJson(Map<String, dynamic> json) =
_$_OwnedDHTRecordPointer.fromJson;
@override
Typed<FixedEncodedString43> get recordKey;
@override
KeyPair get owner;
@override
@JsonKey(ignore: true)
_$$_OwnedDHTRecordPointerCopyWith<_$_OwnedDHTRecordPointer> get copyWith =>
throw _privateConstructorUsedError;
}

View file

@ -0,0 +1,52 @@
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'dht_record_pool.dart';
// **************************************************************************
// JsonSerializableGenerator
// **************************************************************************
_$_DHTRecordPoolAllocations _$$_DHTRecordPoolAllocationsFromJson(
Map<String, dynamic> json) =>
_$_DHTRecordPoolAllocations(
childrenByParent: IMap<Typed<FixedEncodedString43>,
ISet<Typed<FixedEncodedString43>>>.fromJson(
json['children_by_parent'] as Map<String, dynamic>,
(value) => Typed<FixedEncodedString43>.fromJson(value),
(value) => ISet<Typed<FixedEncodedString43>>.fromJson(
value, (value) => Typed<FixedEncodedString43>.fromJson(value))),
parentByChild: IMap<Typed<FixedEncodedString43>,
Typed<FixedEncodedString43>>.fromJson(
json['parent_by_child'] as Map<String, dynamic>,
(value) => Typed<FixedEncodedString43>.fromJson(value),
(value) => Typed<FixedEncodedString43>.fromJson(value)),
);
Map<String, dynamic> _$$_DHTRecordPoolAllocationsToJson(
_$_DHTRecordPoolAllocations instance) =>
<String, dynamic>{
'children_by_parent': instance.childrenByParent.toJson(
(value) => value.toJson(),
(value) => value.toJson(
(value) => value.toJson(),
),
),
'parent_by_child': instance.parentByChild.toJson(
(value) => value.toJson(),
(value) => value.toJson(),
),
};
_$_OwnedDHTRecordPointer _$$_OwnedDHTRecordPointerFromJson(
Map<String, dynamic> json) =>
_$_OwnedDHTRecordPointer(
recordKey: Typed<FixedEncodedString43>.fromJson(json['record_key']),
owner: KeyPair.fromJson(json['owner']),
);
Map<String, dynamic> _$$_OwnedDHTRecordPointerToJson(
_$_OwnedDHTRecordPointer instance) =>
<String, dynamic>{
'record_key': instance.recordKey.toJson(),
'owner': instance.owner.toJson(),
};

View file

@ -1,11 +1,11 @@
import 'dart:async';
import 'dart:typed_data';
import 'package:protobuf/protobuf.dart';
import 'package:veilid/veilid.dart';
import '../entities/proto.dart' as proto;
import '../tools/tools.dart';
import 'veilid_support.dart';
import '../../entities/proto.dart' as proto;
import '../../tools/tools.dart';
import '../veilid_support.dart';
class _DHTShortArrayCache {
_DHTShortArrayCache()
@ -23,11 +23,11 @@ class _DHTShortArrayCache {
}
class DHTShortArray {
DHTShortArray({required DHTRecord dhtRecord})
: _headRecord = dhtRecord,
DHTShortArray._({required DHTRecord headRecord})
: _headRecord = headRecord,
_head = _DHTShortArrayCache() {
late final int stride;
switch (dhtRecord.schema) {
switch (headRecord.schema) {
case DHTSchemaDFLT(oCnt: final oCnt):
stride = oCnt - 1;
if (stride <= 0) {
@ -49,13 +49,21 @@ class DHTShortArray {
// Cached representation refreshed from head record
_DHTShortArrayCache _head;
static Future<DHTShortArray> create(VeilidRoutingContext dhtctx, int stride,
{DHTRecordCrypto? crypto}) async {
static Future<DHTShortArray> create(
{int stride = maxElements,
VeilidRoutingContext? routingContext,
TypedKey? parent,
DHTRecordCrypto? crypto}) async {
assert(stride <= maxElements, 'stride too long');
final dhtRecord = await DHTRecord.create(dhtctx,
schema: DHTSchema.dflt(oCnt: stride + 1), crypto: crypto);
final pool = await DHTRecordPool.instance();
final dhtRecord = await pool.create(
parent: parent,
routingContext: routingContext,
schema: DHTSchema.dflt(oCnt: stride + 1),
crypto: crypto);
try {
final dhtShortArray = DHTShortArray(dhtRecord: dhtRecord);
final dhtShortArray = DHTShortArray._(headRecord: dhtRecord);
return dhtShortArray;
} on Exception catch (_) {
await dhtRecord.delete();
@ -63,13 +71,16 @@ class DHTShortArray {
}
}
static Future<DHTShortArray> openRead(
VeilidRoutingContext dhtctx, TypedKey dhtRecordKey,
{DHTRecordCrypto? crypto}) async {
final dhtRecord =
await DHTRecord.openRead(dhtctx, dhtRecordKey, crypto: crypto);
static Future<DHTShortArray> openRead(TypedKey headRecordKey,
{VeilidRoutingContext? routingContext,
TypedKey? parent,
DHTRecordCrypto? crypto}) async {
final pool = await DHTRecordPool.instance();
final dhtRecord = await pool.openRead(headRecordKey,
parent: parent, routingContext: routingContext, crypto: crypto);
try {
final dhtShortArray = DHTShortArray(dhtRecord: dhtRecord);
final dhtShortArray = DHTShortArray._(headRecord: dhtRecord);
await dhtShortArray._refreshHead();
return dhtShortArray;
} on Exception catch (_) {
@ -79,15 +90,17 @@ class DHTShortArray {
}
static Future<DHTShortArray> openWrite(
VeilidRoutingContext dhtctx,
TypedKey dhtRecordKey,
TypedKey headRecordKey,
KeyPair writer, {
VeilidRoutingContext? routingContext,
TypedKey? parent,
DHTRecordCrypto? crypto,
}) async {
final dhtRecord =
await DHTRecord.openWrite(dhtctx, dhtRecordKey, writer, crypto: crypto);
final pool = await DHTRecordPool.instance();
final dhtRecord = await pool.openWrite(headRecordKey, writer,
parent: parent, routingContext: routingContext, crypto: crypto);
try {
final dhtShortArray = DHTShortArray(dhtRecord: dhtRecord);
final dhtShortArray = DHTShortArray._(headRecord: dhtRecord);
await dhtShortArray._refreshHead();
return dhtShortArray;
} on Exception catch (_) {
@ -96,6 +109,22 @@ class DHTShortArray {
}
}
static Future<DHTShortArray> openOwned(
OwnedDHTRecordPointer ownedDHTRecordPointer, {
required TypedKey parent,
VeilidRoutingContext? routingContext,
DHTRecordCrypto? crypto,
}) =>
openWrite(
ownedDHTRecordPointer.recordKey,
ownedDHTRecordPointer.owner,
routingContext: routingContext,
parent: parent,
crypto: crypto,
);
DHTRecord get record => _headRecord;
////////////////////////////////////////////////////////////////
/// Seralize and write out the current head record, possibly updating it
@ -151,11 +180,21 @@ class DHTShortArray {
/// Open a linked record for reading or writing, same as the head record
Future<DHTRecord> _openLinkedRecord(TypedKey recordKey) async {
final pool = await DHTRecordPool.instance();
final writer = _headRecord.writer;
return (writer != null)
? await DHTRecord.openWrite(
_headRecord.routingContext, recordKey, writer)
: await DHTRecord.openRead(_headRecord.routingContext, recordKey);
? await pool.openWrite(
recordKey,
writer,
parent: _headRecord.key,
routingContext: _headRecord.routingContext,
)
: await pool.openRead(
recordKey,
parent: _headRecord.key,
routingContext: _headRecord.routingContext,
);
}
/// Validate a new head record
@ -242,7 +281,7 @@ class DHTShortArray {
await Future.wait(futures);
}
Future<T> scope<T>(Future<T> Function(DHTShortArray) scopeFunction) async {
Future<T> scope<T>(FutureOr<T> Function(DHTShortArray) scopeFunction) async {
try {
return await scopeFunction(this);
} finally {
@ -251,7 +290,7 @@ class DHTShortArray {
}
Future<T> deleteScope<T>(
Future<T> Function(DHTShortArray) scopeFunction) async {
FutureOr<T> Function(DHTShortArray) scopeFunction) async {
try {
final out = await scopeFunction(this);
await close();

View file

@ -0,0 +1,4 @@
export 'dht_record.dart';
export 'dht_record_crypto.dart';
export 'dht_record_pool.dart';
export 'dht_short_array.dart';

View file

@ -3,7 +3,6 @@
import 'dart:typed_data';
import 'package:fast_immutable_collections/fast_immutable_collections.dart';
import 'package:veilid/veilid.dart';
import '../entities/identity.dart';
import 'veilid_support.dart';
@ -11,7 +10,7 @@ import 'veilid_support.dart';
// Identity Master with secrets
// Not freezed because we never persist this class in its entirety
class IdentityMasterWithSecrets {
IdentityMasterWithSecrets(
IdentityMasterWithSecrets._(
{required this.identityMaster,
required this.masterSecret,
required this.identitySecret});
@ -19,28 +18,15 @@ class IdentityMasterWithSecrets {
SecretKey masterSecret;
SecretKey identitySecret;
Future<void> delete() async {
final veilid = await eventualVeilid.future;
final dhtctx = (await veilid.routingContext())
.withPrivacy()
.withSequencing(Sequencing.ensureOrdered);
await dhtctx.deleteDHTRecord(identityMaster.masterRecordKey);
await dhtctx.deleteDHTRecord(identityMaster.identityRecordKey);
}
}
/// Creates a new master identity and returns it with its secrets
static Future<IdentityMasterWithSecrets> create() async {
final pool = await DHTRecordPool.instance();
/// Creates a new master identity and returns it with its secrets
Future<IdentityMasterWithSecrets> newIdentityMaster() async {
final veilid = await eventualVeilid.future;
final dhtctx = (await veilid.routingContext())
.withPrivacy()
.withSequencing(Sequencing.ensureOrdered);
// IdentityMaster DHT record is public/unencrypted
return (await DHTRecord.create(dhtctx, crypto: const DHTRecordCryptoPublic()))
.deleteScope((masterRec) async {
// Identity record is private
return (await DHTRecord.create(dhtctx)).deleteScope((identityRec) async {
// IdentityMaster DHT record is public/unencrypted
return (await pool.create(crypto: const DHTRecordCryptoPublic()))
.deleteScope((masterRec) async {
// Identity record is private
final identityRec = await pool.create(parent: masterRec.key);
// Make IdentityMaster
final masterRecordKey = masterRec.key;
final masterOwner = masterRec.ownerKeyPair!;
@ -56,7 +42,7 @@ Future<IdentityMasterWithSecrets> newIdentityMaster() async {
assert(masterRecordKey.kind == identityRecordKey.kind,
'new master and identity should have same cryptosystem');
final crypto = await veilid.getCryptoSystem(masterRecordKey.kind);
final crypto = await pool.veilid.getCryptoSystem(masterRecordKey.kind);
final identitySignature =
await crypto.signWithKeyPair(masterOwner, identitySigBuf.toBytes());
@ -80,10 +66,16 @@ Future<IdentityMasterWithSecrets> newIdentityMaster() async {
// Write empty identity to identity dht key
await identityRec.eventualWriteJson(identity);
return IdentityMasterWithSecrets(
return IdentityMasterWithSecrets._(
identityMaster: identityMaster,
masterSecret: masterOwner.secret,
identitySecret: identityOwner.secret);
});
});
}
/// Creates a new master identity and returns it with its secrets
Future<void> delete() async {
final pool = await DHTRecordPool.instance();
await pool.deleteDeep(identityMaster.masterRecordKey);
}
}

View file

@ -3,12 +3,11 @@ import 'dart:async';
import 'package:veilid/veilid.dart';
import '../log/log.dart';
import '../providers/providers.dart';
import '../providers/connection_state.dart';
import 'config.dart';
import 'veilid_log.dart';
class Processor {
Processor();
String _veilidVersion = '';
bool _startedUp = false;

View file

@ -1,14 +1,11 @@
import 'dart:async';
import 'package:flutter/foundation.dart';
import 'package:riverpod_annotation/riverpod_annotation.dart';
import 'package:veilid/veilid.dart';
import 'processor.dart';
import 'veilid_log.dart';
part 'veilid_init.g.dart';
Future<String> getVeilidVersion() async {
String veilidVersion;
try {
@ -72,8 +69,3 @@ Future<void> initializeVeilid() async {
// Share the initialized veilid instance to the rest of the app
eventualVeilid.complete(Veilid.instance);
}
// Expose the Veilid instance as a FutureProvider
@riverpod
FutureOr<Veilid> veilidInstance(VeilidInstanceRef ref) async =>
await eventualVeilid.future;

View file

@ -1,24 +0,0 @@
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'veilid_init.dart';
// **************************************************************************
// RiverpodGenerator
// **************************************************************************
String _$veilidInstanceHash() => r'cca5cf288bafc4a051a1713e285f4c1d3ef4b680';
/// See also [veilidInstance].
@ProviderFor(veilidInstance)
final veilidInstanceProvider = AutoDisposeFutureProvider<Veilid>.internal(
veilidInstance,
name: r'veilidInstanceProvider',
debugGetCreateSourceHash: const bool.fromEnvironment('dart.vm.product')
? null
: _$veilidInstanceHash,
dependencies: null,
allTransitiveDependencies: null,
);
typedef VeilidInstanceRef = AutoDisposeFutureProviderRef<Veilid>;
// ignore_for_file: unnecessary_raw_strings, subtype_of_sealed_class, invalid_use_of_internal_member, do_not_use_environment, prefer_const_constructors, public_member_api_docs, avoid_private_typedef_functions

View file

@ -1,6 +1,7 @@
export 'package:veilid/veilid.dart';
export 'config.dart';
export 'dht_record.dart';
export 'dht_record_crypto.dart';
export 'dht_support/dht_support.dart';
export 'identity_master.dart';
export 'processor.dart';
export 'table_db.dart';