debugging work

This commit is contained in:
Christien Rioux 2025-03-22 21:43:37 -04:00
parent 739df7c427
commit d6b1c20906
71 changed files with 4155 additions and 3616 deletions

View file

@ -1,5 +1,6 @@
import '../../proto/dht.pb.dart' as dhtproto;
import '../../proto/proto.dart' as veilidproto;
import '../../src/dynamic_debug.dart';
import '../dht_support.dart';
export '../../proto/dht.pb.dart';
@ -23,3 +24,44 @@ extension ProtoOwnedDHTRecordPointer on dhtproto.OwnedDHTRecordPointer {
OwnedDHTRecordPointer toVeilid() => OwnedDHTRecordPointer(
recordKey: recordKey.toVeilid(), owner: owner.toVeilid());
}
void registerVeilidDHTProtoToDebug() {
dynamic toDebug(dynamic obj) {
if (obj is dhtproto.OwnedDHTRecordPointer) {
return {
r'$runtimeType': obj.runtimeType,
'recordKey': obj.recordKey,
'owner': obj.owner,
};
}
if (obj is dhtproto.DHTData) {
return {
r'$runtimeType': obj.runtimeType,
'keys': obj.keys,
'hash': obj.hash,
'chunk': obj.chunk,
'size': obj.size
};
}
if (obj is dhtproto.DHTLog) {
return {
r'$runtimeType': obj.runtimeType,
'head': obj.head,
'tail': obj.tail,
'stride': obj.stride,
};
}
if (obj is dhtproto.DHTShortArray) {
return {
r'$runtimeType': obj.runtimeType,
'keys': obj.keys,
'index': obj.index,
'seqs': obj.seqs,
};
}
return obj;
}
DynamicDebug.registerToDebug(toDebug);
}

View file

@ -31,6 +31,14 @@ class DHTLogStateData<T> extends Equatable {
@override
List<Object?> get props => [length, window, windowTail, windowSize, follow];
@override
String toString() => 'DHTLogStateData('
'length: $length, '
'windowTail: $windowTail, '
'windowSize: $windowSize, '
'follow: $follow, '
'window: ${DynamicDebug.toDebug(window)})';
}
typedef DHTLogState<T> = AsyncValue<DHTLogStateData<T>>;

View file

@ -126,10 +126,7 @@ class _DHTLogSpine {
Future<bool> delete() async => _spineMutex.protect(_spineRecord.delete);
Future<T> operate<T>(Future<T> Function(_DHTLogSpine) closure) async =>
// ignore: prefer_expression_function_bodies
_spineMutex.protect(() async {
return closure(this);
});
_spineMutex.protect(() async => closure(this));
Future<T> operateAppend<T>(Future<T> Function(_DHTLogSpine) closure) async =>
_spineMutex.protect(() async {

View file

@ -511,7 +511,7 @@ class DHTRecord implements DHTDeleteable<DHTRecord> {
key,
subkeys: [ValueSubkeyRange.single(subkey)],
);
return rr.localSeqs.firstOrNull ?? 0xFFFFFFFF;
return rr.localSeqs.firstOrNull ?? emptySeq;
}
void _addValueChange(
@ -566,4 +566,6 @@ class DHTRecord implements DHTDeleteable<DHTRecord> {
int _openCount;
StreamController<DHTRecordWatchChange>? _watchController;
_WatchState? _watchState;
static const int emptySeq = 0xFFFFFFFF;
}

View file

@ -9,6 +9,7 @@ import 'package:freezed_annotation/freezed_annotation.dart';
import 'package:protobuf/protobuf.dart';
import '../../../../veilid_support.dart';
import 'extensions.dart';
export 'package:fast_immutable_collections/fast_immutable_collections.dart'
show Output;
@ -32,7 +33,7 @@ typedef DHTRecordPoolLogger = void Function(String message);
/// Record pool that managed DHTRecords and allows for tagged deletion
/// String versions of keys due to IMap<> json unsupported in key
@freezed
class DHTRecordPoolAllocations with _$DHTRecordPoolAllocations {
sealed class DHTRecordPoolAllocations with _$DHTRecordPoolAllocations {
const factory DHTRecordPoolAllocations({
@Default(IMapConst<String, ISet<TypedKey>>({}))
IMap<String, ISet<TypedKey>> childrenByParent,
@ -49,7 +50,7 @@ class DHTRecordPoolAllocations with _$DHTRecordPoolAllocations {
/// Pointer to an owned record, with key, owner key and owner secret
/// Ensure that these are only serialized encrypted
@freezed
class OwnedDHTRecordPointer with _$OwnedDHTRecordPointer {
sealed class OwnedDHTRecordPointer with _$OwnedDHTRecordPointer {
const factory OwnedDHTRecordPointer({
required TypedKey recordKey,
required KeyPair owner,
@ -843,8 +844,12 @@ class DHTRecordPool with TableDBBackedJson<DHTRecordPoolAllocations> {
log('Timeout in watch cancel for key=$openedRecordKey');
} on VeilidAPIException catch (e) {
// Failed to cancel DHT watch, try again next tick
log('Exception in watch cancel for key=$openedRecordKey: $e');
log('VeilidAPIException in watch cancel for key=$openedRecordKey: $e');
} catch (e) {
log('Unhandled exception in watch cancel for key=$openedRecordKey: $e');
rethrow;
}
return;
}
@ -887,7 +892,10 @@ class DHTRecordPool with TableDBBackedJson<DHTRecordPoolAllocations> {
log('Timeout in watch update for key=$openedRecordKey');
} on VeilidAPIException catch (e) {
// Failed to update DHT watch, try again next tick
log('Exception in watch update for key=$openedRecordKey: $e');
log('VeilidAPIException in watch update for key=$openedRecordKey: $e');
} catch (e) {
log('Unhandled exception in watch update for key=$openedRecordKey: $e');
rethrow;
}
// If we still need a state update after this then do a poll instead
@ -904,28 +912,29 @@ class DHTRecordPool with TableDBBackedJson<DHTRecordPoolAllocations> {
singleFuture((this, _sfPollWatch, openedRecordKey), () async {
final dhtctx = openedRecordInfo.shared.defaultRoutingContext;
// Get single subkey to poll
// XXX: veilid api limits this for now until everyone supports
// inspectDHTRecord
final pollSubkey = unionWatchState.subkeys?.firstSubkey;
if (pollSubkey == null) {
return;
final currentReport = await dhtctx.inspectDHTRecord(openedRecordKey,
subkeys: unionWatchState.subkeys, scope: DHTReportScope.syncGet);
final fsc = currentReport.firstSeqChange;
if (fsc == null) {
return null;
}
final pollSubkeys = [ValueSubkeyRange.single(pollSubkey)];
final newerSubkeys = currentReport.newerSubkeys;
final currentReport =
await dhtctx.inspectDHTRecord(openedRecordKey, subkeys: pollSubkeys);
final currentSeq = currentReport.localSeqs.firstOrNull ?? -1;
final valueData = await dhtctx.getDHTValue(openedRecordKey, pollSubkey,
final valueData = await dhtctx.getDHTValue(openedRecordKey, fsc.subkey,
forceRefresh: true);
if (valueData == null) {
return;
}
if (valueData.seq > currentSeq) {
if (valueData.seq < fsc.newSeq) {
log('inspect returned a newer seq than get: ${valueData.seq} < $fsc');
}
if (valueData.seq > fsc.oldSeq && valueData.seq != DHTRecord.emptySeq) {
processRemoteValueChange(VeilidUpdateValueChange(
key: openedRecordKey,
subkeys: pollSubkeys,
subkeys: newerSubkeys,
count: 0xFFFFFFFF,
value: valueData));
}

View file

@ -1,3 +1,4 @@
// dart format width=80
// coverage:ignore-file
// GENERATED CODE - DO NOT MODIFY BY HAND
// ignore_for_file: type=lint
@ -9,183 +10,32 @@ part of 'dht_record_pool.dart';
// FreezedGenerator
// **************************************************************************
// dart format off
T _$identity<T>(T value) => value;
final _privateConstructorUsedError = UnsupportedError(
'It seems like you constructed your class using `MyClass._()`. This constructor is only meant to be used by freezed and you are not supposed to need it nor use it.\nPlease check the documentation here for more information: https://github.com/rrousselGit/freezed#adding-getters-and-methods-to-our-models');
DHTRecordPoolAllocations _$DHTRecordPoolAllocationsFromJson(
Map<String, dynamic> json) {
return _DHTRecordPoolAllocations.fromJson(json);
}
/// @nodoc
mixin _$DHTRecordPoolAllocations {
IMap<String, ISet<Typed<FixedEncodedString43>>> get childrenByParent =>
throw _privateConstructorUsedError;
IMap<String, Typed<FixedEncodedString43>> get parentByChild =>
throw _privateConstructorUsedError;
ISet<Typed<FixedEncodedString43>> get rootRecords =>
throw _privateConstructorUsedError;
IMap<String, String> get debugNames => throw _privateConstructorUsedError;
/// Serializes this DHTRecordPoolAllocations to a JSON map.
Map<String, dynamic> toJson() => throw _privateConstructorUsedError;
IMap<String, ISet<TypedKey>> get childrenByParent;
IMap<String, TypedKey> get parentByChild;
ISet<TypedKey> get rootRecords;
IMap<String, String> get debugNames;
/// Create a copy of DHTRecordPoolAllocations
/// with the given fields replaced by the non-null parameter values.
@JsonKey(includeFromJson: false, includeToJson: false)
@pragma('vm:prefer-inline')
$DHTRecordPoolAllocationsCopyWith<DHTRecordPoolAllocations> get copyWith =>
throw _privateConstructorUsedError;
}
_$DHTRecordPoolAllocationsCopyWithImpl<DHTRecordPoolAllocations>(
this as DHTRecordPoolAllocations, _$identity);
/// @nodoc
abstract class $DHTRecordPoolAllocationsCopyWith<$Res> {
factory $DHTRecordPoolAllocationsCopyWith(DHTRecordPoolAllocations value,
$Res Function(DHTRecordPoolAllocations) then) =
_$DHTRecordPoolAllocationsCopyWithImpl<$Res, DHTRecordPoolAllocations>;
@useResult
$Res call(
{IMap<String, ISet<Typed<FixedEncodedString43>>> childrenByParent,
IMap<String, Typed<FixedEncodedString43>> parentByChild,
ISet<Typed<FixedEncodedString43>> rootRecords,
IMap<String, String> debugNames});
}
/// @nodoc
class _$DHTRecordPoolAllocationsCopyWithImpl<$Res,
$Val extends DHTRecordPoolAllocations>
implements $DHTRecordPoolAllocationsCopyWith<$Res> {
_$DHTRecordPoolAllocationsCopyWithImpl(this._value, this._then);
// ignore: unused_field
final $Val _value;
// ignore: unused_field
final $Res Function($Val) _then;
/// Create a copy of DHTRecordPoolAllocations
/// with the given fields replaced by the non-null parameter values.
@pragma('vm:prefer-inline')
@override
$Res call({
Object? childrenByParent = null,
Object? parentByChild = null,
Object? rootRecords = null,
Object? debugNames = null,
}) {
return _then(_value.copyWith(
childrenByParent: null == childrenByParent
? _value.childrenByParent
: childrenByParent // ignore: cast_nullable_to_non_nullable
as IMap<String, ISet<Typed<FixedEncodedString43>>>,
parentByChild: null == parentByChild
? _value.parentByChild
: parentByChild // ignore: cast_nullable_to_non_nullable
as IMap<String, Typed<FixedEncodedString43>>,
rootRecords: null == rootRecords
? _value.rootRecords
: rootRecords // ignore: cast_nullable_to_non_nullable
as ISet<Typed<FixedEncodedString43>>,
debugNames: null == debugNames
? _value.debugNames
: debugNames // ignore: cast_nullable_to_non_nullable
as IMap<String, String>,
) as $Val);
}
}
/// @nodoc
abstract class _$$DHTRecordPoolAllocationsImplCopyWith<$Res>
implements $DHTRecordPoolAllocationsCopyWith<$Res> {
factory _$$DHTRecordPoolAllocationsImplCopyWith(
_$DHTRecordPoolAllocationsImpl value,
$Res Function(_$DHTRecordPoolAllocationsImpl) then) =
__$$DHTRecordPoolAllocationsImplCopyWithImpl<$Res>;
@override
@useResult
$Res call(
{IMap<String, ISet<Typed<FixedEncodedString43>>> childrenByParent,
IMap<String, Typed<FixedEncodedString43>> parentByChild,
ISet<Typed<FixedEncodedString43>> rootRecords,
IMap<String, String> debugNames});
}
/// @nodoc
class __$$DHTRecordPoolAllocationsImplCopyWithImpl<$Res>
extends _$DHTRecordPoolAllocationsCopyWithImpl<$Res,
_$DHTRecordPoolAllocationsImpl>
implements _$$DHTRecordPoolAllocationsImplCopyWith<$Res> {
__$$DHTRecordPoolAllocationsImplCopyWithImpl(
_$DHTRecordPoolAllocationsImpl _value,
$Res Function(_$DHTRecordPoolAllocationsImpl) _then)
: super(_value, _then);
/// Create a copy of DHTRecordPoolAllocations
/// with the given fields replaced by the non-null parameter values.
@pragma('vm:prefer-inline')
@override
$Res call({
Object? childrenByParent = null,
Object? parentByChild = null,
Object? rootRecords = null,
Object? debugNames = null,
}) {
return _then(_$DHTRecordPoolAllocationsImpl(
childrenByParent: null == childrenByParent
? _value.childrenByParent
: childrenByParent // ignore: cast_nullable_to_non_nullable
as IMap<String, ISet<Typed<FixedEncodedString43>>>,
parentByChild: null == parentByChild
? _value.parentByChild
: parentByChild // ignore: cast_nullable_to_non_nullable
as IMap<String, Typed<FixedEncodedString43>>,
rootRecords: null == rootRecords
? _value.rootRecords
: rootRecords // ignore: cast_nullable_to_non_nullable
as ISet<Typed<FixedEncodedString43>>,
debugNames: null == debugNames
? _value.debugNames
: debugNames // ignore: cast_nullable_to_non_nullable
as IMap<String, String>,
));
}
}
/// @nodoc
@JsonSerializable()
class _$DHTRecordPoolAllocationsImpl implements _DHTRecordPoolAllocations {
const _$DHTRecordPoolAllocationsImpl(
{this.childrenByParent = const IMapConst<String, ISet<TypedKey>>({}),
this.parentByChild = const IMapConst<String, TypedKey>({}),
this.rootRecords = const ISetConst<TypedKey>({}),
this.debugNames = const IMapConst<String, String>({})});
factory _$DHTRecordPoolAllocationsImpl.fromJson(Map<String, dynamic> json) =>
_$$DHTRecordPoolAllocationsImplFromJson(json);
@override
@JsonKey()
final IMap<String, ISet<Typed<FixedEncodedString43>>> childrenByParent;
@override
@JsonKey()
final IMap<String, Typed<FixedEncodedString43>> parentByChild;
@override
@JsonKey()
final ISet<Typed<FixedEncodedString43>> rootRecords;
@override
@JsonKey()
final IMap<String, String> debugNames;
@override
String toString() {
return 'DHTRecordPoolAllocations(childrenByParent: $childrenByParent, parentByChild: $parentByChild, rootRecords: $rootRecords, debugNames: $debugNames)';
}
/// Serializes this DHTRecordPoolAllocations to a JSON map.
Map<String, dynamic> toJson();
@override
bool operator ==(Object other) {
return identical(this, other) ||
(other.runtimeType == runtimeType &&
other is _$DHTRecordPoolAllocationsImpl &&
other is DHTRecordPoolAllocations &&
(identical(other.childrenByParent, childrenByParent) ||
other.childrenByParent == childrenByParent) &&
(identical(other.parentByChild, parentByChild) ||
@ -201,178 +51,205 @@ class _$DHTRecordPoolAllocationsImpl implements _DHTRecordPoolAllocations {
int get hashCode => Object.hash(runtimeType, childrenByParent, parentByChild,
const DeepCollectionEquality().hash(rootRecords), debugNames);
/// Create a copy of DHTRecordPoolAllocations
/// with the given fields replaced by the non-null parameter values.
@JsonKey(includeFromJson: false, includeToJson: false)
@override
@pragma('vm:prefer-inline')
_$$DHTRecordPoolAllocationsImplCopyWith<_$DHTRecordPoolAllocationsImpl>
get copyWith => __$$DHTRecordPoolAllocationsImplCopyWithImpl<
_$DHTRecordPoolAllocationsImpl>(this, _$identity);
@override
Map<String, dynamic> toJson() {
return _$$DHTRecordPoolAllocationsImplToJson(
this,
);
}
}
abstract class _DHTRecordPoolAllocations implements DHTRecordPoolAllocations {
const factory _DHTRecordPoolAllocations(
{final IMap<String, ISet<Typed<FixedEncodedString43>>> childrenByParent,
final IMap<String, Typed<FixedEncodedString43>> parentByChild,
final ISet<Typed<FixedEncodedString43>> rootRecords,
final IMap<String, String> debugNames}) = _$DHTRecordPoolAllocationsImpl;
factory _DHTRecordPoolAllocations.fromJson(Map<String, dynamic> json) =
_$DHTRecordPoolAllocationsImpl.fromJson;
@override
IMap<String, ISet<Typed<FixedEncodedString43>>> get childrenByParent;
@override
IMap<String, Typed<FixedEncodedString43>> get parentByChild;
@override
ISet<Typed<FixedEncodedString43>> get rootRecords;
@override
IMap<String, String> get debugNames;
/// Create a copy of DHTRecordPoolAllocations
/// with the given fields replaced by the non-null parameter values.
@override
@JsonKey(includeFromJson: false, includeToJson: false)
_$$DHTRecordPoolAllocationsImplCopyWith<_$DHTRecordPoolAllocationsImpl>
get copyWith => throw _privateConstructorUsedError;
}
OwnedDHTRecordPointer _$OwnedDHTRecordPointerFromJson(
Map<String, dynamic> json) {
return _OwnedDHTRecordPointer.fromJson(json);
}
/// @nodoc
mixin _$OwnedDHTRecordPointer {
Typed<FixedEncodedString43> get recordKey =>
throw _privateConstructorUsedError;
KeyPair get owner => throw _privateConstructorUsedError;
/// Serializes this OwnedDHTRecordPointer to a JSON map.
Map<String, dynamic> toJson() => throw _privateConstructorUsedError;
/// Create a copy of OwnedDHTRecordPointer
/// with the given fields replaced by the non-null parameter values.
@JsonKey(includeFromJson: false, includeToJson: false)
$OwnedDHTRecordPointerCopyWith<OwnedDHTRecordPointer> get copyWith =>
throw _privateConstructorUsedError;
}
/// @nodoc
abstract class $OwnedDHTRecordPointerCopyWith<$Res> {
factory $OwnedDHTRecordPointerCopyWith(OwnedDHTRecordPointer value,
$Res Function(OwnedDHTRecordPointer) then) =
_$OwnedDHTRecordPointerCopyWithImpl<$Res, OwnedDHTRecordPointer>;
@useResult
$Res call({Typed<FixedEncodedString43> recordKey, KeyPair owner});
}
/// @nodoc
class _$OwnedDHTRecordPointerCopyWithImpl<$Res,
$Val extends OwnedDHTRecordPointer>
implements $OwnedDHTRecordPointerCopyWith<$Res> {
_$OwnedDHTRecordPointerCopyWithImpl(this._value, this._then);
// ignore: unused_field
final $Val _value;
// ignore: unused_field
final $Res Function($Val) _then;
/// Create a copy of OwnedDHTRecordPointer
/// with the given fields replaced by the non-null parameter values.
@pragma('vm:prefer-inline')
@override
$Res call({
Object? recordKey = null,
Object? owner = null,
}) {
return _then(_value.copyWith(
recordKey: null == recordKey
? _value.recordKey
: recordKey // ignore: cast_nullable_to_non_nullable
as Typed<FixedEncodedString43>,
owner: null == owner
? _value.owner
: owner // ignore: cast_nullable_to_non_nullable
as KeyPair,
) as $Val);
String toString() {
return 'DHTRecordPoolAllocations(childrenByParent: $childrenByParent, parentByChild: $parentByChild, rootRecords: $rootRecords, debugNames: $debugNames)';
}
}
/// @nodoc
abstract class _$$OwnedDHTRecordPointerImplCopyWith<$Res>
implements $OwnedDHTRecordPointerCopyWith<$Res> {
factory _$$OwnedDHTRecordPointerImplCopyWith(
_$OwnedDHTRecordPointerImpl value,
$Res Function(_$OwnedDHTRecordPointerImpl) then) =
__$$OwnedDHTRecordPointerImplCopyWithImpl<$Res>;
@override
abstract mixin class $DHTRecordPoolAllocationsCopyWith<$Res> {
factory $DHTRecordPoolAllocationsCopyWith(DHTRecordPoolAllocations value,
$Res Function(DHTRecordPoolAllocations) _then) =
_$DHTRecordPoolAllocationsCopyWithImpl;
@useResult
$Res call({Typed<FixedEncodedString43> recordKey, KeyPair owner});
$Res call(
{IMap<String, ISet<Typed<FixedEncodedString43>>> childrenByParent,
IMap<String, Typed<FixedEncodedString43>> parentByChild,
ISet<Typed<FixedEncodedString43>> rootRecords,
IMap<String, String> debugNames});
}
/// @nodoc
class __$$OwnedDHTRecordPointerImplCopyWithImpl<$Res>
extends _$OwnedDHTRecordPointerCopyWithImpl<$Res,
_$OwnedDHTRecordPointerImpl>
implements _$$OwnedDHTRecordPointerImplCopyWith<$Res> {
__$$OwnedDHTRecordPointerImplCopyWithImpl(_$OwnedDHTRecordPointerImpl _value,
$Res Function(_$OwnedDHTRecordPointerImpl) _then)
: super(_value, _then);
class _$DHTRecordPoolAllocationsCopyWithImpl<$Res>
implements $DHTRecordPoolAllocationsCopyWith<$Res> {
_$DHTRecordPoolAllocationsCopyWithImpl(this._self, this._then);
/// Create a copy of OwnedDHTRecordPointer
final DHTRecordPoolAllocations _self;
final $Res Function(DHTRecordPoolAllocations) _then;
/// Create a copy of DHTRecordPoolAllocations
/// with the given fields replaced by the non-null parameter values.
@pragma('vm:prefer-inline')
@override
$Res call({
Object? recordKey = null,
Object? owner = null,
Object? childrenByParent = null,
Object? parentByChild = null,
Object? rootRecords = null,
Object? debugNames = null,
}) {
return _then(_$OwnedDHTRecordPointerImpl(
recordKey: null == recordKey
? _value.recordKey
: recordKey // ignore: cast_nullable_to_non_nullable
as Typed<FixedEncodedString43>,
owner: null == owner
? _value.owner
: owner // ignore: cast_nullable_to_non_nullable
as KeyPair,
return _then(_self.copyWith(
childrenByParent: null == childrenByParent
? _self.childrenByParent!
: childrenByParent // ignore: cast_nullable_to_non_nullable
as IMap<String, ISet<Typed<FixedEncodedString43>>>,
parentByChild: null == parentByChild
? _self.parentByChild!
: parentByChild // ignore: cast_nullable_to_non_nullable
as IMap<String, Typed<FixedEncodedString43>>,
rootRecords: null == rootRecords
? _self.rootRecords!
: rootRecords // ignore: cast_nullable_to_non_nullable
as ISet<Typed<FixedEncodedString43>>,
debugNames: null == debugNames
? _self.debugNames
: debugNames // ignore: cast_nullable_to_non_nullable
as IMap<String, String>,
));
}
}
/// @nodoc
@JsonSerializable()
class _$OwnedDHTRecordPointerImpl implements _OwnedDHTRecordPointer {
const _$OwnedDHTRecordPointerImpl(
{required this.recordKey, required this.owner});
factory _$OwnedDHTRecordPointerImpl.fromJson(Map<String, dynamic> json) =>
_$$OwnedDHTRecordPointerImplFromJson(json);
class _DHTRecordPoolAllocations implements DHTRecordPoolAllocations {
const _DHTRecordPoolAllocations(
{this.childrenByParent = const IMapConst<String, ISet<TypedKey>>({}),
this.parentByChild = const IMapConst<String, TypedKey>({}),
this.rootRecords = const ISetConst<TypedKey>({}),
this.debugNames = const IMapConst<String, String>({})});
factory _DHTRecordPoolAllocations.fromJson(Map<String, dynamic> json) =>
_$DHTRecordPoolAllocationsFromJson(json);
@override
final Typed<FixedEncodedString43> recordKey;
@JsonKey()
final IMap<String, ISet<Typed<FixedEncodedString43>>> childrenByParent;
@override
final KeyPair owner;
@JsonKey()
final IMap<String, Typed<FixedEncodedString43>> parentByChild;
@override
@JsonKey()
final ISet<Typed<FixedEncodedString43>> rootRecords;
@override
@JsonKey()
final IMap<String, String> debugNames;
/// Create a copy of DHTRecordPoolAllocations
/// with the given fields replaced by the non-null parameter values.
@override
@JsonKey(includeFromJson: false, includeToJson: false)
@pragma('vm:prefer-inline')
_$DHTRecordPoolAllocationsCopyWith<_DHTRecordPoolAllocations> get copyWith =>
__$DHTRecordPoolAllocationsCopyWithImpl<_DHTRecordPoolAllocations>(
this, _$identity);
@override
String toString() {
return 'OwnedDHTRecordPointer(recordKey: $recordKey, owner: $owner)';
Map<String, dynamic> toJson() {
return _$DHTRecordPoolAllocationsToJson(
this,
);
}
@override
bool operator ==(Object other) {
return identical(this, other) ||
(other.runtimeType == runtimeType &&
other is _$OwnedDHTRecordPointerImpl &&
other is _DHTRecordPoolAllocations &&
(identical(other.childrenByParent, childrenByParent) ||
other.childrenByParent == childrenByParent) &&
(identical(other.parentByChild, parentByChild) ||
other.parentByChild == parentByChild) &&
const DeepCollectionEquality()
.equals(other.rootRecords, rootRecords) &&
(identical(other.debugNames, debugNames) ||
other.debugNames == debugNames));
}
@JsonKey(includeFromJson: false, includeToJson: false)
@override
int get hashCode => Object.hash(runtimeType, childrenByParent, parentByChild,
const DeepCollectionEquality().hash(rootRecords), debugNames);
@override
String toString() {
return 'DHTRecordPoolAllocations(childrenByParent: $childrenByParent, parentByChild: $parentByChild, rootRecords: $rootRecords, debugNames: $debugNames)';
}
}
/// @nodoc
abstract mixin class _$DHTRecordPoolAllocationsCopyWith<$Res>
implements $DHTRecordPoolAllocationsCopyWith<$Res> {
factory _$DHTRecordPoolAllocationsCopyWith(_DHTRecordPoolAllocations value,
$Res Function(_DHTRecordPoolAllocations) _then) =
__$DHTRecordPoolAllocationsCopyWithImpl;
@override
@useResult
$Res call(
{IMap<String, ISet<Typed<FixedEncodedString43>>> childrenByParent,
IMap<String, Typed<FixedEncodedString43>> parentByChild,
ISet<Typed<FixedEncodedString43>> rootRecords,
IMap<String, String> debugNames});
}
/// @nodoc
class __$DHTRecordPoolAllocationsCopyWithImpl<$Res>
implements _$DHTRecordPoolAllocationsCopyWith<$Res> {
__$DHTRecordPoolAllocationsCopyWithImpl(this._self, this._then);
final _DHTRecordPoolAllocations _self;
final $Res Function(_DHTRecordPoolAllocations) _then;
/// Create a copy of DHTRecordPoolAllocations
/// with the given fields replaced by the non-null parameter values.
@override
@pragma('vm:prefer-inline')
$Res call({
Object? childrenByParent = null,
Object? parentByChild = null,
Object? rootRecords = null,
Object? debugNames = null,
}) {
return _then(_DHTRecordPoolAllocations(
childrenByParent: null == childrenByParent
? _self.childrenByParent
: childrenByParent // ignore: cast_nullable_to_non_nullable
as IMap<String, ISet<Typed<FixedEncodedString43>>>,
parentByChild: null == parentByChild
? _self.parentByChild
: parentByChild // ignore: cast_nullable_to_non_nullable
as IMap<String, Typed<FixedEncodedString43>>,
rootRecords: null == rootRecords
? _self.rootRecords
: rootRecords // ignore: cast_nullable_to_non_nullable
as ISet<Typed<FixedEncodedString43>>,
debugNames: null == debugNames
? _self.debugNames
: debugNames // ignore: cast_nullable_to_non_nullable
as IMap<String, String>,
));
}
}
/// @nodoc
mixin _$OwnedDHTRecordPointer {
TypedKey get recordKey;
KeyPair get owner;
/// Create a copy of OwnedDHTRecordPointer
/// with the given fields replaced by the non-null parameter values.
@JsonKey(includeFromJson: false, includeToJson: false)
@pragma('vm:prefer-inline')
$OwnedDHTRecordPointerCopyWith<OwnedDHTRecordPointer> get copyWith =>
_$OwnedDHTRecordPointerCopyWithImpl<OwnedDHTRecordPointer>(
this as OwnedDHTRecordPointer, _$identity);
/// Serializes this OwnedDHTRecordPointer to a JSON map.
Map<String, dynamic> toJson();
@override
bool operator ==(Object other) {
return identical(this, other) ||
(other.runtimeType == runtimeType &&
other is OwnedDHTRecordPointer &&
(identical(other.recordKey, recordKey) ||
other.recordKey == recordKey) &&
(identical(other.owner, owner) || other.owner == owner));
@ -382,40 +259,136 @@ class _$OwnedDHTRecordPointerImpl implements _OwnedDHTRecordPointer {
@override
int get hashCode => Object.hash(runtimeType, recordKey, owner);
/// Create a copy of OwnedDHTRecordPointer
/// with the given fields replaced by the non-null parameter values.
@JsonKey(includeFromJson: false, includeToJson: false)
@override
@pragma('vm:prefer-inline')
_$$OwnedDHTRecordPointerImplCopyWith<_$OwnedDHTRecordPointerImpl>
get copyWith => __$$OwnedDHTRecordPointerImplCopyWithImpl<
_$OwnedDHTRecordPointerImpl>(this, _$identity);
@override
Map<String, dynamic> toJson() {
return _$$OwnedDHTRecordPointerImplToJson(
this,
);
String toString() {
return 'OwnedDHTRecordPointer(recordKey: $recordKey, owner: $owner)';
}
}
abstract class _OwnedDHTRecordPointer implements OwnedDHTRecordPointer {
const factory _OwnedDHTRecordPointer(
{required final Typed<FixedEncodedString43> recordKey,
required final KeyPair owner}) = _$OwnedDHTRecordPointerImpl;
/// @nodoc
abstract mixin class $OwnedDHTRecordPointerCopyWith<$Res> {
factory $OwnedDHTRecordPointerCopyWith(OwnedDHTRecordPointer value,
$Res Function(OwnedDHTRecordPointer) _then) =
_$OwnedDHTRecordPointerCopyWithImpl;
@useResult
$Res call({Typed<FixedEncodedString43> recordKey, KeyPair owner});
}
factory _OwnedDHTRecordPointer.fromJson(Map<String, dynamic> json) =
_$OwnedDHTRecordPointerImpl.fromJson;
/// @nodoc
class _$OwnedDHTRecordPointerCopyWithImpl<$Res>
implements $OwnedDHTRecordPointerCopyWith<$Res> {
_$OwnedDHTRecordPointerCopyWithImpl(this._self, this._then);
final OwnedDHTRecordPointer _self;
final $Res Function(OwnedDHTRecordPointer) _then;
/// Create a copy of OwnedDHTRecordPointer
/// with the given fields replaced by the non-null parameter values.
@pragma('vm:prefer-inline')
@override
$Res call({
Object? recordKey = null,
Object? owner = null,
}) {
return _then(_self.copyWith(
recordKey: null == recordKey
? _self.recordKey!
: recordKey // ignore: cast_nullable_to_non_nullable
as Typed<FixedEncodedString43>,
owner: null == owner
? _self.owner
: owner // ignore: cast_nullable_to_non_nullable
as KeyPair,
));
}
}
/// @nodoc
@JsonSerializable()
class _OwnedDHTRecordPointer implements OwnedDHTRecordPointer {
const _OwnedDHTRecordPointer({required this.recordKey, required this.owner});
factory _OwnedDHTRecordPointer.fromJson(Map<String, dynamic> json) =>
_$OwnedDHTRecordPointerFromJson(json);
@override
Typed<FixedEncodedString43> get recordKey;
final Typed<FixedEncodedString43> recordKey;
@override
KeyPair get owner;
final KeyPair owner;
/// Create a copy of OwnedDHTRecordPointer
/// with the given fields replaced by the non-null parameter values.
@override
@JsonKey(includeFromJson: false, includeToJson: false)
_$$OwnedDHTRecordPointerImplCopyWith<_$OwnedDHTRecordPointerImpl>
get copyWith => throw _privateConstructorUsedError;
@pragma('vm:prefer-inline')
_$OwnedDHTRecordPointerCopyWith<_OwnedDHTRecordPointer> get copyWith =>
__$OwnedDHTRecordPointerCopyWithImpl<_OwnedDHTRecordPointer>(
this, _$identity);
@override
Map<String, dynamic> toJson() {
return _$OwnedDHTRecordPointerToJson(
this,
);
}
@override
bool operator ==(Object other) {
return identical(this, other) ||
(other.runtimeType == runtimeType &&
other is _OwnedDHTRecordPointer &&
(identical(other.recordKey, recordKey) ||
other.recordKey == recordKey) &&
(identical(other.owner, owner) || other.owner == owner));
}
@JsonKey(includeFromJson: false, includeToJson: false)
@override
int get hashCode => Object.hash(runtimeType, recordKey, owner);
@override
String toString() {
return 'OwnedDHTRecordPointer(recordKey: $recordKey, owner: $owner)';
}
}
/// @nodoc
abstract mixin class _$OwnedDHTRecordPointerCopyWith<$Res>
implements $OwnedDHTRecordPointerCopyWith<$Res> {
factory _$OwnedDHTRecordPointerCopyWith(_OwnedDHTRecordPointer value,
$Res Function(_OwnedDHTRecordPointer) _then) =
__$OwnedDHTRecordPointerCopyWithImpl;
@override
@useResult
$Res call({Typed<FixedEncodedString43> recordKey, KeyPair owner});
}
/// @nodoc
class __$OwnedDHTRecordPointerCopyWithImpl<$Res>
implements _$OwnedDHTRecordPointerCopyWith<$Res> {
__$OwnedDHTRecordPointerCopyWithImpl(this._self, this._then);
final _OwnedDHTRecordPointer _self;
final $Res Function(_OwnedDHTRecordPointer) _then;
/// Create a copy of OwnedDHTRecordPointer
/// with the given fields replaced by the non-null parameter values.
@override
@pragma('vm:prefer-inline')
$Res call({
Object? recordKey = null,
Object? owner = null,
}) {
return _then(_OwnedDHTRecordPointer(
recordKey: null == recordKey
? _self.recordKey
: recordKey // ignore: cast_nullable_to_non_nullable
as Typed<FixedEncodedString43>,
owner: null == owner
? _self.owner
: owner // ignore: cast_nullable_to_non_nullable
as KeyPair,
));
}
}
// dart format on

View file

@ -6,9 +6,9 @@ part of 'dht_record_pool.dart';
// JsonSerializableGenerator
// **************************************************************************
_$DHTRecordPoolAllocationsImpl _$$DHTRecordPoolAllocationsImplFromJson(
_DHTRecordPoolAllocations _$DHTRecordPoolAllocationsFromJson(
Map<String, dynamic> json) =>
_$DHTRecordPoolAllocationsImpl(
_DHTRecordPoolAllocations(
childrenByParent: json['children_by_parent'] == null
? const IMapConst<String, ISet<TypedKey>>({})
: IMap<String, ISet<Typed<FixedEncodedString43>>>.fromJson(
@ -34,8 +34,8 @@ _$DHTRecordPoolAllocationsImpl _$$DHTRecordPoolAllocationsImplFromJson(
(value) => value as String),
);
Map<String, dynamic> _$$DHTRecordPoolAllocationsImplToJson(
_$DHTRecordPoolAllocationsImpl instance) =>
Map<String, dynamic> _$DHTRecordPoolAllocationsToJson(
_DHTRecordPoolAllocations instance) =>
<String, dynamic>{
'children_by_parent': instance.childrenByParent.toJson(
(value) => value,
@ -56,15 +56,15 @@ Map<String, dynamic> _$$DHTRecordPoolAllocationsImplToJson(
),
};
_$OwnedDHTRecordPointerImpl _$$OwnedDHTRecordPointerImplFromJson(
_OwnedDHTRecordPointer _$OwnedDHTRecordPointerFromJson(
Map<String, dynamic> json) =>
_$OwnedDHTRecordPointerImpl(
_OwnedDHTRecordPointer(
recordKey: Typed<FixedEncodedString43>.fromJson(json['record_key']),
owner: KeyPair.fromJson(json['owner']),
);
Map<String, dynamic> _$$OwnedDHTRecordPointerImplToJson(
_$OwnedDHTRecordPointerImpl instance) =>
Map<String, dynamic> _$OwnedDHTRecordPointerToJson(
_OwnedDHTRecordPointer instance) =>
<String, dynamic>{
'record_key': instance.recordKey.toJson(),
'owner': instance.owner.toJson(),

View file

@ -0,0 +1,57 @@
import 'package:veilid/veilid.dart';
import 'dht_record_pool.dart';
class DHTSeqChange {
const DHTSeqChange(this.subkey, this.oldSeq, this.newSeq);
final int subkey;
final int oldSeq;
final int newSeq;
}
extension DHTReportReportExt on DHTRecordReport {
List<ValueSubkeyRange> get newerSubkeys {
if (networkSeqs.isEmpty || localSeqs.isEmpty || subkeys.isEmpty) {
return [];
}
final currentSubkeys = <ValueSubkeyRange>[];
var i = 0;
for (final skr in subkeys) {
for (var sk = skr.low; sk <= skr.high; sk++) {
if (networkSeqs[i] > localSeqs[i] &&
networkSeqs[i] != DHTRecord.emptySeq) {
if (currentSubkeys.isNotEmpty &&
currentSubkeys.last.high == (sk - 1)) {
currentSubkeys.add(ValueSubkeyRange(
low: currentSubkeys.removeLast().low, high: sk));
} else {
currentSubkeys.add(ValueSubkeyRange.single(sk));
}
}
i++;
}
}
return currentSubkeys;
}
DHTSeqChange? get firstSeqChange {
if (networkSeqs.isEmpty || localSeqs.isEmpty || subkeys.isEmpty) {
return null;
}
var i = 0;
for (final skr in subkeys) {
for (var sk = skr.low; sk <= skr.high; sk++) {
if (networkSeqs[i] > localSeqs[i] &&
networkSeqs[i] != DHTRecord.emptySeq) {
return DHTSeqChange(sk, localSeqs[i], networkSeqs[i]);
}
i++;
}
}
return null;
}
}

View file

@ -3,27 +3,14 @@ import 'dart:async';
import 'package:async_tools/async_tools.dart';
import 'package:bloc/bloc.dart';
import 'package:bloc_advanced_tools/bloc_advanced_tools.dart';
import 'package:equatable/equatable.dart';
import 'package:fast_immutable_collections/fast_immutable_collections.dart';
import 'package:meta/meta.dart';
import '../../../veilid_support.dart';
@immutable
class DHTShortArrayElementState<T> extends Equatable {
const DHTShortArrayElementState(
{required this.value, required this.isOffline});
final T value;
final bool isOffline;
typedef DHTShortArrayState<T> = AsyncValue<IList<OnlineElementState<T>>>;
typedef DHTShortArrayCubitState<T> = BlocBusyState<DHTShortArrayState<T>>;
@override
List<Object?> get props => [value, isOffline];
}
typedef DHTShortArrayState<T> = AsyncValue<IList<DHTShortArrayElementState<T>>>;
typedef DHTShortArrayBusyState<T> = BlocBusyState<DHTShortArrayState<T>>;
class DHTShortArrayCubit<T> extends Cubit<DHTShortArrayBusyState<T>>
class DHTShortArrayCubit<T> extends Cubit<DHTShortArrayCubitState<T>>
with BlocBusyWrapper<DHTShortArrayState<T>>, RefreshableCubit {
DHTShortArrayCubit({
required Future<DHTShortArray> Function() open,
@ -46,7 +33,7 @@ class DHTShortArrayCubit<T> extends Cubit<DHTShortArrayBusyState<T>>
}
} on Exception catch (e, st) {
addError(e, st);
emit(DHTShortArrayBusyState<T>(AsyncValue.error(e, st)));
emit(DHTShortArrayCubitState<T>(AsyncValue.error(e, st)));
return;
}
@ -83,7 +70,7 @@ class DHTShortArrayCubit<T> extends Cubit<DHTShortArrayBusyState<T>>
// Get the items
final allItems = (await reader.getRange(0, forceRefresh: forceRefresh))
?.indexed
.map((x) => DHTShortArrayElementState(
.map((x) => OnlineElementState(
value: _decodeElement(x.$2),
isOffline: offlinePositions?.contains(x.$1) ?? false))
.toIList();

View file

@ -333,7 +333,7 @@ class _DHTShortArrayHead {
}
Future<DHTShortArrayHeadLookup> lookupIndex(int idx, bool allowCreate) async {
final seq = idx < _seqs.length ? _seqs[idx] : 0xFFFFFFFF;
final seq = idx < _seqs.length ? _seqs[idx] : DHTRecord.emptySeq;
final recordNumber = idx ~/ _stride;
final record = await _getOrCreateLinkedRecord(recordNumber, allowCreate);
final recordSubkey = (idx % _stride) + ((recordNumber == 0) ? 1 : 0);
@ -427,14 +427,14 @@ class _DHTShortArrayHead {
// If our local sequence number is unknown or hasnt been written yet
// then a normal DHT operation is going to pull from the network anyway
if (_localSeqs.length < idx || _localSeqs[idx] == 0xFFFFFFFF) {
if (_localSeqs.length < idx || _localSeqs[idx] == DHTRecord.emptySeq) {
return false;
}
// If the remote sequence number record is unknown or hasnt been written
// at this index yet, then we also do not refresh at this time as it
// is the first time the index is being written to
if (_seqs.length < idx || _seqs[idx] == 0xFFFFFFFF) {
if (_seqs.length < idx || _seqs[idx] == DHTRecord.emptySeq) {
return false;
}
@ -448,12 +448,12 @@ class _DHTShortArrayHead {
final idx = _index[pos];
while (_localSeqs.length <= idx) {
_localSeqs.add(0xFFFFFFFF);
_localSeqs.add(DHTRecord.emptySeq);
}
_localSeqs[idx] = newSeq;
if (write) {
while (_seqs.length <= idx) {
_seqs.add(0xFFFFFFFF);
_seqs.add(DHTRecord.emptySeq);
}
_seqs[idx] = newSeq;
}

View file

@ -122,7 +122,7 @@ class _DHTShortArrayWrite extends _DHTShortArrayRead
final outSeqNum = Output<int>();
final result = lookup.seq == 0xFFFFFFFF
final result = lookup.seq == DHTRecord.emptySeq
? null
: await lookup.record.get(subkey: lookup.recordSubkey);
@ -151,7 +151,7 @@ class _DHTShortArrayWrite extends _DHTShortArrayRead
final lookup = await _head.lookupPosition(pos, true);
final outSeqNumRead = Output<int>();
final oldValue = lookup.seq == 0xFFFFFFFF
final oldValue = lookup.seq == DHTRecord.emptySeq
? null
: await lookup.record
.get(subkey: lookup.recordSubkey, outSeqNum: outSeqNumRead);