mirror of
https://gitlab.com/veilid/veilidchat.git
synced 2024-10-01 06:55:46 -04:00
fix eventual consistency
This commit is contained in:
parent
2c3d4dce93
commit
b5612e5dd8
@ -292,7 +292,7 @@ class SingleContactMessagesCubit extends Cubit<SingleContactMessagesState> {
|
||||
}
|
||||
|
||||
await _sentMessagesCubit!.operateAppendEventual((writer) =>
|
||||
writer.tryAddAll(messages.map((m) => m.writeToBuffer()).toList()));
|
||||
writer.addAll(messages.map((m) => m.writeToBuffer()).toList()));
|
||||
}
|
||||
|
||||
// Produce a state for this cubit from the input cubits and queues
|
||||
|
@ -84,10 +84,7 @@ class ChatListCubit extends DHTShortArrayCubit<proto.Chat>
|
||||
..remoteConversationRecordKey = remoteConversationRecordKey.toProto();
|
||||
|
||||
// Add chat
|
||||
final added = await writer.tryAdd(chat.writeToBuffer());
|
||||
if (!added) {
|
||||
throw Exception('Failed to add chat');
|
||||
}
|
||||
await writer.add(chat.writeToBuffer());
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -160,9 +160,7 @@ class ContactInvitationListCubit
|
||||
// Add ContactInvitationRecord to account's list
|
||||
// if this fails, don't keep retrying, user can try again later
|
||||
await operateWrite((writer) async {
|
||||
if (await writer.tryAdd(cinvrec.writeToBuffer()) == false) {
|
||||
throw Exception('Failed to add contact invitation record');
|
||||
}
|
||||
await writer.add(cinvrec.writeToBuffer());
|
||||
});
|
||||
});
|
||||
});
|
||||
|
@ -54,9 +54,7 @@ class ContactListCubit extends DHTShortArrayCubit<proto.Contact> {
|
||||
// Add Contact to account's list
|
||||
// if this fails, don't keep retrying, user can try again later
|
||||
await operateWrite((writer) async {
|
||||
if (!await writer.tryAdd(contact.writeToBuffer())) {
|
||||
throw Exception('Failed to add contact record');
|
||||
}
|
||||
await writer.add(contact.writeToBuffer());
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -36,161 +36,161 @@ void main() {
|
||||
setUpAll(veilidFixture.attach);
|
||||
tearDownAll(veilidFixture.detach);
|
||||
|
||||
group('TableDB Tests', () {
|
||||
group('TableDBArray Tests', () {
|
||||
// test('create/delete TableDBArray', testTableDBArrayCreateDelete);
|
||||
// group('TableDB Tests', () {
|
||||
// group('TableDBArray Tests', () {
|
||||
// // test('create/delete TableDBArray', testTableDBArrayCreateDelete);
|
||||
|
||||
group('TableDBArray Add/Get Tests', () {
|
||||
for (final params in [
|
||||
//
|
||||
(99, 3, 15),
|
||||
(100, 4, 16),
|
||||
(101, 5, 17),
|
||||
//
|
||||
(511, 3, 127),
|
||||
(512, 4, 128),
|
||||
(513, 5, 129),
|
||||
//
|
||||
(4095, 3, 1023),
|
||||
(4096, 4, 1024),
|
||||
(4097, 5, 1025),
|
||||
//
|
||||
(65535, 3, 16383),
|
||||
(65536, 4, 16384),
|
||||
(65537, 5, 16385),
|
||||
]) {
|
||||
final count = params.$1;
|
||||
final singles = params.$2;
|
||||
final batchSize = params.$3;
|
||||
// group('TableDBArray Add/Get Tests', () {
|
||||
// for (final params in [
|
||||
// //
|
||||
// (99, 3, 15),
|
||||
// (100, 4, 16),
|
||||
// (101, 5, 17),
|
||||
// //
|
||||
// (511, 3, 127),
|
||||
// (512, 4, 128),
|
||||
// (513, 5, 129),
|
||||
// //
|
||||
// (4095, 3, 1023),
|
||||
// (4096, 4, 1024),
|
||||
// (4097, 5, 1025),
|
||||
// //
|
||||
// (65535, 3, 16383),
|
||||
// (65536, 4, 16384),
|
||||
// (65537, 5, 16385),
|
||||
// ]) {
|
||||
// final count = params.$1;
|
||||
// final singles = params.$2;
|
||||
// final batchSize = params.$3;
|
||||
|
||||
test(
|
||||
timeout: const Timeout(Duration(seconds: 480)),
|
||||
'add/remove TableDBArray count = $count batchSize=$batchSize',
|
||||
makeTestTableDBArrayAddGetClear(
|
||||
count: count,
|
||||
singles: singles,
|
||||
batchSize: batchSize,
|
||||
crypto: const VeilidCryptoPublic()),
|
||||
);
|
||||
}
|
||||
});
|
||||
// test(
|
||||
// timeout: const Timeout(Duration(seconds: 480)),
|
||||
// 'add/remove TableDBArray count = $count batchSize=$batchSize',
|
||||
// makeTestTableDBArrayAddGetClear(
|
||||
// count: count,
|
||||
// singles: singles,
|
||||
// batchSize: batchSize,
|
||||
// crypto: const VeilidCryptoPublic()),
|
||||
// );
|
||||
// }
|
||||
// });
|
||||
|
||||
group('TableDBArray Insert Tests', () {
|
||||
for (final params in [
|
||||
//
|
||||
(99, 3, 15),
|
||||
(100, 4, 16),
|
||||
(101, 5, 17),
|
||||
//
|
||||
(511, 3, 127),
|
||||
(512, 4, 128),
|
||||
(513, 5, 129),
|
||||
//
|
||||
(4095, 3, 1023),
|
||||
(4096, 4, 1024),
|
||||
(4097, 5, 1025),
|
||||
//
|
||||
(65535, 3, 16383),
|
||||
(65536, 4, 16384),
|
||||
(65537, 5, 16385),
|
||||
]) {
|
||||
final count = params.$1;
|
||||
final singles = params.$2;
|
||||
final batchSize = params.$3;
|
||||
// group('TableDBArray Insert Tests', () {
|
||||
// for (final params in [
|
||||
// //
|
||||
// (99, 3, 15),
|
||||
// (100, 4, 16),
|
||||
// (101, 5, 17),
|
||||
// //
|
||||
// (511, 3, 127),
|
||||
// (512, 4, 128),
|
||||
// (513, 5, 129),
|
||||
// //
|
||||
// (4095, 3, 1023),
|
||||
// (4096, 4, 1024),
|
||||
// (4097, 5, 1025),
|
||||
// //
|
||||
// (65535, 3, 16383),
|
||||
// (65536, 4, 16384),
|
||||
// (65537, 5, 16385),
|
||||
// ]) {
|
||||
// final count = params.$1;
|
||||
// final singles = params.$2;
|
||||
// final batchSize = params.$3;
|
||||
|
||||
test(
|
||||
timeout: const Timeout(Duration(seconds: 480)),
|
||||
'insert TableDBArray count=$count singles=$singles batchSize=$batchSize',
|
||||
makeTestTableDBArrayInsert(
|
||||
count: count,
|
||||
singles: singles,
|
||||
batchSize: batchSize,
|
||||
crypto: const VeilidCryptoPublic()),
|
||||
);
|
||||
}
|
||||
});
|
||||
// test(
|
||||
// timeout: const Timeout(Duration(seconds: 480)),
|
||||
// 'insert TableDBArray count=$count singles=$singles batchSize=$batchSize',
|
||||
// makeTestTableDBArrayInsert(
|
||||
// count: count,
|
||||
// singles: singles,
|
||||
// batchSize: batchSize,
|
||||
// crypto: const VeilidCryptoPublic()),
|
||||
// );
|
||||
// }
|
||||
// });
|
||||
|
||||
group('TableDBArray Remove Tests', () {
|
||||
for (final params in [
|
||||
//
|
||||
(99, 3, 15),
|
||||
(100, 4, 16),
|
||||
(101, 5, 17),
|
||||
//
|
||||
(511, 3, 127),
|
||||
(512, 4, 128),
|
||||
(513, 5, 129),
|
||||
//
|
||||
(4095, 3, 1023),
|
||||
(4096, 4, 1024),
|
||||
(4097, 5, 1025),
|
||||
//
|
||||
(16383, 3, 4095),
|
||||
(16384, 4, 4096),
|
||||
(16385, 5, 4097),
|
||||
]) {
|
||||
final count = params.$1;
|
||||
final singles = params.$2;
|
||||
final batchSize = params.$3;
|
||||
// group('TableDBArray Remove Tests', () {
|
||||
// for (final params in [
|
||||
// //
|
||||
// (99, 3, 15),
|
||||
// (100, 4, 16),
|
||||
// (101, 5, 17),
|
||||
// //
|
||||
// (511, 3, 127),
|
||||
// (512, 4, 128),
|
||||
// (513, 5, 129),
|
||||
// //
|
||||
// (4095, 3, 1023),
|
||||
// (4096, 4, 1024),
|
||||
// (4097, 5, 1025),
|
||||
// //
|
||||
// (16383, 3, 4095),
|
||||
// (16384, 4, 4096),
|
||||
// (16385, 5, 4097),
|
||||
// ]) {
|
||||
// final count = params.$1;
|
||||
// final singles = params.$2;
|
||||
// final batchSize = params.$3;
|
||||
|
||||
test(
|
||||
timeout: const Timeout(Duration(seconds: 480)),
|
||||
'remove TableDBArray count=$count singles=$singles batchSize=$batchSize',
|
||||
makeTestTableDBArrayRemove(
|
||||
count: count,
|
||||
singles: singles,
|
||||
batchSize: batchSize,
|
||||
crypto: const VeilidCryptoPublic()),
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
// group('DHT Support Tests', () {
|
||||
// setUpAll(updateProcessorFixture.setUp);
|
||||
// setUpAll(tickerFixture.setUp);
|
||||
// tearDownAll(tickerFixture.tearDown);
|
||||
// tearDownAll(updateProcessorFixture.tearDown);
|
||||
|
||||
// test('create pool', testDHTRecordPoolCreate);
|
||||
|
||||
// group('DHTRecordPool Tests', () {
|
||||
// setUpAll(dhtRecordPoolFixture.setUp);
|
||||
// tearDownAll(dhtRecordPoolFixture.tearDown);
|
||||
|
||||
// test('create/delete record', testDHTRecordCreateDelete);
|
||||
// test('record scopes', testDHTRecordScopes);
|
||||
// test('create/delete deep record', testDHTRecordDeepCreateDelete);
|
||||
// });
|
||||
|
||||
// group('DHTShortArray Tests', () {
|
||||
// setUpAll(dhtRecordPoolFixture.setUp);
|
||||
// tearDownAll(dhtRecordPoolFixture.tearDown);
|
||||
|
||||
// for (final stride in [256, 16 /*64, 32, 16, 8, 4, 2, 1 */]) {
|
||||
// test('create shortarray stride=$stride',
|
||||
// makeTestDHTShortArrayCreateDelete(stride: stride));
|
||||
// test('add shortarray stride=$stride',
|
||||
// makeTestDHTShortArrayAdd(stride: stride));
|
||||
// }
|
||||
// });
|
||||
|
||||
// group('DHTLog Tests', () {
|
||||
// setUpAll(dhtRecordPoolFixture.setUp);
|
||||
// tearDownAll(dhtRecordPoolFixture.tearDown);
|
||||
|
||||
// for (final stride in [256, 16 /*64, 32, 16, 8, 4, 2, 1 */]) {
|
||||
// test('create log stride=$stride',
|
||||
// makeTestDHTLogCreateDelete(stride: stride));
|
||||
// test(
|
||||
// timeout: const Timeout(Duration(seconds: 480)),
|
||||
// 'add/truncate log stride=$stride',
|
||||
// makeTestDHTLogAddTruncate(stride: stride),
|
||||
// );
|
||||
// }
|
||||
// test(
|
||||
// timeout: const Timeout(Duration(seconds: 480)),
|
||||
// 'remove TableDBArray count=$count singles=$singles batchSize=$batchSize',
|
||||
// makeTestTableDBArrayRemove(
|
||||
// count: count,
|
||||
// singles: singles,
|
||||
// batchSize: batchSize,
|
||||
// crypto: const VeilidCryptoPublic()),
|
||||
// );
|
||||
// }
|
||||
// });
|
||||
// });
|
||||
// });
|
||||
|
||||
group('DHT Support Tests', () {
|
||||
setUpAll(updateProcessorFixture.setUp);
|
||||
setUpAll(tickerFixture.setUp);
|
||||
tearDownAll(tickerFixture.tearDown);
|
||||
tearDownAll(updateProcessorFixture.tearDown);
|
||||
|
||||
test('create pool', testDHTRecordPoolCreate);
|
||||
|
||||
group('DHTRecordPool Tests', () {
|
||||
setUpAll(dhtRecordPoolFixture.setUp);
|
||||
tearDownAll(dhtRecordPoolFixture.tearDown);
|
||||
|
||||
test('create/delete record', testDHTRecordCreateDelete);
|
||||
test('record scopes', testDHTRecordScopes);
|
||||
test('create/delete deep record', testDHTRecordDeepCreateDelete);
|
||||
});
|
||||
|
||||
group('DHTShortArray Tests', () {
|
||||
setUpAll(dhtRecordPoolFixture.setUp);
|
||||
tearDownAll(dhtRecordPoolFixture.tearDown);
|
||||
|
||||
for (final stride in [256, 16 /*64, 32, 16, 8, 4, 2, 1 */]) {
|
||||
test('create shortarray stride=$stride',
|
||||
makeTestDHTShortArrayCreateDelete(stride: stride));
|
||||
test('add shortarray stride=$stride',
|
||||
makeTestDHTShortArrayAdd(stride: stride));
|
||||
}
|
||||
});
|
||||
|
||||
group('DHTLog Tests', () {
|
||||
setUpAll(dhtRecordPoolFixture.setUp);
|
||||
tearDownAll(dhtRecordPoolFixture.tearDown);
|
||||
|
||||
for (final stride in [256, 16 /*64, 32, 16, 8, 4, 2, 1 */]) {
|
||||
test('create log stride=$stride',
|
||||
makeTestDHTLogCreateDelete(stride: stride));
|
||||
test(
|
||||
timeout: const Timeout(Duration(seconds: 480)),
|
||||
'add/truncate log stride=$stride',
|
||||
makeTestDHTLogAddTruncate(stride: stride),
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
}
|
||||
|
@ -64,8 +64,7 @@ Future<void> Function() makeTestDHTLogAddTruncate({required int stride}) =>
|
||||
const chunk = 25;
|
||||
for (var n = 0; n < dataset.length; n += chunk) {
|
||||
print('$n-${n + chunk - 1} ');
|
||||
final success = await w.tryAddAll(dataset.sublist(n, n + chunk));
|
||||
expect(success, isTrue);
|
||||
await w.addAll(dataset.sublist(n, n + chunk));
|
||||
}
|
||||
});
|
||||
expect(res, isNull);
|
||||
|
@ -64,7 +64,7 @@ Future<void> Function() makeTestDHTShortArrayAdd({required int stride}) =>
|
||||
for (var n = 4; n < 8; n++) {
|
||||
await arr.operateWriteEventual((w) async {
|
||||
print('$n ');
|
||||
return w.tryAdd(dataset[n]);
|
||||
await w.add(dataset[n]);
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -73,8 +73,7 @@ Future<void> Function() makeTestDHTShortArrayAdd({required int stride}) =>
|
||||
{
|
||||
await arr.operateWriteEventual((w) async {
|
||||
print('${dataset.length ~/ 2}-${dataset.length}');
|
||||
return w
|
||||
.tryAddAll(dataset.sublist(dataset.length ~/ 2, dataset.length));
|
||||
await w.addAll(dataset.sublist(dataset.length ~/ 2, dataset.length));
|
||||
});
|
||||
}
|
||||
|
||||
@ -83,7 +82,7 @@ Future<void> Function() makeTestDHTShortArrayAdd({required int stride}) =>
|
||||
for (var n = 0; n < 4; n++) {
|
||||
await arr.operateWriteEventual((w) async {
|
||||
print('$n ');
|
||||
return w.tryInsert(n, dataset[n]);
|
||||
await w.insert(n, dataset[n]);
|
||||
});
|
||||
}
|
||||
}
|
||||
@ -92,7 +91,7 @@ Future<void> Function() makeTestDHTShortArrayAdd({required int stride}) =>
|
||||
{
|
||||
await arr.operateWriteEventual((w) async {
|
||||
print('8-${dataset.length ~/ 2}');
|
||||
return w.tryInsertAll(8, dataset.sublist(8, dataset.length ~/ 2));
|
||||
await w.insertAll(8, dataset.sublist(8, dataset.length ~/ 2));
|
||||
});
|
||||
}
|
||||
|
||||
@ -111,7 +110,6 @@ Future<void> Function() makeTestDHTShortArrayAdd({required int stride}) =>
|
||||
{
|
||||
await arr.operateWriteEventual((w) async {
|
||||
await w.clear();
|
||||
return true;
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -37,10 +37,10 @@ packages:
|
||||
dependency: "direct dev"
|
||||
description:
|
||||
name: async_tools
|
||||
sha256: e783ac6ed5645c86da34240389bb3a000fc5e3ae6589c6a482eb24ece7217681
|
||||
sha256: "72590010ed6c6f5cbd5d40e33834abc08a43da6a73ac3c3645517d53899b8684"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "0.1.1"
|
||||
version: "0.1.2"
|
||||
bloc:
|
||||
dependency: transitive
|
||||
description:
|
||||
@ -53,10 +53,10 @@ packages:
|
||||
dependency: transitive
|
||||
description:
|
||||
name: bloc_advanced_tools
|
||||
sha256: "09f8a121d950575f1f2980c8b10df46b2ac6c72c8cbe48cc145871e5882ed430"
|
||||
sha256: "0cf9b3a73a67addfe22ec3f97a1ac240f6ad53870d6b21a980260f390d7901cd"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "0.1.1"
|
||||
version: "0.1.2"
|
||||
boolean_selector:
|
||||
dependency: transitive
|
||||
description:
|
||||
|
@ -14,7 +14,7 @@ dependencies:
|
||||
path: ../
|
||||
|
||||
dev_dependencies:
|
||||
async_tools: ^0.1.1
|
||||
async_tools: ^0.1.2
|
||||
integration_test:
|
||||
sdk: flutter
|
||||
lint_hard: ^4.0.0
|
||||
|
@ -242,11 +242,11 @@ class DHTLog implements DHTDeleteable<DHTLog> {
|
||||
/// Runs a closure allowing append/truncate access to the log
|
||||
/// Will execute the closure multiple times if a consistent write to the DHT
|
||||
/// is not achieved. Timeout if specified will be thrown as a
|
||||
/// TimeoutException. The closure should return true if its changes also
|
||||
/// succeeded, returning false will trigger another eventual consistency
|
||||
/// attempt.
|
||||
Future<void> operateAppendEventual(
|
||||
Future<bool> Function(DHTLogWriteOperations) closure,
|
||||
/// TimeoutException. The closure should return a value if its changes also
|
||||
/// succeeded, and throw DHTExceptionTryAgain to trigger another
|
||||
/// eventual consistency pass.
|
||||
Future<T> operateAppendEventual<T>(
|
||||
Future<T> Function(DHTLogWriteOperations) closure,
|
||||
{Duration? timeout}) async {
|
||||
if (!isOpen) {
|
||||
throw StateError('log is not open"');
|
||||
|
@ -210,8 +210,8 @@ class DHTLogCubit<T> extends Cubit<DHTLogBusyState<T>>
|
||||
return _log.operateAppend(closure);
|
||||
}
|
||||
|
||||
Future<void> operateAppendEventual(
|
||||
Future<bool> Function(DHTLogWriteOperations) closure,
|
||||
Future<R> operateAppendEventual<R>(
|
||||
Future<R> Function(DHTLogWriteOperations) closure,
|
||||
{Duration? timeout}) async {
|
||||
await _initWait();
|
||||
return _log.operateAppendEventual(closure, timeout: timeout);
|
||||
|
@ -152,17 +152,16 @@ class _DHTLogSpine {
|
||||
}
|
||||
});
|
||||
|
||||
Future<void> operateAppendEventual(
|
||||
Future<bool> Function(_DHTLogSpine) closure,
|
||||
Future<T> operateAppendEventual<T>(Future<T> Function(_DHTLogSpine) closure,
|
||||
{Duration? timeout}) async {
|
||||
final timeoutTs = timeout == null
|
||||
? null
|
||||
: Veilid.instance.now().offset(TimestampDuration.fromDuration(timeout));
|
||||
|
||||
await _spineMutex.protect(() async {
|
||||
return _spineMutex.protect(() async {
|
||||
late int oldHead;
|
||||
late int oldTail;
|
||||
|
||||
late T out;
|
||||
try {
|
||||
// Iterate until we have a successful element and head write
|
||||
do {
|
||||
@ -180,17 +179,19 @@ class _DHTLogSpine {
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (await closure(this)) {
|
||||
break;
|
||||
}
|
||||
out = await closure(this);
|
||||
break;
|
||||
} on DHTExceptionTryAgain {
|
||||
//
|
||||
// Failed to write in closure resets state
|
||||
_head = oldHead;
|
||||
_tail = oldTail;
|
||||
} on Exception {
|
||||
// Failed to write in closure resets state
|
||||
_head = oldHead;
|
||||
_tail = oldTail;
|
||||
rethrow;
|
||||
}
|
||||
// Failed to write in closure resets state
|
||||
_head = oldHead;
|
||||
_tail = oldTail;
|
||||
}
|
||||
|
||||
// Try to do the head write
|
||||
} while (!await writeSpineHead(old: (oldHead, oldTail)));
|
||||
} on Exception {
|
||||
@ -199,6 +200,8 @@ class _DHTLogSpine {
|
||||
_tail = oldTail;
|
||||
rethrow;
|
||||
}
|
||||
|
||||
return out;
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -17,12 +17,22 @@ class _DHTLogWrite extends _DHTLogRead implements DHTLogWriteOperations {
|
||||
}
|
||||
final lookup = await _spine.lookupPosition(pos);
|
||||
if (lookup == null) {
|
||||
throw StateError("can't lookup position in write to dht log");
|
||||
throw DHTExceptionInvalidData();
|
||||
}
|
||||
|
||||
// Write item to the segment
|
||||
return lookup.scope((sa) => sa.operateWrite((write) async =>
|
||||
write.tryWriteItem(lookup.pos, newValue, output: output)));
|
||||
try {
|
||||
await lookup.scope((sa) => sa.operateWrite((write) async {
|
||||
final success =
|
||||
await write.tryWriteItem(lookup.pos, newValue, output: output);
|
||||
if (!success) {
|
||||
throw DHTExceptionTryAgain();
|
||||
}
|
||||
}));
|
||||
} on DHTExceptionTryAgain {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
@override
|
||||
@ -35,40 +45,47 @@ class _DHTLogWrite extends _DHTLogRead implements DHTLogWriteOperations {
|
||||
}
|
||||
final aLookup = await _spine.lookupPosition(aPos);
|
||||
if (aLookup == null) {
|
||||
throw StateError("can't lookup position a in swap of dht log");
|
||||
throw DHTExceptionInvalidData();
|
||||
}
|
||||
final bLookup = await _spine.lookupPosition(bPos);
|
||||
if (bLookup == null) {
|
||||
await aLookup.close();
|
||||
throw StateError("can't lookup position b in swap of dht log");
|
||||
throw DHTExceptionInvalidData();
|
||||
}
|
||||
|
||||
// Swap items in the segments
|
||||
if (aLookup.shortArray == bLookup.shortArray) {
|
||||
await bLookup.close();
|
||||
await aLookup.scope((sa) => sa.operateWriteEventual((aWrite) async {
|
||||
await aWrite.swap(aLookup.pos, bLookup.pos);
|
||||
return true;
|
||||
}));
|
||||
return aLookup.scope((sa) => sa.operateWriteEventual(
|
||||
(aWrite) async => aWrite.swap(aLookup.pos, bLookup.pos)));
|
||||
} else {
|
||||
final bItem = Output<Uint8List>();
|
||||
await aLookup.scope(
|
||||
return aLookup.scope(
|
||||
(sa) => bLookup.scope((sb) => sa.operateWriteEventual((aWrite) async {
|
||||
if (bItem.value == null) {
|
||||
final aItem = await aWrite.get(aLookup.pos);
|
||||
if (aItem == null) {
|
||||
throw StateError("can't get item for position a in swap");
|
||||
throw DHTExceptionInvalidData();
|
||||
}
|
||||
await sb.operateWriteEventual((bWrite) async =>
|
||||
bWrite.tryWriteItem(bLookup.pos, aItem, output: bItem));
|
||||
await sb.operateWriteEventual((bWrite) async {
|
||||
final success = await bWrite
|
||||
.tryWriteItem(bLookup.pos, aItem, output: bItem);
|
||||
if (!success) {
|
||||
throw DHTExceptionTryAgain();
|
||||
}
|
||||
});
|
||||
}
|
||||
final success =
|
||||
await aWrite.tryWriteItem(aLookup.pos, bItem.value!);
|
||||
if (!success) {
|
||||
throw DHTExceptionTryAgain();
|
||||
}
|
||||
return aWrite.tryWriteItem(aLookup.pos, bItem.value!);
|
||||
})));
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Future<bool> tryAdd(Uint8List value) async {
|
||||
Future<void> add(Uint8List value) async {
|
||||
// Allocate empty index at the end of the list
|
||||
final insertPos = _spine.length;
|
||||
_spine.allocateTail(1);
|
||||
@ -78,26 +95,20 @@ class _DHTLogWrite extends _DHTLogRead implements DHTLogWriteOperations {
|
||||
}
|
||||
|
||||
// Write item to the segment
|
||||
return lookup.scope((sa) async {
|
||||
try {
|
||||
return sa.operateWrite((write) async {
|
||||
return lookup.scope((sa) async => sa.operateWrite((write) async {
|
||||
// If this a new segment, then clear it in case we have wrapped around
|
||||
if (lookup.pos == 0) {
|
||||
await write.clear();
|
||||
} else if (lookup.pos != write.length) {
|
||||
// We should always be appending at the length
|
||||
throw StateError('appending should be at the end');
|
||||
throw DHTExceptionInvalidData();
|
||||
}
|
||||
return write.tryAdd(value);
|
||||
});
|
||||
} on DHTExceptionTryAgain {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
return write.add(value);
|
||||
}));
|
||||
}
|
||||
|
||||
@override
|
||||
Future<bool> tryAddAll(List<Uint8List> values) async {
|
||||
Future<void> addAll(List<Uint8List> values) async {
|
||||
// Allocate empty index at the end of the list
|
||||
final insertPos = _spine.length;
|
||||
_spine.allocateTail(values.length);
|
||||
@ -111,31 +122,26 @@ class _DHTLogWrite extends _DHTLogRead implements DHTLogWriteOperations {
|
||||
|
||||
final lookup = await _spine.lookupPosition(insertPos + valueIdx);
|
||||
if (lookup == null) {
|
||||
throw StateError("can't write to dht log");
|
||||
throw DHTExceptionInvalidData();
|
||||
}
|
||||
|
||||
final sacount = min(remaining, DHTShortArray.maxElements - lookup.pos);
|
||||
final sublistValues = values.sublist(valueIdx, valueIdx + sacount);
|
||||
|
||||
dws.add(() async {
|
||||
final ok = await lookup.scope((sa) async {
|
||||
try {
|
||||
return sa.operateWrite((write) async {
|
||||
// If this a new segment, then clear it in
|
||||
// case we have wrapped around
|
||||
if (lookup.pos == 0) {
|
||||
await write.clear();
|
||||
} else if (lookup.pos != write.length) {
|
||||
// We should always be appending at the length
|
||||
throw StateError('appending should be at the end');
|
||||
}
|
||||
return write.tryAddAll(sublistValues);
|
||||
});
|
||||
} on DHTExceptionTryAgain {
|
||||
return false;
|
||||
}
|
||||
});
|
||||
if (!ok) {
|
||||
try {
|
||||
await lookup.scope((sa) async => sa.operateWrite((write) async {
|
||||
// If this a new segment, then clear it in
|
||||
// case we have wrapped around
|
||||
if (lookup.pos == 0) {
|
||||
await write.clear();
|
||||
} else if (lookup.pos != write.length) {
|
||||
// We should always be appending at the length
|
||||
throw DHTExceptionInvalidData();
|
||||
}
|
||||
return write.addAll(sublistValues);
|
||||
}));
|
||||
} on DHTExceptionTryAgain {
|
||||
success = false;
|
||||
}
|
||||
});
|
||||
@ -145,7 +151,9 @@ class _DHTLogWrite extends _DHTLogRead implements DHTLogWriteOperations {
|
||||
|
||||
await dws();
|
||||
|
||||
return success;
|
||||
if (!success) {
|
||||
throw DHTExceptionTryAgain();
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -1,5 +1,7 @@
|
||||
part of 'dht_record_pool.dart';
|
||||
|
||||
const _sfListen = 'listen';
|
||||
|
||||
@immutable
|
||||
class DHTRecordWatchChange extends Equatable {
|
||||
const DHTRecordWatchChange(
|
||||
@ -79,6 +81,7 @@ class DHTRecord implements DHTDeleteable<DHTRecord> {
|
||||
return false;
|
||||
}
|
||||
|
||||
await serialFuturePause((this, _sfListen));
|
||||
await _watchController?.close();
|
||||
_watchController = null;
|
||||
await DHTRecordPool.instance._recordClosed(this);
|
||||
@ -445,7 +448,8 @@ class DHTRecord implements DHTDeleteable<DHTRecord> {
|
||||
if (change.local && !localChanges) {
|
||||
return;
|
||||
}
|
||||
Future.delayed(Duration.zero, () async {
|
||||
|
||||
serialFuture((this, _sfListen), () async {
|
||||
final Uint8List? data;
|
||||
if (change.local) {
|
||||
// local changes are not encrypted
|
||||
|
@ -232,11 +232,11 @@ class DHTShortArray implements DHTDeleteable<DHTShortArray> {
|
||||
/// Runs a closure allowing read-write access to the shortarray
|
||||
/// Will execute the closure multiple times if a consistent write to the DHT
|
||||
/// is not achieved. Timeout if specified will be thrown as a
|
||||
/// TimeoutException. The closure should return true if its changes also
|
||||
/// succeeded, returning false will trigger another eventual consistency
|
||||
/// attempt.
|
||||
Future<void> operateWriteEventual(
|
||||
Future<bool> Function(DHTShortArrayWriteOperations) closure,
|
||||
/// TimeoutException. The closure should return a value if its changes also
|
||||
/// succeeded, and throw DHTExceptionTryAgain to trigger another
|
||||
/// eventual consistency pass.
|
||||
Future<T> operateWriteEventual<T>(
|
||||
Future<T> Function(DHTShortArrayWriteOperations) closure,
|
||||
{Duration? timeout}) async {
|
||||
if (!isOpen) {
|
||||
throw StateError('short array is not open"');
|
||||
|
@ -111,8 +111,8 @@ class DHTShortArrayCubit<T> extends Cubit<DHTShortArrayBusyState<T>>
|
||||
return _shortArray.operateWrite(closure);
|
||||
}
|
||||
|
||||
Future<void> operateWriteEventual(
|
||||
Future<bool> Function(DHTShortArrayWriteOperations) closure,
|
||||
Future<R> operateWriteEventual<R>(
|
||||
Future<R> Function(DHTShortArrayWriteOperations) closure,
|
||||
{Duration? timeout}) async {
|
||||
await _initWait();
|
||||
return _shortArray.operateWriteEventual(closure, timeout: timeout);
|
||||
|
@ -107,19 +107,20 @@ class _DHTShortArrayHead {
|
||||
}
|
||||
});
|
||||
|
||||
Future<void> operateWriteEventual(
|
||||
Future<bool> Function(_DHTShortArrayHead) closure,
|
||||
Future<T> operateWriteEventual<T>(
|
||||
Future<T> Function(_DHTShortArrayHead) closure,
|
||||
{Duration? timeout}) async {
|
||||
final timeoutTs = timeout == null
|
||||
? null
|
||||
: Veilid.instance.now().offset(TimestampDuration.fromDuration(timeout));
|
||||
|
||||
await _headMutex.protect(() async {
|
||||
return _headMutex.protect(() async {
|
||||
late List<DHTRecord> oldLinkedRecords;
|
||||
late List<int> oldIndex;
|
||||
late List<int> oldFree;
|
||||
late List<int> oldSeqs;
|
||||
|
||||
late T out;
|
||||
try {
|
||||
// Iterate until we have a successful element and head write
|
||||
|
||||
@ -140,20 +141,23 @@ class _DHTShortArrayHead {
|
||||
}
|
||||
}
|
||||
try {
|
||||
if (await closure(this)) {
|
||||
break;
|
||||
}
|
||||
out = await closure(this);
|
||||
break;
|
||||
} on DHTExceptionTryAgain {
|
||||
//
|
||||
// Failed to write in closure resets state
|
||||
_linkedRecords = List.of(oldLinkedRecords);
|
||||
_index = List.of(oldIndex);
|
||||
_free = List.of(oldFree);
|
||||
_seqs = List.of(oldSeqs);
|
||||
} on Exception {
|
||||
// Failed to write in closure resets state
|
||||
_linkedRecords = List.of(oldLinkedRecords);
|
||||
_index = List.of(oldIndex);
|
||||
_free = List.of(oldFree);
|
||||
_seqs = List.of(oldSeqs);
|
||||
rethrow;
|
||||
}
|
||||
|
||||
// Failed to write in closure resets state
|
||||
_linkedRecords = List.of(oldLinkedRecords);
|
||||
_index = List.of(oldIndex);
|
||||
_free = List.of(oldFree);
|
||||
_seqs = List.of(oldSeqs);
|
||||
}
|
||||
|
||||
// Try to do the head write
|
||||
} while (!await _writeHead());
|
||||
|
||||
@ -167,6 +171,7 @@ class _DHTShortArrayHead {
|
||||
|
||||
rethrow;
|
||||
}
|
||||
return out;
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -16,14 +16,14 @@ class _DHTShortArrayWrite extends _DHTShortArrayRead
|
||||
_DHTShortArrayWrite._(super.head) : super._();
|
||||
|
||||
@override
|
||||
Future<bool> tryAdd(Uint8List value) => tryInsert(_head.length, value);
|
||||
Future<void> add(Uint8List value) => insert(_head.length, value);
|
||||
|
||||
@override
|
||||
Future<bool> tryAddAll(List<Uint8List> values) =>
|
||||
tryInsertAll(_head.length, values);
|
||||
Future<void> addAll(List<Uint8List> values) =>
|
||||
insertAll(_head.length, values);
|
||||
|
||||
@override
|
||||
Future<bool> tryInsert(int pos, Uint8List value) async {
|
||||
Future<void> insert(int pos, Uint8List value) async {
|
||||
if (pos < 0 || pos > _head.length) {
|
||||
throw IndexError.withLength(pos, _head.length);
|
||||
}
|
||||
@ -39,11 +39,13 @@ class _DHTShortArrayWrite extends _DHTShortArrayRead
|
||||
_head.freeIndex(pos);
|
||||
}
|
||||
}
|
||||
return true;
|
||||
if (!success) {
|
||||
throw DHTExceptionTryAgain();
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
Future<bool> tryInsertAll(int pos, List<Uint8List> values) async {
|
||||
Future<void> insertAll(int pos, List<Uint8List> values) async {
|
||||
if (pos < 0 || pos > _head.length) {
|
||||
throw IndexError.withLength(pos, _head.length);
|
||||
}
|
||||
@ -94,8 +96,9 @@ class _DHTShortArrayWrite extends _DHTShortArrayRead
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return success;
|
||||
if (!success) {
|
||||
throw DHTExceptionTryAgain();
|
||||
}
|
||||
}
|
||||
|
||||
@override
|
||||
|
@ -8,34 +8,34 @@ import '../../../veilid_support.dart';
|
||||
// Add
|
||||
abstract class DHTAdd {
|
||||
/// Try to add an item to the DHT container.
|
||||
/// Return true if the element was successfully added, and false if the state
|
||||
/// changed before the element could be added or a newer value was found on
|
||||
/// the network.
|
||||
/// Return if the element was successfully added,
|
||||
/// Throws DHTExceptionTryAgain if the state changed before the element could
|
||||
/// be added or a newer value was found on the network.
|
||||
/// Throws a StateError if the container exceeds its maximum size.
|
||||
Future<bool> tryAdd(Uint8List value);
|
||||
Future<void> add(Uint8List value);
|
||||
|
||||
/// Try to add a list of items to the DHT container.
|
||||
/// Return true if the elements were successfully added, and false if the
|
||||
/// state changed before the element could be added or a newer value was found
|
||||
/// on the network.
|
||||
/// Return if the elements were successfully added.
|
||||
/// Throws DHTExceptionTryAgain if the state changed before the elements could
|
||||
/// be added or a newer value was found on the network.
|
||||
/// Throws a StateError if the container exceeds its maximum size.
|
||||
Future<bool> tryAddAll(List<Uint8List> values);
|
||||
Future<void> addAll(List<Uint8List> values);
|
||||
}
|
||||
|
||||
extension DHTAddExt on DHTAdd {
|
||||
/// Convenience function:
|
||||
/// Like tryAddItem but also encodes the input value as JSON and parses the
|
||||
/// returned element as JSON
|
||||
Future<bool> tryAddJson<T>(
|
||||
Future<void> addJson<T>(
|
||||
T newValue,
|
||||
) =>
|
||||
tryAdd(jsonEncodeBytes(newValue));
|
||||
add(jsonEncodeBytes(newValue));
|
||||
|
||||
/// Convenience function:
|
||||
/// Like tryAddItem but also encodes the input value as a protobuf object
|
||||
/// and parses the returned element as a protobuf object
|
||||
Future<bool> tryAddProtobuf<T extends GeneratedMessage>(
|
||||
Future<void> addProtobuf<T extends GeneratedMessage>(
|
||||
T newValue,
|
||||
) =>
|
||||
tryAdd(newValue.writeToBuffer());
|
||||
add(newValue.writeToBuffer());
|
||||
}
|
||||
|
@ -8,22 +8,22 @@ import '../../../veilid_support.dart';
|
||||
// Insert/Remove interface
|
||||
abstract class DHTInsertRemove {
|
||||
/// Try to insert an item as position 'pos' of the DHT container.
|
||||
/// Return true if the element was successfully inserted, and false if the
|
||||
/// state changed before the element could be inserted or a newer value was
|
||||
/// found on the network.
|
||||
/// Return if the element was successfully inserted
|
||||
/// Throws DHTExceptionTryAgain if the state changed before the element could
|
||||
/// be inserted or a newer value was found on the network.
|
||||
/// Throws an IndexError if the position removed exceeds the length of
|
||||
/// the container.
|
||||
/// Throws a StateError if the container exceeds its maximum size.
|
||||
Future<bool> tryInsert(int pos, Uint8List value);
|
||||
Future<void> insert(int pos, Uint8List value);
|
||||
|
||||
/// Try to insert items at position 'pos' of the DHT container.
|
||||
/// Return true if the elements were successfully inserted, and false if the
|
||||
/// state changed before the elements could be inserted or a newer value was
|
||||
/// found on the network.
|
||||
/// Return if the elements were successfully inserted
|
||||
/// Throws DHTExceptionTryAgain if the state changed before the elements could
|
||||
/// be inserted or a newer value was found on the network.
|
||||
/// Throws an IndexError if the position removed exceeds the length of
|
||||
/// the container.
|
||||
/// Throws a StateError if the container exceeds its maximum size.
|
||||
Future<bool> tryInsertAll(int pos, List<Uint8List> values);
|
||||
Future<void> insertAll(int pos, List<Uint8List> values);
|
||||
|
||||
/// Remove an item at position 'pos' in the DHT container.
|
||||
/// If the remove was successful this returns:
|
||||
|
@ -7,9 +7,9 @@ environment:
|
||||
sdk: '>=3.2.0 <4.0.0'
|
||||
|
||||
dependencies:
|
||||
async_tools: ^0.1.1
|
||||
async_tools: ^0.1.2
|
||||
bloc: ^8.1.4
|
||||
bloc_advanced_tools: ^0.1.1
|
||||
bloc_advanced_tools: ^0.1.2
|
||||
charcode: ^1.3.1
|
||||
collection: ^1.18.0
|
||||
equatable: ^2.0.5
|
||||
@ -24,11 +24,11 @@ dependencies:
|
||||
# veilid: ^0.0.1
|
||||
path: ../../../veilid/veilid-flutter
|
||||
|
||||
dependency_overrides:
|
||||
async_tools:
|
||||
path: ../../../dart_async_tools
|
||||
bloc_advanced_tools:
|
||||
path: ../../../bloc_advanced_tools
|
||||
# dependency_overrides:
|
||||
# async_tools:
|
||||
# path: ../../../dart_async_tools
|
||||
# bloc_advanced_tools:
|
||||
# path: ../../../bloc_advanced_tools
|
||||
|
||||
dev_dependencies:
|
||||
build_runner: ^2.4.10
|
||||
|
@ -61,10 +61,10 @@ packages:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
name: async_tools
|
||||
sha256: e783ac6ed5645c86da34240389bb3a000fc5e3ae6589c6a482eb24ece7217681
|
||||
sha256: "72590010ed6c6f5cbd5d40e33834abc08a43da6a73ac3c3645517d53899b8684"
|
||||
url: "https://pub.dev"
|
||||
source: hosted
|
||||
version: "0.1.1"
|
||||
version: "0.1.2"
|
||||
awesome_extensions:
|
||||
dependency: "direct main"
|
||||
description:
|
||||
|
@ -11,7 +11,7 @@ dependencies:
|
||||
animated_theme_switcher: ^2.0.10
|
||||
ansicolor: ^2.0.2
|
||||
archive: ^3.6.1
|
||||
async_tools: ^0.1.1
|
||||
async_tools: ^0.1.2
|
||||
awesome_extensions: ^2.0.16
|
||||
badges: ^3.1.2
|
||||
basic_utils: ^5.7.0
|
||||
|
Loading…
Reference in New Issue
Block a user