2023-06-19 22:35:49 -04:00
|
|
|
# Routing context veilid tests
|
2023-06-18 18:47:39 -04:00
|
|
|
|
2023-06-19 22:35:49 -04:00
|
|
|
import veilid
|
|
|
|
import pytest
|
|
|
|
import asyncio
|
|
|
|
import json
|
2024-06-28 18:31:35 -04:00
|
|
|
import time
|
2024-04-28 12:42:13 -04:00
|
|
|
import os
|
2023-06-19 22:35:49 -04:00
|
|
|
from . import *
|
2024-03-18 14:16:03 -04:00
|
|
|
from .api import VeilidTestConnectionError, api_connector
|
2023-06-18 18:47:39 -04:00
|
|
|
|
2023-06-19 22:35:49 -04:00
|
|
|
##################################################################
|
2023-07-30 16:45:20 -04:00
|
|
|
BOGUS_KEY = veilid.TypedKey.from_value(
|
|
|
|
veilid.CryptoKind.CRYPTO_KIND_VLD0, veilid.PublicKey.from_bytes(b' '))
|
|
|
|
|
2023-06-18 18:47:39 -04:00
|
|
|
|
2023-06-19 22:35:49 -04:00
|
|
|
@pytest.mark.asyncio
|
|
|
|
async def test_get_dht_value_unopened(api_connection: veilid.VeilidAPI):
|
2024-03-18 20:35:13 -04:00
|
|
|
rc = await api_connection.new_routing_context()
|
|
|
|
|
2023-06-19 22:35:49 -04:00
|
|
|
async with rc:
|
|
|
|
with pytest.raises(veilid.VeilidAPIError):
|
|
|
|
out = await rc.get_dht_value(BOGUS_KEY, veilid.ValueSubkey(0), False)
|
2023-06-18 18:47:39 -04:00
|
|
|
|
|
|
|
|
2023-06-19 22:35:49 -04:00
|
|
|
@pytest.mark.asyncio
|
|
|
|
async def test_open_dht_record_nonexistent_no_writer(api_connection: veilid.VeilidAPI):
|
2024-03-18 20:35:13 -04:00
|
|
|
rc = await api_connection.new_routing_context()
|
2023-06-19 22:35:49 -04:00
|
|
|
async with rc:
|
|
|
|
with pytest.raises(veilid.VeilidAPIError):
|
|
|
|
out = await rc.open_dht_record(BOGUS_KEY, None)
|
2023-06-18 18:47:39 -04:00
|
|
|
|
2023-07-30 16:45:20 -04:00
|
|
|
|
2023-06-19 22:35:49 -04:00
|
|
|
@pytest.mark.asyncio
|
|
|
|
async def test_close_dht_record_nonexistent(api_connection: veilid.VeilidAPI):
|
2024-03-18 20:35:13 -04:00
|
|
|
rc = await api_connection.new_routing_context()
|
2023-06-19 22:35:49 -04:00
|
|
|
async with rc:
|
|
|
|
with pytest.raises(veilid.VeilidAPIError):
|
|
|
|
await rc.close_dht_record(BOGUS_KEY)
|
2023-06-18 18:47:39 -04:00
|
|
|
|
2023-07-30 16:45:20 -04:00
|
|
|
|
2023-06-19 22:35:49 -04:00
|
|
|
@pytest.mark.asyncio
|
|
|
|
async def test_delete_dht_record_nonexistent(api_connection: veilid.VeilidAPI):
|
2024-03-18 20:35:13 -04:00
|
|
|
rc = await api_connection.new_routing_context()
|
2023-06-19 22:35:49 -04:00
|
|
|
async with rc:
|
|
|
|
with pytest.raises(veilid.VeilidAPIError):
|
|
|
|
await rc.delete_dht_record(BOGUS_KEY)
|
2023-07-30 16:45:20 -04:00
|
|
|
|
|
|
|
|
2023-06-19 22:35:49 -04:00
|
|
|
@pytest.mark.asyncio
|
|
|
|
async def test_create_delete_dht_record_simple(api_connection: veilid.VeilidAPI):
|
2024-03-18 20:35:13 -04:00
|
|
|
rc = await api_connection.new_routing_context()
|
2023-06-19 22:35:49 -04:00
|
|
|
async with rc:
|
2023-07-22 13:06:46 -04:00
|
|
|
rec = await rc.create_dht_record(
|
|
|
|
veilid.DHTSchema.dflt(1), veilid.CryptoKind.CRYPTO_KIND_VLD0
|
|
|
|
)
|
2023-06-19 22:35:49 -04:00
|
|
|
await rc.close_dht_record(rec.key)
|
|
|
|
await rc.delete_dht_record(rec.key)
|
2023-06-18 18:47:39 -04:00
|
|
|
|
2023-07-30 16:45:20 -04:00
|
|
|
|
2023-06-19 22:35:49 -04:00
|
|
|
@pytest.mark.asyncio
|
|
|
|
async def test_get_dht_value_nonexistent(api_connection: veilid.VeilidAPI):
|
2024-03-18 20:35:13 -04:00
|
|
|
rc = await api_connection.new_routing_context()
|
2023-06-19 22:35:49 -04:00
|
|
|
async with rc:
|
2023-07-08 22:50:44 -04:00
|
|
|
rec = await rc.create_dht_record(veilid.DHTSchema.dflt(1))
|
2023-06-19 22:35:49 -04:00
|
|
|
assert await rc.get_dht_value(rec.key, 0, False) == None
|
|
|
|
await rc.close_dht_record(rec.key)
|
|
|
|
await rc.delete_dht_record(rec.key)
|
2023-06-18 20:57:51 -04:00
|
|
|
|
2023-07-30 16:45:20 -04:00
|
|
|
|
2023-06-19 22:35:49 -04:00
|
|
|
@pytest.mark.asyncio
|
|
|
|
async def test_set_get_dht_value(api_connection: veilid.VeilidAPI):
|
2024-03-18 20:35:13 -04:00
|
|
|
rc = await api_connection.new_routing_context()
|
2023-06-19 22:35:49 -04:00
|
|
|
async with rc:
|
2023-07-08 22:50:44 -04:00
|
|
|
rec = await rc.create_dht_record(veilid.DHTSchema.dflt(2))
|
2023-07-30 16:45:20 -04:00
|
|
|
|
2023-06-19 22:35:49 -04:00
|
|
|
vd = await rc.set_dht_value(rec.key, 0, b"BLAH BLAH BLAH")
|
2023-07-30 16:45:20 -04:00
|
|
|
assert vd == None
|
|
|
|
|
2023-06-19 22:35:49 -04:00
|
|
|
vd2 = await rc.get_dht_value(rec.key, 0, False)
|
|
|
|
assert vd2 != None
|
2023-07-30 16:45:20 -04:00
|
|
|
|
2023-06-26 21:29:02 -04:00
|
|
|
vd3 = await rc.get_dht_value(rec.key, 0, True)
|
|
|
|
assert vd3 != None
|
|
|
|
|
2023-06-29 14:52:53 -04:00
|
|
|
vd4 = await rc.get_dht_value(rec.key, 1, False)
|
|
|
|
assert vd4 == None
|
|
|
|
|
2023-06-20 23:46:39 -04:00
|
|
|
print("vd2: {}", vd2.__dict__)
|
2023-06-26 21:29:02 -04:00
|
|
|
print("vd3: {}", vd3.__dict__)
|
2023-06-20 23:46:39 -04:00
|
|
|
|
2023-06-26 21:29:02 -04:00
|
|
|
assert vd2 == vd3
|
2023-06-18 20:57:51 -04:00
|
|
|
|
2023-06-19 22:35:49 -04:00
|
|
|
await rc.close_dht_record(rec.key)
|
|
|
|
await rc.delete_dht_record(rec.key)
|
2023-06-18 20:57:51 -04:00
|
|
|
|
2023-07-30 16:45:20 -04:00
|
|
|
|
2023-06-29 14:52:53 -04:00
|
|
|
@pytest.mark.asyncio
|
|
|
|
async def test_open_writer_dht_value(api_connection: veilid.VeilidAPI):
|
2024-03-18 20:35:13 -04:00
|
|
|
rc = await api_connection.new_routing_context()
|
2023-06-29 14:52:53 -04:00
|
|
|
async with rc:
|
2023-07-08 22:50:44 -04:00
|
|
|
rec = await rc.create_dht_record(veilid.DHTSchema.dflt(2))
|
2023-06-29 14:52:53 -04:00
|
|
|
key = rec.key
|
|
|
|
owner = rec.owner
|
|
|
|
secret = rec.owner_secret
|
2023-06-29 22:18:45 -04:00
|
|
|
print(f"key:{key}")
|
2023-06-29 14:52:53 -04:00
|
|
|
|
2023-07-08 22:50:44 -04:00
|
|
|
cs = await api_connection.get_crypto_system(rec.key.kind())
|
2023-06-29 14:52:53 -04:00
|
|
|
async with cs:
|
|
|
|
assert await cs.validate_key_pair(owner, secret)
|
|
|
|
other_keypair = await cs.generate_key_pair()
|
|
|
|
|
|
|
|
va = b"Qwertyuiop Asdfghjkl Zxcvbnm"
|
|
|
|
vb = b"1234567890"
|
|
|
|
vc = b"!@#$%^&*()"
|
|
|
|
|
2023-06-29 22:18:45 -04:00
|
|
|
# Test subkey writes
|
2023-06-29 14:52:53 -04:00
|
|
|
vdtemp = await rc.set_dht_value(key, 1, va)
|
2023-07-30 16:45:20 -04:00
|
|
|
assert vdtemp == None
|
2023-06-29 14:52:53 -04:00
|
|
|
|
|
|
|
vdtemp = await rc.get_dht_value(key, 1, False)
|
2023-06-29 22:18:45 -04:00
|
|
|
assert vdtemp.data == va
|
|
|
|
assert vdtemp.seq == 0
|
|
|
|
assert vdtemp.writer == owner
|
2023-06-29 14:52:53 -04:00
|
|
|
|
|
|
|
vdtemp = await rc.get_dht_value(key, 0, False)
|
2023-06-29 22:18:45 -04:00
|
|
|
assert vdtemp == None
|
2023-06-29 14:52:53 -04:00
|
|
|
|
|
|
|
vdtemp = await rc.set_dht_value(key, 0, vb)
|
2023-07-30 16:45:20 -04:00
|
|
|
assert vdtemp == None
|
2023-06-29 14:52:53 -04:00
|
|
|
|
|
|
|
vdtemp = await rc.get_dht_value(key, 0, True)
|
2023-06-29 22:18:45 -04:00
|
|
|
assert vdtemp.data == vb
|
2023-06-29 14:52:53 -04:00
|
|
|
|
|
|
|
vdtemp = await rc.get_dht_value(key, 1, True)
|
2023-06-29 22:18:45 -04:00
|
|
|
assert vdtemp.data == va
|
2023-06-29 14:52:53 -04:00
|
|
|
|
|
|
|
# Equal value should not trigger sequence number update
|
|
|
|
vdtemp = await rc.set_dht_value(key, 1, va)
|
2023-07-30 16:45:20 -04:00
|
|
|
assert vdtemp == None
|
2023-06-29 14:52:53 -04:00
|
|
|
|
|
|
|
# Different value should trigger sequence number update
|
|
|
|
vdtemp = await rc.set_dht_value(key, 1, vb)
|
2023-07-30 16:45:20 -04:00
|
|
|
assert vdtemp == None
|
2023-06-29 14:52:53 -04:00
|
|
|
|
|
|
|
# Now that we initialized some subkeys
|
|
|
|
# and verified they stored correctly
|
|
|
|
# Delete things locally and reopen and see if we can write
|
|
|
|
# with the same writer key
|
|
|
|
|
|
|
|
await rc.close_dht_record(key)
|
|
|
|
await rc.delete_dht_record(key)
|
|
|
|
|
|
|
|
rec = await rc.open_dht_record(key, veilid.KeyPair.from_parts(owner, secret))
|
|
|
|
assert rec != None
|
|
|
|
assert rec.key == key
|
|
|
|
assert rec.owner == owner
|
|
|
|
assert rec.owner_secret == secret
|
|
|
|
assert rec.schema.kind == veilid.DHTSchemaKind.DFLT
|
|
|
|
assert rec.schema.o_cnt == 2
|
|
|
|
|
2023-06-29 22:18:45 -04:00
|
|
|
# Verify subkey 1 can be set before it is get but newer is available online
|
|
|
|
vdtemp = await rc.set_dht_value(key, 1, vc)
|
|
|
|
assert vdtemp != None
|
|
|
|
assert vdtemp.data == vb
|
|
|
|
assert vdtemp.seq == 1
|
|
|
|
assert vdtemp.writer == owner
|
|
|
|
|
|
|
|
# Verify subkey 1 can be set a second time and it updates because seq is newer
|
|
|
|
vdtemp = await rc.set_dht_value(key, 1, vc)
|
2023-07-30 16:45:20 -04:00
|
|
|
assert vdtemp == None
|
|
|
|
|
2023-06-29 22:18:45 -04:00
|
|
|
# Verify the network got the subkey update with a refresh check
|
2023-06-29 14:52:53 -04:00
|
|
|
vdtemp = await rc.get_dht_value(key, 1, True)
|
|
|
|
assert vdtemp != None
|
|
|
|
assert vdtemp.data == vc
|
|
|
|
assert vdtemp.seq == 2
|
|
|
|
assert vdtemp.writer == owner
|
|
|
|
|
|
|
|
# Delete things locally and reopen and see if we can write
|
|
|
|
# with a different writer key (should fail)
|
|
|
|
|
|
|
|
await rc.close_dht_record(key)
|
|
|
|
await rc.delete_dht_record(key)
|
2023-07-30 16:45:20 -04:00
|
|
|
|
2023-06-29 14:52:53 -04:00
|
|
|
rec = await rc.open_dht_record(key, other_keypair)
|
|
|
|
assert rec != None
|
|
|
|
assert rec.key == key
|
|
|
|
assert rec.owner == owner
|
|
|
|
assert rec.owner_secret == None
|
|
|
|
assert rec.schema.kind == veilid.DHTSchemaKind.DFLT
|
|
|
|
assert rec.schema.o_cnt == 2
|
|
|
|
|
|
|
|
# Verify subkey 1 can NOT be set because we have the wrong writer
|
2023-07-01 10:45:31 -04:00
|
|
|
with pytest.raises(veilid.VeilidAPIError):
|
|
|
|
vdtemp = await rc.set_dht_value(key, 1, va)
|
2023-07-30 16:45:20 -04:00
|
|
|
|
2023-06-29 14:52:53 -04:00
|
|
|
# Verify subkey 0 can NOT be set because we have the wrong writer
|
2023-07-01 10:45:31 -04:00
|
|
|
with pytest.raises(veilid.VeilidAPIError):
|
|
|
|
vdtemp = await rc.set_dht_value(key, 0, va)
|
2023-07-30 16:45:20 -04:00
|
|
|
|
2024-02-21 20:52:48 -05:00
|
|
|
# Verify subkey 0 can be set because override with the right writer
|
2024-02-21 22:00:06 -05:00
|
|
|
vdtemp = await rc.set_dht_value(key, 0, va, veilid.KeyPair.from_parts(owner, secret))
|
|
|
|
assert vdtemp == None
|
2024-02-21 20:52:48 -05:00
|
|
|
|
2023-06-29 14:52:53 -04:00
|
|
|
# Clean up
|
|
|
|
await rc.close_dht_record(key)
|
|
|
|
await rc.delete_dht_record(key)
|
2024-03-13 22:34:44 -04:00
|
|
|
|
2024-03-17 11:43:22 -04:00
|
|
|
@pytest.mark.asyncio
|
2024-03-18 14:16:03 -04:00
|
|
|
async def test_watch_dht_values():
|
|
|
|
|
|
|
|
value_change_queue: asyncio.Queue[veilid.VeilidUpdate] = asyncio.Queue()
|
|
|
|
|
|
|
|
async def value_change_update_callback(update: veilid.VeilidUpdate):
|
|
|
|
if update.kind == veilid.VeilidUpdateKind.VALUE_CHANGE:
|
|
|
|
await value_change_queue.put(update)
|
|
|
|
|
|
|
|
try:
|
|
|
|
api = await api_connector(value_change_update_callback)
|
|
|
|
except VeilidTestConnectionError:
|
|
|
|
pytest.skip("Unable to connect to veilid-server.")
|
|
|
|
return
|
|
|
|
|
|
|
|
# Make two routing contexts, one with and one without safety
|
|
|
|
# So we can pretend to be a different node and get the watch updates
|
|
|
|
# Normally they would not get sent if the set comes from the same target
|
|
|
|
# as the watch's target
|
2024-03-18 20:35:13 -04:00
|
|
|
rcWatch = await api.new_routing_context()
|
|
|
|
|
2024-03-18 14:16:03 -04:00
|
|
|
rcSet = await (await api.new_routing_context()).with_safety(
|
|
|
|
veilid.SafetySelection.unsafe(veilid.Sequencing.ENSURE_ORDERED)
|
|
|
|
)
|
|
|
|
async with rcWatch, rcSet:
|
|
|
|
# Make a DHT record
|
|
|
|
rec = await rcWatch.create_dht_record(veilid.DHTSchema.dflt(10))
|
|
|
|
|
|
|
|
# Set some subkey we care about
|
|
|
|
vd = await rcWatch.set_dht_value(rec.key, 3, b"BLAH BLAH BLAH")
|
2024-03-17 11:43:22 -04:00
|
|
|
assert vd == None
|
|
|
|
|
2024-03-18 14:16:03 -04:00
|
|
|
# Make a watch on that subkey
|
|
|
|
ts = await rcWatch.watch_dht_values(rec.key, [], 0, 0xFFFFFFFF)
|
2024-03-17 11:43:22 -04:00
|
|
|
assert ts != 0
|
|
|
|
|
2024-03-18 14:16:03 -04:00
|
|
|
# Reopen without closing to change routing context and not lose watch
|
|
|
|
rec = await rcSet.open_dht_record(rec.key, rec.owner_key_pair())
|
|
|
|
|
|
|
|
# Now set the subkey and trigger an update
|
|
|
|
vd = await rcSet.set_dht_value(rec.key, 3, b"BLAH")
|
2024-03-17 11:43:22 -04:00
|
|
|
assert vd == None
|
2024-03-31 16:34:12 -04:00
|
|
|
|
|
|
|
# Now we should NOT get an update because the update is the same as our local copy
|
|
|
|
update = None
|
|
|
|
try:
|
|
|
|
update = await asyncio.wait_for(value_change_queue.get(), timeout=5)
|
|
|
|
except asyncio.TimeoutError:
|
|
|
|
pass
|
|
|
|
assert update == None
|
|
|
|
|
|
|
|
# Now set multiple subkeys and trigger an update
|
|
|
|
vd = await asyncio.gather(*[rcSet.set_dht_value(rec.key, 3, b"BLAH BLAH"), rcSet.set_dht_value(rec.key, 4, b"BZORT")])
|
|
|
|
assert vd == [None, None]
|
2024-03-17 11:43:22 -04:00
|
|
|
|
2024-03-18 14:16:03 -04:00
|
|
|
# Wait for the update
|
|
|
|
upd = await asyncio.wait_for(value_change_queue.get(), timeout=5)
|
|
|
|
|
2024-03-31 16:34:12 -04:00
|
|
|
# Verify the update came back but we don't get a new value because the sequence number is the same
|
2024-03-18 14:16:03 -04:00
|
|
|
assert upd.detail.key == rec.key
|
2024-03-31 16:34:12 -04:00
|
|
|
assert upd.detail.count == 0xFFFFFFFD
|
|
|
|
assert upd.detail.subkeys == [(3, 4)]
|
|
|
|
assert upd.detail.value == None
|
2024-03-18 14:16:03 -04:00
|
|
|
|
|
|
|
# Reopen without closing to change routing context and not lose watch
|
|
|
|
rec = await rcWatch.open_dht_record(rec.key, rec.owner_key_pair())
|
|
|
|
|
|
|
|
# Cancel some subkeys we don't care about
|
|
|
|
still_active = await rcWatch.cancel_dht_watch(rec.key, [(0, 2)])
|
2024-03-17 16:00:34 -04:00
|
|
|
assert still_active == True
|
2024-03-17 11:43:22 -04:00
|
|
|
|
2024-03-18 14:16:03 -04:00
|
|
|
# Reopen without closing to change routing context and not lose watch
|
|
|
|
rec = await rcSet.open_dht_record(rec.key, rec.owner_key_pair())
|
|
|
|
|
2024-03-31 16:34:12 -04:00
|
|
|
# Now set multiple subkeys and trigger an update
|
|
|
|
vd = await asyncio.gather(*[rcSet.set_dht_value(rec.key, 3, b"BLAH BLAH BLAH"), rcSet.set_dht_value(rec.key, 5, b"BZORT BZORT")])
|
|
|
|
assert vd == [None, None]
|
2024-03-17 11:43:22 -04:00
|
|
|
|
2024-03-18 14:16:03 -04:00
|
|
|
# Wait for the update
|
|
|
|
upd = await asyncio.wait_for(value_change_queue.get(), timeout=5)
|
|
|
|
|
2024-03-31 16:34:12 -04:00
|
|
|
# Verify the update came back but we don't get a new value because the sequence number is the same
|
2024-03-18 14:16:03 -04:00
|
|
|
assert upd.detail.key == rec.key
|
2024-03-31 16:34:12 -04:00
|
|
|
assert upd.detail.count == 0xFFFFFFFC
|
|
|
|
assert upd.detail.subkeys == [(3, 3), (5, 5)]
|
|
|
|
assert upd.detail.value == None
|
2024-03-18 14:16:03 -04:00
|
|
|
|
|
|
|
# Reopen without closing to change routing context and not lose watch
|
|
|
|
rec = await rcWatch.open_dht_record(rec.key, rec.owner_key_pair())
|
|
|
|
|
|
|
|
# Now cancel the update
|
|
|
|
still_active = await rcWatch.cancel_dht_watch(rec.key, [(3, 9)])
|
2024-03-17 16:00:34 -04:00
|
|
|
assert still_active == False
|
2024-03-17 11:43:22 -04:00
|
|
|
|
2024-03-18 14:16:03 -04:00
|
|
|
# Reopen without closing to change routing context and not lose watch
|
|
|
|
rec = await rcSet.open_dht_record(rec.key, rec.owner_key_pair())
|
|
|
|
|
2024-03-31 16:34:12 -04:00
|
|
|
# Now set multiple subkeys
|
|
|
|
vd = await asyncio.gather(*[rcSet.set_dht_value(rec.key, 3, b"BLAH BLAH BLAH BLAH"), rcSet.set_dht_value(rec.key, 5, b"BZORT BZORT BZORT")])
|
|
|
|
assert vd == [None, None]
|
2024-03-17 11:43:22 -04:00
|
|
|
|
2024-03-18 14:16:03 -04:00
|
|
|
# Now we should NOT get an update
|
|
|
|
update = None
|
|
|
|
try:
|
|
|
|
update = await asyncio.wait_for(value_change_queue.get(), timeout=5)
|
|
|
|
except asyncio.TimeoutError:
|
|
|
|
pass
|
|
|
|
assert update == None
|
|
|
|
|
|
|
|
# Clean up
|
|
|
|
await rcSet.close_dht_record(rec.key)
|
|
|
|
await rcSet.delete_dht_record(rec.key)
|
2024-03-13 22:34:44 -04:00
|
|
|
|
|
|
|
@pytest.mark.asyncio
|
|
|
|
async def test_inspect_dht_record(api_connection: veilid.VeilidAPI):
|
2024-03-18 20:35:13 -04:00
|
|
|
rc = await api_connection.new_routing_context()
|
2024-03-13 22:34:44 -04:00
|
|
|
async with rc:
|
|
|
|
rec = await rc.create_dht_record(veilid.DHTSchema.dflt(2))
|
|
|
|
|
|
|
|
vd = await rc.set_dht_value(rec.key, 0, b"BLAH BLAH BLAH")
|
|
|
|
assert vd == None
|
|
|
|
|
|
|
|
rr = await rc.inspect_dht_record(rec.key, [], veilid.DHTReportScope.LOCAL)
|
|
|
|
print("rr: {}", rr.__dict__)
|
|
|
|
assert rr.subkeys == [[0,1]]
|
|
|
|
assert rr.local_seqs == [0, 0xFFFFFFFF]
|
|
|
|
assert rr.network_seqs == []
|
|
|
|
|
|
|
|
rr2 = await rc.inspect_dht_record(rec.key, [], veilid.DHTReportScope.SYNC_GET)
|
|
|
|
print("rr2: {}", rr2.__dict__)
|
|
|
|
assert rr2.subkeys == [[0,1]]
|
|
|
|
assert rr2.local_seqs == [0, 0xFFFFFFFF]
|
|
|
|
assert rr2.network_seqs == [0, 0xFFFFFFFF]
|
|
|
|
|
|
|
|
await rc.close_dht_record(rec.key)
|
2024-04-28 12:42:13 -04:00
|
|
|
await rc.delete_dht_record(rec.key)
|
|
|
|
|
|
|
|
@pytest.mark.skipif(os.getenv("INTEGRATION") != "1", reason="integration test requires two servers running")
|
|
|
|
@pytest.mark.asyncio
|
|
|
|
async def test_dht_integration_writer_reader():
|
|
|
|
|
|
|
|
async def null_update_callback(update: veilid.VeilidUpdate):
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
api0 = await api_connector(null_update_callback, 0)
|
|
|
|
except VeilidTestConnectionError:
|
|
|
|
pytest.skip("Unable to connect to veilid-server 0.")
|
|
|
|
return
|
|
|
|
|
|
|
|
try:
|
|
|
|
api1 = await api_connector(null_update_callback, 1)
|
|
|
|
except VeilidTestConnectionError:
|
|
|
|
pytest.skip("Unable to connect to veilid-server 1.")
|
|
|
|
return
|
|
|
|
|
|
|
|
async with api0, api1:
|
|
|
|
# purge local and remote record stores to ensure we start fresh
|
|
|
|
await api0.debug("record purge local")
|
|
|
|
await api0.debug("record purge remote")
|
|
|
|
await api1.debug("record purge local")
|
|
|
|
await api1.debug("record purge remote")
|
|
|
|
|
|
|
|
# make routing contexts
|
|
|
|
rc0 = await api0.new_routing_context()
|
|
|
|
rc1 = await api1.new_routing_context()
|
|
|
|
async with rc0, rc1:
|
|
|
|
|
2024-06-25 18:39:12 -04:00
|
|
|
COUNT = 100
|
2024-04-28 12:42:13 -04:00
|
|
|
TEST_DATA = b"test data"
|
|
|
|
|
|
|
|
# write dht records on server 0
|
|
|
|
records = []
|
|
|
|
schema = veilid.DHTSchema.dflt(1)
|
|
|
|
print(f'writing {COUNT} records')
|
|
|
|
for n in range(COUNT):
|
|
|
|
desc = await rc0.create_dht_record(schema)
|
|
|
|
records.append(desc)
|
|
|
|
|
|
|
|
await rc0.set_dht_value(desc.key, 0, TEST_DATA)
|
2024-06-28 18:31:35 -04:00
|
|
|
|
2024-04-28 12:42:13 -04:00
|
|
|
print(f' {n}')
|
|
|
|
|
2024-06-28 18:31:35 -04:00
|
|
|
print(f'syncing records to the network')
|
|
|
|
for desc0 in records:
|
|
|
|
while True:
|
|
|
|
rr = await rc0.inspect_dht_record(desc0.key, [])
|
|
|
|
if len(rr.offline_subkeys) == 0:
|
|
|
|
await rc0.close_dht_record(desc0.key)
|
|
|
|
break
|
|
|
|
time.sleep(0.1)
|
|
|
|
|
2024-04-28 12:42:13 -04:00
|
|
|
# read dht records on server 1
|
|
|
|
print(f'reading {COUNT} records')
|
|
|
|
n=0
|
|
|
|
for desc0 in records:
|
|
|
|
desc1 = await rc1.open_dht_record(desc0.key)
|
|
|
|
vd1 = await rc1.get_dht_value(desc1.key, 0)
|
|
|
|
assert vd1.data == TEST_DATA
|
|
|
|
await rc1.close_dht_record(desc1.key)
|
|
|
|
|
|
|
|
print(f' {n}')
|
|
|
|
n+=1
|
|
|
|
|
2024-07-03 13:15:59 -04:00
|
|
|
@pytest.mark.asyncio
|
|
|
|
async def test_dht_write_read_local():
|
|
|
|
|
|
|
|
async def null_update_callback(update: veilid.VeilidUpdate):
|
|
|
|
pass
|
|
|
|
|
|
|
|
try:
|
|
|
|
api0 = await api_connector(null_update_callback, 0)
|
|
|
|
except VeilidTestConnectionError:
|
|
|
|
pytest.skip("Unable to connect to veilid-server 0.")
|
|
|
|
return
|
|
|
|
|
|
|
|
async with api0:
|
|
|
|
# purge local and remote record stores to ensure we start fresh
|
|
|
|
await api0.debug("record purge local")
|
|
|
|
await api0.debug("record purge remote")
|
|
|
|
|
|
|
|
# make routing contexts
|
|
|
|
rc0 = await api0.new_routing_context()
|
|
|
|
async with rc0:
|
|
|
|
|
|
|
|
COUNT = 500
|
|
|
|
TEST_DATA = b"ABCD"*1024
|
|
|
|
TEST_DATA2 = b"ABCD"*4096
|
|
|
|
|
|
|
|
# write dht records on server 0
|
|
|
|
records = []
|
|
|
|
schema = veilid.DHTSchema.dflt(2)
|
|
|
|
print(f'writing {COUNT} records')
|
|
|
|
for n in range(COUNT):
|
|
|
|
desc = await rc0.create_dht_record(schema)
|
|
|
|
records.append(desc)
|
|
|
|
|
|
|
|
await rc0.set_dht_value(desc.key, 0, TEST_DATA)
|
|
|
|
await rc0.set_dht_value(desc.key, 1, TEST_DATA2)
|
|
|
|
|
|
|
|
print(f' {n}')
|
2024-04-28 12:42:13 -04:00
|
|
|
|
2024-07-03 13:15:59 -04:00
|
|
|
print(f'syncing records to the network')
|
|
|
|
for desc0 in records:
|
|
|
|
while True:
|
|
|
|
rr = await rc0.inspect_dht_record(desc0.key, [])
|
|
|
|
if len(rr.offline_subkeys) == 0:
|
|
|
|
await rc0.close_dht_record(desc0.key)
|
|
|
|
break
|
|
|
|
time.sleep(0.1)
|
|
|
|
|
|
|
|
# read dht records on server 0
|
|
|
|
print(f'reading {COUNT} records')
|
|
|
|
n=0
|
|
|
|
for desc0 in records:
|
|
|
|
desc1 = await rc0.open_dht_record(desc0.key)
|
|
|
|
|
|
|
|
vd0 = await rc0.get_dht_value(desc1.key, 0)
|
|
|
|
assert vd0.data == TEST_DATA
|
|
|
|
|
|
|
|
vd1 = await rc0.get_dht_value(desc1.key, 1)
|
|
|
|
assert vd1.data == TEST_DATA2
|
|
|
|
await rc0.close_dht_record(desc1.key)
|
|
|
|
|
|
|
|
print(f' {n}')
|
|
|
|
n+=1
|
|
|
|
|
|
|
|
|