mirror of
https://github.com/monero-project/monero.git
synced 2025-01-08 08:47:51 -05:00
Fix grow_tree, restructure it, and clean the approach
The initial impl didn't capture the following edge case: - Tree has 3 (or more) layers + 1 leaf layeri - Leaf layer last chunk IS full - Layer 0 last chunk is NOT full - Layer 1 last chunk is NOT full - Layer 2 last chunk IS NOT full In this case, when updating layer 1, we need to use layer 0's old last hash to update layer 1's old last hash. Same for Layer 2. The solution is to use logic that checks the *prev* layer when updating a layer to determine if the old last hash from the prev layer is needed. This commit restructures the grow_tree impl to account for this and simplifies the approach as follows: 1. Read the tree to get num leaf tuples + last hashes in each layer 2. Get the tree extension using the above values + new leaf tuples 2a. Prior to updating the leaf layer, call the function get_update_leaf_layer_metadata. This function uses existing totals in the leaf layer, the new total of leaf tuples, and tree params to calculate how the layer after the leaf layer should be updated. 2b. For each subsequent layer, call the function get_update_layer_metadata. This function uses the existing totals in the *prev* layer, the new total of children in the *prev* layer, and tree params to calculate how the layer should be updated. 3. Grow the tree using the tree extension. This approach isolates update logic and actual hashing into neat structured functions, rather than mix the two. This makes the code easier to follow without needing to keep so much in your head at one time.
This commit is contained in:
parent
8287ba6f78
commit
36f1e1965f
@ -1312,11 +1312,14 @@ void BlockchainLMDB::grow_tree(const fcmp::curve_trees::CurveTreesV1 &curve_tree
|
||||
|
||||
CURSOR(leaves)
|
||||
|
||||
// Read every layer's last chunk data
|
||||
const auto last_chunks = this->get_tree_last_chunks(curve_trees);
|
||||
// Get the number of leaf tuples that exist in the tree
|
||||
const std::size_t old_n_leaf_tuples = this->get_num_leaf_tuples();
|
||||
|
||||
// Using the last chunk data and new leaves, get a struct we can use to extend the tree
|
||||
const auto tree_extension = curve_trees.get_tree_extension(last_chunks, new_leaves);
|
||||
// Read every layer's last hashes
|
||||
const auto last_hashes = this->get_tree_last_hashes();
|
||||
|
||||
// Use the number of leaf tuples and the existing last hashes to get a struct we can use to extend the tree
|
||||
const auto tree_extension = curve_trees.get_tree_extension(old_n_leaf_tuples, last_hashes, new_leaves);
|
||||
|
||||
// Insert the leaves
|
||||
// TODO: grow_leaves
|
||||
@ -1354,15 +1357,10 @@ void BlockchainLMDB::grow_tree(const fcmp::curve_trees::CurveTreesV1 &curve_tree
|
||||
throw0(DB_ERROR(("Growing odd c2 layer, expected even layer idx for c1: "
|
||||
+ std::to_string(layer_idx)).c_str()));
|
||||
|
||||
const auto *c2_last_chunk_ptr = (c2_idx >= last_chunks.c2_last_chunks.size())
|
||||
? nullptr
|
||||
: &last_chunks.c2_last_chunks[c2_idx];
|
||||
|
||||
this->grow_layer<fcmp::curve_trees::Selene>(curve_trees.m_c2,
|
||||
c2_extensions,
|
||||
c2_idx,
|
||||
layer_idx,
|
||||
c2_last_chunk_ptr);
|
||||
layer_idx);
|
||||
|
||||
++c2_idx;
|
||||
}
|
||||
@ -1372,15 +1370,10 @@ void BlockchainLMDB::grow_tree(const fcmp::curve_trees::CurveTreesV1 &curve_tree
|
||||
throw0(DB_ERROR(("Growing even c1 layer, expected odd layer idx for c2: "
|
||||
+ std::to_string(layer_idx)).c_str()));
|
||||
|
||||
const auto *c1_last_chunk_ptr = (c1_idx >= last_chunks.c1_last_chunks.size())
|
||||
? nullptr
|
||||
: &last_chunks.c1_last_chunks[c1_idx];
|
||||
|
||||
this->grow_layer<fcmp::curve_trees::Helios>(curve_trees.m_c1,
|
||||
c1_extensions,
|
||||
c1_idx,
|
||||
layer_idx,
|
||||
c1_last_chunk_ptr);
|
||||
layer_idx);
|
||||
|
||||
++c1_idx;
|
||||
}
|
||||
@ -1393,8 +1386,7 @@ template<typename C>
|
||||
void BlockchainLMDB::grow_layer(const C &curve,
|
||||
const std::vector<fcmp::curve_trees::LayerExtension<C>> &layer_extensions,
|
||||
const std::size_t ext_idx,
|
||||
const std::size_t layer_idx,
|
||||
const fcmp::curve_trees::LastChunkData<C> *last_chunk_ptr)
|
||||
const std::size_t layer_idx)
|
||||
{
|
||||
LOG_PRINT_L3("BlockchainLMDB::" << __func__);
|
||||
check_open();
|
||||
@ -1407,12 +1399,11 @@ void BlockchainLMDB::grow_layer(const C &curve,
|
||||
|
||||
CHECK_AND_ASSERT_THROW_MES(!ext.hashes.empty(), "empty layer extension");
|
||||
|
||||
// TODO: make sure last_chunk_ptr->next_start_child_chunk_idx lines up
|
||||
// TODO: make sure ext.start_idx lines up with the end of the layer
|
||||
|
||||
MDB_val_copy<std::size_t> k(layer_idx);
|
||||
|
||||
const bool update_last_parent = last_chunk_ptr != nullptr && last_chunk_ptr->update_last_parent;
|
||||
if (update_last_parent)
|
||||
if (ext.update_existing_last_hash)
|
||||
{
|
||||
// We updated the last hash, so update it
|
||||
layer_val<C> lv;
|
||||
@ -1421,14 +1412,14 @@ void BlockchainLMDB::grow_layer(const C &curve,
|
||||
MDB_val_set(v, lv);
|
||||
|
||||
// We expect to overwrite the existing hash
|
||||
// TODO: make sure the hash already exists
|
||||
// TODO: make sure the hash already exists and is the existing last hash
|
||||
int result = mdb_cursor_put(m_cur_layers, &k, &v, 0);
|
||||
if (result != MDB_SUCCESS)
|
||||
throw0(DB_ERROR(lmdb_error("Failed to update chunk hash: ", result).c_str()));
|
||||
}
|
||||
|
||||
// Now add all the new hashes found in the extension
|
||||
for (std::size_t i = update_last_parent ? 1 : 0; i < ext.hashes.size(); ++i)
|
||||
for (std::size_t i = ext.update_existing_last_hash ? 1 : 0; i < ext.hashes.size(); ++i)
|
||||
{
|
||||
layer_val<C> lv;
|
||||
lv.child_chunk_idx = i + ext.start_idx;
|
||||
@ -1444,62 +1435,46 @@ void BlockchainLMDB::grow_layer(const C &curve,
|
||||
}
|
||||
}
|
||||
|
||||
template<typename C>
|
||||
static fcmp::curve_trees::LastChunkData<C> get_last_child_layer_chunk(const bool update_last_parent,
|
||||
const std::size_t parent_layer_size,
|
||||
const typename C::Point &last_parent,
|
||||
const typename C::Scalar &last_child)
|
||||
{
|
||||
if (update_last_parent)
|
||||
CHECK_AND_ASSERT_THROW_MES(parent_layer_size > 0, "empty parent layer");
|
||||
|
||||
// If updating last parent, the next start will be the last parent's index, else we start at the tip
|
||||
const std::size_t next_start_child_chunk_index = update_last_parent
|
||||
? (parent_layer_size - 1)
|
||||
: parent_layer_size;
|
||||
|
||||
return fcmp::curve_trees::LastChunkData<C>{
|
||||
.next_start_child_chunk_index = next_start_child_chunk_index,
|
||||
.last_parent = last_parent,
|
||||
.update_last_parent = update_last_parent,
|
||||
.last_child = last_child
|
||||
};
|
||||
}
|
||||
|
||||
fcmp::curve_trees::CurveTreesV1::LastChunks BlockchainLMDB::get_tree_last_chunks(
|
||||
const fcmp::curve_trees::CurveTreesV1 &curve_trees) const
|
||||
std::size_t BlockchainLMDB::get_num_leaf_tuples() const
|
||||
{
|
||||
LOG_PRINT_L3("BlockchainLMDB::" << __func__);
|
||||
check_open();
|
||||
|
||||
TXN_PREFIX_RDONLY();
|
||||
RCURSOR(leaves)
|
||||
RCURSOR(layers)
|
||||
|
||||
fcmp::curve_trees::CurveTreesV1::LastChunks last_chunks;
|
||||
fcmp::curve_trees::CurveTreesV1::LastHashes last_hashes;
|
||||
|
||||
// Get the number of leaf tuples in the tree
|
||||
std::uint64_t n_leaf_tuples = 0;
|
||||
|
||||
// Get the number of leaves in the tree
|
||||
std::uint64_t num_leaf_tuples = 0;
|
||||
{
|
||||
MDB_val k, v;
|
||||
int result = mdb_cursor_get(m_cur_leaves, &k, &v, MDB_LAST);
|
||||
if (result == MDB_NOTFOUND)
|
||||
num_leaf_tuples = 0;
|
||||
n_leaf_tuples = 0;
|
||||
else if (result == MDB_SUCCESS)
|
||||
num_leaf_tuples = (1 + (*(const std::size_t*)k.mv_data)) * fcmp::curve_trees::CurveTreesV1::LEAF_TUPLE_SIZE;
|
||||
n_leaf_tuples = (1 + (*(const std::size_t*)k.mv_data));
|
||||
else
|
||||
throw0(DB_ERROR(lmdb_error("Failed to get last leaf: ", result).c_str()));
|
||||
}
|
||||
last_chunks.next_start_leaf_index = num_leaf_tuples;
|
||||
|
||||
MDEBUG(num_leaf_tuples << " total leaf tuples in the tree");
|
||||
TXN_POSTFIX_RDONLY();
|
||||
|
||||
// Now set the last chunk data from each layer
|
||||
auto &c1_last_chunks_out = last_chunks.c1_last_chunks;
|
||||
auto &c2_last_chunks_out = last_chunks.c2_last_chunks;
|
||||
return n_leaf_tuples;
|
||||
}
|
||||
|
||||
// Check if we'll need to update the last parent in each layer
|
||||
const bool update_last_parent = (num_leaf_tuples % curve_trees.m_leaf_layer_chunk_width) > 0;
|
||||
fcmp::curve_trees::CurveTreesV1::LastHashes BlockchainLMDB::get_tree_last_hashes() const
|
||||
{
|
||||
LOG_PRINT_L3("BlockchainLMDB::" << __func__);
|
||||
check_open();
|
||||
|
||||
TXN_PREFIX_RDONLY();
|
||||
RCURSOR(layers)
|
||||
|
||||
fcmp::curve_trees::CurveTreesV1::LastHashes last_hashes;
|
||||
auto &c1_last_hashes = last_hashes.c1_last_hashes;
|
||||
auto &c2_last_hashes = last_hashes.c2_last_hashes;
|
||||
|
||||
// Traverse the tree layer-by-layer starting at the layer closest to leaf layer
|
||||
std::size_t layer_idx = 0;
|
||||
@ -1522,57 +1497,18 @@ fcmp::curve_trees::CurveTreesV1::LastChunks BlockchainLMDB::get_tree_last_chunks
|
||||
if (result != MDB_SUCCESS)
|
||||
throw0(DB_ERROR(lmdb_error("Failed to get last record in layer: ", result).c_str()));
|
||||
|
||||
// First push the last leaf chunk data into c2 chunks
|
||||
if (layer_idx == 0)
|
||||
{
|
||||
const auto *lv = (layer_val<fcmp::curve_trees::Selene> *)v.mv_data;
|
||||
MDEBUG("Selene, layer_idx: " << layer_idx << " , lv->child_chunk_idx: " << lv->child_chunk_idx);
|
||||
|
||||
auto last_leaf_chunk = get_last_child_layer_chunk<fcmp::curve_trees::Selene>(
|
||||
/*update_last_parent*/ update_last_parent,
|
||||
/*parent_layer_size */ lv->child_chunk_idx + 1,
|
||||
/*last_parent */ lv->child_chunk_hash,
|
||||
// Since the leaf layer is append-only, we'll never need access to the last child
|
||||
/*last_child */ curve_trees.m_c2.zero_scalar());
|
||||
|
||||
c2_last_chunks_out.push_back(std::move(last_leaf_chunk));
|
||||
|
||||
++layer_idx;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Then push last chunk data from subsequent layers, alternating c1 -> c2 -> c1 -> ...
|
||||
// TODO: template below if statement
|
||||
const bool use_c2 = (layer_idx % 2) == 0;
|
||||
if (use_c2)
|
||||
{
|
||||
const auto *lv = (layer_val<fcmp::curve_trees::Selene> *)v.mv_data;
|
||||
MDEBUG("Selene, layer_idx: " << layer_idx << " , lv->child_chunk_idx: " << lv->child_chunk_idx);
|
||||
|
||||
const auto &last_child = curve_trees.m_c1.point_to_cycle_scalar(c1_last_chunks_out.back().last_parent);
|
||||
|
||||
auto last_parent_chunk = get_last_child_layer_chunk<fcmp::curve_trees::Selene>(
|
||||
update_last_parent,
|
||||
lv->child_chunk_idx + 1,
|
||||
lv->child_chunk_hash,
|
||||
last_child);
|
||||
|
||||
c2_last_chunks_out.push_back(std::move(last_parent_chunk));
|
||||
c2_last_hashes.emplace_back(std::move(lv->child_chunk_hash));
|
||||
}
|
||||
else
|
||||
{
|
||||
const auto *lv = (layer_val<fcmp::curve_trees::Helios> *)v.mv_data;
|
||||
MDEBUG("Helios, layer_idx: " << layer_idx << " , lv->child_chunk_idx: " << lv->child_chunk_idx);
|
||||
|
||||
const auto &last_child = curve_trees.m_c2.point_to_cycle_scalar(c2_last_chunks_out.back().last_parent);
|
||||
|
||||
auto last_parent_chunk = get_last_child_layer_chunk<fcmp::curve_trees::Helios>(
|
||||
update_last_parent,
|
||||
lv->child_chunk_idx + 1,
|
||||
lv->child_chunk_hash,
|
||||
last_child);
|
||||
|
||||
c1_last_chunks_out.push_back(std::move(last_parent_chunk));
|
||||
c1_last_hashes.emplace_back(std::move(lv->child_chunk_hash));
|
||||
}
|
||||
|
||||
++layer_idx;
|
||||
@ -1580,7 +1516,7 @@ fcmp::curve_trees::CurveTreesV1::LastChunks BlockchainLMDB::get_tree_last_chunks
|
||||
|
||||
TXN_POSTFIX_RDONLY();
|
||||
|
||||
return last_chunks;
|
||||
return last_hashes;
|
||||
}
|
||||
|
||||
bool BlockchainLMDB::audit_tree(const fcmp::curve_trees::CurveTreesV1 &curve_trees) const
|
||||
|
@ -416,11 +416,11 @@ private:
|
||||
void grow_layer(const C &curve,
|
||||
const std::vector<fcmp::curve_trees::LayerExtension<C>> &layer_extensions,
|
||||
const std::size_t c_idx,
|
||||
const std::size_t layer_idx,
|
||||
const fcmp::curve_trees::LastChunkData<C> *last_chunk_data);
|
||||
const std::size_t layer_idx);
|
||||
|
||||
fcmp::curve_trees::CurveTreesV1::LastChunks get_tree_last_chunks(
|
||||
const fcmp::curve_trees::CurveTreesV1 &curve_trees) const;
|
||||
std::size_t get_num_leaf_tuples() const;
|
||||
|
||||
fcmp::curve_trees::CurveTreesV1::LastHashes get_tree_last_hashes() const;
|
||||
|
||||
template<typename C_CHILD, typename C_PARENT>
|
||||
bool audit_layer(const C_CHILD &c_child,
|
||||
|
@ -57,124 +57,109 @@ template Selene::Point get_new_parent<Selene>(const Selene &curve, const typenam
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
// Static functions
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
// Hash the first chunk of the children now being added to a layer
|
||||
template<typename C>
|
||||
static typename C::Point get_first_parent(const C &curve,
|
||||
const typename C::Chunk &new_children,
|
||||
const std::size_t chunk_width,
|
||||
const LastChunkData<C> *last_chunk_ptr,
|
||||
const std::size_t offset)
|
||||
{
|
||||
// If no last chunk exists, we can get a new parent
|
||||
if (last_chunk_ptr == nullptr)
|
||||
return get_new_parent<C>(curve, new_children);
|
||||
|
||||
typename C::Scalar prior_child_after_offset;
|
||||
if (last_chunk_ptr->update_last_parent)
|
||||
{
|
||||
// If the last parent has an updated child in it, then we need to get the delta to the old child
|
||||
prior_child_after_offset = last_chunk_ptr->last_child;
|
||||
}
|
||||
else if (offset > 0)
|
||||
{
|
||||
// If we're not updating the last parent hash and offset is non-zero, then we must be adding new children
|
||||
// to the existing last chunk. New children means no prior child after offset exists, use zero scalar
|
||||
prior_child_after_offset = curve.zero_scalar();
|
||||
}
|
||||
else
|
||||
{
|
||||
// If we're not updating the last parent and the last chunk is already full, we can get a new parent
|
||||
return get_new_parent<C>(curve, new_children);
|
||||
}
|
||||
|
||||
MDEBUG("Updating existing hash: " << curve.to_string(last_chunk_ptr->last_parent) << " , offset: " << offset
|
||||
<< ", prior_child_after_offset: " << curve.to_string(prior_child_after_offset));
|
||||
|
||||
return curve.hash_grow(
|
||||
last_chunk_ptr->last_parent,
|
||||
offset,
|
||||
prior_child_after_offset,
|
||||
new_children
|
||||
);
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
// After hashing a layer of children points, convert those children x-coordinates into their respective cycle
|
||||
// scalars, and prepare them to be hashed for the next layer
|
||||
template<typename C_CHILD, typename C_PARENT>
|
||||
static std::size_t next_child_scalars_from_children(const C_CHILD &c_child,
|
||||
const bool updating_root_layer,
|
||||
const LastChunkData<C_CHILD> *last_child_chunk_ptr,
|
||||
const LayerExtension<C_CHILD> &children,
|
||||
std::vector<typename C_PARENT::Scalar> &child_scalars_out)
|
||||
static std::vector<typename C_PARENT::Scalar> next_child_scalars_from_children(const C_CHILD &c_child,
|
||||
const typename C_CHILD::Point *last_root,
|
||||
const LayerExtension<C_CHILD> &children)
|
||||
{
|
||||
child_scalars_out.clear();
|
||||
std::vector<typename C_PARENT::Scalar> child_scalars_out;
|
||||
child_scalars_out.reserve(1 + children.hashes.size());
|
||||
|
||||
std::uint64_t next_child_start_index = children.start_idx;
|
||||
|
||||
// If we're creating a *new* root at the existing root layer, we may need to include the *existing* root when
|
||||
// hashing the *existing* root layer
|
||||
if (updating_root_layer)
|
||||
if (last_root != nullptr)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(last_child_chunk_ptr != nullptr, "last child chunk does not exist at root");
|
||||
|
||||
// If the children don't already include the existing root, then we need to include it to be hashed
|
||||
// - the children would include the existing root already if the existing root was updated in the child
|
||||
// layer (the start_idx would be 0)
|
||||
if (next_child_start_index > 0)
|
||||
if (children.start_idx > 0)
|
||||
{
|
||||
MDEBUG("Updating root layer and including the existing root in next children");
|
||||
child_scalars_out.emplace_back(c_child.point_to_cycle_scalar(last_child_chunk_ptr->last_parent));
|
||||
--next_child_start_index;
|
||||
child_scalars_out.emplace_back(c_child.point_to_cycle_scalar(*last_root));
|
||||
}
|
||||
}
|
||||
|
||||
// Convert child points to scalars
|
||||
tower_cycle::extend_scalars_from_cycle_points<C_CHILD, C_PARENT>(c_child, children.hashes, child_scalars_out);
|
||||
|
||||
return next_child_start_index;
|
||||
return child_scalars_out;
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
// Hash chunks of a layer of new children, outputting the next layer's parents
|
||||
template<typename C>
|
||||
static void hash_layer(const C &curve,
|
||||
const LastChunkData<C> *last_chunk_ptr,
|
||||
const std::vector<typename C::Scalar> &child_scalars,
|
||||
const std::size_t child_start_idx,
|
||||
const std::size_t chunk_width,
|
||||
LayerExtension<C> &parents_out)
|
||||
static LayerExtension<C> hash_children_chunks(const C &curve,
|
||||
const typename C::Scalar *old_last_child,
|
||||
const typename C::Point *old_last_parent,
|
||||
const std::size_t start_offset,
|
||||
const std::size_t next_parent_start_index,
|
||||
const std::vector<typename C::Scalar> &new_child_scalars,
|
||||
const std::size_t chunk_width)
|
||||
{
|
||||
parents_out.start_idx = (last_chunk_ptr == nullptr) ? 0 : last_chunk_ptr->next_start_child_chunk_index;
|
||||
parents_out.hashes.clear();
|
||||
LayerExtension<C> parents_out;
|
||||
parents_out.start_idx = next_parent_start_index;
|
||||
parents_out.update_existing_last_hash = old_last_parent != nullptr;
|
||||
parents_out.hashes.reserve(1 + (new_child_scalars.size() / chunk_width));
|
||||
|
||||
CHECK_AND_ASSERT_THROW_MES(!child_scalars.empty(), "empty child scalars");
|
||||
|
||||
const std::size_t offset = child_start_idx % chunk_width;
|
||||
CHECK_AND_ASSERT_THROW_MES(!new_child_scalars.empty(), "empty child scalars");
|
||||
CHECK_AND_ASSERT_THROW_MES(chunk_width > start_offset, "start_offset must be smaller than chunk_width");
|
||||
|
||||
// See how many children we need to fill up the existing last chunk
|
||||
std::size_t chunk_size = std::min(child_scalars.size(), chunk_width - offset);
|
||||
std::size_t chunk_size = std::min(new_child_scalars.size(), chunk_width - start_offset);
|
||||
|
||||
MDEBUG("Starting chunk_size: " << chunk_size << " , num child scalars: " << child_scalars.size()
|
||||
<< " , offset: " << offset);
|
||||
MDEBUG("First chunk_size: " << chunk_size << " , num new child scalars: " << new_child_scalars.size()
|
||||
<< " , start_offset: " << start_offset << " , parent layer start idx: " << parents_out.start_idx);
|
||||
|
||||
// Hash the first chunk
|
||||
// TODO: separate function
|
||||
{
|
||||
// Prepare to hash
|
||||
const auto &existing_hash = old_last_parent != nullptr
|
||||
? *old_last_parent
|
||||
: curve.m_hash_init_point;
|
||||
|
||||
const auto &prior_child_after_offset = old_last_child != nullptr
|
||||
? *old_last_child
|
||||
: curve.zero_scalar();
|
||||
|
||||
const auto chunk_start = new_child_scalars.data();
|
||||
const typename C::Chunk chunk{chunk_start, chunk_size};
|
||||
|
||||
MDEBUG("existing_hash: " << curve.to_string(existing_hash) << " , start_offset: " << start_offset
|
||||
<< " , prior_child_after_offset: " << curve.to_string(prior_child_after_offset));
|
||||
|
||||
for (std::size_t i = 0; i < chunk_size; ++i)
|
||||
MDEBUG("Hashing child " << curve.to_string(chunk_start[i]));
|
||||
|
||||
// Do the hash
|
||||
auto chunk_hash = curve.hash_grow(
|
||||
existing_hash,
|
||||
start_offset,
|
||||
prior_child_after_offset,
|
||||
chunk
|
||||
);
|
||||
|
||||
MDEBUG("Child chunk_start_idx " << 0 << " result: " << curve.to_string(chunk_hash)
|
||||
<< " , chunk_size: " << chunk_size);
|
||||
|
||||
// We've got our hash
|
||||
parents_out.hashes.emplace_back(std::move(chunk_hash));
|
||||
}
|
||||
|
||||
// Hash chunks of child scalars to create the parent hashes
|
||||
std::size_t chunk_start_idx = 0;
|
||||
while (chunk_start_idx < child_scalars.size())
|
||||
std::size_t chunk_start_idx = chunk_size;
|
||||
while (chunk_start_idx < new_child_scalars.size())
|
||||
{
|
||||
const auto chunk_start = child_scalars.data() + chunk_start_idx;
|
||||
chunk_size = std::min(chunk_width, new_child_scalars.size() - chunk_start_idx);
|
||||
|
||||
const auto chunk_start = new_child_scalars.data() + chunk_start_idx;
|
||||
const typename C::Chunk chunk{chunk_start, chunk_size};
|
||||
|
||||
for (std::size_t i = 0; i < chunk_size; ++i)
|
||||
MDEBUG("Hashing child " << curve.to_string(chunk_start[i]));
|
||||
|
||||
// Hash the chunk of children
|
||||
typename C::Point chunk_hash = chunk_start_idx == 0
|
||||
? get_first_parent<C>(curve,
|
||||
chunk,
|
||||
chunk_width,
|
||||
last_chunk_ptr,
|
||||
offset)
|
||||
: get_new_parent<C>(curve, chunk);
|
||||
auto chunk_hash = get_new_parent(curve, chunk);
|
||||
|
||||
MDEBUG("Child chunk_start_idx " << chunk_start_idx << " result: " << curve.to_string(chunk_hash)
|
||||
<< " , chunk_size: " << chunk_size);
|
||||
@ -185,16 +170,247 @@ static void hash_layer(const C &curve,
|
||||
// Advance to the next chunk
|
||||
chunk_start_idx += chunk_size;
|
||||
|
||||
// Prepare for next loop if there should be one
|
||||
if (chunk_start_idx == child_scalars.size())
|
||||
break;
|
||||
|
||||
// Fill a complete chunk, or add the remaining new children to the last chunk
|
||||
CHECK_AND_ASSERT_THROW_MES(chunk_start_idx < child_scalars.size(), "unexpected chunk start idx");
|
||||
chunk_size = std::min(chunk_width, child_scalars.size() - chunk_start_idx);
|
||||
CHECK_AND_ASSERT_THROW_MES(chunk_start_idx <= new_child_scalars.size(), "unexpected chunk start idx");
|
||||
}
|
||||
|
||||
return parents_out;
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
static UpdateLayerMetadata get_update_layer_metadata(const std::size_t old_total_children,
|
||||
const std::size_t new_total_children,
|
||||
const std::size_t parent_chunk_width,
|
||||
const bool last_child_will_change)
|
||||
{
|
||||
// 1. Check pre-conditions on total number of children
|
||||
// - If there's only 1 old child, it must be the old root, and we must be setting a new parent layer after old root
|
||||
const bool setting_next_layer_after_old_root = old_total_children == 1;
|
||||
if (setting_next_layer_after_old_root)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(new_total_children > old_total_children,
|
||||
"new_total_children must be > old_total_children when setting next layer after old root");
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(new_total_children >= old_total_children,
|
||||
"new_total_children must be >= old_total_children");
|
||||
}
|
||||
|
||||
// 2. Calculate old and new total number of parents using totals for children
|
||||
// If there's only 1 child, then it must be the old root and thus it would have no old parents
|
||||
const std::size_t old_total_parents = old_total_children > 1
|
||||
? (1 + ((old_total_children - 1) / parent_chunk_width))
|
||||
: 0;
|
||||
const std::size_t new_total_parents = 1 + ((new_total_children - 1) / parent_chunk_width);
|
||||
|
||||
// 3. Check pre-conditions on total number of parents
|
||||
CHECK_AND_ASSERT_THROW_MES(new_total_parents >= old_total_parents,
|
||||
"new_total_parents must be >= old_total_parents");
|
||||
CHECK_AND_ASSERT_THROW_MES(new_total_parents < new_total_children,
|
||||
"new_total_parents must be < new_total_children");
|
||||
|
||||
if (setting_next_layer_after_old_root)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(old_total_parents == 0,
|
||||
"old_total_parents expected to be 0 when setting next layer after old root");
|
||||
}
|
||||
|
||||
// 4. Set the current offset in the last chunk
|
||||
// - Note: this value starts at the last child in the last chunk, but it might need to be decremented by 1 if we're
|
||||
// changing that last child
|
||||
std::size_t offset = old_total_parents > 0
|
||||
? (old_total_children % parent_chunk_width)
|
||||
: 0;
|
||||
|
||||
// 5. Check if the last chunk is full (keep in mind it's also possible it's empty)
|
||||
const bool last_chunk_is_full = offset == 0;
|
||||
|
||||
// 6. When the last child changes, we'll need to use its old value to update the parent
|
||||
// - We only care if the child has a parent, otherwise we won't need the child's old value to update the parent
|
||||
// (since there is no parent to update)
|
||||
const bool need_old_last_child = old_total_parents > 0 && last_child_will_change;
|
||||
|
||||
// 7. If we're changing the last child, we need to subtract the offset by 1 to account for that child
|
||||
if (need_old_last_child)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(old_total_children > 0, "no old children but last child is supposed to change");
|
||||
|
||||
// If the chunk is full, must subtract the chunk width by 1
|
||||
offset = offset == 0 ? (parent_chunk_width - 1) : (offset - 1);
|
||||
}
|
||||
|
||||
// 8. When the last parent changes, we'll need to use its old value to update itself
|
||||
const bool adding_members_to_existing_last_chunk = old_total_parents > 0 && !last_chunk_is_full
|
||||
&& new_total_children > old_total_children;
|
||||
const bool need_old_last_parent = need_old_last_child || adding_members_to_existing_last_chunk;
|
||||
|
||||
// 9. Set the next parent's start index
|
||||
std::size_t next_parent_start_index = old_total_parents;
|
||||
if (need_old_last_parent)
|
||||
{
|
||||
// If we're updating the last parent, we need to bring the starting parent index back 1
|
||||
CHECK_AND_ASSERT_THROW_MES(old_total_parents > 0, "no old parents but last parent is supposed to change1");
|
||||
--next_parent_start_index;
|
||||
}
|
||||
|
||||
// Done
|
||||
MDEBUG("parent_chunk_width: " << parent_chunk_width
|
||||
<< " , old_total_children: " << old_total_children
|
||||
<< " , new_total_children: " << new_total_children
|
||||
<< " , old_total_parents: " << old_total_parents
|
||||
<< " , new_total_parents: " << new_total_parents
|
||||
<< " , setting_next_layer_after_old_root: " << setting_next_layer_after_old_root
|
||||
<< " , need_old_last_child: " << need_old_last_child
|
||||
<< " , need_old_last_parent: " << need_old_last_parent
|
||||
<< " , start_offset: " << offset
|
||||
<< " , next_parent_start_index: " << next_parent_start_index);
|
||||
|
||||
return UpdateLayerMetadata{
|
||||
.parent_chunk_width = parent_chunk_width,
|
||||
.old_total_children = old_total_children,
|
||||
.new_total_children = new_total_children,
|
||||
.old_total_parents = old_total_parents,
|
||||
.new_total_parents = new_total_parents,
|
||||
.setting_next_layer_after_old_root = setting_next_layer_after_old_root,
|
||||
.need_old_last_child = need_old_last_child,
|
||||
.need_old_last_parent = need_old_last_parent,
|
||||
.start_offset = offset,
|
||||
.next_parent_start_index = next_parent_start_index,
|
||||
};
|
||||
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
static UpdateLayerMetadata get_update_leaf_layer_metadata(const std::size_t old_n_leaf_tuples,
|
||||
const std::size_t new_n_leaf_tuples,
|
||||
const std::size_t leaf_tuple_size,
|
||||
const std::size_t leaf_layer_chunk_width)
|
||||
{
|
||||
// TODO: comments
|
||||
|
||||
// The leaf layer can never be the root layer
|
||||
const bool setting_next_layer_after_old_root = false;
|
||||
|
||||
const std::size_t old_total_children = old_n_leaf_tuples * leaf_tuple_size;
|
||||
const std::size_t new_total_children = (old_n_leaf_tuples + new_n_leaf_tuples) * leaf_tuple_size;
|
||||
|
||||
const std::size_t old_total_parents = old_total_children > 0
|
||||
? (1 + ((old_total_children - 1) / leaf_layer_chunk_width))
|
||||
: 0;
|
||||
const std::size_t new_total_parents = 1 + ((new_total_children - 1) / leaf_layer_chunk_width);
|
||||
|
||||
CHECK_AND_ASSERT_THROW_MES(new_total_children >= old_total_children,
|
||||
"new_total_children must be >= old_total_children");
|
||||
CHECK_AND_ASSERT_THROW_MES(new_total_parents >= old_total_parents,
|
||||
"new_total_parents must be >= old_total_parents");
|
||||
|
||||
// Since leaf layer is append-only, no leaf can ever change and we'll never need an old leaf
|
||||
const bool need_old_last_child = false;
|
||||
|
||||
const std::size_t offset = old_total_children % leaf_layer_chunk_width;
|
||||
|
||||
const bool last_chunk_is_full = offset == 0;
|
||||
const bool adding_members_to_existing_last_chunk = old_total_parents > 0 && !last_chunk_is_full
|
||||
&& new_total_children > old_total_children;
|
||||
const bool need_old_last_parent = adding_members_to_existing_last_chunk;
|
||||
|
||||
std::size_t next_parent_start_index = old_total_parents;
|
||||
if (need_old_last_parent)
|
||||
{
|
||||
// If we're updating the last parent, we need to bring the starting parent index back 1
|
||||
CHECK_AND_ASSERT_THROW_MES(old_total_parents > 0, "no old parents but last parent is supposed to change2");
|
||||
--next_parent_start_index;
|
||||
}
|
||||
|
||||
MDEBUG("parent_chunk_width: " << leaf_layer_chunk_width
|
||||
<< " , old_total_children: " << old_total_children
|
||||
<< " , new_total_children: " << new_total_children
|
||||
<< " , old_total_parents: " << old_total_parents
|
||||
<< " , new_total_parents: " << new_total_parents
|
||||
<< " , setting_next_layer_after_old_root: " << setting_next_layer_after_old_root
|
||||
<< " , need_old_last_child: " << need_old_last_child
|
||||
<< " , need_old_last_parent: " << need_old_last_parent
|
||||
<< " , start_offset: " << offset
|
||||
<< " , next_parent_start_index: " << next_parent_start_index);
|
||||
|
||||
return UpdateLayerMetadata{
|
||||
.parent_chunk_width = leaf_layer_chunk_width,
|
||||
.old_total_children = old_total_children,
|
||||
.new_total_children = new_total_children,
|
||||
.old_total_parents = old_total_parents,
|
||||
.new_total_parents = new_total_parents,
|
||||
.setting_next_layer_after_old_root = setting_next_layer_after_old_root,
|
||||
.need_old_last_child = need_old_last_child,
|
||||
.need_old_last_parent = need_old_last_parent,
|
||||
.start_offset = offset,
|
||||
.next_parent_start_index = next_parent_start_index,
|
||||
};
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
// Helper function used to get the next layer extension used to grow the next layer in the tree
|
||||
// - for example, if we just grew the parent layer after the leaf layer, the "next layer" would be the grandparent
|
||||
// layer of the leaf layer
|
||||
template<typename C_CHILD, typename C_PARENT>
|
||||
static LayerExtension<C_PARENT> get_next_layer_extension(const C_CHILD &c_child,
|
||||
const C_PARENT &c_parent,
|
||||
const UpdateLayerMetadata &update_layer_metadata,
|
||||
const std::vector<typename C_CHILD::Point> &child_last_hashes,
|
||||
const std::vector<typename C_PARENT::Point> &parent_last_hashes,
|
||||
const std::vector<LayerExtension<C_CHILD>> child_layer_extensions,
|
||||
const std::size_t last_updated_child_idx,
|
||||
const std::size_t last_updated_parent_idx)
|
||||
{
|
||||
// TODO: comments
|
||||
const auto *child_last_hash = (last_updated_child_idx >= child_last_hashes.size())
|
||||
? nullptr
|
||||
: &child_last_hashes[last_updated_child_idx];
|
||||
|
||||
const auto *parent_last_hash = (last_updated_parent_idx >= parent_last_hashes.size())
|
||||
? nullptr
|
||||
: &parent_last_hashes[last_updated_parent_idx];
|
||||
|
||||
// Pre-conditions
|
||||
CHECK_AND_ASSERT_THROW_MES(last_updated_child_idx < child_layer_extensions.size(), "missing child layer");
|
||||
const auto &child_extension = child_layer_extensions[last_updated_child_idx];
|
||||
|
||||
if (update_layer_metadata.setting_next_layer_after_old_root)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES((last_updated_child_idx + 1) == child_last_hashes.size(),
|
||||
"unexpected last updated child idx");
|
||||
CHECK_AND_ASSERT_THROW_MES(child_last_hash != nullptr, "missing last child when setting layer after old root");
|
||||
}
|
||||
|
||||
const auto child_scalars = next_child_scalars_from_children<C_CHILD, C_PARENT>(c_child,
|
||||
update_layer_metadata.setting_next_layer_after_old_root ? child_last_hash : nullptr,
|
||||
child_extension);
|
||||
|
||||
if (update_layer_metadata.need_old_last_parent)
|
||||
CHECK_AND_ASSERT_THROW_MES(parent_last_hash != nullptr, "missing last parent");
|
||||
|
||||
typename C_PARENT::Scalar last_child_scalar;
|
||||
if (update_layer_metadata.need_old_last_child)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(child_last_hash != nullptr, "missing last child");
|
||||
last_child_scalar = c_child.point_to_cycle_scalar(*child_last_hash);
|
||||
}
|
||||
|
||||
// Do the hashing
|
||||
LayerExtension<C_PARENT> layer_extension = hash_children_chunks(
|
||||
c_parent,
|
||||
update_layer_metadata.need_old_last_child ? &last_child_scalar : nullptr,
|
||||
update_layer_metadata.need_old_last_parent ? parent_last_hash : nullptr,
|
||||
update_layer_metadata.start_offset,
|
||||
update_layer_metadata.next_parent_start_index,
|
||||
child_scalars,
|
||||
update_layer_metadata.parent_chunk_width
|
||||
);
|
||||
|
||||
CHECK_AND_ASSERT_THROW_MES((layer_extension.start_idx + layer_extension.hashes.size()) ==
|
||||
update_layer_metadata.new_total_parents,
|
||||
"unexpected num parents extended");
|
||||
|
||||
return layer_extension;
|
||||
}
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
// CurveTrees public member functions
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
@ -214,8 +430,69 @@ CurveTrees<Helios, Selene>::LeafTuple CurveTrees<Helios, Selene>::output_to_leaf
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<typename C1, typename C2>
|
||||
UpdateLayerMetadata CurveTrees<C1, C2>::set_next_layer_extension(
|
||||
const UpdateLayerMetadata &prev_layer_metadata,
|
||||
const bool parent_is_c1,
|
||||
const LastHashes &last_hashes,
|
||||
std::size_t &c1_last_idx_inout,
|
||||
std::size_t &c2_last_idx_inout,
|
||||
TreeExtension &tree_extension_inout) const
|
||||
{
|
||||
const auto &c1_last_hashes = last_hashes.c1_last_hashes;
|
||||
const auto &c2_last_hashes = last_hashes.c2_last_hashes;
|
||||
|
||||
auto &c1_layer_extensions_out = tree_extension_inout.c1_layer_extensions;
|
||||
auto &c2_layer_extensions_out = tree_extension_inout.c2_layer_extensions;
|
||||
|
||||
const std::size_t parent_chunk_width = parent_is_c1 ? m_c1_width : m_c2_width;
|
||||
|
||||
const auto update_layer_metadata = get_update_layer_metadata(
|
||||
prev_layer_metadata.old_total_parents,
|
||||
prev_layer_metadata.new_total_parents,
|
||||
parent_chunk_width,
|
||||
prev_layer_metadata.need_old_last_parent
|
||||
);
|
||||
|
||||
if (parent_is_c1)
|
||||
{
|
||||
auto c1_layer_extension = get_next_layer_extension<C2, C1>(
|
||||
m_c2,
|
||||
m_c1,
|
||||
update_layer_metadata,
|
||||
c2_last_hashes,
|
||||
c1_last_hashes,
|
||||
c2_layer_extensions_out,
|
||||
c2_last_idx_inout,
|
||||
c1_last_idx_inout
|
||||
);
|
||||
|
||||
c1_layer_extensions_out.emplace_back(std::move(c1_layer_extension));
|
||||
++c2_last_idx_inout;
|
||||
}
|
||||
else
|
||||
{
|
||||
auto c2_layer_extension = get_next_layer_extension<C1, C2>(
|
||||
m_c1,
|
||||
m_c2,
|
||||
update_layer_metadata,
|
||||
c1_last_hashes,
|
||||
c2_last_hashes,
|
||||
c1_layer_extensions_out,
|
||||
c1_last_idx_inout,
|
||||
c2_last_idx_inout
|
||||
);
|
||||
|
||||
c2_layer_extensions_out.emplace_back(std::move(c2_layer_extension));
|
||||
++c1_last_idx_inout;
|
||||
}
|
||||
|
||||
return update_layer_metadata;
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<typename C1, typename C2>
|
||||
typename CurveTrees<C1, C2>::TreeExtension CurveTrees<C1, C2>::get_tree_extension(
|
||||
const LastChunks &existing_last_chunks,
|
||||
const std::size_t old_n_leaf_tuples,
|
||||
const LastHashes &existing_last_hashes,
|
||||
const std::vector<LeafTuple> &new_leaf_tuples) const
|
||||
{
|
||||
TreeExtension tree_extension;
|
||||
@ -223,10 +500,13 @@ typename CurveTrees<C1, C2>::TreeExtension CurveTrees<C1, C2>::get_tree_extensio
|
||||
if (new_leaf_tuples.empty())
|
||||
return tree_extension;
|
||||
|
||||
const auto &c1_last_chunks = existing_last_chunks.c1_last_chunks;
|
||||
const auto &c2_last_chunks = existing_last_chunks.c2_last_chunks;
|
||||
auto update_layer_metadata = get_update_leaf_layer_metadata(
|
||||
old_n_leaf_tuples,
|
||||
new_leaf_tuples.size(),
|
||||
LEAF_TUPLE_SIZE,
|
||||
m_leaf_layer_chunk_width);
|
||||
|
||||
tree_extension.leaves.start_idx = existing_last_chunks.next_start_leaf_index;
|
||||
tree_extension.leaves.start_idx = update_layer_metadata.old_total_children;
|
||||
|
||||
// Copy the leaves
|
||||
// TODO: don't copy here
|
||||
@ -240,109 +520,53 @@ typename CurveTrees<C1, C2>::TreeExtension CurveTrees<C1, C2>::get_tree_extensio
|
||||
});
|
||||
}
|
||||
|
||||
auto &c1_layer_extensions_out = tree_extension.c1_layer_extensions;
|
||||
auto &c2_layer_extensions_out = tree_extension.c2_layer_extensions;
|
||||
|
||||
const std::vector<typename C2::Scalar> flattened_leaves = this->flatten_leaves(new_leaf_tuples);
|
||||
if (update_layer_metadata.need_old_last_parent)
|
||||
CHECK_AND_ASSERT_THROW_MES(!existing_last_hashes.c2_last_hashes.empty(), "missing last c2 parent");
|
||||
|
||||
// Hash the leaf layer
|
||||
LayerExtension<C2> leaf_parents;
|
||||
hash_layer(m_c2,
|
||||
c2_last_chunks.empty() ? nullptr : &c2_last_chunks[0],
|
||||
flattened_leaves,
|
||||
tree_extension.leaves.start_idx,
|
||||
m_leaf_layer_chunk_width,
|
||||
leaf_parents);
|
||||
auto leaf_parents = hash_children_chunks(m_c2,
|
||||
nullptr, // We never need the old last child from leaf layer because the leaf layer is always append-only
|
||||
update_layer_metadata.need_old_last_parent ? &existing_last_hashes.c2_last_hashes[0] : nullptr,
|
||||
update_layer_metadata.start_offset,
|
||||
update_layer_metadata.next_parent_start_index,
|
||||
this->flatten_leaves(new_leaf_tuples),
|
||||
m_leaf_layer_chunk_width
|
||||
);
|
||||
|
||||
c2_layer_extensions_out.emplace_back(std::move(leaf_parents));
|
||||
CHECK_AND_ASSERT_THROW_MES(
|
||||
(leaf_parents.start_idx + leaf_parents.hashes.size()) == update_layer_metadata.new_total_parents,
|
||||
"unexpected num leaf parents extended");
|
||||
|
||||
// Check if we just added the root
|
||||
if (c2_layer_extensions_out.back().hashes.size() == 1 && c2_layer_extensions_out.back().start_idx == 0)
|
||||
return tree_extension;
|
||||
|
||||
const std::size_t next_root_layer_idx = c1_last_chunks.size() + c2_last_chunks.size();
|
||||
tree_extension.c2_layer_extensions.emplace_back(std::move(leaf_parents));
|
||||
|
||||
// Alternate between hashing c2 children, c1 children, c2, c1, ...
|
||||
bool parent_is_c1 = true;
|
||||
|
||||
std::size_t c1_last_idx = 0;
|
||||
std::size_t c2_last_idx = 0;
|
||||
// TODO: calculate max number of layers it should take to add all leaves (existing leaves + new leaves)
|
||||
while (true)
|
||||
while (update_layer_metadata.new_total_parents > 1)
|
||||
{
|
||||
const std::size_t updating_layer_idx = 1 + c1_last_idx + c2_last_idx;
|
||||
const std::size_t updating_root_layer = updating_layer_idx == next_root_layer_idx;
|
||||
MDEBUG("Getting extension for layer " << (c1_last_idx + c2_last_idx + 1));
|
||||
|
||||
const auto *c1_last_chunk_ptr = (c1_last_idx >= c1_last_chunks.size())
|
||||
? nullptr
|
||||
: &c1_last_chunks[c1_last_idx];
|
||||
const std::size_t new_total_children = update_layer_metadata.new_total_parents;
|
||||
|
||||
const auto *c2_last_chunk_ptr = (c2_last_idx >= c2_last_chunks.size())
|
||||
? nullptr
|
||||
: &c2_last_chunks[c2_last_idx];
|
||||
update_layer_metadata = this->set_next_layer_extension(
|
||||
update_layer_metadata,
|
||||
parent_is_c1,
|
||||
existing_last_hashes,
|
||||
c1_last_idx,
|
||||
c2_last_idx,
|
||||
tree_extension
|
||||
);
|
||||
|
||||
// TODO: templated function
|
||||
if (parent_is_c1)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_last_idx < c2_layer_extensions_out.size(), "missing c2 layer");
|
||||
|
||||
const auto &c2_child_extension = c2_layer_extensions_out[c2_last_idx];
|
||||
|
||||
std::vector<typename C1::Scalar> c1_child_scalars;
|
||||
const std::size_t next_child_start_idx = next_child_scalars_from_children<C2, C1>(m_c2,
|
||||
updating_root_layer,
|
||||
c2_last_chunk_ptr,
|
||||
c2_child_extension,
|
||||
c1_child_scalars);
|
||||
|
||||
LayerExtension<C1> c1_layer_extension;
|
||||
hash_layer<C1>(m_c1,
|
||||
c1_last_chunk_ptr,
|
||||
c1_child_scalars,
|
||||
next_child_start_idx,
|
||||
m_c1_width,
|
||||
c1_layer_extension);
|
||||
|
||||
c1_layer_extensions_out.emplace_back(std::move(c1_layer_extension));
|
||||
|
||||
// Check if we just added the root
|
||||
if (c1_layer_extensions_out.back().hashes.size() == 1 && c1_layer_extensions_out.back().start_idx == 0)
|
||||
return tree_extension;
|
||||
|
||||
++c2_last_idx;
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_last_idx < c1_layer_extensions_out.size(), "missing c1 layer");
|
||||
|
||||
const auto &c1_child_extension = c1_layer_extensions_out[c1_last_idx];
|
||||
|
||||
std::vector<typename C2::Scalar> c2_child_scalars;
|
||||
const std::size_t next_child_start_idx = next_child_scalars_from_children<C1, C2>(m_c1,
|
||||
updating_root_layer,
|
||||
c1_last_chunk_ptr,
|
||||
c1_child_extension,
|
||||
c2_child_scalars);
|
||||
|
||||
LayerExtension<C2> c2_layer_extension;
|
||||
hash_layer<C2>(m_c2,
|
||||
c2_last_chunk_ptr,
|
||||
c2_child_scalars,
|
||||
next_child_start_idx,
|
||||
m_c2_width,
|
||||
c2_layer_extension);
|
||||
|
||||
c2_layer_extensions_out.emplace_back(std::move(c2_layer_extension));
|
||||
|
||||
// Check if we just added the root
|
||||
if (c2_layer_extensions_out.back().hashes.size() == 1 && c2_layer_extensions_out.back().start_idx == 0)
|
||||
return tree_extension;
|
||||
|
||||
++c1_last_idx;
|
||||
}
|
||||
// Sanity check to make sure we're making progress to exit the while loop
|
||||
CHECK_AND_ASSERT_THROW_MES(update_layer_metadata.new_total_parents < new_total_children,
|
||||
"expect fewer parents than children in every layer");
|
||||
|
||||
parent_is_c1 = !parent_is_c1;
|
||||
}
|
||||
|
||||
return tree_extension;
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
|
@ -51,30 +51,37 @@ template<typename C>
|
||||
struct LayerExtension final
|
||||
{
|
||||
std::size_t start_idx{0};
|
||||
bool update_existing_last_hash;
|
||||
std::vector<typename C::Point> hashes;
|
||||
};
|
||||
|
||||
// Useful data from the last chunk in a layer
|
||||
template<typename C>
|
||||
struct LastChunkData final
|
||||
// Useful metadata for updating a layer
|
||||
struct UpdateLayerMetadata final
|
||||
{
|
||||
// The next starting index in the layer (referencing the "next" child chunk)
|
||||
const std::size_t next_start_child_chunk_index;
|
||||
// The existing hash of the last chunk of child scalars
|
||||
// - Used to grow the existing last chunk in the layer
|
||||
// - Only must be set if the existing last chunk isn't full
|
||||
const typename C::Point last_parent;
|
||||
// Whether or not the existing last parent in the layer needs to be updated
|
||||
// - True if the last leaf layer chunk is not yet full
|
||||
// - If true, next_start_child_chunk_index == existing layer size
|
||||
// - If false, next_start_child_chunk_index == (existing layer size - 1), since updating existing last parent
|
||||
const bool update_last_parent;
|
||||
// The last child in the last chunk (and therefore the last child in the child layer)
|
||||
// - Used to get the delta from the existing last child to the new last child
|
||||
// - Only needs to be set if update_last_parent is true
|
||||
// - Since the leaf layer is append-only, the layer above leaf layer does not actually need this value since the
|
||||
// last leaf will never change (and therefore, we'll never need the delta to a prior leaf)
|
||||
const typename C::Scalar last_child;
|
||||
// The max chunk width of children used to hash into a parent
|
||||
std::size_t parent_chunk_width;
|
||||
|
||||
// Total children refers to the total number of elements in a layer
|
||||
std::size_t old_total_children;
|
||||
std::size_t new_total_children;
|
||||
|
||||
// Total parents refers to the total number of hashes of chunks of children
|
||||
std::size_t old_total_parents;
|
||||
std::size_t new_total_parents;
|
||||
|
||||
// When updating the tree, we use this boolean to know when we'll need to use the tree's existing old root in order
|
||||
// to set a new layer after that root
|
||||
// - We'll need to be sure the old root gets hashed when setting the next layer
|
||||
bool setting_next_layer_after_old_root;
|
||||
// When the last child in the child layer changes, we'll need to use its old value to update its parent hash
|
||||
bool need_old_last_child;
|
||||
// When the last parent in the layer changes, we'll need to use its old value to update itself
|
||||
bool need_old_last_parent;
|
||||
|
||||
// The first chunk that needs to be updated's first child's offset within that chunk
|
||||
std::size_t start_offset;
|
||||
// The parent's starting index in the layer
|
||||
std::size_t next_parent_start_index;
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
@ -130,14 +137,13 @@ public:
|
||||
std::vector<LayerExtension<C2>> c2_layer_extensions;
|
||||
};
|
||||
|
||||
// Last chunk data from each layer in the tree
|
||||
// Last hashes from each layer in the tree
|
||||
// - layers alternate between C1 and C2
|
||||
// - c2_last_chunks[0] is first layer after leaves, then c1_last_chunks[0], then c2_last_chunks[1], etc
|
||||
struct LastChunks final
|
||||
// - c2_last_hashes[0] refers to the layer after leaves, then c1_last_hashes[0], then c2_last_hashes[1], etc
|
||||
struct LastHashes final
|
||||
{
|
||||
std::size_t next_start_leaf_index{0};
|
||||
std::vector<LastChunkData<C1>> c1_last_chunks;
|
||||
std::vector<LastChunkData<C2>> c2_last_chunks;
|
||||
std::vector<typename C1::Point> c1_last_hashes;
|
||||
std::vector<typename C2::Point> c2_last_hashes;
|
||||
};
|
||||
|
||||
//member functions
|
||||
@ -145,14 +151,27 @@ public:
|
||||
// Convert cryptonote output pub key and commitment to a leaf tuple for the curve trees tree
|
||||
LeafTuple output_to_leaf_tuple(const crypto::public_key &O, const crypto::public_key &C) const;
|
||||
|
||||
// Take in the existing last chunks of each layer in the tree, as well as new leaves to add to the tree,
|
||||
// and return a tree extension struct that can be used to extend a global tree
|
||||
TreeExtension get_tree_extension(const LastChunks &existing_last_chunks,
|
||||
// Take in the existing number of leaf tuples and the existing last hashes of each layer in the tree, as well as new
|
||||
// leaves to add to the tree, and return a tree extension struct that can be used to extend a global tree
|
||||
TreeExtension get_tree_extension(const std::size_t old_n_leaf_tuples,
|
||||
const LastHashes &existing_last_hashes,
|
||||
const std::vector<LeafTuple> &new_leaf_tuples) const;
|
||||
|
||||
// Flatten leaves [(O.x, I.x, C.x),(O.x, I.x, C.x),...] -> [scalar,scalar,scalar,scalar,scalar,scalar,...]
|
||||
std::vector<typename C2::Scalar> flatten_leaves(const std::vector<LeafTuple> &leaves) const;
|
||||
|
||||
private:
|
||||
// Helper function used to set the next layer extension used to grow the next layer in the tree
|
||||
// - for example, if we just grew the parent layer after the leaf layer, the "next layer" would be the grandparent
|
||||
// layer of the leaf layer
|
||||
UpdateLayerMetadata set_next_layer_extension(
|
||||
const UpdateLayerMetadata &prev_layer_metadata,
|
||||
const bool parent_is_c1,
|
||||
const LastHashes &last_hashes,
|
||||
std::size_t &c1_last_idx_inout,
|
||||
std::size_t &c2_last_idx_inout,
|
||||
TreeExtension &tree_extension_inout) const;
|
||||
|
||||
//public member variables
|
||||
public:
|
||||
// The curve interfaces
|
||||
|
@ -40,28 +40,6 @@
|
||||
// CurveTreesGlobalTree helpers
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<typename C>
|
||||
static fcmp::curve_trees::LastChunkData<C> get_last_child_layer_chunk(const bool update_last_parent,
|
||||
const std::size_t parent_layer_size,
|
||||
const typename C::Point &last_parent,
|
||||
const typename C::Scalar &last_child)
|
||||
{
|
||||
if (update_last_parent)
|
||||
CHECK_AND_ASSERT_THROW_MES(parent_layer_size > 0, "empty parent layer");
|
||||
|
||||
// If updating last parent, the next start will be the last parent's index, else we start at the tip
|
||||
const std::size_t next_start_child_chunk_index = update_last_parent
|
||||
? (parent_layer_size - 1)
|
||||
: parent_layer_size;
|
||||
|
||||
return fcmp::curve_trees::LastChunkData<C>{
|
||||
.next_start_child_chunk_index = next_start_child_chunk_index,
|
||||
.last_parent = last_parent,
|
||||
.update_last_parent = update_last_parent,
|
||||
.last_child = last_child
|
||||
};
|
||||
}
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<typename C>
|
||||
static bool validate_layer(const C &curve,
|
||||
const CurveTreesGlobalTree::Layer<C> &parents,
|
||||
const std::vector<typename C::Scalar> &child_scalars,
|
||||
@ -102,9 +80,17 @@ static bool validate_layer(const C &curve,
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
// CurveTreesGlobalTree implementations
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
CurveTreesV1::LastChunks CurveTreesGlobalTree::get_last_chunks()
|
||||
std::size_t CurveTreesGlobalTree::get_num_leaf_tuples() const
|
||||
{
|
||||
const auto &leaves = m_tree.leaves;
|
||||
return m_tree.leaves.size();
|
||||
}
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
CurveTreesV1::LastHashes CurveTreesGlobalTree::get_last_hashes() const
|
||||
{
|
||||
CurveTreesV1::LastHashes last_hashes_out;
|
||||
auto &c1_last_hashes_out = last_hashes_out.c1_last_hashes;
|
||||
auto &c2_last_hashes_out = last_hashes_out.c2_last_hashes;
|
||||
|
||||
const auto &c1_layers = m_tree.c1_layers;
|
||||
const auto &c2_layers = m_tree.c2_layers;
|
||||
|
||||
@ -112,95 +98,37 @@ CurveTreesV1::LastChunks CurveTreesGlobalTree::get_last_chunks()
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_layers.size() == c1_layers.size() || c2_layers.size() == (c1_layers.size() + 1),
|
||||
"unexpected number of curve layers");
|
||||
|
||||
CurveTreesV1::LastChunks last_chunks;
|
||||
|
||||
// Since leaf layer is append-only, we know the next start will be right after all existing leaf tuple
|
||||
const std::size_t num_leaf_tuples = leaves.size() * CurveTreesV1::LEAF_TUPLE_SIZE;
|
||||
last_chunks.next_start_leaf_index = num_leaf_tuples;
|
||||
c1_last_hashes_out.reserve(c1_layers.size());
|
||||
c2_last_hashes_out.reserve(c2_layers.size());
|
||||
|
||||
if (c2_layers.empty())
|
||||
return last_chunks;
|
||||
return last_hashes_out;
|
||||
|
||||
auto &c1_last_chunks_out = last_chunks.c1_last_chunks;
|
||||
auto &c2_last_chunks_out = last_chunks.c2_last_chunks;
|
||||
|
||||
c1_last_chunks_out.reserve(c1_layers.size());
|
||||
c2_last_chunks_out.reserve(c2_layers.size());
|
||||
|
||||
// First push the last leaf chunk data into c2 chunks
|
||||
const bool update_last_parent = (num_leaf_tuples % m_curve_trees.m_leaf_layer_chunk_width) > 0;
|
||||
auto last_leaf_chunk = get_last_child_layer_chunk<Selene>(
|
||||
/*update_last_parent*/ update_last_parent,
|
||||
/*parent_layer_size */ c2_layers[0].size(),
|
||||
/*last_parent */ c2_layers[0].back(),
|
||||
// Since the leaf layer is append-only, we'll never need access to the last child
|
||||
/*last_child */ m_curve_trees.m_c2.zero_scalar());
|
||||
|
||||
c2_last_chunks_out.push_back(std::move(last_leaf_chunk));
|
||||
|
||||
// If there are no c1 layers, we're done
|
||||
if (c1_layers.empty())
|
||||
return last_chunks;
|
||||
|
||||
// Next parents will be c1
|
||||
bool parent_is_c1 = true;
|
||||
// Next parents will be c2
|
||||
bool use_c2 = true;
|
||||
|
||||
// Then get last chunks up until the root
|
||||
std::size_t c1_idx = 0;
|
||||
std::size_t c2_idx = 0;
|
||||
while (c1_last_chunks_out.size() < c1_layers.size() || c2_last_chunks_out.size() < c2_layers.size())
|
||||
while (c1_last_hashes_out.size() < c1_layers.size() || c2_last_hashes_out.size() < c2_layers.size())
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_layers.size() > c1_idx, "missing c1 layer");
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_layers.size() > c2_idx, "missing c2 layer");
|
||||
|
||||
// TODO: template the below if statement into another function
|
||||
if (parent_is_c1)
|
||||
if (use_c2)
|
||||
{
|
||||
const Layer<Selene> &child_layer = c2_layers[c2_idx];
|
||||
CHECK_AND_ASSERT_THROW_MES(!child_layer.empty(), "child layer is empty");
|
||||
|
||||
const Layer<Helios> &parent_layer = c1_layers[c1_idx];
|
||||
CHECK_AND_ASSERT_THROW_MES(!parent_layer.empty(), "parent layer is empty");
|
||||
|
||||
const auto &last_child = m_curve_trees.m_c2.point_to_cycle_scalar(child_layer.back());
|
||||
|
||||
auto last_parent_chunk = get_last_child_layer_chunk<Helios>(update_last_parent,
|
||||
parent_layer.size(),
|
||||
parent_layer.back(),
|
||||
last_child);
|
||||
|
||||
c1_last_chunks_out.push_back(std::move(last_parent_chunk));
|
||||
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_layers.size() > c2_idx, "missing c2 layer");
|
||||
c2_last_hashes_out.push_back(c2_layers[c2_idx].back());
|
||||
++c2_idx;
|
||||
}
|
||||
else
|
||||
{
|
||||
const Layer<Helios> &child_layer = c1_layers[c1_idx];
|
||||
CHECK_AND_ASSERT_THROW_MES(!child_layer.empty(), "child layer is empty");
|
||||
|
||||
const Layer<Selene> &parent_layer = c2_layers[c2_idx];
|
||||
CHECK_AND_ASSERT_THROW_MES(!parent_layer.empty(), "parent layer is empty");
|
||||
|
||||
const auto &last_child = m_curve_trees.m_c1.point_to_cycle_scalar(child_layer.back());
|
||||
|
||||
auto last_parent_chunk = get_last_child_layer_chunk<Selene>(update_last_parent,
|
||||
parent_layer.size(),
|
||||
parent_layer.back(),
|
||||
last_child);
|
||||
|
||||
c2_last_chunks_out.push_back(std::move(last_parent_chunk));
|
||||
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_layers.size() > c1_idx, "missing c1 layer");
|
||||
c1_last_hashes_out.push_back(c1_layers[c1_idx].back());
|
||||
++c1_idx;
|
||||
}
|
||||
|
||||
// Alternate curves every iteration
|
||||
parent_is_c1 = !parent_is_c1;
|
||||
use_c2 = !use_c2;
|
||||
}
|
||||
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_last_chunks_out.size() == c1_layers.size(), "unexpected c1 last chunks");
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_last_chunks_out.size() == c2_layers.size(), "unexpected c2 last chunks");
|
||||
|
||||
return last_chunks;
|
||||
return last_hashes_out;
|
||||
}
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
void CurveTreesGlobalTree::extend_tree(const CurveTreesV1::TreeExtension &tree_extension)
|
||||
@ -250,7 +178,14 @@ void CurveTreesGlobalTree::extend_tree(const CurveTreesV1::TreeExtension &tree_e
|
||||
|
||||
// We updated the last hash
|
||||
if (started_at_tip)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_ext.update_existing_last_hash, "expect to be updating last hash");
|
||||
c2_inout.back() = c2_ext.hashes.front();
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(!c2_ext.update_existing_last_hash, "unexpected last hash update");
|
||||
}
|
||||
|
||||
for (std::size_t i = started_at_tip ? 1 : 0; i < c2_ext.hashes.size(); ++i)
|
||||
c2_inout.emplace_back(c2_ext.hashes[i]);
|
||||
@ -276,7 +211,14 @@ void CurveTreesGlobalTree::extend_tree(const CurveTreesV1::TreeExtension &tree_e
|
||||
|
||||
// We updated the last hash
|
||||
if (started_at_tip)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_ext.update_existing_last_hash, "expect to be updating last hash");
|
||||
c1_inout.back() = c1_ext.hashes.front();
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(!c1_ext.update_existing_last_hash, "unexpected last hash update");
|
||||
}
|
||||
|
||||
for (std::size_t i = started_at_tip ? 1 : 0; i < c1_ext.hashes.size(); ++i)
|
||||
c1_inout.emplace_back(c1_ext.hashes[i]);
|
||||
@ -803,6 +745,8 @@ void CurveTreesGlobalTree::trim_tree(const std::size_t new_num_leaves)
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
bool CurveTreesGlobalTree::audit_tree()
|
||||
{
|
||||
MDEBUG("Auditing global tree");
|
||||
|
||||
const auto &leaves = m_tree.leaves;
|
||||
const auto &c1_layers = m_tree.c1_layers;
|
||||
const auto &c2_layers = m_tree.c2_layers;
|
||||
@ -894,42 +838,33 @@ bool CurveTreesGlobalTree::audit_tree()
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
// Logging helpers
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
void CurveTreesGlobalTree::log_last_chunks(const CurveTreesV1::LastChunks &last_chunks)
|
||||
void CurveTreesGlobalTree::log_last_hashes(const CurveTreesV1::LastHashes &last_hashes)
|
||||
{
|
||||
const auto &c1_last_chunks = last_chunks.c1_last_chunks;
|
||||
const auto &c2_last_chunks = last_chunks.c2_last_chunks;
|
||||
const auto &c1_last_hashes = last_hashes.c1_last_hashes;
|
||||
const auto &c2_last_hashes = last_hashes.c2_last_hashes;
|
||||
|
||||
MDEBUG("Total of " << c1_last_chunks.size() << " Helios last chunks and "
|
||||
<< c2_last_chunks.size() << " Selene last chunks");
|
||||
MDEBUG("Total of " << c1_last_hashes.size() << " Helios layers and " << c2_last_hashes.size() << " Selene layers");
|
||||
|
||||
bool use_c2 = true;
|
||||
std::size_t c1_idx = 0;
|
||||
std::size_t c2_idx = 0;
|
||||
for (std::size_t i = 0; i < (c1_last_chunks.size() + c2_last_chunks.size()); ++i)
|
||||
for (std::size_t i = 0; i < (c1_last_hashes.size() + c2_last_hashes.size()); ++i)
|
||||
{
|
||||
if (use_c2)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_idx < c2_last_chunks.size(), "unexpected c2 layer");
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_idx < c2_last_hashes.size(), "unexpected c2 layer");
|
||||
|
||||
const fcmp::curve_trees::LastChunkData<Selene> &last_chunk = c2_last_chunks[c2_idx];
|
||||
|
||||
MDEBUG("next_start_child_chunk_index: " << last_chunk.next_start_child_chunk_index
|
||||
<< " , last_parent: " << m_curve_trees.m_c2.to_string(last_chunk.last_parent)
|
||||
<< " , update_last_parent: " << last_chunk.update_last_parent
|
||||
<< " , last_child: " << m_curve_trees.m_c2.to_string(last_chunk.last_child));
|
||||
const auto &last_hash = c2_last_hashes[c2_idx];
|
||||
MDEBUG("c2_idx: " << c2_idx << " , last_hash: " << m_curve_trees.m_c2.to_string(last_hash));
|
||||
|
||||
++c2_idx;
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_idx < c1_last_chunks.size(), "unexpected c1 layer");
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_idx < c1_last_hashes.size(), "unexpected c1 layer");
|
||||
|
||||
const fcmp::curve_trees::LastChunkData<Helios> &last_chunk = c1_last_chunks[c1_idx];
|
||||
|
||||
MDEBUG("next_start_child_chunk_index: " << last_chunk.next_start_child_chunk_index
|
||||
<< " , last_parent: " << m_curve_trees.m_c1.to_string(last_chunk.last_parent)
|
||||
<< " , update_last_parent: " << last_chunk.update_last_parent
|
||||
<< " , last_child: " << m_curve_trees.m_c1.to_string(last_chunk.last_child));
|
||||
const auto &last_hash = c1_last_hashes[c1_idx];
|
||||
MDEBUG("c1_idx: " << c1_idx << " , last_hash: " << m_curve_trees.m_c1.to_string(last_hash));
|
||||
|
||||
++c1_idx;
|
||||
}
|
||||
@ -1074,14 +1009,16 @@ static bool grow_tree(CurveTreesV1 &curve_trees,
|
||||
CurveTreesGlobalTree &global_tree,
|
||||
const std::size_t num_leaves)
|
||||
{
|
||||
// Get the last chunk from each layer in the tree; empty if tree is empty
|
||||
const auto last_chunks = global_tree.get_last_chunks();
|
||||
// Do initial tree reads
|
||||
const std::size_t old_n_leaf_tuples = global_tree.get_num_leaf_tuples();
|
||||
const CurveTreesV1::LastHashes last_hashes = global_tree.get_last_hashes();
|
||||
|
||||
global_tree.log_last_chunks(last_chunks);
|
||||
global_tree.log_last_hashes(last_hashes);
|
||||
|
||||
// Get a tree extension object to the existing tree using randomly generated leaves
|
||||
// - The tree extension includes all elements we'll need to add to the existing tree when adding the new leaves
|
||||
const auto tree_extension = curve_trees.get_tree_extension(last_chunks,
|
||||
const auto tree_extension = curve_trees.get_tree_extension(old_n_leaf_tuples,
|
||||
last_hashes,
|
||||
generate_random_leaves(curve_trees, num_leaves));
|
||||
|
||||
global_tree.log_tree_extension(tree_extension);
|
||||
@ -1179,54 +1116,45 @@ TEST(curve_trees, grow_tree)
|
||||
Helios helios;
|
||||
Selene selene;
|
||||
|
||||
LOG_PRINT_L1("Test grow tree with helios chunk width " << HELIOS_CHUNK_WIDTH
|
||||
<< ", selene chunk width " << SELENE_CHUNK_WIDTH);
|
||||
// Constant for how deep we want the tree
|
||||
const std::size_t TEST_N_LAYERS = 4;
|
||||
|
||||
// Use lower values for chunk width than prod so that we can quickly test a many-layer deep tree
|
||||
const std::size_t helios_chunk_width = 3;
|
||||
const std::size_t selene_chunk_width = 2;
|
||||
|
||||
static_assert(helios_chunk_width > 1, "helios width must be > 1");
|
||||
static_assert(selene_chunk_width > 1, "selene width must be > 1");
|
||||
|
||||
LOG_PRINT_L1("Test grow tree with helios chunk width " << helios_chunk_width
|
||||
<< ", selene chunk width " << selene_chunk_width);
|
||||
|
||||
// Number of leaves for which x number of layers is required
|
||||
std::size_t leaves_needed_for_n_layers = selene_chunk_width;
|
||||
for (std::size_t i = 1; i < TEST_N_LAYERS; ++i)
|
||||
{
|
||||
const std::size_t width = i % 2 == 0 ? selene_chunk_width : helios_chunk_width;
|
||||
leaves_needed_for_n_layers *= width;
|
||||
}
|
||||
|
||||
auto curve_trees = CurveTreesV1(
|
||||
helios,
|
||||
selene,
|
||||
HELIOS_CHUNK_WIDTH,
|
||||
SELENE_CHUNK_WIDTH);
|
||||
helios_chunk_width,
|
||||
selene_chunk_width);
|
||||
|
||||
unit_test::BlockchainLMDBTest test_db;
|
||||
|
||||
static_assert(HELIOS_CHUNK_WIDTH > 1, "helios width must be > 1");
|
||||
static_assert(SELENE_CHUNK_WIDTH > 1, "selene width must be > 1");
|
||||
// Increment to test for off-by-1
|
||||
++leaves_needed_for_n_layers;
|
||||
|
||||
// Number of leaves for which x number of layers is required
|
||||
const std::size_t NEED_1_LAYER = SELENE_CHUNK_WIDTH;
|
||||
const std::size_t NEED_2_LAYERS = NEED_1_LAYER * HELIOS_CHUNK_WIDTH;
|
||||
const std::size_t NEED_3_LAYERS = NEED_2_LAYERS * SELENE_CHUNK_WIDTH;
|
||||
|
||||
const std::vector<std::size_t> N_LEAVES{
|
||||
// Basic tests
|
||||
1,
|
||||
2,
|
||||
|
||||
// Test with number of leaves {-1,0,+1} relative to chunk width boundaries
|
||||
NEED_1_LAYER-1,
|
||||
NEED_1_LAYER,
|
||||
NEED_1_LAYER+1,
|
||||
|
||||
NEED_2_LAYERS-1,
|
||||
NEED_2_LAYERS,
|
||||
NEED_2_LAYERS+1,
|
||||
|
||||
NEED_3_LAYERS,
|
||||
};
|
||||
|
||||
for (const std::size_t init_leaves : N_LEAVES)
|
||||
// First initialize the tree with init_leaves
|
||||
for (std::size_t init_leaves = 1; init_leaves < leaves_needed_for_n_layers; ++init_leaves)
|
||||
{
|
||||
// TODO: init tree once, then extend a copy of that tree
|
||||
|
||||
for (const std::size_t ext_leaves : N_LEAVES)
|
||||
// Then extend the tree with ext_leaves
|
||||
for (std::size_t ext_leaves = 1; (init_leaves + ext_leaves) < leaves_needed_for_n_layers; ++ext_leaves)
|
||||
{
|
||||
// Only test 3rd layer once because it's a huge test
|
||||
if (init_leaves > 1 && ext_leaves == NEED_3_LAYERS)
|
||||
continue;
|
||||
if (ext_leaves > 1 && init_leaves == NEED_3_LAYERS)
|
||||
continue;
|
||||
|
||||
ASSERT_TRUE(grow_tree_in_memory(init_leaves, ext_leaves, curve_trees));
|
||||
ASSERT_TRUE(grow_tree_db(init_leaves, ext_leaves, curve_trees, test_db));
|
||||
}
|
||||
|
@ -65,8 +65,11 @@ public:
|
||||
|
||||
//public member functions
|
||||
public:
|
||||
// Read the in-memory tree and get data from last chunks from each layer
|
||||
CurveTreesV1::LastChunks get_last_chunks();
|
||||
// Read the in-memory tree and get the number of leaf tuples
|
||||
std::size_t get_num_leaf_tuples() const;
|
||||
|
||||
// Read the in-memory tree and get the last hashes from each layer in the tree
|
||||
CurveTreesV1::LastHashes get_last_hashes() const;
|
||||
|
||||
// Use the tree extension to extend the in-memory tree
|
||||
void extend_tree(const CurveTreesV1::TreeExtension &tree_extension);
|
||||
@ -78,7 +81,7 @@ public:
|
||||
bool audit_tree();
|
||||
|
||||
// logging helpers
|
||||
void log_last_chunks(const CurveTreesV1::LastChunks &last_chunks);
|
||||
void log_last_hashes(const CurveTreesV1::LastHashes &last_hashes);
|
||||
void log_tree_extension(const CurveTreesV1::TreeExtension &tree_extension);
|
||||
void log_tree();
|
||||
|
||||
|
Loading…
Reference in New Issue
Block a user