trim_tree db impl + db test + some housekeeping

This commit is contained in:
j-berman 2024-07-09 21:02:21 -07:00
parent 42fd22c4ee
commit f50ad5baac
7 changed files with 495 additions and 104 deletions

View file

@ -1769,8 +1769,10 @@ public:
virtual void grow_tree(const fcmp::curve_trees::CurveTreesV1 &curve_trees,
const std::vector<fcmp::curve_trees::CurveTreesV1::LeafTuple> &new_leaves) = 0;
virtual void trim_tree(const fcmp::curve_trees::CurveTreesV1 &curve_trees, const std::size_t trim_n_leaf_tuples) = 0;
// TODO: description
virtual bool audit_tree(const fcmp::curve_trees::CurveTreesV1 &curve_trees) const = 0;
virtual bool audit_tree(const fcmp::curve_trees::CurveTreesV1 &curve_trees, const std::size_t expected_n_leaf_tuples) const = 0;
//
// Hard fork related storage

View file

@ -1326,7 +1326,7 @@ void BlockchainLMDB::grow_tree(const fcmp::curve_trees::CurveTreesV1 &curve_tree
const auto &leaves = tree_extension.leaves;
for (std::size_t i = 0; i < leaves.tuples.size(); ++i)
{
MDB_val_copy<std::size_t> k(i + leaves.start_idx);
MDB_val_copy<std::size_t> k(i + leaves.start_leaf_tuple_idx);
MDB_val_set(v, leaves.tuples[i]);
// TODO: according to the docs, MDB_APPENDDUP isn't supposed to perform any key comparisons to maximize efficiency.
@ -1435,6 +1435,189 @@ void BlockchainLMDB::grow_layer(const C &curve,
}
}
void BlockchainLMDB::trim_tree(const fcmp::curve_trees::CurveTreesV1 &curve_trees, const std::size_t trim_n_leaf_tuples)
{
// TODO: block_wtxn_start like pop_block, then call BlockchainDB::trim_tree
LOG_PRINT_L3("BlockchainLMDB::" << __func__);
check_open();
mdb_txn_cursors *m_cursors = &m_wcursors;
CURSOR(leaves)
CURSOR(layers)
CHECK_AND_ASSERT_THROW_MES(trim_n_leaf_tuples > 0, "must be trimming some leaves");
const std::size_t old_n_leaf_tuples = this->get_num_leaf_tuples();
CHECK_AND_ASSERT_THROW_MES(old_n_leaf_tuples > trim_n_leaf_tuples, "cannot trim more leaves than exist");
const auto trim_instructions = curve_trees.get_trim_instructions(old_n_leaf_tuples, trim_n_leaf_tuples);
// Do initial tree reads
const auto last_chunk_children_to_trim = this->get_last_chunk_children_to_trim(curve_trees, trim_instructions);
const auto last_hashes_to_trim = this->get_last_hashes_to_trim(trim_instructions);
// Get the new hashes, wrapped in a simple struct we can use to trim the tree
const auto tree_reduction = curve_trees.get_tree_reduction(
trim_instructions,
last_chunk_children_to_trim,
last_hashes_to_trim);
// Use tree reduction to trim tree
CHECK_AND_ASSERT_THROW_MES((tree_reduction.new_total_leaf_tuples + trim_n_leaf_tuples) == old_n_leaf_tuples,
"unexpected new total leaves");
MDEBUG("Trimming " << trim_n_leaf_tuples << " leaf tuples");
// Trim the leaves
// TODO: trim_leaves
for (std::size_t i = 0; i < trim_n_leaf_tuples; ++i)
{
std::size_t last_leaf_tuple_idx = (old_n_leaf_tuples - 1 - i);
MDB_val_copy<std::size_t> k(last_leaf_tuple_idx);
MDB_val v;
int result = mdb_cursor_get(m_cur_leaves, &k, &v, MDB_SET);
if (result == MDB_NOTFOUND)
throw0(DB_ERROR("leaf not found")); // TODO: specific error type instead of DB_ERROR
if (result != MDB_SUCCESS)
throw0(DB_ERROR(lmdb_error("Failed to get leaf: ", result).c_str()));
result = mdb_cursor_del(m_cur_leaves, 0);
if (result != MDB_SUCCESS)
throw0(DB_ERROR(lmdb_error("Error removing leaf: ", result).c_str()));
MDEBUG("Successfully removed leaf at last_leaf_tuple_idx: " << last_leaf_tuple_idx);
}
// Trim the layers
// TODO: trim_layers
const auto &c2_layer_reductions = tree_reduction.c2_layer_reductions;
const auto &c1_layer_reductions = tree_reduction.c1_layer_reductions;
CHECK_AND_ASSERT_THROW_MES(!c2_layer_reductions.empty(), "empty c2 layer reductions");
bool use_c2 = true;
std::size_t c2_idx = 0;
std::size_t c1_idx = 0;
for (std::size_t i = 0; i < (c2_layer_reductions.size() + c1_layer_reductions.size()); ++i)
{
if (use_c2)
{
CHECK_AND_ASSERT_THROW_MES(c2_idx < c2_layer_reductions.size(), "unexpected c2 layer reduction");
const auto &c2_reduction = c2_layer_reductions[c2_idx];
trim_layer(c2_reduction, i);
++c2_idx;
}
else
{
CHECK_AND_ASSERT_THROW_MES(c1_idx < c1_layer_reductions.size(), "unexpected c1 layer reduction");
const auto &c1_reduction = c1_layer_reductions[c1_idx];
trim_layer(c1_reduction, i);
++c1_idx;
}
use_c2 = !use_c2;
}
// Trim any remaining layers in layers after the root
// TODO: trim_leftovers_after_root
const std::size_t expected_root_idx = c2_layer_reductions.size() + c1_layer_reductions.size() - 1;
while (1)
{
MDB_val k, v;
int result = mdb_cursor_get(m_cur_layers, &k, &v, MDB_LAST);
if (result != MDB_SUCCESS)
throw0(DB_ERROR(lmdb_error("Failed to get last elem: ", result).c_str()));
const std::size_t last_layer_idx = *(std::size_t *)k.mv_data;
if (last_layer_idx > expected_root_idx)
{
// Delete all elements in layers after the root
result = mdb_cursor_del(m_cur_layers, MDB_NODUPDATA);
}
else if (last_layer_idx < expected_root_idx)
{
throw0(DB_ERROR("Encountered unexpected last elem in tree before the root"));
}
else // last_layer_idx == expected_root_idx
{
// We've trimmed all layers past the root, we're done
break;
}
}
}
template<typename C>
void BlockchainLMDB::trim_layer(const fcmp::curve_trees::LayerReduction<C> &layer_reduction,
const std::size_t layer_idx)
{
LOG_PRINT_L3("BlockchainLMDB::" << __func__);
check_open();
mdb_txn_cursors *m_cursors = &m_wcursors;
CURSOR(layers)
MDB_val_copy<std::size_t> k(layer_idx);
// Get the number of existing elements in the layer
// TODO: get_num_elems_in_layer
std::size_t old_n_elems_in_layer = 0;
{
// Get the first record in a layer so we can then get the last record
MDB_val v;
int result = mdb_cursor_get(m_cur_layers, &k, &v, MDB_SET);
if (result != MDB_SUCCESS)
throw0(DB_ERROR(lmdb_error("Failed to get first record in layer: ", result).c_str()));
// TODO: why can't I just use MDB_LAST_DUP once and get the last record?
result = mdb_cursor_get(m_cur_layers, &k, &v, MDB_LAST_DUP);
if (result != MDB_SUCCESS)
throw0(DB_ERROR(lmdb_error("Failed to get last elem: ", result).c_str()));
const auto *lv = (layer_val<C> *)v.mv_data;
old_n_elems_in_layer = (1 + lv->child_chunk_idx);
}
CHECK_AND_ASSERT_THROW_MES(old_n_elems_in_layer >= layer_reduction.new_total_parents,
"unexpected old n elems in layer");
const std::size_t trim_n_elems_in_layer = old_n_elems_in_layer - layer_reduction.new_total_parents;
// Delete the elements
for (std::size_t i = 0; i < trim_n_elems_in_layer; ++i)
{
std::size_t last_elem_idx = (old_n_elems_in_layer - 1 - i);
MDB_val_set(v, last_elem_idx);
int result = mdb_cursor_get(m_cur_layers, &k, &v, MDB_GET_BOTH);
if (result == MDB_NOTFOUND)
throw0(DB_ERROR("leaf not found")); // TODO: specific error type instead of DB_ERROR
if (result != MDB_SUCCESS)
throw0(DB_ERROR(lmdb_error("Failed to get elem: ", result).c_str()));
result = mdb_cursor_del(m_cur_layers, 0);
if (result != MDB_SUCCESS)
throw0(DB_ERROR(lmdb_error("Error removing elem: ", result).c_str()));
MDEBUG("Successfully removed elem at layer_idx: " << layer_idx << " , last_elem_idx: " << last_elem_idx);
}
// Update the last element if needed
if (layer_reduction.update_existing_last_hash)
{
layer_val<C> lv;
lv.child_chunk_idx = layer_reduction.new_total_parents - 1;
lv.child_chunk_hash = layer_reduction.new_last_hash;
MDB_val_set(v, lv);
// We expect to overwrite the existing hash
// TODO: make sure the hash already exists and is the existing last hash
int result = mdb_cursor_put(m_cur_layers, &k, &v, 0);
if (result != MDB_SUCCESS)
throw0(DB_ERROR(lmdb_error("Failed to update chunk hash: ", result).c_str()));
}
}
std::size_t BlockchainLMDB::get_num_leaf_tuples() const
{
LOG_PRINT_L3("BlockchainLMDB::" << __func__);
@ -1443,8 +1626,6 @@ std::size_t BlockchainLMDB::get_num_leaf_tuples() const
TXN_PREFIX_RDONLY();
RCURSOR(leaves)
fcmp::curve_trees::CurveTreesV1::LastHashes last_hashes;
// Get the number of leaf tuples in the tree
std::uint64_t n_leaf_tuples = 0;
@ -1519,7 +1700,171 @@ fcmp::curve_trees::CurveTreesV1::LastHashes BlockchainLMDB::get_tree_last_hashes
return last_hashes;
}
bool BlockchainLMDB::audit_tree(const fcmp::curve_trees::CurveTreesV1 &curve_trees) const
fcmp::curve_trees::CurveTreesV1::LastChunkChildrenToTrim BlockchainLMDB::get_last_chunk_children_to_trim(
const fcmp::curve_trees::CurveTreesV1 &curve_trees,
const std::vector<fcmp::curve_trees::TrimLayerInstructions> &trim_instructions) const
{
LOG_PRINT_L3("BlockchainLMDB::" << __func__);
check_open();
TXN_PREFIX_RDONLY();
RCURSOR(layers)
fcmp::curve_trees::CurveTreesV1::LastChunkChildrenToTrim last_chunk_children_to_trim;
auto &c1_last_children_out = last_chunk_children_to_trim.c1_children;
auto &c2_last_children_out = last_chunk_children_to_trim.c2_children;
// Get the leaves to trim
// TODO: separate function for leaves
{
CHECK_AND_ASSERT_THROW_MES(!trim_instructions.empty(), "no instructions");
const auto &trim_leaf_layer_instructions = trim_instructions[0];
std::vector<fcmp::curve_trees::Selene::Scalar> leaves_to_trim;
if (trim_leaf_layer_instructions.need_last_chunk_children_to_trim ||
(trim_leaf_layer_instructions.need_last_chunk_remaining_children && trim_leaf_layer_instructions.new_offset > 0))
{
std::size_t idx = trim_leaf_layer_instructions.start_trim_idx;
CHECK_AND_ASSERT_THROW_MES(idx % fcmp::curve_trees::CurveTreesV1::LEAF_TUPLE_SIZE == 0,
"expected divisble by leaf tuple size");
const std::size_t leaf_tuple_idx = idx / fcmp::curve_trees::CurveTreesV1::LEAF_TUPLE_SIZE;
MDB_val_copy<std::size_t> k(leaf_tuple_idx);
MDB_cursor_op leaf_op = MDB_SET;
do
{
MDB_val v;
int result = mdb_cursor_get(m_cur_leaves, &k, &v, leaf_op);
leaf_op = MDB_NEXT;
if (result == MDB_NOTFOUND)
throw0(DB_ERROR("leaf not found")); // TODO: specific error type instead of DB_ERROR
if (result != MDB_SUCCESS)
throw0(DB_ERROR(lmdb_error("Failed to get leaf: ", result).c_str()));
const auto leaf = *(fcmp::curve_trees::CurveTreesV1::LeafTuple *)v.mv_data;
leaves_to_trim.push_back(leaf.O_x);
leaves_to_trim.push_back(leaf.I_x);
leaves_to_trim.push_back(leaf.C_x);
idx += fcmp::curve_trees::CurveTreesV1::LEAF_TUPLE_SIZE;
}
while (idx < trim_leaf_layer_instructions.end_trim_idx);
}
c2_last_children_out.emplace_back(std::move(leaves_to_trim));
}
// Traverse the tree layer-by-layer starting at the layer closest to leaf layer, getting children to trim
// TODO: separate function for layers
bool parent_is_c1 = true;
for (std::size_t i = 1; i < trim_instructions.size(); ++i)
{
const auto &trim_layer_instructions = trim_instructions[i];
std::vector<fcmp::curve_trees::Helios::Scalar> c1_children;
std::vector<fcmp::curve_trees::Selene::Scalar> c2_children;
if (trim_layer_instructions.need_last_chunk_children_to_trim ||
(trim_layer_instructions.need_last_chunk_remaining_children && trim_layer_instructions.new_offset > 0))
{
const std::size_t layer_idx = (i - 1);
std::size_t idx = trim_layer_instructions.start_trim_idx;
MDB_val_set(k, layer_idx);
MDB_val_set(v, idx);
MDB_cursor_op op = MDB_GET_BOTH;
do
{
MDEBUG("Getting child to trim at layer_idx: " << layer_idx << " , idx: " << idx);
int result = mdb_cursor_get(m_cur_layers, &k, &v, op);
op = MDB_NEXT_DUP;
if (result == MDB_NOTFOUND)
throw0(DB_ERROR("layer elem not found")); // TODO: specific error type instead of DB_ERROR
if (result != MDB_SUCCESS)
throw0(DB_ERROR(lmdb_error("Failed to get layer elem: ", result).c_str()));
if (parent_is_c1)
{
const auto *lv = (layer_val<fcmp::curve_trees::Selene> *)v.mv_data;
auto child_scalar = curve_trees.m_c2.point_to_cycle_scalar(lv->child_chunk_hash);
c1_children.emplace_back(std::move(child_scalar));
}
else
{
const auto *lv = (layer_val<fcmp::curve_trees::Helios> *)v.mv_data;
auto child_scalar = curve_trees.m_c1.point_to_cycle_scalar(lv->child_chunk_hash);
c2_children.emplace_back(std::move(child_scalar));
}
++idx;
}
while (idx < trim_layer_instructions.end_trim_idx);
}
if (parent_is_c1)
c1_last_children_out.emplace_back(std::move(c1_children));
else
c2_last_children_out.emplace_back(std::move(c2_children));
parent_is_c1 = !parent_is_c1;
}
TXN_POSTFIX_RDONLY();
return last_chunk_children_to_trim;
}
fcmp::curve_trees::CurveTreesV1::LastHashes BlockchainLMDB::get_last_hashes_to_trim(
const std::vector<fcmp::curve_trees::TrimLayerInstructions> &trim_instructions) const
{
LOG_PRINT_L3("BlockchainLMDB::" << __func__);
check_open();
TXN_PREFIX_RDONLY();
RCURSOR(layers)
fcmp::curve_trees::CurveTreesV1::LastHashes last_hashes_out;
// Traverse the tree layer-by-layer starting at the layer closest to leaf layer
std::size_t layer_idx = 0;
for (const auto &trim_layer_instructions : trim_instructions)
{
const std::size_t new_last_idx = trim_layer_instructions.new_total_parents - 1;
MDB_val_copy<std::size_t> k(layer_idx);
MDB_val_set(v, new_last_idx);
int result = mdb_cursor_get(m_cur_layers, &k, &v, MDB_GET_BOTH);
if (result == MDB_NOTFOUND)
throw0(DB_ERROR("layer elem not found")); // TODO: specific error type instead of DB_ERROR
if (result != MDB_SUCCESS)
throw0(DB_ERROR(lmdb_error("Failed to get layer elem: ", result).c_str()));
if ((layer_idx % 2) == 0)
{
const auto *lv = (layer_val<fcmp::curve_trees::Selene> *)v.mv_data;
last_hashes_out.c2_last_hashes.push_back(lv->child_chunk_hash);
}
else
{
const auto *lv = (layer_val<fcmp::curve_trees::Helios> *)v.mv_data;
last_hashes_out.c1_last_hashes.push_back(lv->child_chunk_hash);
}
++layer_idx;
}
TXN_POSTFIX_RDONLY();
return last_hashes_out;
}
bool BlockchainLMDB::audit_tree(const fcmp::curve_trees::CurveTreesV1 &curve_trees,
const std::size_t expected_n_leaf_tuples) const
{
LOG_PRINT_L3("BlockchainLMDB::" << __func__);
check_open();
@ -1528,6 +1873,9 @@ bool BlockchainLMDB::audit_tree(const fcmp::curve_trees::CurveTreesV1 &curve_tre
RCURSOR(leaves)
RCURSOR(layers)
const std::size_t actual_n_leaf_tuples = this->get_num_leaf_tuples();
CHECK_AND_ASSERT_MES(actual_n_leaf_tuples == expected_n_leaf_tuples, false, "unexpected num leaf tuples");
// Check chunks of leaves hash into first layer as expected
std::size_t layer_idx = 0;
std::size_t child_chunk_idx = 0;
@ -1696,7 +2044,7 @@ bool BlockchainLMDB::audit_layer(const C_CHILD &c_child,
// No more children, expect to be done auditing layer and ready to move up a layer
if (result != MDB_NOTFOUND)
throw0(DB_ERROR(lmdb_error("unexpected parent result at parent_layer_idx " + std::to_string(parent_layer_idx)
+ " , child_chunk_idx " + std::to_string(child_chunk_idx), result).c_str()));
+ " , child_chunk_idx " + std::to_string(child_chunk_idx) + " : ", result).c_str()));
MDEBUG("Finished auditing layer " << layer_idx);
TXN_POSTFIX_RDONLY();
@ -1704,8 +2052,12 @@ bool BlockchainLMDB::audit_layer(const C_CHILD &c_child,
}
// End condition B: check if finished auditing the tree
if (child_chunk_idx == 0 && child_chunk.size() == 1 && result == MDB_NOTFOUND)
if (child_chunk_idx == 0 && child_chunk.size() == 1)
{
if (result != MDB_NOTFOUND)
throw0(DB_ERROR(lmdb_error("unexpected parent of root at parent_layer_idx " + std::to_string(parent_layer_idx)
+ " , child_chunk_idx " + std::to_string(child_chunk_idx) + " : ", result).c_str()));
MDEBUG("Encountered root at layer_idx " << layer_idx);
TXN_POSTFIX_RDONLY();
return true;

View file

@ -367,7 +367,10 @@ public:
virtual void grow_tree(const fcmp::curve_trees::CurveTreesV1 &curve_trees,
const std::vector<fcmp::curve_trees::CurveTreesV1::LeafTuple> &new_leaves);
virtual bool audit_tree(const fcmp::curve_trees::CurveTreesV1 &curve_trees) const;
virtual void trim_tree(const fcmp::curve_trees::CurveTreesV1 &curve_trees, const std::size_t trim_n_leaf_tuples);
virtual bool audit_tree(const fcmp::curve_trees::CurveTreesV1 &curve_trees,
const std::size_t expected_n_leaf_tuples) const;
private:
void do_resize(uint64_t size_increase=0);
@ -418,10 +421,20 @@ private:
const std::size_t c_idx,
const std::size_t layer_idx);
template<typename C>
void trim_layer(const fcmp::curve_trees::LayerReduction<C> &layer_reduction, const std::size_t layer_idx);
std::size_t get_num_leaf_tuples() const;
fcmp::curve_trees::CurveTreesV1::LastHashes get_tree_last_hashes() const;
fcmp::curve_trees::CurveTreesV1::LastChunkChildrenToTrim get_last_chunk_children_to_trim(
const fcmp::curve_trees::CurveTreesV1 &curve_trees,
const std::vector<fcmp::curve_trees::TrimLayerInstructions> &trim_instructions) const;
fcmp::curve_trees::CurveTreesV1::LastHashes get_last_hashes_to_trim(
const std::vector<fcmp::curve_trees::TrimLayerInstructions> &trim_instructions) const;
template<typename C_CHILD, typename C_PARENT>
bool audit_layer(const C_CHILD &c_child,
const C_PARENT &c_parent,

View file

@ -118,7 +118,8 @@ public:
virtual void remove_spent_key(const crypto::key_image& k_image) override {}
virtual void grow_tree(const fcmp::curve_trees::CurveTreesV1 &curve_trees,
const std::vector<fcmp::curve_trees::CurveTreesV1::LeafTuple> &new_leaves) override {};
virtual bool audit_tree(const fcmp::curve_trees::CurveTreesV1 &curve_trees) const override { return false; };
virtual void trim_tree(const fcmp::curve_trees::CurveTreesV1 &curve_trees, const std::size_t trim_n_leaf_tuples) override {};
virtual bool audit_tree(const fcmp::curve_trees::CurveTreesV1 &curve_trees, const std::size_t expected_n_leaf_tuples) const override { return false; };
virtual bool for_all_key_images(std::function<bool(const crypto::key_image&)>) const override { return true; }
virtual bool for_blocks_range(const uint64_t&, const uint64_t&, std::function<bool(uint64_t, const crypto::hash&, const cryptonote::block&)>) const override { return true; }

View file

@ -478,6 +478,22 @@ static TrimLayerInstructions get_trim_layer_instructions(
hash_offset = 0;
}
std::size_t start_trim_idx = 0;
std::size_t end_trim_idx = 0;
if (need_last_chunk_children_to_trim)
{
const std::size_t chunk_boundary_start = (new_total_parents - 1) * parent_chunk_width;
const std::size_t chunk_boundary_end = chunk_boundary_start + parent_chunk_width;
start_trim_idx = chunk_boundary_start + new_offset;
end_trim_idx = std::min(chunk_boundary_end, old_total_children);
}
else if (need_last_chunk_remaining_children && new_offset > 0)
{
start_trim_idx = new_total_children - new_offset;
end_trim_idx = new_total_children;
}
MDEBUG("parent_chunk_width: " << parent_chunk_width
<< " , old_total_children: " << old_total_children
<< " , new_total_children: " << new_total_children
@ -489,7 +505,9 @@ static TrimLayerInstructions get_trim_layer_instructions(
<< " , need_new_last_child: " << last_child_will_change
<< " , update_existing_last_hash: " << update_existing_last_hash
<< " , new_offset: " << new_offset
<< " , hash_offset: " << hash_offset);
<< " , hash_offset: " << hash_offset
<< " , start_trim_idx: " << start_trim_idx
<< " , end_trim_idx: " << end_trim_idx);
return TrimLayerInstructions{
.parent_chunk_width = parent_chunk_width,
@ -504,6 +522,8 @@ static TrimLayerInstructions get_trim_layer_instructions(
.update_existing_last_hash = update_existing_last_hash,
.new_offset = new_offset,
.hash_offset = hash_offset,
.start_trim_idx = start_trim_idx,
.end_trim_idx = end_trim_idx,
};
}
//----------------------------------------------------------------------------------------------------------------------
@ -545,9 +565,9 @@ static typename fcmp::curve_trees::LayerReduction<C_PARENT> get_next_layer_reduc
{
CHECK_AND_ASSERT_THROW_MES(child_layer_idx > 0, "child index cannot be 0 here");
CHECK_AND_ASSERT_THROW_MES(child_reductions.size() == child_layer_idx, "unexpected child layer idx");
const std::size_t last_child_layer_idx = child_layer_idx - 1;
const typename C_CHILD::Point &new_last_child = child_reductions.back().new_last_hash;
CHECK_AND_ASSERT_THROW_MES(child_reductions.back().update_existing_last_hash, "expected new last child");
const typename C_CHILD::Point &new_last_child = child_reductions.back().new_last_hash;
new_last_child_scalar = c_child.point_to_cycle_scalar(new_last_child);
if (trim_layer_instructions.need_last_chunk_remaining_children)
@ -557,6 +577,7 @@ static typename fcmp::curve_trees::LayerReduction<C_PARENT> get_next_layer_reduc
else if (!trim_layer_instructions.need_last_chunk_children_to_trim)
{
// TODO: cleaner conditional for this case
const std::size_t last_child_layer_idx = child_layer_idx - 1;
CHECK_AND_ASSERT_THROW_MES(child_last_hashes.size() > last_child_layer_idx, "missing last child hash");
const typename C_CHILD::Point &old_last_child = child_last_hashes[last_child_layer_idx];
@ -583,8 +604,8 @@ static typename fcmp::curve_trees::LayerReduction<C_PARENT> get_next_layer_reduc
else
{
MDEBUG("hash_trim: existing_hash: " << c_parent.to_string(existing_hash)
<< " , hash_offset: " << trim_layer_instructions.hash_offset
<< " , new_last_child_scalar: " << c_parent.to_string(new_last_child_scalar));
<< " , hash_offset: " << trim_layer_instructions.hash_offset
<< " , child_to_grow_back: " << c_parent.to_string(new_last_child_scalar));
layer_reduction_out.new_last_hash = c_parent.hash_trim(
existing_hash,
@ -650,7 +671,7 @@ typename CurveTrees<C1, C2>::TreeExtension CurveTrees<C1, C2>::get_tree_extensio
LEAF_TUPLE_SIZE,
m_leaf_layer_chunk_width);
tree_extension.leaves.start_idx = grow_layer_instructions.old_total_children;
tree_extension.leaves.start_leaf_tuple_idx = grow_layer_instructions.old_total_children / LEAF_TUPLE_SIZE;
// Copy the leaves
// TODO: don't copy here
@ -764,8 +785,12 @@ typename CurveTrees<C1, C2>::TreeReduction CurveTrees<C1, C2>::get_tree_reductio
const LastChunkChildrenToTrim &children_to_trim,
const LastHashes &last_hashes) const
{
TreeReduction tree_reduction_out;
tree_reduction_out.new_total_leaves = trim_instructions[0].new_total_children;
TreeReduction tree_reduction_out;
CHECK_AND_ASSERT_THROW_MES((trim_instructions[0].new_total_children % LEAF_TUPLE_SIZE) == 0,
"unexpected new total leaves");
const std::size_t new_total_leaf_tuples = trim_instructions[0].new_total_children / LEAF_TUPLE_SIZE;
tree_reduction_out.new_total_leaf_tuples = new_total_leaf_tuples;
bool use_c2 = true;
std::size_t c1_idx = 0;

View file

@ -116,6 +116,9 @@ struct TrimLayerInstructions final
std::size_t new_offset;
std::size_t hash_offset;
std::size_t start_trim_idx;
std::size_t end_trim_idx;
};
//----------------------------------------------------------------------------------------------------------------------
@ -156,8 +159,8 @@ public:
// Contiguous leaves in the tree, starting a specified start_idx in the leaf layer
struct Leaves final
{
// Starting index in the leaf layer
std::size_t start_idx{0};
// Starting leaf tuple index in the leaf layer
std::size_t start_leaf_tuple_idx{0};
// Contiguous leaves in a tree that start at the start_idx
std::vector<LeafTuple> tuples;
};
@ -177,7 +180,7 @@ public:
// - c2_layer_reductions[0] is first layer after leaves, then c1_layer_reductions[0], c2_layer_reductions[1], etc
struct TreeReduction final
{
std::size_t new_total_leaves;
std::size_t new_total_leaf_tuples;
std::vector<LayerReduction<C1>> c1_layer_reductions;
std::vector<LayerReduction<C2>> c2_layer_reductions;
};

View file

@ -79,42 +79,18 @@ static bool validate_layer(const C &curve,
//----------------------------------------------------------------------------------------------------------------------
template<typename C_CHILD, typename C_PARENT>
static std::vector<typename C_PARENT::Scalar> get_last_chunk_children_to_trim(const C_CHILD &c_child,
const fcmp::curve_trees::TrimLayerInstructions &trim_instructions,
const CurveTreesGlobalTree::Layer<C_CHILD> &child_layer)
const CurveTreesGlobalTree::Layer<C_CHILD> &child_layer,
const bool need_last_chunk_children_to_trim,
const bool need_last_chunk_remaining_children,
const std::size_t new_offset,
const std::size_t start_trim_idx,
const std::size_t end_trim_idx)
{
std::vector<typename C_PARENT::Scalar> children_to_trim_out;
const std::size_t new_total_children = trim_instructions.new_total_children;
const std::size_t old_total_children = trim_instructions.old_total_children;
const std::size_t new_total_parents = trim_instructions.new_total_parents;
const std::size_t parent_chunk_width = trim_instructions.parent_chunk_width;
const std::size_t new_offset = trim_instructions.new_offset;
CHECK_AND_ASSERT_THROW_MES(new_total_children > 0, "expected some new children");
CHECK_AND_ASSERT_THROW_MES(new_total_children >= new_offset, "expected more children than offset");
CHECK_AND_ASSERT_THROW_MES(new_total_parents > 0, "expected some new parents");
if (trim_instructions.need_last_chunk_children_to_trim)
if (need_last_chunk_children_to_trim || (need_last_chunk_remaining_children && new_offset > 0))
{
std::size_t idx = ((new_total_parents - 1) * parent_chunk_width) + new_offset;
MDEBUG("Start trim from idx: " << idx);
do
{
// TODO: consolidate do while inner logic with below
CHECK_AND_ASSERT_THROW_MES(child_layer.size() > idx, "idx too high");
const auto &child_point = child_layer[idx];
auto child_scalar = c_child.point_to_cycle_scalar(child_point);
children_to_trim_out.push_back(std::move(child_scalar));
++idx;
}
while ((idx < old_total_children) && (idx % parent_chunk_width != 0));
}
else if (trim_instructions.need_last_chunk_remaining_children && new_offset > 0)
{
std::size_t idx = new_total_children - new_offset;
MDEBUG("Start grow remaining from idx: " << idx);
std::size_t idx = start_trim_idx;
MDEBUG("Start trim from idx: " << idx << " , ending trim at: " << end_trim_idx);
do
{
CHECK_AND_ASSERT_THROW_MES(child_layer.size() > idx, "idx too high");
@ -125,7 +101,7 @@ static std::vector<typename C_PARENT::Scalar> get_last_chunk_children_to_trim(co
++idx;
}
while ((idx < new_total_children) && (idx % parent_chunk_width != 0));
while (idx < end_trim_idx);
}
return children_to_trim_out;
@ -188,8 +164,7 @@ CurveTreesV1::LastHashes CurveTreesGlobalTree::get_last_hashes() const
void CurveTreesGlobalTree::extend_tree(const CurveTreesV1::TreeExtension &tree_extension)
{
// Add the leaves
const std::size_t init_num_leaves = m_tree.leaves.size() * m_curve_trees.LEAF_TUPLE_SIZE;
CHECK_AND_ASSERT_THROW_MES(init_num_leaves == tree_extension.leaves.start_idx,
CHECK_AND_ASSERT_THROW_MES(m_tree.leaves.size() == tree_extension.leaves.start_leaf_tuple_idx,
"unexpected leaf start idx");
m_tree.leaves.reserve(m_tree.leaves.size() + tree_extension.leaves.tuples.size());
@ -287,12 +262,9 @@ void CurveTreesGlobalTree::extend_tree(const CurveTreesV1::TreeExtension &tree_e
void CurveTreesGlobalTree::reduce_tree(const CurveTreesV1::TreeReduction &tree_reduction)
{
// Trim the leaves
const std::size_t init_num_leaves = m_tree.leaves.size() * m_curve_trees.LEAF_TUPLE_SIZE;
CHECK_AND_ASSERT_THROW_MES(init_num_leaves > tree_reduction.new_total_leaves, "expected fewer new total leaves");
CHECK_AND_ASSERT_THROW_MES((tree_reduction.new_total_leaves % m_curve_trees.LEAF_TUPLE_SIZE) == 0,
"unexpected new total leaves");
const std::size_t new_total_leaf_tuples = tree_reduction.new_total_leaves / m_curve_trees.LEAF_TUPLE_SIZE;
while (m_tree.leaves.size() > new_total_leaf_tuples)
CHECK_AND_ASSERT_THROW_MES(m_tree.leaves.size() > tree_reduction.new_total_leaf_tuples,
"expected fewer new total leaves");
while (m_tree.leaves.size() > tree_reduction.new_total_leaf_tuples)
m_tree.leaves.pop_back();
// Trim the layers
@ -372,23 +344,15 @@ CurveTreesV1::LastChunkChildrenToTrim CurveTreesGlobalTree::get_all_last_chunk_c
CHECK_AND_ASSERT_THROW_MES(!trim_instructions.empty(), "no instructions");
const auto &trim_leaf_layer_instructions = trim_instructions[0];
const std::size_t new_total_children = trim_leaf_layer_instructions.new_total_children;
const std::size_t old_total_children = trim_leaf_layer_instructions.old_total_children;
const std::size_t new_total_parents = trim_leaf_layer_instructions.new_total_parents;
const std::size_t parent_chunk_width = trim_leaf_layer_instructions.parent_chunk_width;
const std::size_t new_offset = trim_leaf_layer_instructions.new_offset;
CHECK_AND_ASSERT_THROW_MES(new_total_children >= CurveTreesV1::LEAF_TUPLE_SIZE, "expected some new leaves");
CHECK_AND_ASSERT_THROW_MES(new_total_children >= new_offset, "expected more children than offset");
CHECK_AND_ASSERT_THROW_MES(new_total_parents > 0, "expected some new parents");
const std::size_t new_offset = trim_leaf_layer_instructions.new_offset;
std::vector<Selene::Scalar> leaves_to_trim;
// TODO: separate function
// TODO: calculate starting indexes in trim instructions, perhaps calculate end indexes also
if (trim_leaf_layer_instructions.need_last_chunk_children_to_trim)
if (trim_leaf_layer_instructions.need_last_chunk_children_to_trim ||
(trim_leaf_layer_instructions.need_last_chunk_remaining_children && new_offset > 0))
{
std::size_t idx = ((new_total_parents - 1) * parent_chunk_width) + new_offset;
std::size_t idx = trim_leaf_layer_instructions.start_trim_idx;
MDEBUG("Start trim from idx: " << idx);
do
{
@ -404,26 +368,7 @@ CurveTreesV1::LastChunkChildrenToTrim CurveTreesGlobalTree::get_all_last_chunk_c
idx += CurveTreesV1::LEAF_TUPLE_SIZE;
}
while ((idx < old_total_children) && (idx % parent_chunk_width != 0));
}
else if (trim_leaf_layer_instructions.need_last_chunk_remaining_children && new_offset > 0)
{
std::size_t idx = new_total_children - new_offset;
do
{
CHECK_AND_ASSERT_THROW_MES(idx % CurveTreesV1::LEAF_TUPLE_SIZE == 0, "expected divisble by leaf tuple size");
const std::size_t leaf_tuple_idx = idx / CurveTreesV1::LEAF_TUPLE_SIZE;
CHECK_AND_ASSERT_THROW_MES(m_tree.leaves.size() > leaf_tuple_idx, "leaf_tuple_idx too high");
const auto &leaf_tuple = m_tree.leaves[leaf_tuple_idx];
leaves_to_trim.push_back(leaf_tuple.O_x);
leaves_to_trim.push_back(leaf_tuple.I_x);
leaves_to_trim.push_back(leaf_tuple.C_x);
idx += CurveTreesV1::LEAF_TUPLE_SIZE;
}
while ((idx < new_total_children) && (idx % parent_chunk_width != 0));
while (idx < trim_leaf_layer_instructions.end_trim_idx);
}
all_children_to_trim.c2_children.emplace_back(std::move(leaves_to_trim));
@ -433,16 +378,28 @@ CurveTreesV1::LastChunkChildrenToTrim CurveTreesGlobalTree::get_all_last_chunk_c
std::size_t c2_idx = 0;
for (std::size_t i = 1; i < trim_instructions.size(); ++i)
{
MDEBUG("Getting trim instructions for layer " << i);
const auto &trim_layer_instructions = trim_instructions[i];
const bool need_last_chunk_children_to_trim = trim_layer_instructions.need_last_chunk_children_to_trim;
const bool need_last_chunk_remaining_children = trim_layer_instructions.need_last_chunk_remaining_children;
const std::size_t new_offset = trim_layer_instructions.new_offset;
const std::size_t start_trim_idx = trim_layer_instructions.start_trim_idx;
const std::size_t end_trim_idx = trim_layer_instructions.end_trim_idx;
if (parent_is_c2)
{
CHECK_AND_ASSERT_THROW_MES(m_tree.c1_layers.size() > c1_idx, "c1_idx too high");
auto children_to_trim = get_last_chunk_children_to_trim<Helios, Selene>(
m_curve_trees.m_c1,
trim_layer_instructions,
m_tree.c1_layers[c1_idx]);
m_tree.c1_layers[c1_idx],
need_last_chunk_children_to_trim,
need_last_chunk_remaining_children,
new_offset,
start_trim_idx,
end_trim_idx);
all_children_to_trim.c2_children.emplace_back(std::move(children_to_trim));
++c1_idx;
@ -453,8 +410,12 @@ CurveTreesV1::LastChunkChildrenToTrim CurveTreesGlobalTree::get_all_last_chunk_c
auto children_to_trim = get_last_chunk_children_to_trim<Selene, Helios>(
m_curve_trees.m_c2,
trim_layer_instructions,
m_tree.c2_layers[c2_idx]);
m_tree.c2_layers[c2_idx],
need_last_chunk_children_to_trim,
need_last_chunk_remaining_children,
new_offset,
start_trim_idx,
end_trim_idx);
all_children_to_trim.c1_children.emplace_back(std::move(children_to_trim));
++c2_idx;
@ -674,7 +635,7 @@ void CurveTreesGlobalTree::log_tree_extension(const CurveTreesV1::TreeExtension
MDEBUG("Tree extension has " << tree_extension.leaves.tuples.size() << " leaves, "
<< c1_extensions.size() << " helios layers, " << c2_extensions.size() << " selene layers");
MDEBUG("Leaf start idx: " << tree_extension.leaves.start_idx);
MDEBUG("Leaf start idx: " << tree_extension.leaves.start_leaf_tuple_idx);
for (std::size_t i = 0; i < tree_extension.leaves.tuples.size(); ++i)
{
const auto &leaf = tree_extension.leaves.tuples[i];
@ -683,7 +644,7 @@ void CurveTreesGlobalTree::log_tree_extension(const CurveTreesV1::TreeExtension
const auto I_x = m_curve_trees.m_c2.to_string(leaf.I_x);
const auto C_x = m_curve_trees.m_c2.to_string(leaf.C_x);
MDEBUG("Leaf idx " << ((i*CurveTreesV1::LEAF_TUPLE_SIZE) + tree_extension.leaves.start_idx)
MDEBUG("Leaf tuple idx " << (tree_extension.leaves.start_leaf_tuple_idx)
<< " : { O_x: " << O_x << " , I_x: " << I_x << " , C_x: " << C_x << " }");
}
@ -899,13 +860,15 @@ static bool grow_tree_db(const std::size_t init_leaves,
LOG_PRINT_L1("Adding " << init_leaves << " leaves to db, then extending by " << ext_leaves << " leaves");
test_db.m_db->grow_tree(curve_trees, generate_random_leaves(curve_trees, init_leaves));
CHECK_AND_ASSERT_MES(test_db.m_db->audit_tree(curve_trees), false, "failed to add initial leaves to db");
CHECK_AND_ASSERT_MES(test_db.m_db->audit_tree(curve_trees, init_leaves), false,
"failed to add initial leaves to db");
MDEBUG("Successfully added initial " << init_leaves << " leaves to db, extending by "
<< ext_leaves << " leaves");
test_db.m_db->grow_tree(curve_trees, generate_random_leaves(curve_trees, ext_leaves));
CHECK_AND_ASSERT_MES(test_db.m_db->audit_tree(curve_trees), false, "failed to extend tree in db");
CHECK_AND_ASSERT_MES(test_db.m_db->audit_tree(curve_trees, init_leaves + ext_leaves), false,
"failed to extend tree in db");
MDEBUG("Successfully extended tree in db by " << ext_leaves << " leaves");
}
@ -913,6 +876,35 @@ static bool grow_tree_db(const std::size_t init_leaves,
return true;
}
//----------------------------------------------------------------------------------------------------------------------
static bool trim_tree_db(const std::size_t init_leaves,
const std::size_t trim_leaves,
CurveTreesV1 &curve_trees,
unit_test::BlockchainLMDBTest &test_db)
{
INIT_BLOCKCHAIN_LMDB_TEST_DB();
{
cryptonote::db_wtxn_guard guard(test_db.m_db);
LOG_PRINT_L1("Adding " << init_leaves << " leaves to db, then trimming by " << trim_leaves << " leaves");
test_db.m_db->grow_tree(curve_trees, generate_random_leaves(curve_trees, init_leaves));
CHECK_AND_ASSERT_MES(test_db.m_db->audit_tree(curve_trees, init_leaves), false,
"failed to add initial leaves to db");
MDEBUG("Successfully added initial " << init_leaves << " leaves to db, trimming by "
<< trim_leaves << " leaves");
test_db.m_db->trim_tree(curve_trees, trim_leaves);
CHECK_AND_ASSERT_MES(test_db.m_db->audit_tree(curve_trees, init_leaves - trim_leaves), false,
"failed to trim tree in db");
MDEBUG("Successfully trimmed tree in db by " << trim_leaves << " leaves");
}
return true;
}
//----------------------------------------------------------------------------------------------------------------------
//----------------------------------------------------------------------------------------------------------------------
// Test
//----------------------------------------------------------------------------------------------------------------------
@ -999,6 +991,8 @@ TEST(curve_trees, trim_tree)
helios_chunk_width,
selene_chunk_width);
unit_test::BlockchainLMDBTest test_db;
// Increment to test for off-by-1
++leaves_needed_for_n_layers;
@ -1020,6 +1014,7 @@ TEST(curve_trees, trim_tree)
CurveTreesGlobalTree tree_copy(global_tree);
ASSERT_TRUE(trim_tree_in_memory(trim_leaves, std::move(tree_copy)));
ASSERT_TRUE(trim_tree_db(init_leaves, trim_leaves, curve_trees, test_db));
}
}
}
@ -1076,7 +1071,7 @@ TEST(curve_trees, hash_trim)
ASSERT_EQ(trim_res_bytes, grow_res_bytes);
}
// 3. Trim 2
// 2. Trim 2
{
// Start by hashing: {selene_scalar_0, selene_scalar_1, selene_scalar_2}
// Then trim to: {selene_scalar_0}
@ -1151,7 +1146,7 @@ TEST(curve_trees, hash_trim)
ASSERT_EQ(trim_res_bytes, grow_res_bytes);
}
// 4. Trim 2 then grow by 1
// 4. Trim 2 and grow back by 1
{
// Start by hashing: {selene_scalar_0, selene_scalar_1, selene_scalar_2}
// Then trim+grow to: {selene_scalar_0, selene_scalar_3}