From 34eafa85f358cfbd9dd1424989b2f5f5418b7e1a Mon Sep 17 00:00:00 2001 From: j-berman Date: Thu, 1 Aug 2024 10:04:51 -0700 Subject: [PATCH] Store points in the tree in compressed encoding (32 bytes) --- src/blockchain_db/lmdb/db_lmdb.cpp | 88 ++++++++++++++---------------- src/blockchain_db/lmdb/db_lmdb.h | 4 +- src/fcmp/curve_trees.cpp | 1 - src/fcmp/curve_trees.h | 1 - src/fcmp/fcmp_rust/fcmp++.h | 4 ++ src/fcmp/tower_cycle.cpp | 10 ++++ src/fcmp/tower_cycle.h | 6 ++ 7 files changed, 65 insertions(+), 49 deletions(-) diff --git a/src/blockchain_db/lmdb/db_lmdb.cpp b/src/blockchain_db/lmdb/db_lmdb.cpp index 0573e1b96..dbb7fd903 100644 --- a/src/blockchain_db/lmdb/db_lmdb.cpp +++ b/src/blockchain_db/lmdb/db_lmdb.cpp @@ -216,6 +216,7 @@ namespace * * spent_keys input hash - * + * TODO: don't store leaf tuples, store reference to outputs * locked_leaves block ID [{output ID, leaf tuple}...] * leaves leaf_idx {leaf tuple} * layers layer_idx [{child_chunk_idx, child_chunk_hash}...] @@ -392,11 +393,9 @@ typedef struct outtx { uint64_t local_index; } outtx; -template struct layer_val { uint64_t child_chunk_idx; - // TODO: use compressed 32 byte point; also need a from_bytes implemented on rust side - typename C::Point child_chunk_hash; + std::array child_chunk_hash; }; std::atomic mdb_txn_safe::num_active_txns{0}; @@ -1471,9 +1470,9 @@ void BlockchainLMDB::grow_layer(const C &curve, if (ext.update_existing_last_hash) { // We updated the last hash, so update it - layer_val lv; + layer_val lv; lv.child_chunk_idx = ext.start_idx; - lv.child_chunk_hash = ext.hashes.front(); + lv.child_chunk_hash = curve.to_bytes(ext.hashes.front()); MDB_val_set(v, lv); // We expect to overwrite the existing hash @@ -1486,9 +1485,9 @@ void BlockchainLMDB::grow_layer(const C &curve, // Now add all the new hashes found in the extension for (uint64_t i = ext.update_existing_last_hash ? 1 : 0; i < ext.hashes.size(); ++i) { - layer_val lv; + layer_val lv; lv.child_chunk_idx = i + ext.start_idx; - lv.child_chunk_hash = ext.hashes[i]; + lv.child_chunk_hash = curve.to_bytes(ext.hashes[i]); MDB_val_set(v, lv); // TODO: according to the docs, MDB_APPENDDUP isn't supposed to perform any key comparisons to maximize efficiency. @@ -1570,14 +1569,14 @@ void BlockchainLMDB::trim_tree(const uint64_t trim_n_leaf_tuples) { CHECK_AND_ASSERT_THROW_MES(c2_idx < c2_layer_reductions.size(), "unexpected c2 layer reduction"); const auto &c2_reduction = c2_layer_reductions[c2_idx]; - trim_layer(c2_reduction, i); + this->trim_layer(m_curve_trees->m_c2, c2_reduction, i); ++c2_idx; } else { CHECK_AND_ASSERT_THROW_MES(c1_idx < c1_layer_reductions.size(), "unexpected c1 layer reduction"); const auto &c1_reduction = c1_layer_reductions[c1_idx]; - trim_layer(c1_reduction, i); + this->trim_layer(m_curve_trees->m_c1, c1_reduction, i); ++c1_idx; } @@ -1614,7 +1613,8 @@ void BlockchainLMDB::trim_tree(const uint64_t trim_n_leaf_tuples) } template -void BlockchainLMDB::trim_layer(const fcmp::curve_trees::LayerReduction &layer_reduction, +void BlockchainLMDB::trim_layer(const C &curve, + const fcmp::curve_trees::LayerReduction &layer_reduction, const uint64_t layer_idx) { LOG_PRINT_L3("BlockchainLMDB::" << __func__); @@ -1639,7 +1639,7 @@ void BlockchainLMDB::trim_layer(const fcmp::curve_trees::LayerReduction &laye if (result != MDB_SUCCESS) throw0(DB_ERROR(lmdb_error("Failed to get last elem: ", result).c_str())); - const auto *lv = (layer_val *)v.mv_data; + const auto *lv = (layer_val *)v.mv_data; old_n_elems_in_layer = (1 + lv->child_chunk_idx); } @@ -1669,9 +1669,9 @@ void BlockchainLMDB::trim_layer(const fcmp::curve_trees::LayerReduction &laye // Update the last element if needed if (layer_reduction.update_existing_last_hash) { - layer_val lv; + layer_val lv; lv.child_chunk_idx = layer_reduction.new_total_parents - 1; - lv.child_chunk_hash = layer_reduction.new_last_hash; + lv.child_chunk_hash = curve.to_bytes(layer_reduction.new_last_hash); MDB_val_set(v, lv); // We expect to overwrite the existing hash @@ -1724,17 +1724,8 @@ std::array BlockchainLMDB::get_tree_root() const int result = mdb_cursor_get(m_cur_layers, &k, &v, MDB_LAST); if (result == MDB_SUCCESS) { - const uint64_t layer_idx = *(uint64_t*)k.mv_data; - if ((layer_idx % 2) == 0) - { - const auto *lv = (layer_val *)v.mv_data; - root = m_curve_trees->m_c2.to_bytes(lv->child_chunk_hash); - } - else - { - const auto *lv = (layer_val *)v.mv_data; - root = m_curve_trees->m_c1.to_bytes(lv->child_chunk_hash); - } + auto *lv = (layer_val *)v.mv_data; + root = std::move(lv->child_chunk_hash); } else if (result != MDB_NOTFOUND) throw0(DB_ERROR(lmdb_error("Failed to get last leaf: ", result).c_str())); @@ -1776,18 +1767,19 @@ fcmp::curve_trees::CurveTreesV1::LastHashes BlockchainLMDB::get_tree_last_hashes if (result != MDB_SUCCESS) throw0(DB_ERROR(lmdb_error("Failed to get last record in layer: ", result).c_str())); + const auto *lv = (layer_val *)v.mv_data; + MDEBUG("Reading last hash at layer_idx: " << layer_idx << " , lv->child_chunk_idx: " << lv->child_chunk_idx); + const bool use_c2 = (layer_idx % 2) == 0; if (use_c2) { - const auto *lv = (layer_val *)v.mv_data; - MDEBUG("Selene, layer_idx: " << layer_idx << " , lv->child_chunk_idx: " << lv->child_chunk_idx); - c2_last_hashes.emplace_back(std::move(lv->child_chunk_hash)); + auto point = m_curve_trees->m_c2.from_bytes(lv->child_chunk_hash); + c2_last_hashes.emplace_back(std::move(point)); } else { - const auto *lv = (layer_val *)v.mv_data; - MDEBUG("Helios, layer_idx: " << layer_idx << " , lv->child_chunk_idx: " << lv->child_chunk_idx); - c1_last_hashes.emplace_back(std::move(lv->child_chunk_hash)); + auto point = m_curve_trees->m_c1.from_bytes(lv->child_chunk_hash); + c1_last_hashes.emplace_back(std::move(point)); } ++layer_idx; @@ -1884,16 +1876,17 @@ fcmp::curve_trees::CurveTreesV1::LastChunkChildrenToTrim BlockchainLMDB::get_las if (result != MDB_SUCCESS) throw0(DB_ERROR(lmdb_error("Failed to get layer elem: ", result).c_str())); + const auto *lv = (layer_val *)v.mv_data; if (parent_is_c1) { - const auto *lv = (layer_val *)v.mv_data; - auto child_scalar = m_curve_trees->m_c2.point_to_cycle_scalar(lv->child_chunk_hash); + const auto point = m_curve_trees->m_c2.from_bytes(lv->child_chunk_hash); + auto child_scalar = m_curve_trees->m_c2.point_to_cycle_scalar(point); c1_children.emplace_back(std::move(child_scalar)); } else { - const auto *lv = (layer_val *)v.mv_data; - auto child_scalar = m_curve_trees->m_c1.point_to_cycle_scalar(lv->child_chunk_hash); + const auto point = m_curve_trees->m_c1.from_bytes(lv->child_chunk_hash); + auto child_scalar = m_curve_trees->m_c1.point_to_cycle_scalar(point); c2_children.emplace_back(std::move(child_scalar)); } @@ -1941,15 +1934,16 @@ fcmp::curve_trees::CurveTreesV1::LastHashes BlockchainLMDB::get_last_hashes_to_t if (result != MDB_SUCCESS) throw0(DB_ERROR(lmdb_error("Failed to get layer elem: ", result).c_str())); + const auto *lv = (layer_val *)v.mv_data; if ((layer_idx % 2) == 0) { - const auto *lv = (layer_val *)v.mv_data; - last_hashes_out.c2_last_hashes.push_back(lv->child_chunk_hash); + auto point = m_curve_trees->m_c2.from_bytes(lv->child_chunk_hash); + last_hashes_out.c2_last_hashes.emplace_back(std::move(point)); } else { - const auto *lv = (layer_val *)v.mv_data; - last_hashes_out.c1_last_hashes.push_back(lv->child_chunk_hash); + auto point = m_curve_trees->m_c1.from_bytes(lv->child_chunk_hash); + last_hashes_out.c1_last_hashes.emplace_back(std::move(point)); } ++layer_idx; @@ -2050,11 +2044,11 @@ bool BlockchainLMDB::audit_tree(const uint64_t expected_n_leaf_tuples) const << m_curve_trees->m_c2.to_string(m_curve_trees->m_c2.m_hash_init_point) << " (" << leaves.size() << " leaves)"); // Now compare to value from the db - const auto *lv = (layer_val *)v_parent.mv_data; - MDEBUG("Actual leaf chunk hash " << m_curve_trees->m_c2.to_string(lv->child_chunk_hash)); + const auto *lv = (layer_val *)v_parent.mv_data; + MDEBUG("Actual leaf chunk hash " << epee::string_tools::pod_to_hex(lv->child_chunk_hash)); const auto expected_bytes = m_curve_trees->m_c2.to_bytes(chunk_hash); - const auto actual_bytes = m_curve_trees->m_c2.to_bytes(lv->child_chunk_hash); + const auto actual_bytes = lv->child_chunk_hash; CHECK_AND_ASSERT_MES(expected_bytes == actual_bytes, false, "unexpected leaf chunk hash"); ++child_chunk_idx; @@ -2133,8 +2127,10 @@ bool BlockchainLMDB::audit_layer(const C_CHILD &c_child, if (result != MDB_SUCCESS) throw0(DB_ERROR(lmdb_error("Failed to get child: ", result).c_str())); - const auto *child = (layer_val*)v_child.mv_data; - child_chunk.push_back(child->child_chunk_hash); + const auto *lv = (layer_val *)v_child.mv_data; + auto child_point = c_child.from_bytes(lv->child_chunk_hash); + + child_chunk.emplace_back(std::move(child_point)); if (child_chunk.size() == chunk_width) break; @@ -2192,10 +2188,10 @@ bool BlockchainLMDB::audit_layer(const C_CHILD &c_child, MDEBUG("chunk_hash " << c_parent.to_string(chunk_hash) << " , hash init point: " << c_parent.to_string(c_parent.m_hash_init_point) << " (" << child_scalars.size() << " children)"); - const auto *lv = (layer_val *)v_parent.mv_data; - MDEBUG("Actual chunk hash " << c_parent.to_string(lv->child_chunk_hash)); + const auto *lv = (layer_val *)v_parent.mv_data; + MDEBUG("Actual chunk hash " << epee::string_tools::pod_to_hex(lv->child_chunk_hash)); - const auto actual_bytes = c_parent.to_bytes(lv->child_chunk_hash); + const auto actual_bytes = lv->child_chunk_hash; const auto expected_bytes = c_parent.to_bytes(chunk_hash); if (actual_bytes != expected_bytes) throw0(DB_ERROR(("unexpected hash at child_chunk_idx " + std::to_string(child_chunk_idx)).c_str())); diff --git a/src/blockchain_db/lmdb/db_lmdb.h b/src/blockchain_db/lmdb/db_lmdb.h index 5d15ecf59..834b2bf1e 100644 --- a/src/blockchain_db/lmdb/db_lmdb.h +++ b/src/blockchain_db/lmdb/db_lmdb.h @@ -425,7 +425,9 @@ private: const uint64_t layer_idx); template - void trim_layer(const fcmp::curve_trees::LayerReduction &layer_reduction, const uint64_t layer_idx); + void trim_layer(const C &curve, + const fcmp::curve_trees::LayerReduction &layer_reduction, + const uint64_t layer_idx); uint64_t get_num_leaf_tuples() const; diff --git a/src/fcmp/curve_trees.cpp b/src/fcmp/curve_trees.cpp index 3e93ca3dc..90d84d871 100644 --- a/src/fcmp/curve_trees.cpp +++ b/src/fcmp/curve_trees.cpp @@ -913,7 +913,6 @@ typename CurveTrees::TreeReduction CurveTrees::get_tree_reductio ++c1_idx; } - use_c2 = !use_c2; } diff --git a/src/fcmp/curve_trees.h b/src/fcmp/curve_trees.h index 2e31bdce0..8218a9eda 100644 --- a/src/fcmp/curve_trees.h +++ b/src/fcmp/curve_trees.h @@ -37,7 +37,6 @@ #include - namespace fcmp { namespace curve_trees diff --git a/src/fcmp/fcmp_rust/fcmp++.h b/src/fcmp/fcmp_rust/fcmp++.h index f1ef5c17c..d222dd910 100644 --- a/src/fcmp/fcmp_rust/fcmp++.h +++ b/src/fcmp/fcmp_rust/fcmp++.h @@ -100,6 +100,10 @@ uint8_t *helios_point_to_bytes(HeliosPoint helios_point); uint8_t *selene_point_to_bytes(SelenePoint selene_point); +HeliosPoint helios_point_from_bytes(const uint8_t *helios_point_bytes); + +SelenePoint selene_point_from_bytes(const uint8_t *selene_point_bytes); + SeleneScalar ed25519_point_to_selene_scalar(const uint8_t *ed25519_point); HeliosScalar selene_point_to_helios_scalar(SelenePoint selene_point); diff --git a/src/fcmp/tower_cycle.cpp b/src/fcmp/tower_cycle.cpp index e1ce3f6f9..75c73af1c 100644 --- a/src/fcmp/tower_cycle.cpp +++ b/src/fcmp/tower_cycle.cpp @@ -187,6 +187,16 @@ std::array Selene::to_bytes(const Selene::Point &point) const return res; } //---------------------------------------------------------------------------------------------------------------------- +Helios::Point Helios::from_bytes(const std::array &bytes) const +{ + return fcmp_rust::helios_point_from_bytes(bytes.data()); +} +//---------------------------------------------------------------------------------------------------------------------- +Selene::Point Selene::from_bytes(const std::array &bytes) const +{ + return fcmp_rust::selene_point_from_bytes(bytes.data()); +} +//---------------------------------------------------------------------------------------------------------------------- std::string Helios::to_string(const typename Helios::Scalar &scalar) const { return epee::string_tools::pod_to_hex(this->to_bytes(scalar)); diff --git a/src/fcmp/tower_cycle.h b/src/fcmp/tower_cycle.h index 6e8cb6f50..6b0836384 100644 --- a/src/fcmp/tower_cycle.h +++ b/src/fcmp/tower_cycle.h @@ -94,6 +94,8 @@ public: virtual std::array to_bytes(const typename C::Scalar &scalar) const = 0; virtual std::array to_bytes(const typename C::Point &point) const = 0; + virtual typename C::Point from_bytes(const std::array &bytes) const = 0; + virtual std::string to_string(const typename C::Scalar &scalar) const = 0; virtual std::string to_string(const typename C::Point &point) const = 0; @@ -139,6 +141,8 @@ public: std::array to_bytes(const Scalar &scalar) const override; std::array to_bytes(const Point &point) const override; + Point from_bytes(const std::array &bytes) const override; + std::string to_string(const Scalar &scalar) const override; std::string to_string(const Point &point) const override; }; @@ -179,6 +183,8 @@ public: std::array to_bytes(const Scalar &scalar) const override; std::array to_bytes(const Point &point) const override; + Point from_bytes(const std::array &bytes) const override; + std::string to_string(const Scalar &scalar) const override; std::string to_string(const Point &point) const override; };