mirror of
https://github.com/monero-project/monero.git
synced 2025-01-22 06:41:27 -05:00
fcmp++: compilation fixes + misc. cleanup
- Removed call to hash_init_point in constructor - Replaced global static CURVE_TREES_V1 with a smart pointer - Don't need to link Rust static lib when including curve_trees.h - leaves table doesn't need dupsort flags, all leaves should be unique by key - rename fcmp -> fcmp_pp - return when 0 leaves passed into trim_tree
This commit is contained in:
parent
edded7e6e3
commit
10c6c12b18
@ -45,7 +45,7 @@ target_link_libraries(blockchain_db
|
||||
PUBLIC
|
||||
common
|
||||
cncrypto
|
||||
fcmp
|
||||
fcmp_pp
|
||||
ringct
|
||||
${LMDB_LIBRARY}
|
||||
${Boost_FILESYSTEM_LIBRARY}
|
||||
|
@ -328,6 +328,7 @@ uint64_t BlockchainDB::add_block( const std::pair<block, blobdata>& blck
|
||||
// Get all other txs' leaf tuples
|
||||
for (std::size_t i = 0; i < txs.size(); ++i)
|
||||
{
|
||||
// TODO: this loop can be parallelized
|
||||
m_curve_trees->tx_outs_to_leaf_tuple_contexts(
|
||||
txs[i].first,
|
||||
output_ids[i],
|
||||
|
@ -33,6 +33,7 @@
|
||||
#include <string>
|
||||
#include <exception>
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <boost/program_options.hpp>
|
||||
#include "common/command_line.h"
|
||||
#include "crypto/hash.h"
|
||||
@ -591,14 +592,14 @@ protected:
|
||||
|
||||
HardFork* m_hardfork;
|
||||
|
||||
fcmp::curve_trees::CurveTreesV1* m_curve_trees;
|
||||
std::shared_ptr<fcmp::curve_trees::CurveTreesV1> m_curve_trees;
|
||||
|
||||
public:
|
||||
|
||||
/**
|
||||
* @brief An empty constructor.
|
||||
*/
|
||||
BlockchainDB(): m_hardfork(NULL), m_open(false), m_curve_trees(NULL) { }
|
||||
BlockchainDB(): m_hardfork(NULL), m_open(false), m_curve_trees() { }
|
||||
|
||||
/**
|
||||
* @brief An empty destructor.
|
||||
@ -1788,6 +1789,8 @@ public:
|
||||
|
||||
// TODO: description
|
||||
virtual bool audit_tree(const uint64_t expected_n_leaf_tuples) const = 0;
|
||||
virtual uint64_t get_num_leaf_tuples() const = 0;
|
||||
virtual std::array<uint8_t, 32UL> get_tree_root() const = 0;
|
||||
|
||||
//
|
||||
// Hard fork related storage
|
||||
|
@ -1363,10 +1363,10 @@ void BlockchainLMDB::remove_spent_key(const crypto::key_image& k_image)
|
||||
|
||||
void BlockchainLMDB::grow_tree(std::vector<fcmp::curve_trees::LeafTupleContext> &&new_leaves)
|
||||
{
|
||||
LOG_PRINT_L3("BlockchainLMDB::" << __func__);
|
||||
if (new_leaves.empty())
|
||||
return;
|
||||
|
||||
LOG_PRINT_L3("BlockchainLMDB::" << __func__);
|
||||
check_open();
|
||||
mdb_txn_cursors *m_cursors = &m_wcursors;
|
||||
|
||||
@ -1377,7 +1377,7 @@ void BlockchainLMDB::grow_tree(std::vector<fcmp::curve_trees::LeafTupleContext>
|
||||
// Get the number of leaf tuples that exist in the tree
|
||||
const uint64_t old_n_leaf_tuples = this->get_num_leaf_tuples();
|
||||
|
||||
// Read every layer's last hashes
|
||||
// Read every layer's last hash
|
||||
const auto last_hashes = this->get_tree_last_hashes();
|
||||
|
||||
// Use the number of leaf tuples and the existing last hashes to get a struct we can use to extend the tree
|
||||
@ -1392,10 +1392,10 @@ void BlockchainLMDB::grow_tree(std::vector<fcmp::curve_trees::LeafTupleContext>
|
||||
MDB_val_copy<uint64_t> k(i + leaves.start_leaf_tuple_idx);
|
||||
MDB_val_set(v, leaves.tuples[i]);
|
||||
|
||||
// TODO: according to the docs, MDB_APPENDDUP isn't supposed to perform any key comparisons to maximize efficiency.
|
||||
// TODO: according to the docs, MDB_APPEND isn't supposed to perform any key comparisons to maximize efficiency.
|
||||
// Adding MDB_NOOVERWRITE I assume re-introduces a key comparison. Benchmark NOOVERWRITE here
|
||||
// MDB_NOOVERWRITE makes sure key doesn't already exist
|
||||
int result = mdb_cursor_put(m_cur_leaves, &k, &v, MDB_APPENDDUP | MDB_NOOVERWRITE);
|
||||
int result = mdb_cursor_put(m_cur_leaves, &k, &v, MDB_APPEND | MDB_NOOVERWRITE);
|
||||
if (result != MDB_SUCCESS)
|
||||
throw0(DB_ERROR(lmdb_error("Failed to add leaf: ", result).c_str()));
|
||||
}
|
||||
@ -1446,7 +1446,7 @@ void BlockchainLMDB::grow_tree(std::vector<fcmp::curve_trees::LeafTupleContext>
|
||||
}
|
||||
|
||||
template<typename C>
|
||||
void BlockchainLMDB::grow_layer(const C &curve,
|
||||
void BlockchainLMDB::grow_layer(const std::unique_ptr<C> &curve,
|
||||
const std::vector<fcmp::curve_trees::LayerExtension<C>> &layer_extensions,
|
||||
const uint64_t ext_idx,
|
||||
const uint64_t layer_idx)
|
||||
@ -1471,7 +1471,7 @@ void BlockchainLMDB::grow_layer(const C &curve,
|
||||
// We updated the last hash, so update it
|
||||
layer_val lv;
|
||||
lv.child_chunk_idx = ext.start_idx;
|
||||
lv.child_chunk_hash = curve.to_bytes(ext.hashes.front());
|
||||
lv.child_chunk_hash = curve->to_bytes(ext.hashes.front());
|
||||
MDB_val_set(v, lv);
|
||||
|
||||
// We expect to overwrite the existing hash
|
||||
@ -1486,7 +1486,7 @@ void BlockchainLMDB::grow_layer(const C &curve,
|
||||
{
|
||||
layer_val lv;
|
||||
lv.child_chunk_idx = i + ext.start_idx;
|
||||
lv.child_chunk_hash = curve.to_bytes(ext.hashes[i]);
|
||||
lv.child_chunk_hash = curve->to_bytes(ext.hashes[i]);
|
||||
MDB_val_set(v, lv);
|
||||
|
||||
// TODO: according to the docs, MDB_APPENDDUP isn't supposed to perform any key comparisons to maximize efficiency.
|
||||
@ -1500,16 +1500,16 @@ void BlockchainLMDB::grow_layer(const C &curve,
|
||||
|
||||
void BlockchainLMDB::trim_tree(const uint64_t trim_n_leaf_tuples)
|
||||
{
|
||||
// TODO: block_wtxn_start like pop_block, then call BlockchainDB::trim_tree
|
||||
LOG_PRINT_L3("BlockchainLMDB::" << __func__);
|
||||
if (trim_n_leaf_tuples == 0)
|
||||
return;
|
||||
|
||||
check_open();
|
||||
mdb_txn_cursors *m_cursors = &m_wcursors;
|
||||
|
||||
CURSOR(leaves)
|
||||
CURSOR(layers)
|
||||
|
||||
CHECK_AND_ASSERT_THROW_MES(trim_n_leaf_tuples > 0, "must be trimming some leaves");
|
||||
|
||||
const uint64_t old_n_leaf_tuples = this->get_num_leaf_tuples();
|
||||
CHECK_AND_ASSERT_THROW_MES(old_n_leaf_tuples > trim_n_leaf_tuples, "cannot trim more leaves than exist");
|
||||
|
||||
@ -1612,7 +1612,7 @@ void BlockchainLMDB::trim_tree(const uint64_t trim_n_leaf_tuples)
|
||||
}
|
||||
|
||||
template<typename C>
|
||||
void BlockchainLMDB::trim_layer(const C &curve,
|
||||
void BlockchainLMDB::trim_layer(const std::unique_ptr<C> &curve,
|
||||
const fcmp::curve_trees::LayerReduction<C> &layer_reduction,
|
||||
const uint64_t layer_idx)
|
||||
{
|
||||
@ -1670,7 +1670,7 @@ void BlockchainLMDB::trim_layer(const C &curve,
|
||||
{
|
||||
layer_val lv;
|
||||
lv.child_chunk_idx = layer_reduction.new_total_parents - 1;
|
||||
lv.child_chunk_hash = curve.to_bytes(layer_reduction.new_last_hash);
|
||||
lv.child_chunk_hash = curve->to_bytes(layer_reduction.new_last_hash);
|
||||
MDB_val_set(v, lv);
|
||||
|
||||
// We expect to overwrite the existing hash
|
||||
@ -1772,12 +1772,12 @@ fcmp::curve_trees::CurveTreesV1::LastHashes BlockchainLMDB::get_tree_last_hashes
|
||||
const bool use_c2 = (layer_idx % 2) == 0;
|
||||
if (use_c2)
|
||||
{
|
||||
auto point = m_curve_trees->m_c2.from_bytes(lv->child_chunk_hash);
|
||||
auto point = m_curve_trees->m_c2->from_bytes(lv->child_chunk_hash);
|
||||
c2_last_hashes.emplace_back(std::move(point));
|
||||
}
|
||||
else
|
||||
{
|
||||
auto point = m_curve_trees->m_c1.from_bytes(lv->child_chunk_hash);
|
||||
auto point = m_curve_trees->m_c1->from_bytes(lv->child_chunk_hash);
|
||||
c1_last_hashes.emplace_back(std::move(point));
|
||||
}
|
||||
|
||||
@ -1883,14 +1883,14 @@ fcmp::curve_trees::CurveTreesV1::LastChunkChildrenToTrim BlockchainLMDB::get_las
|
||||
const auto *lv = (layer_val *)v.mv_data;
|
||||
if (parent_is_c1)
|
||||
{
|
||||
const auto point = m_curve_trees->m_c2.from_bytes(lv->child_chunk_hash);
|
||||
auto child_scalar = m_curve_trees->m_c2.point_to_cycle_scalar(point);
|
||||
const auto point = m_curve_trees->m_c2->from_bytes(lv->child_chunk_hash);
|
||||
auto child_scalar = m_curve_trees->m_c2->point_to_cycle_scalar(point);
|
||||
c1_children.emplace_back(std::move(child_scalar));
|
||||
}
|
||||
else
|
||||
{
|
||||
const auto point = m_curve_trees->m_c1.from_bytes(lv->child_chunk_hash);
|
||||
auto child_scalar = m_curve_trees->m_c1.point_to_cycle_scalar(point);
|
||||
const auto point = m_curve_trees->m_c1->from_bytes(lv->child_chunk_hash);
|
||||
auto child_scalar = m_curve_trees->m_c1->point_to_cycle_scalar(point);
|
||||
c2_children.emplace_back(std::move(child_scalar));
|
||||
}
|
||||
|
||||
@ -1941,12 +1941,12 @@ fcmp::curve_trees::CurveTreesV1::LastHashes BlockchainLMDB::get_last_hashes_to_t
|
||||
const auto *lv = (layer_val *)v.mv_data;
|
||||
if ((layer_idx % 2) == 0)
|
||||
{
|
||||
auto point = m_curve_trees->m_c2.from_bytes(lv->child_chunk_hash);
|
||||
auto point = m_curve_trees->m_c2->from_bytes(lv->child_chunk_hash);
|
||||
last_hashes_out.c2_last_hashes.emplace_back(std::move(point));
|
||||
}
|
||||
else
|
||||
{
|
||||
auto point = m_curve_trees->m_c1.from_bytes(lv->child_chunk_hash);
|
||||
auto point = m_curve_trees->m_c1->from_bytes(lv->child_chunk_hash);
|
||||
last_hashes_out.c1_last_hashes.emplace_back(std::move(point));
|
||||
}
|
||||
|
||||
@ -2043,17 +2043,17 @@ bool BlockchainLMDB::audit_tree(const uint64_t expected_n_leaf_tuples) const
|
||||
|
||||
// Hash the chunk of leaves
|
||||
for (uint64_t i = 0; i < leaves.size(); ++i)
|
||||
MDEBUG("Hashing " << m_curve_trees->m_c2.to_string(leaves[i]));
|
||||
MDEBUG("Hashing " << m_curve_trees->m_c2->to_string(leaves[i]));
|
||||
|
||||
const fcmp::curve_trees::Selene::Point chunk_hash = fcmp::curve_trees::get_new_parent(m_curve_trees->m_c2, chunk);
|
||||
MDEBUG("chunk_hash " << m_curve_trees->m_c2.to_string(chunk_hash) << " , hash init point: "
|
||||
<< m_curve_trees->m_c2.to_string(m_curve_trees->m_c2.m_hash_init_point) << " (" << leaves.size() << " leaves)");
|
||||
MDEBUG("chunk_hash " << m_curve_trees->m_c2->to_string(chunk_hash) << " , hash init point: "
|
||||
<< m_curve_trees->m_c2->to_string(m_curve_trees->m_c2->hash_init_point()) << " (" << leaves.size() << " leaves)");
|
||||
|
||||
// Now compare to value from the db
|
||||
const auto *lv = (layer_val *)v_parent.mv_data;
|
||||
MDEBUG("Actual leaf chunk hash " << epee::string_tools::pod_to_hex(lv->child_chunk_hash));
|
||||
|
||||
const auto expected_bytes = m_curve_trees->m_c2.to_bytes(chunk_hash);
|
||||
const auto expected_bytes = m_curve_trees->m_c2->to_bytes(chunk_hash);
|
||||
const auto actual_bytes = lv->child_chunk_hash;
|
||||
CHECK_AND_ASSERT_MES(expected_bytes == actual_bytes, false, "unexpected leaf chunk hash");
|
||||
|
||||
@ -2101,8 +2101,8 @@ bool BlockchainLMDB::audit_tree(const uint64_t expected_n_leaf_tuples) const
|
||||
}
|
||||
|
||||
template<typename C_CHILD, typename C_PARENT>
|
||||
bool BlockchainLMDB::audit_layer(const C_CHILD &c_child,
|
||||
const C_PARENT &c_parent,
|
||||
bool BlockchainLMDB::audit_layer(const std::unique_ptr<C_CHILD> &c_child,
|
||||
const std::unique_ptr<C_PARENT> &c_parent,
|
||||
const uint64_t layer_idx,
|
||||
const uint64_t child_start_idx,
|
||||
const uint64_t child_chunk_idx,
|
||||
@ -2134,7 +2134,7 @@ bool BlockchainLMDB::audit_layer(const C_CHILD &c_child,
|
||||
throw0(DB_ERROR(lmdb_error("Failed to get child: ", result).c_str()));
|
||||
|
||||
const auto *lv = (layer_val *)v_child.mv_data;
|
||||
auto child_point = c_child.from_bytes(lv->child_chunk_hash);
|
||||
auto child_point = c_child->from_bytes(lv->child_chunk_hash);
|
||||
|
||||
child_chunk.emplace_back(std::move(child_point));
|
||||
|
||||
@ -2184,21 +2184,21 @@ bool BlockchainLMDB::audit_layer(const C_CHILD &c_child,
|
||||
std::vector<typename C_PARENT::Scalar> child_scalars;
|
||||
child_scalars.reserve(child_chunk.size());
|
||||
for (const auto &child : child_chunk)
|
||||
child_scalars.emplace_back(c_child.point_to_cycle_scalar(child));
|
||||
child_scalars.emplace_back(c_child->point_to_cycle_scalar(child));
|
||||
const typename C_PARENT::Chunk chunk{child_scalars.data(), child_scalars.size()};
|
||||
|
||||
for (uint64_t i = 0; i < child_scalars.size(); ++i)
|
||||
MDEBUG("Hashing " << c_parent.to_string(child_scalars[i]));
|
||||
MDEBUG("Hashing " << c_parent->to_string(child_scalars[i]));
|
||||
|
||||
const auto chunk_hash = fcmp::curve_trees::get_new_parent(c_parent, chunk);
|
||||
MDEBUG("chunk_hash " << c_parent.to_string(chunk_hash) << " , hash init point: "
|
||||
<< c_parent.to_string(c_parent.m_hash_init_point) << " (" << child_scalars.size() << " children)");
|
||||
MDEBUG("chunk_hash " << c_parent->to_string(chunk_hash) << " , hash init point: "
|
||||
<< c_parent->to_string(c_parent->hash_init_point()) << " (" << child_scalars.size() << " children)");
|
||||
|
||||
const auto *lv = (layer_val *)v_parent.mv_data;
|
||||
MDEBUG("Actual chunk hash " << epee::string_tools::pod_to_hex(lv->child_chunk_hash));
|
||||
|
||||
const auto actual_bytes = lv->child_chunk_hash;
|
||||
const auto expected_bytes = c_parent.to_bytes(chunk_hash);
|
||||
const auto expected_bytes = c_parent->to_bytes(chunk_hash);
|
||||
if (actual_bytes != expected_bytes)
|
||||
throw0(DB_ERROR(("unexpected hash at child_chunk_idx " + std::to_string(child_chunk_idx)).c_str()));
|
||||
|
||||
@ -2296,7 +2296,7 @@ BlockchainLMDB::~BlockchainLMDB()
|
||||
BlockchainLMDB::close();
|
||||
}
|
||||
|
||||
BlockchainLMDB::BlockchainLMDB(bool batch_transactions, fcmp::curve_trees::CurveTreesV1 *curve_trees): BlockchainDB()
|
||||
BlockchainLMDB::BlockchainLMDB(bool batch_transactions, std::shared_ptr<fcmp::curve_trees::CurveTreesV1> curve_trees): BlockchainDB()
|
||||
{
|
||||
LOG_PRINT_L3("BlockchainLMDB::" << __func__);
|
||||
// initialize folder to something "safe" just in case
|
||||
@ -2437,7 +2437,7 @@ void BlockchainLMDB::open(const std::string& filename, const int db_flags)
|
||||
lmdb_db_open(txn, LMDB_SPENT_KEYS, MDB_INTEGERKEY | MDB_CREATE | MDB_DUPSORT | MDB_DUPFIXED, m_spent_keys, "Failed to open db handle for m_spent_keys");
|
||||
|
||||
lmdb_db_open(txn, LMDB_LOCKED_LEAVES, MDB_INTEGERKEY | MDB_DUPSORT | MDB_DUPFIXED | MDB_CREATE, m_locked_leaves, "Failed to open db handle for m_locked_leaves");
|
||||
lmdb_db_open(txn, LMDB_LEAVES, MDB_INTEGERKEY | MDB_DUPSORT | MDB_DUPFIXED | MDB_CREATE, m_leaves, "Failed to open db handle for m_leaves");
|
||||
lmdb_db_open(txn, LMDB_LEAVES, MDB_INTEGERKEY | MDB_CREATE, m_leaves, "Failed to open db handle for m_leaves");
|
||||
lmdb_db_open(txn, LMDB_LAYERS, MDB_INTEGERKEY | MDB_DUPSORT | MDB_DUPFIXED | MDB_CREATE, m_layers, "Failed to open db handle for m_layers");
|
||||
|
||||
lmdb_db_open(txn, LMDB_TXPOOL_META, MDB_CREATE, m_txpool_meta, "Failed to open db handle for m_txpool_meta");
|
||||
|
@ -194,7 +194,7 @@ struct mdb_txn_safe
|
||||
class BlockchainLMDB : public BlockchainDB
|
||||
{
|
||||
public:
|
||||
BlockchainLMDB(bool batch_transactions=true, fcmp::curve_trees::CurveTreesV1 *curve_trees=&fcmp::curve_trees::CURVE_TREES_V1);
|
||||
BlockchainLMDB(bool batch_transactions=true, std::shared_ptr<fcmp::curve_trees::CurveTreesV1> curve_trees = fcmp::curve_trees::curve_trees_v1());
|
||||
~BlockchainLMDB();
|
||||
|
||||
virtual void open(const std::string& filename, const int mdb_flags=0);
|
||||
@ -419,19 +419,19 @@ private:
|
||||
virtual void remove_spent_key(const crypto::key_image& k_image);
|
||||
|
||||
template<typename C>
|
||||
void grow_layer(const C &curve,
|
||||
void grow_layer(const std::unique_ptr<C> &curve,
|
||||
const std::vector<fcmp::curve_trees::LayerExtension<C>> &layer_extensions,
|
||||
const uint64_t c_idx,
|
||||
const uint64_t layer_idx);
|
||||
|
||||
template<typename C>
|
||||
void trim_layer(const C &curve,
|
||||
void trim_layer(const std::unique_ptr<C> &curve,
|
||||
const fcmp::curve_trees::LayerReduction<C> &layer_reduction,
|
||||
const uint64_t layer_idx);
|
||||
|
||||
uint64_t get_num_leaf_tuples() const;
|
||||
virtual uint64_t get_num_leaf_tuples() const;
|
||||
|
||||
std::array<uint8_t, 32UL> get_tree_root() const;
|
||||
virtual std::array<uint8_t, 32UL> get_tree_root() const;
|
||||
|
||||
fcmp::curve_trees::CurveTreesV1::LastHashes get_tree_last_hashes() const;
|
||||
|
||||
@ -442,8 +442,8 @@ private:
|
||||
const std::vector<fcmp::curve_trees::TrimLayerInstructions> &trim_instructions) const;
|
||||
|
||||
template<typename C_CHILD, typename C_PARENT>
|
||||
bool audit_layer(const C_CHILD &c_child,
|
||||
const C_PARENT &c_parent,
|
||||
bool audit_layer(const std::unique_ptr<C_CHILD> &c_child,
|
||||
const std::unique_ptr<C_PARENT> &c_parent,
|
||||
const uint64_t layer_idx,
|
||||
const uint64_t child_start_idx,
|
||||
const uint64_t child_chunk_idx,
|
||||
|
@ -119,6 +119,8 @@ public:
|
||||
virtual void grow_tree(std::vector<fcmp::curve_trees::LeafTupleContext> &&new_leaves) override {};
|
||||
virtual void trim_tree(const uint64_t trim_n_leaf_tuples) override {};
|
||||
virtual bool audit_tree(const uint64_t expected_n_leaf_tuples) const override { return false; };
|
||||
virtual std::array<uint8_t, 32UL> get_tree_root() const override { return {}; };
|
||||
virtual uint64_t get_num_leaf_tuples() const override { return 0; };
|
||||
|
||||
virtual bool for_all_key_images(std::function<bool(const crypto::key_image&)>) const override { return true; }
|
||||
virtual bool for_blocks_range(const uint64_t&, const uint64_t&, std::function<bool(uint64_t, const crypto::hash&, const cryptonote::block&)>) const override { return true; }
|
||||
|
@ -26,20 +26,20 @@
|
||||
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
# THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
set(fcmp_sources
|
||||
set(fcmp_pp_sources
|
||||
curve_trees.cpp
|
||||
tower_cycle.cpp)
|
||||
|
||||
monero_find_all_headers(fcmp_headers "${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
monero_find_all_headers(fcmp_pp_headers "${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
|
||||
add_subdirectory(fcmp_rust)
|
||||
|
||||
monero_add_library_with_deps(
|
||||
NAME fcmp
|
||||
NAME fcmp_pp
|
||||
DEPENDS fcmp_rust
|
||||
SOURCES
|
||||
${fcmp_sources}
|
||||
${fcmp_headers})
|
||||
${fcmp_pp_sources}
|
||||
${fcmp_pp_headers})
|
||||
|
||||
if(WIN32)
|
||||
set(EXTRA_RUST_LIBRARIES ws2_32 ntdll userenv)
|
||||
@ -47,7 +47,7 @@ else()
|
||||
set(EXTRA_RUST_LIBRARIES )
|
||||
endif()
|
||||
|
||||
target_link_libraries(fcmp
|
||||
target_link_libraries(fcmp_pp
|
||||
PUBLIC
|
||||
crypto
|
||||
cryptonote_basic
|
||||
|
@ -44,17 +44,33 @@ template class CurveTrees<Helios, Selene>;
|
||||
// Public helper functions
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<typename C>
|
||||
typename C::Point get_new_parent(const C &curve, const typename C::Chunk &new_children)
|
||||
typename C::Point get_new_parent(const std::unique_ptr<C> &curve, const typename C::Chunk &new_children)
|
||||
{
|
||||
return curve.hash_grow(
|
||||
curve.m_hash_init_point,
|
||||
return curve->hash_grow(
|
||||
curve->hash_init_point(),
|
||||
0,/*offset*/
|
||||
curve.zero_scalar(),
|
||||
curve->zero_scalar(),
|
||||
new_children
|
||||
);
|
||||
};
|
||||
template Helios::Point get_new_parent<Helios>(const Helios &curve, const typename Helios::Chunk &new_children);
|
||||
template Selene::Point get_new_parent<Selene>(const Selene &curve, const typename Selene::Chunk &new_children);
|
||||
template Helios::Point get_new_parent<Helios>(const std::unique_ptr<Helios> &curve,
|
||||
const typename Helios::Chunk &new_children);
|
||||
template Selene::Point get_new_parent<Selene>(const std::unique_ptr<Selene> &curve,
|
||||
const typename Selene::Chunk &new_children);
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
std::shared_ptr<CurveTreesV1> curve_trees_v1(const std::size_t helios_chunk_width, const std::size_t selene_chunk_width)
|
||||
{
|
||||
std::unique_ptr<Helios> helios(new Helios());
|
||||
std::unique_ptr<Selene> selene(new Selene());
|
||||
return std::shared_ptr<CurveTreesV1>(
|
||||
new CurveTreesV1(
|
||||
std::move(helios),
|
||||
std::move(selene),
|
||||
helios_chunk_width,
|
||||
selene_chunk_width
|
||||
)
|
||||
);
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
// Static functions
|
||||
@ -62,7 +78,7 @@ template Selene::Point get_new_parent<Selene>(const Selene &curve, const typenam
|
||||
// After hashing a layer of children points, convert those children x-coordinates into their respective cycle
|
||||
// scalars, and prepare them to be hashed for the next layer
|
||||
template<typename C_CHILD, typename C_PARENT>
|
||||
static std::vector<typename C_PARENT::Scalar> next_child_scalars_from_children(const C_CHILD &c_child,
|
||||
static std::vector<typename C_PARENT::Scalar> next_child_scalars_from_children(const std::unique_ptr<C_CHILD> &c_child,
|
||||
const typename C_CHILD::Point *last_root,
|
||||
const LayerExtension<C_CHILD> &children)
|
||||
{
|
||||
@ -79,7 +95,7 @@ static std::vector<typename C_PARENT::Scalar> next_child_scalars_from_children(c
|
||||
if (children.start_idx > 0)
|
||||
{
|
||||
MDEBUG("Updating root layer and including the existing root in next children");
|
||||
child_scalars_out.emplace_back(c_child.point_to_cycle_scalar(*last_root));
|
||||
child_scalars_out.emplace_back(c_child->point_to_cycle_scalar(*last_root));
|
||||
}
|
||||
}
|
||||
|
||||
@ -91,7 +107,7 @@ static std::vector<typename C_PARENT::Scalar> next_child_scalars_from_children(c
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
// Hash chunks of a layer of new children, outputting the next layer's parents
|
||||
template<typename C>
|
||||
static LayerExtension<C> hash_children_chunks(const C &curve,
|
||||
static LayerExtension<C> hash_children_chunks(const std::unique_ptr<C> &curve,
|
||||
const typename C::Scalar *old_last_child,
|
||||
const typename C::Point *old_last_parent,
|
||||
const std::size_t start_offset,
|
||||
@ -119,30 +135,30 @@ static LayerExtension<C> hash_children_chunks(const C &curve,
|
||||
// Prepare to hash
|
||||
const auto &existing_hash = old_last_parent != nullptr
|
||||
? *old_last_parent
|
||||
: curve.m_hash_init_point;
|
||||
: curve->hash_init_point();
|
||||
|
||||
const auto &prior_child_after_offset = old_last_child != nullptr
|
||||
? *old_last_child
|
||||
: curve.zero_scalar();
|
||||
: curve->zero_scalar();
|
||||
|
||||
const auto chunk_start = new_child_scalars.data();
|
||||
const typename C::Chunk chunk{chunk_start, chunk_size};
|
||||
|
||||
MDEBUG("existing_hash: " << curve.to_string(existing_hash) << " , start_offset: " << start_offset
|
||||
<< " , prior_child_after_offset: " << curve.to_string(prior_child_after_offset));
|
||||
MDEBUG("existing_hash: " << curve->to_string(existing_hash) << " , start_offset: " << start_offset
|
||||
<< " , prior_child_after_offset: " << curve->to_string(prior_child_after_offset));
|
||||
|
||||
for (std::size_t i = 0; i < chunk_size; ++i)
|
||||
MDEBUG("Hashing child " << curve.to_string(chunk_start[i]));
|
||||
MDEBUG("Hashing child " << curve->to_string(chunk_start[i]));
|
||||
|
||||
// Do the hash
|
||||
auto chunk_hash = curve.hash_grow(
|
||||
auto chunk_hash = curve->hash_grow(
|
||||
existing_hash,
|
||||
start_offset,
|
||||
prior_child_after_offset,
|
||||
chunk
|
||||
);
|
||||
|
||||
MDEBUG("Child chunk_start_idx " << 0 << " result: " << curve.to_string(chunk_hash)
|
||||
MDEBUG("Child chunk_start_idx " << 0 << " result: " << curve->to_string(chunk_hash)
|
||||
<< " , chunk_size: " << chunk_size);
|
||||
|
||||
// We've got our hash
|
||||
@ -154,17 +170,18 @@ static LayerExtension<C> hash_children_chunks(const C &curve,
|
||||
while (chunk_start_idx < new_child_scalars.size())
|
||||
{
|
||||
// TODO: this loop can be parallelized
|
||||
// Fill a complete chunk, or add the remaining new children to the last chunk
|
||||
chunk_size = std::min(chunk_width, new_child_scalars.size() - chunk_start_idx);
|
||||
|
||||
const auto chunk_start = new_child_scalars.data() + chunk_start_idx;
|
||||
const typename C::Chunk chunk{chunk_start, chunk_size};
|
||||
|
||||
for (std::size_t i = 0; i < chunk_size; ++i)
|
||||
MDEBUG("Hashing child " << curve.to_string(chunk_start[i]));
|
||||
MDEBUG("Hashing child " << curve->to_string(chunk_start[i]));
|
||||
|
||||
auto chunk_hash = get_new_parent(curve, chunk);
|
||||
|
||||
MDEBUG("Child chunk_start_idx " << chunk_start_idx << " result: " << curve.to_string(chunk_hash)
|
||||
MDEBUG("Child chunk_start_idx " << chunk_start_idx << " result: " << curve->to_string(chunk_hash)
|
||||
<< " , chunk_size: " << chunk_size);
|
||||
|
||||
// We've got our hash
|
||||
@ -173,7 +190,6 @@ static LayerExtension<C> hash_children_chunks(const C &curve,
|
||||
// Advance to the next chunk
|
||||
chunk_start_idx += chunk_size;
|
||||
|
||||
// Fill a complete chunk, or add the remaining new children to the last chunk
|
||||
CHECK_AND_ASSERT_THROW_MES(chunk_start_idx <= new_child_scalars.size(), "unexpected chunk start idx");
|
||||
}
|
||||
|
||||
@ -351,8 +367,8 @@ static GrowLayerInstructions get_leaf_layer_grow_instructions(const uint64_t old
|
||||
// - for example, if we just grew the parent layer after the leaf layer, the "next layer" would be the grandparent
|
||||
// layer of the leaf layer
|
||||
template<typename C_CHILD, typename C_PARENT>
|
||||
static LayerExtension<C_PARENT> get_next_layer_extension(const C_CHILD &c_child,
|
||||
const C_PARENT &c_parent,
|
||||
static LayerExtension<C_PARENT> get_next_layer_extension(const std::unique_ptr<C_CHILD> &c_child,
|
||||
const std::unique_ptr<C_PARENT> &c_parent,
|
||||
const GrowLayerInstructions &grow_layer_instructions,
|
||||
const std::vector<typename C_CHILD::Point> &child_last_hashes,
|
||||
const std::vector<typename C_PARENT::Point> &parent_last_hashes,
|
||||
@ -391,7 +407,7 @@ static LayerExtension<C_PARENT> get_next_layer_extension(const C_CHILD &c_child,
|
||||
if (grow_layer_instructions.need_old_last_child)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(child_last_hash != nullptr, "missing last child");
|
||||
last_child_scalar = c_child.point_to_cycle_scalar(*child_last_hash);
|
||||
last_child_scalar = c_child->point_to_cycle_scalar(*child_last_hash);
|
||||
}
|
||||
|
||||
// Do the hashing
|
||||
@ -534,8 +550,8 @@ static TrimLayerInstructions get_trim_layer_instructions(
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<typename C_CHILD, typename C_PARENT>
|
||||
static typename fcmp::curve_trees::LayerReduction<C_PARENT> get_next_layer_reduction(
|
||||
const C_CHILD &c_child,
|
||||
const C_PARENT &c_parent,
|
||||
const std::unique_ptr<C_CHILD> &c_child,
|
||||
const std::unique_ptr<C_PARENT> &c_parent,
|
||||
const TrimLayerInstructions &trim_layer_instructions,
|
||||
const std::vector<typename C_PARENT::Point> &parent_last_hashes,
|
||||
const std::vector<std::vector<typename C_PARENT::Scalar>> &children_to_trim,
|
||||
@ -554,7 +570,7 @@ static typename fcmp::curve_trees::LayerReduction<C_PARENT> get_next_layer_reduc
|
||||
|
||||
const typename C_PARENT::Point &existing_hash = trim_layer_instructions.need_existing_last_hash
|
||||
? parent_last_hashes[parent_layer_idx]
|
||||
: c_parent.m_hash_init_point;
|
||||
: c_parent->hash_init_point();
|
||||
|
||||
std::vector<typename C_PARENT::Scalar> child_scalars;
|
||||
if (trim_layer_instructions.need_last_chunk_children_to_trim
|
||||
@ -564,7 +580,7 @@ static typename fcmp::curve_trees::LayerReduction<C_PARENT> get_next_layer_reduc
|
||||
child_scalars = children_to_trim[parent_layer_idx];
|
||||
}
|
||||
|
||||
typename C_PARENT::Scalar new_last_child_scalar = c_parent.zero_scalar();
|
||||
typename C_PARENT::Scalar new_last_child_scalar = c_parent->zero_scalar();
|
||||
if (trim_layer_instructions.need_new_last_child)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(child_layer_idx > 0, "child index cannot be 0 here");
|
||||
@ -572,7 +588,7 @@ static typename fcmp::curve_trees::LayerReduction<C_PARENT> get_next_layer_reduc
|
||||
CHECK_AND_ASSERT_THROW_MES(child_reductions.back().update_existing_last_hash, "expected new last child");
|
||||
|
||||
const typename C_CHILD::Point &new_last_child = child_reductions.back().new_last_hash;
|
||||
new_last_child_scalar = c_child.point_to_cycle_scalar(new_last_child);
|
||||
new_last_child_scalar = c_child->point_to_cycle_scalar(new_last_child);
|
||||
|
||||
if (trim_layer_instructions.need_last_chunk_remaining_children)
|
||||
{
|
||||
@ -585,40 +601,40 @@ static typename fcmp::curve_trees::LayerReduction<C_PARENT> get_next_layer_reduc
|
||||
CHECK_AND_ASSERT_THROW_MES(child_last_hashes.size() > last_child_layer_idx, "missing last child hash");
|
||||
|
||||
const typename C_CHILD::Point &old_last_child = child_last_hashes[last_child_layer_idx];
|
||||
auto old_last_child_scalar = c_child.point_to_cycle_scalar(old_last_child);
|
||||
auto old_last_child_scalar = c_child->point_to_cycle_scalar(old_last_child);
|
||||
|
||||
child_scalars.emplace_back(std::move(old_last_child_scalar));
|
||||
}
|
||||
}
|
||||
|
||||
for (std::size_t i = 0; i < child_scalars.size(); ++i)
|
||||
MDEBUG("Hashing child " << c_parent.to_string(child_scalars[i]));
|
||||
MDEBUG("Hashing child " << c_parent->to_string(child_scalars[i]));
|
||||
|
||||
if (trim_layer_instructions.need_last_chunk_remaining_children)
|
||||
{
|
||||
MDEBUG("hash_grow: existing_hash: " << c_parent.to_string(existing_hash)
|
||||
MDEBUG("hash_grow: existing_hash: " << c_parent->to_string(existing_hash)
|
||||
<< " , hash_offset: " << trim_layer_instructions.hash_offset);
|
||||
|
||||
layer_reduction_out.new_last_hash = c_parent.hash_grow(
|
||||
layer_reduction_out.new_last_hash = c_parent->hash_grow(
|
||||
existing_hash,
|
||||
trim_layer_instructions.hash_offset,
|
||||
c_parent.zero_scalar(),
|
||||
c_parent->zero_scalar(),
|
||||
typename C_PARENT::Chunk{child_scalars.data(), child_scalars.size()});
|
||||
}
|
||||
else
|
||||
{
|
||||
MDEBUG("hash_trim: existing_hash: " << c_parent.to_string(existing_hash)
|
||||
MDEBUG("hash_trim: existing_hash: " << c_parent->to_string(existing_hash)
|
||||
<< " , hash_offset: " << trim_layer_instructions.hash_offset
|
||||
<< " , child_to_grow_back: " << c_parent.to_string(new_last_child_scalar));
|
||||
<< " , child_to_grow_back: " << c_parent->to_string(new_last_child_scalar));
|
||||
|
||||
layer_reduction_out.new_last_hash = c_parent.hash_trim(
|
||||
layer_reduction_out.new_last_hash = c_parent->hash_trim(
|
||||
existing_hash,
|
||||
trim_layer_instructions.hash_offset,
|
||||
typename C_PARENT::Chunk{child_scalars.data(), child_scalars.size()},
|
||||
new_last_child_scalar);
|
||||
}
|
||||
|
||||
MDEBUG("Result hash: " << c_parent.to_string(layer_reduction_out.new_last_hash));
|
||||
MDEBUG("Result hash: " << c_parent->to_string(layer_reduction_out.new_last_hash));
|
||||
|
||||
return layer_reduction_out;
|
||||
}
|
||||
@ -655,48 +671,6 @@ LeafTupleContext CurveTrees<Helios, Selene>::output_to_leaf_context(
|
||||
};
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<>
|
||||
CurveTrees<Helios, Selene>::LeafTuple CurveTrees<Helios, Selene>::leaf_tuple(
|
||||
const PreprocessedLeafTuple &preprocessed_leaf_tuple) const
|
||||
{
|
||||
const rct::key &O = preprocessed_leaf_tuple.O;
|
||||
const rct::key &C = preprocessed_leaf_tuple.C;
|
||||
|
||||
crypto::ec_point I;
|
||||
crypto::derive_key_image_generator(rct::rct2pk(O), I);
|
||||
|
||||
rct::key O_x, I_x, C_x;
|
||||
|
||||
if (!rct::point_to_wei_x(O, O_x))
|
||||
throw std::runtime_error("failed to get wei x scalar from O");
|
||||
if (!rct::point_to_wei_x(rct::pt2rct(I), I_x))
|
||||
throw std::runtime_error("failed to get wei x scalar from I");
|
||||
if (!rct::point_to_wei_x(C, C_x))
|
||||
throw std::runtime_error("failed to get wei x scalar from C");
|
||||
|
||||
return LeafTuple{
|
||||
.O_x = tower_cycle::selene_scalar_from_bytes(O_x),
|
||||
.I_x = tower_cycle::selene_scalar_from_bytes(I_x),
|
||||
.C_x = tower_cycle::selene_scalar_from_bytes(C_x)
|
||||
};
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<typename C1, typename C2>
|
||||
std::vector<typename C2::Scalar> CurveTrees<C1, C2>::flatten_leaves(std::vector<LeafTuple> &&leaves) const
|
||||
{
|
||||
std::vector<typename C2::Scalar> flattened_leaves;
|
||||
flattened_leaves.reserve(leaves.size() * LEAF_TUPLE_SIZE);
|
||||
|
||||
for (auto &l : leaves)
|
||||
{
|
||||
flattened_leaves.emplace_back(std::move(l.O_x));
|
||||
flattened_leaves.emplace_back(std::move(l.I_x));
|
||||
flattened_leaves.emplace_back(std::move(l.C_x));
|
||||
}
|
||||
|
||||
return flattened_leaves;
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template <>
|
||||
void CurveTrees<Helios, Selene>::tx_outs_to_leaf_tuple_contexts(const cryptonote::transaction &tx,
|
||||
const std::vector<uint64_t> &output_ids,
|
||||
@ -746,6 +720,48 @@ void CurveTrees<Helios, Selene>::tx_outs_to_leaf_tuple_contexts(const cryptonote
|
||||
}
|
||||
}
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<>
|
||||
CurveTrees<Helios, Selene>::LeafTuple CurveTrees<Helios, Selene>::leaf_tuple(
|
||||
const PreprocessedLeafTuple &preprocessed_leaf_tuple) const
|
||||
{
|
||||
const rct::key &O = preprocessed_leaf_tuple.O;
|
||||
const rct::key &C = preprocessed_leaf_tuple.C;
|
||||
|
||||
crypto::ec_point I;
|
||||
crypto::derive_key_image_generator(rct::rct2pk(O), I);
|
||||
|
||||
rct::key O_x, I_x, C_x;
|
||||
|
||||
if (!rct::point_to_wei_x(O, O_x))
|
||||
throw std::runtime_error("failed to get wei x scalar from O");
|
||||
if (!rct::point_to_wei_x(rct::pt2rct(I), I_x))
|
||||
throw std::runtime_error("failed to get wei x scalar from I");
|
||||
if (!rct::point_to_wei_x(C, C_x))
|
||||
throw std::runtime_error("failed to get wei x scalar from C");
|
||||
|
||||
return LeafTuple{
|
||||
.O_x = tower_cycle::selene_scalar_from_bytes(O_x),
|
||||
.I_x = tower_cycle::selene_scalar_from_bytes(I_x),
|
||||
.C_x = tower_cycle::selene_scalar_from_bytes(C_x)
|
||||
};
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<typename C1, typename C2>
|
||||
std::vector<typename C2::Scalar> CurveTrees<C1, C2>::flatten_leaves(std::vector<LeafTuple> &&leaves) const
|
||||
{
|
||||
std::vector<typename C2::Scalar> flattened_leaves;
|
||||
flattened_leaves.reserve(leaves.size() * LEAF_TUPLE_SIZE);
|
||||
|
||||
for (auto &l : leaves)
|
||||
{
|
||||
flattened_leaves.emplace_back(std::move(l.O_x));
|
||||
flattened_leaves.emplace_back(std::move(l.I_x));
|
||||
flattened_leaves.emplace_back(std::move(l.C_x));
|
||||
}
|
||||
|
||||
return flattened_leaves;
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<typename C1, typename C2>
|
||||
typename CurveTrees<C1, C2>::TreeExtension CurveTrees<C1, C2>::get_tree_extension(
|
||||
const uint64_t old_n_leaf_tuples,
|
||||
|
@ -28,12 +28,13 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "cryptonote_basic/cryptonote_basic.h"
|
||||
#include "crypto/crypto.h"
|
||||
#include "cryptonote_basic/cryptonote_basic.h"
|
||||
#include "misc_log_ex.h"
|
||||
#include "tower_cycle.h"
|
||||
|
||||
#include <map>
|
||||
#include <memory>
|
||||
#include <vector>
|
||||
|
||||
|
||||
@ -45,7 +46,7 @@ namespace curve_trees
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
// Hash a chunk of new children
|
||||
template<typename C>
|
||||
typename C::Point get_new_parent(const C &curve, const typename C::Chunk &new_children);
|
||||
typename C::Point get_new_parent(const std::unique_ptr<C> &curve, const typename C::Chunk &new_children);
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
// A layer of contiguous hashes starting from a specific start_idx in the tree
|
||||
template<typename C>
|
||||
@ -150,14 +151,14 @@ struct LeafTupleContext final
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
// This class is useful to help update the curve trees merkle tree without needing to keep the entire tree in memory
|
||||
// - It requires instantiation with the C1 and C2 curve classes and widths, hardening the tree structure
|
||||
// - It ties the C2 curve in the tree to the leaf layer
|
||||
// - It ties the C2 curve in the tree to the leaf layer (the leaf layer is composed of C2 scalars)
|
||||
template<typename C1, typename C2>
|
||||
class CurveTrees
|
||||
{
|
||||
public:
|
||||
CurveTrees(const C1 &c1, const C2 &c2, const uint64_t c1_width, const uint64_t c2_width):
|
||||
m_c1{c1},
|
||||
m_c2{c2},
|
||||
CurveTrees(std::unique_ptr<C1> &&c1, std::unique_ptr<C2> &&c2, const uint64_t c1_width, const uint64_t c2_width):
|
||||
m_c1{std::move(c1)},
|
||||
m_c2{std::move(c2)},
|
||||
m_c1_width{c1_width},
|
||||
m_c2_width{c2_width},
|
||||
m_leaf_layer_chunk_width{LEAF_TUPLE_SIZE * c2_width}
|
||||
@ -230,16 +231,11 @@ public:
|
||||
|
||||
//member functions
|
||||
public:
|
||||
// Convert cryptonote output pub key and commitment to a leaf tuple for the curve trees tree
|
||||
// Convert cryptonote output pub key and commitment to a pre-processed leaf tuple ready for insertion to the tree
|
||||
LeafTupleContext output_to_leaf_context(const std::uint64_t output_id,
|
||||
const crypto::public_key &output_pubkey,
|
||||
const rct::key &C) const;
|
||||
|
||||
LeafTuple leaf_tuple(const PreprocessedLeafTuple &preprocessed_leaf_tuple) const;
|
||||
|
||||
// Flatten leaves [(O.x, I.x, C.x),(O.x, I.x, C.x),...] -> [O.x, I.x, C.x, O.x, I.x, C.x...]
|
||||
std::vector<typename C2::Scalar> flatten_leaves(std::vector<LeafTuple> &&leaves) const;
|
||||
|
||||
// Convert cryptonote tx outs to contexts ready to be converted to leaf tuples, grouped by unlock height
|
||||
void tx_outs_to_leaf_tuple_contexts(const cryptonote::transaction &tx,
|
||||
const std::vector<uint64_t> &output_ids,
|
||||
@ -247,7 +243,13 @@ public:
|
||||
const bool miner_tx,
|
||||
std::multimap<uint64_t, LeafTupleContext> &leaf_tuples_by_unlock_block_inout) const;
|
||||
|
||||
// Take in the existing number of leaf tuples and the existing last hashes of each layer in the tree, as well as new
|
||||
// Derive a leaf tuple from a pre-processed leaf tuple {O,C} -> {O.x,I.x,C.x}
|
||||
LeafTuple leaf_tuple(const PreprocessedLeafTuple &preprocessed_leaf_tuple) const;
|
||||
|
||||
// Flatten leaves [(O.x, I.x, C.x),(O.x, I.x, C.x),...] -> [O.x, I.x, C.x, O.x, I.x, C.x...]
|
||||
std::vector<typename C2::Scalar> flatten_leaves(std::vector<LeafTuple> &&leaves) const;
|
||||
|
||||
// Take in the existing number of leaf tuples and the existing last hash in each layer in the tree, as well as new
|
||||
// leaves to add to the tree, and return a tree extension struct that can be used to extend a tree
|
||||
TreeExtension get_tree_extension(const uint64_t old_n_leaf_tuples,
|
||||
const LastHashes &existing_last_hashes,
|
||||
@ -259,7 +261,7 @@ public:
|
||||
const uint64_t trim_n_leaf_tuples) const;
|
||||
|
||||
// Take in the instructions useful for trimming all existing layers in the tree, all children to be trimmed from
|
||||
// each last chunk, and the existing last hashes in what will become the new last parent of each layer, and return
|
||||
// each last chunk, and the existing last hash in what will become the new last parent of each layer, and return
|
||||
// a tree reduction struct that can be used to trim a tree
|
||||
TreeReduction get_tree_reduction(
|
||||
const std::vector<TrimLayerInstructions> &trim_instructions,
|
||||
@ -281,8 +283,8 @@ private:
|
||||
//public member variables
|
||||
public:
|
||||
// The curve interfaces
|
||||
const C1 &m_c1;
|
||||
const C2 &m_c2;
|
||||
const std::unique_ptr<C1> m_c1;
|
||||
const std::unique_ptr<C2> m_c2;
|
||||
|
||||
// The leaf layer has a distinct chunk width than the other layers
|
||||
const std::size_t m_leaf_layer_chunk_width;
|
||||
@ -300,9 +302,10 @@ using CurveTreesV1 = CurveTrees<Helios, Selene>;
|
||||
// /b2742e86f3d18155fd34dd1ed69cb8f79b900fce/crypto/fcmps/src/tests.rs#L81-L82
|
||||
const std::size_t HELIOS_CHUNK_WIDTH = 38;
|
||||
const std::size_t SELENE_CHUNK_WIDTH = 18;
|
||||
const Helios HELIOS;
|
||||
const Selene SELENE;
|
||||
static CurveTreesV1 CURVE_TREES_V1(HELIOS, SELENE, HELIOS_CHUNK_WIDTH, SELENE_CHUNK_WIDTH);
|
||||
|
||||
std::shared_ptr<CurveTreesV1> curve_trees_v1(
|
||||
const std::size_t helios_chunk_width = HELIOS_CHUNK_WIDTH,
|
||||
const std::size_t selene_chunk_width = SELENE_CHUNK_WIDTH);
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
} //namespace curve_trees
|
||||
|
@ -93,26 +93,26 @@ else ()
|
||||
set(TARGET_DIR "release")
|
||||
endif ()
|
||||
|
||||
set(FCMP_RUST_HEADER_DIR "${MONERO_GENERATED_HEADERS_DIR}/fcmp_rust")
|
||||
set(FCMP_RUST_HEADER "${FCMP_RUST_HEADER_DIR}/fcmp++.h")
|
||||
set(FCMP_RUST_LIB "${CMAKE_CURRENT_BINARY_DIR}/libfcmp_rust.a")
|
||||
set(FCMP_PP_RUST_HEADER_DIR "${MONERO_GENERATED_HEADERS_DIR}/fcmp_rust")
|
||||
set(FCMP_PP_RUST_HEADER "${FCMP_PP_RUST_HEADER_DIR}/fcmp++.h")
|
||||
set(FCMP_PP_RUST_LIB "${CMAKE_CURRENT_BINARY_DIR}/libfcmp_rust.a")
|
||||
|
||||
# Removing OUTPUT files makes sure custom command runs every time
|
||||
file(REMOVE_RECURSE "${FCMP_RUST_HEADER_DIR}")
|
||||
file(MAKE_DIRECTORY "${FCMP_RUST_HEADER_DIR}")
|
||||
file(REMOVE_RECURSE "${FCMP_PP_RUST_HEADER_DIR}")
|
||||
file(MAKE_DIRECTORY "${FCMP_PP_RUST_HEADER_DIR}")
|
||||
|
||||
file(REMOVE "${FCMP_RUST_LIB}")
|
||||
file(REMOVE "${FCMP_PP_RUST_LIB}")
|
||||
|
||||
add_custom_command(
|
||||
COMMENT "Building rust fcmp lib"
|
||||
OUTPUT ${FCMP_RUST_HEADER}
|
||||
OUTPUT ${FCMP_RUST_LIB}
|
||||
COMMENT "Building fcmp++ rust lib"
|
||||
OUTPUT ${FCMP_PP_RUST_HEADER}
|
||||
OUTPUT ${FCMP_PP_RUST_LIB}
|
||||
COMMAND CARGO_TARGET_DIR=${CMAKE_CURRENT_BINARY_DIR} ${CARGO_CMD}
|
||||
COMMAND cp ${CMAKE_CURRENT_SOURCE_DIR}/fcmp++.h ${FCMP_RUST_HEADER}
|
||||
COMMAND cp ${CMAKE_CURRENT_BINARY_DIR}/${RUST_TARGET}/${TARGET_DIR}/libfcmp_rust.a ${FCMP_RUST_LIB}
|
||||
COMMAND echo "Finished copying fcmp rust targets"
|
||||
COMMAND cp ${CMAKE_CURRENT_SOURCE_DIR}/fcmp++.h ${FCMP_PP_RUST_HEADER}
|
||||
COMMAND cp ${CMAKE_CURRENT_BINARY_DIR}/${RUST_TARGET}/${TARGET_DIR}/libfcmp_rust.a ${FCMP_PP_RUST_LIB}
|
||||
COMMAND echo "Finished copying fcmp++ rust targets"
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
VERBATIM
|
||||
)
|
||||
|
||||
add_custom_target(fcmp_rust DEPENDS ${FCMP_RUST_LIB})
|
||||
add_custom_target(fcmp_rust DEPENDS ${FCMP_PP_RUST_LIB})
|
||||
|
@ -1,4 +1,4 @@
|
||||
namespace fcmp_rust {
|
||||
namespace fcmp_pp_rust {
|
||||
#include <cstdarg>
|
||||
#include <cstdint>
|
||||
#include <cstdlib>
|
||||
|
@ -36,7 +36,7 @@ namespace fcmp
|
||||
// Byte buffer containing the fcmp++ proof
|
||||
using FcmpPpProof = std::vector<uint8_t>;
|
||||
|
||||
static inline std::size_t get_fcmp_pp_len_from_n_inputs(const std::size_t n_inputs)
|
||||
static inline std::size_t fcmp_pp_len(const std::size_t n_inputs)
|
||||
{
|
||||
// TODO: implement
|
||||
return n_inputs * 4;
|
||||
|
@ -35,14 +35,24 @@ namespace tower_cycle
|
||||
{
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
Helios::Point Helios::hash_init_point() const
|
||||
{
|
||||
return fcmp_pp_rust::helios_hash_init_point();
|
||||
}
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
Selene::Point Selene::hash_init_point() const
|
||||
{
|
||||
return fcmp_pp_rust::selene_hash_init_point();
|
||||
}
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
Helios::CycleScalar Helios::point_to_cycle_scalar(const Helios::Point &point) const
|
||||
{
|
||||
return fcmp_rust::helios_point_to_selene_scalar(point);
|
||||
return fcmp_pp_rust::helios_point_to_selene_scalar(point);
|
||||
}
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
Selene::CycleScalar Selene::point_to_cycle_scalar(const Selene::Point &point) const
|
||||
{
|
||||
return fcmp_rust::selene_point_to_helios_scalar(point);
|
||||
return fcmp_pp_rust::selene_point_to_helios_scalar(point);
|
||||
}
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
Helios::Point Helios::hash_grow(
|
||||
@ -51,7 +61,7 @@ Helios::Point Helios::hash_grow(
|
||||
const Helios::Scalar &existing_child_at_offset,
|
||||
const Helios::Chunk &new_children) const
|
||||
{
|
||||
auto result = fcmp_rust::hash_grow_helios(
|
||||
auto result = fcmp_pp_rust::hash_grow_helios(
|
||||
existing_hash,
|
||||
offset,
|
||||
existing_child_at_offset,
|
||||
@ -75,7 +85,7 @@ Helios::Point Helios::hash_trim(
|
||||
const Helios::Chunk &children,
|
||||
const Helios::Scalar &child_to_grow_back) const
|
||||
{
|
||||
auto result = fcmp_rust::hash_trim_helios(
|
||||
auto result = fcmp_pp_rust::hash_trim_helios(
|
||||
existing_hash,
|
||||
offset,
|
||||
children,
|
||||
@ -99,7 +109,7 @@ Selene::Point Selene::hash_grow(
|
||||
const Selene::Scalar &existing_child_at_offset,
|
||||
const Selene::Chunk &new_children) const
|
||||
{
|
||||
auto result = fcmp_rust::hash_grow_selene(
|
||||
auto result = fcmp_pp_rust::hash_grow_selene(
|
||||
existing_hash,
|
||||
offset,
|
||||
existing_child_at_offset,
|
||||
@ -123,7 +133,7 @@ Selene::Point Selene::hash_trim(
|
||||
const Selene::Chunk &children,
|
||||
const Selene::Scalar &child_to_grow_back) const
|
||||
{
|
||||
auto result = fcmp_rust::hash_trim_selene(
|
||||
auto result = fcmp_pp_rust::hash_trim_selene(
|
||||
existing_hash,
|
||||
offset,
|
||||
children,
|
||||
@ -143,17 +153,17 @@ Selene::Point Selene::hash_trim(
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
Helios::Scalar Helios::zero_scalar() const
|
||||
{
|
||||
return fcmp_rust::helios_zero_scalar();
|
||||
return fcmp_pp_rust::helios_zero_scalar();
|
||||
}
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
Selene::Scalar Selene::zero_scalar() const
|
||||
{
|
||||
return fcmp_rust::selene_zero_scalar();
|
||||
return fcmp_pp_rust::selene_zero_scalar();
|
||||
}
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
std::array<uint8_t, 32UL> Helios::to_bytes(const Helios::Scalar &scalar) const
|
||||
{
|
||||
auto bytes = fcmp_rust::helios_scalar_to_bytes(scalar);
|
||||
auto bytes = fcmp_pp_rust::helios_scalar_to_bytes(scalar);
|
||||
std::array<uint8_t, 32UL> res;
|
||||
memcpy(&res, bytes, 32);
|
||||
free(bytes);
|
||||
@ -162,7 +172,7 @@ std::array<uint8_t, 32UL> Helios::to_bytes(const Helios::Scalar &scalar) const
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
std::array<uint8_t, 32UL> Selene::to_bytes(const Selene::Scalar &scalar) const
|
||||
{
|
||||
auto bytes = fcmp_rust::selene_scalar_to_bytes(scalar);
|
||||
auto bytes = fcmp_pp_rust::selene_scalar_to_bytes(scalar);
|
||||
std::array<uint8_t, 32UL> res;
|
||||
memcpy(&res, bytes, 32);
|
||||
free(bytes);
|
||||
@ -171,7 +181,7 @@ std::array<uint8_t, 32UL> Selene::to_bytes(const Selene::Scalar &scalar) const
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
std::array<uint8_t, 32UL> Helios::to_bytes(const Helios::Point &point) const
|
||||
{
|
||||
auto bytes = fcmp_rust::helios_point_to_bytes(point);
|
||||
auto bytes = fcmp_pp_rust::helios_point_to_bytes(point);
|
||||
std::array<uint8_t, 32UL> res;
|
||||
memcpy(&res, bytes, 32);
|
||||
free(bytes);
|
||||
@ -180,7 +190,7 @@ std::array<uint8_t, 32UL> Helios::to_bytes(const Helios::Point &point) const
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
std::array<uint8_t, 32UL> Selene::to_bytes(const Selene::Point &point) const
|
||||
{
|
||||
auto bytes = fcmp_rust::selene_point_to_bytes(point);
|
||||
auto bytes = fcmp_pp_rust::selene_point_to_bytes(point);
|
||||
std::array<uint8_t, 32UL> res;
|
||||
memcpy(&res, bytes, 32);
|
||||
free(bytes);
|
||||
@ -189,12 +199,12 @@ std::array<uint8_t, 32UL> Selene::to_bytes(const Selene::Point &point) const
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
Helios::Point Helios::from_bytes(const std::array<uint8_t, 32UL> &bytes) const
|
||||
{
|
||||
return fcmp_rust::helios_point_from_bytes(bytes.data());
|
||||
return fcmp_pp_rust::helios_point_from_bytes(bytes.data());
|
||||
}
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
Selene::Point Selene::from_bytes(const std::array<uint8_t, 32UL> &bytes) const
|
||||
{
|
||||
return fcmp_rust::selene_point_from_bytes(bytes.data());
|
||||
return fcmp_pp_rust::selene_point_from_bytes(bytes.data());
|
||||
}
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
std::string Helios::to_string(const typename Helios::Scalar &scalar) const
|
||||
@ -222,31 +232,31 @@ std::string Selene::to_string(const typename Selene::Point &point) const
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
SeleneScalar selene_scalar_from_bytes(const rct::key &scalar)
|
||||
{
|
||||
return fcmp_rust::selene_scalar_from_bytes(scalar.bytes);
|
||||
return fcmp_pp_rust::selene_scalar_from_bytes(scalar.bytes);
|
||||
}
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<typename C>
|
||||
void extend_zeroes(const C &curve,
|
||||
void extend_zeroes(const std::unique_ptr<C> &curve,
|
||||
const std::size_t num_zeroes,
|
||||
std::vector<typename C::Scalar> &zeroes_inout)
|
||||
{
|
||||
zeroes_inout.reserve(zeroes_inout.size() + num_zeroes);
|
||||
|
||||
for (std::size_t i = 0; i < num_zeroes; ++i)
|
||||
zeroes_inout.emplace_back(curve.zero_scalar());
|
||||
zeroes_inout.emplace_back(curve->zero_scalar());
|
||||
}
|
||||
|
||||
// Explicit instantiations
|
||||
template void extend_zeroes<Helios>(const Helios &curve,
|
||||
template void extend_zeroes<Helios>(const std::unique_ptr<Helios> &curve,
|
||||
const std::size_t num_zeroes,
|
||||
std::vector<Helios::Scalar> &zeroes_inout);
|
||||
|
||||
template void extend_zeroes<Selene>(const Selene &curve,
|
||||
template void extend_zeroes<Selene>(const std::unique_ptr<Selene> &curve,
|
||||
const std::size_t num_zeroes,
|
||||
std::vector<Selene::Scalar> &zeroes_inout);
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<typename C_POINTS, typename C_SCALARS>
|
||||
void extend_scalars_from_cycle_points(const C_POINTS &curve,
|
||||
void extend_scalars_from_cycle_points(const std::unique_ptr<C_POINTS> &curve,
|
||||
const std::vector<typename C_POINTS::Point> &points,
|
||||
std::vector<typename C_SCALARS::Scalar> &scalars_out)
|
||||
{
|
||||
@ -254,17 +264,17 @@ void extend_scalars_from_cycle_points(const C_POINTS &curve,
|
||||
|
||||
for (const auto &point : points)
|
||||
{
|
||||
typename C_SCALARS::Scalar scalar = curve.point_to_cycle_scalar(point);
|
||||
typename C_SCALARS::Scalar scalar = curve->point_to_cycle_scalar(point);
|
||||
scalars_out.push_back(std::move(scalar));
|
||||
}
|
||||
}
|
||||
|
||||
// Explicit instantiations
|
||||
template void extend_scalars_from_cycle_points<Helios, Selene>(const Helios &curve,
|
||||
template void extend_scalars_from_cycle_points<Helios, Selene>(const std::unique_ptr<Helios> &curve,
|
||||
const std::vector<Helios::Point> &points,
|
||||
std::vector<Selene::Scalar> &scalars_out);
|
||||
|
||||
template void extend_scalars_from_cycle_points<Selene, Helios>(const Selene &curve,
|
||||
template void extend_scalars_from_cycle_points<Selene, Helios>(const std::unique_ptr<Selene> &curve,
|
||||
const std::vector<Selene::Point> &points,
|
||||
std::vector<Helios::Scalar> &scalars_out);
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
|
@ -43,22 +43,22 @@ namespace tower_cycle
|
||||
// Rust types
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
// Need to forward declare Scalar types for point_to_cycle_scalar below
|
||||
using SeleneScalar = fcmp_rust::SeleneScalar;
|
||||
using HeliosScalar = fcmp_rust::HeliosScalar;
|
||||
using SeleneScalar = fcmp_pp_rust::SeleneScalar;
|
||||
using HeliosScalar = fcmp_pp_rust::HeliosScalar;
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
struct HeliosT final
|
||||
{
|
||||
using Scalar = HeliosScalar;
|
||||
using Point = fcmp_rust::HeliosPoint;
|
||||
using Chunk = fcmp_rust::HeliosScalarSlice;
|
||||
using Point = fcmp_pp_rust::HeliosPoint;
|
||||
using Chunk = fcmp_pp_rust::HeliosScalarSlice;
|
||||
using CycleScalar = SeleneScalar;
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
struct SeleneT final
|
||||
{
|
||||
using Scalar = SeleneScalar;
|
||||
using Point = fcmp_rust::SelenePoint;
|
||||
using Chunk = fcmp_rust::SeleneScalarSlice;
|
||||
using Point = fcmp_pp_rust::SelenePoint;
|
||||
using Chunk = fcmp_pp_rust::SeleneScalarSlice;
|
||||
using CycleScalar = HeliosScalar;
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
@ -67,14 +67,10 @@ struct SeleneT final
|
||||
template<typename C>
|
||||
class Curve
|
||||
{
|
||||
//constructor
|
||||
public:
|
||||
Curve(const typename C::Point &hash_init_point):
|
||||
m_hash_init_point{hash_init_point}
|
||||
{};
|
||||
|
||||
//member functions
|
||||
public:
|
||||
virtual typename C::Point hash_init_point() const = 0;
|
||||
|
||||
// Read the x-coordinate from this curve's point to get this curve's cycle scalar
|
||||
virtual typename C::CycleScalar point_to_cycle_scalar(const typename C::Point &point) const = 0;
|
||||
|
||||
@ -99,11 +95,6 @@ public:
|
||||
|
||||
virtual std::string to_string(const typename C::Scalar &scalar) const = 0;
|
||||
virtual std::string to_string(const typename C::Point &point) const = 0;
|
||||
|
||||
//member variables
|
||||
public:
|
||||
// kayabaNerve: this doesn't have a reference as doing so delays initialization and borks it
|
||||
const typename C::Point m_hash_init_point;
|
||||
};
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
class Helios final : public Curve<HeliosT>
|
||||
@ -115,14 +106,10 @@ public:
|
||||
using Chunk = HeliosT::Chunk;
|
||||
using CycleScalar = HeliosT::CycleScalar;
|
||||
|
||||
//constructor
|
||||
public:
|
||||
Helios()
|
||||
: Curve<HeliosT>(fcmp_rust::helios_hash_init_point())
|
||||
{};
|
||||
|
||||
//member functions
|
||||
public:
|
||||
Point hash_init_point() const override;
|
||||
|
||||
CycleScalar point_to_cycle_scalar(const Point &point) const override;
|
||||
|
||||
Point hash_grow(
|
||||
@ -157,14 +144,10 @@ public:
|
||||
using Chunk = SeleneT::Chunk;
|
||||
using CycleScalar = SeleneT::CycleScalar;
|
||||
|
||||
//constructor
|
||||
public:
|
||||
Selene()
|
||||
: Curve<SeleneT>(fcmp_rust::selene_hash_init_point())
|
||||
{};
|
||||
|
||||
//member functions
|
||||
public:
|
||||
Point hash_init_point() const override;
|
||||
|
||||
CycleScalar point_to_cycle_scalar(const Point &point) const override;
|
||||
|
||||
Point hash_grow(
|
||||
@ -194,12 +177,12 @@ public:
|
||||
SeleneScalar selene_scalar_from_bytes(const rct::key &scalar);
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<typename C>
|
||||
void extend_zeroes(const C &curve,
|
||||
void extend_zeroes(const std::unique_ptr<C> &curve,
|
||||
const std::size_t num_zeroes,
|
||||
std::vector<typename C::Scalar> &zeroes_inout);
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<typename C_POINTS, typename C_SCALARS>
|
||||
void extend_scalars_from_cycle_points(const C_POINTS &curve,
|
||||
void extend_scalars_from_cycle_points(const std::unique_ptr<C_POINTS> &curve,
|
||||
const std::vector<typename C_POINTS::Point> &points,
|
||||
std::vector<typename C_SCALARS::Scalar> &scalars_out);
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
|
@ -189,6 +189,7 @@ namespace rct {
|
||||
void ecdhEncode(ecdhTuple & unmasked, const key & sharedSec, bool v2);
|
||||
void ecdhDecode(ecdhTuple & masked, const key & sharedSec, bool v2);
|
||||
|
||||
// TODO: tests for these functions specifically
|
||||
bool clear_torsion(const key &k, key &k_out);
|
||||
bool point_to_wei_x(const key &pub, key &wei_x);
|
||||
}
|
||||
|
@ -326,7 +326,7 @@ namespace rct {
|
||||
std::vector<ecdhTuple> ecdhInfo;
|
||||
ctkeyV outPk;
|
||||
xmr_amount txnFee; // contains b
|
||||
crypto::hash referenceBlock; // block containing the merkle tree root used for fcmp's
|
||||
crypto::hash referenceBlock; // block containing the merkle tree root used for fcmp++
|
||||
|
||||
rctSigBase() :
|
||||
type(RCTTypeNull), message{}, mixRing{}, pseudoOuts{}, ecdhInfo{}, outPk{}, txnFee(0), referenceBlock{}
|
||||
@ -503,7 +503,7 @@ namespace rct {
|
||||
{
|
||||
ar.tag("fcmp_pp");
|
||||
ar.begin_object();
|
||||
const std::size_t proof_len = fcmp::get_fcmp_pp_len_from_n_inputs(inputs);
|
||||
const std::size_t proof_len = fcmp::fcmp_pp_len(inputs);
|
||||
if (!typename Archive<W>::is_saving())
|
||||
fcmp_pp.resize(proof_len);
|
||||
if (fcmp_pp.size() != proof_len)
|
||||
|
@ -38,7 +38,6 @@ target_link_libraries(block_weight
|
||||
PRIVATE
|
||||
cryptonote_core
|
||||
blockchain_db
|
||||
fcmp
|
||||
${EXTRA_LIBRARIES})
|
||||
|
||||
add_test(
|
||||
|
@ -120,7 +120,7 @@ target_link_libraries(unit_tests
|
||||
daemon_messages
|
||||
daemon_rpc_server
|
||||
blockchain_db
|
||||
fcmp
|
||||
fcmp_pp
|
||||
lmdb_lib
|
||||
rpc
|
||||
net
|
||||
|
@ -41,7 +41,7 @@
|
||||
// CurveTreesGlobalTree helpers
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<typename C>
|
||||
static bool validate_layer(const C &curve,
|
||||
static bool validate_layer(const std::unique_ptr<C> &curve,
|
||||
const CurveTreesGlobalTree::Layer<C> &parents,
|
||||
const std::vector<typename C::Scalar> &child_scalars,
|
||||
const std::size_t max_chunk_size)
|
||||
@ -60,14 +60,14 @@ static bool validate_layer(const C &curve,
|
||||
const typename C::Chunk chunk{chunk_start, chunk_size};
|
||||
|
||||
for (std::size_t i = 0; i < chunk_size; ++i)
|
||||
MDEBUG("Hashing " << curve.to_string(chunk_start[i]));
|
||||
MDEBUG("Hashing " << curve->to_string(chunk_start[i]));
|
||||
|
||||
const typename C::Point chunk_hash = fcmp::curve_trees::get_new_parent(curve, chunk);
|
||||
|
||||
MDEBUG("chunk_start_idx: " << chunk_start_idx << " , chunk_size: " << chunk_size << " , chunk_hash: " << curve.to_string(chunk_hash));
|
||||
MDEBUG("chunk_start_idx: " << chunk_start_idx << " , chunk_size: " << chunk_size << " , chunk_hash: " << curve->to_string(chunk_hash));
|
||||
|
||||
const auto actual_bytes = curve.to_bytes(parent);
|
||||
const auto expected_bytes = curve.to_bytes(chunk_hash);
|
||||
const auto actual_bytes = curve->to_bytes(parent);
|
||||
const auto expected_bytes = curve->to_bytes(chunk_hash);
|
||||
CHECK_AND_ASSERT_MES(actual_bytes == expected_bytes, false, "unexpected hash");
|
||||
|
||||
chunk_start_idx += chunk_size;
|
||||
@ -79,7 +79,7 @@ static bool validate_layer(const C &curve,
|
||||
}
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
template<typename C_CHILD, typename C_PARENT>
|
||||
static std::vector<typename C_PARENT::Scalar> get_last_chunk_children_to_trim(const C_CHILD &c_child,
|
||||
static std::vector<typename C_PARENT::Scalar> get_last_chunk_children_to_trim(const std::unique_ptr<C_CHILD> &c_child,
|
||||
const CurveTreesGlobalTree::Layer<C_CHILD> &child_layer,
|
||||
const bool need_last_chunk_children_to_trim,
|
||||
const bool need_last_chunk_remaining_children,
|
||||
@ -96,7 +96,7 @@ static std::vector<typename C_PARENT::Scalar> get_last_chunk_children_to_trim(co
|
||||
CHECK_AND_ASSERT_THROW_MES(child_layer.size() > idx, "idx too high");
|
||||
const auto &child_point = child_layer[idx];
|
||||
|
||||
auto child_scalar = c_child.point_to_cycle_scalar(child_point);
|
||||
auto child_scalar = c_child->point_to_cycle_scalar(child_point);
|
||||
children_to_trim_out.push_back(std::move(child_scalar));
|
||||
|
||||
++idx;
|
||||
@ -608,7 +608,7 @@ void CurveTreesGlobalTree::log_last_hashes(const CurveTreesV1::LastHashes &last_
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_idx < c2_last_hashes.size(), "unexpected c2 layer");
|
||||
|
||||
const auto &last_hash = c2_last_hashes[c2_idx];
|
||||
MDEBUG("c2_idx: " << c2_idx << " , last_hash: " << m_curve_trees.m_c2.to_string(last_hash));
|
||||
MDEBUG("c2_idx: " << c2_idx << " , last_hash: " << m_curve_trees.m_c2->to_string(last_hash));
|
||||
|
||||
++c2_idx;
|
||||
}
|
||||
@ -617,7 +617,7 @@ void CurveTreesGlobalTree::log_last_hashes(const CurveTreesV1::LastHashes &last_
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_idx < c1_last_hashes.size(), "unexpected c1 layer");
|
||||
|
||||
const auto &last_hash = c1_last_hashes[c1_idx];
|
||||
MDEBUG("c1_idx: " << c1_idx << " , last_hash: " << m_curve_trees.m_c1.to_string(last_hash));
|
||||
MDEBUG("c1_idx: " << c1_idx << " , last_hash: " << m_curve_trees.m_c1->to_string(last_hash));
|
||||
|
||||
++c1_idx;
|
||||
}
|
||||
@ -643,9 +643,9 @@ void CurveTreesGlobalTree::log_tree_extension(const CurveTreesV1::TreeExtension
|
||||
const auto &preprocessed_leaf_tuple = tree_extension.leaves.tuples[i];
|
||||
const auto leaf = m_curve_trees.leaf_tuple(preprocessed_leaf_tuple);
|
||||
|
||||
const auto O_x = m_curve_trees.m_c2.to_string(leaf.O_x);
|
||||
const auto I_x = m_curve_trees.m_c2.to_string(leaf.I_x);
|
||||
const auto C_x = m_curve_trees.m_c2.to_string(leaf.C_x);
|
||||
const auto O_x = m_curve_trees.m_c2->to_string(leaf.O_x);
|
||||
const auto I_x = m_curve_trees.m_c2->to_string(leaf.I_x);
|
||||
const auto C_x = m_curve_trees.m_c2->to_string(leaf.C_x);
|
||||
|
||||
MDEBUG("Leaf tuple idx " << (tree_extension.leaves.start_leaf_tuple_idx)
|
||||
<< " : { O_x: " << O_x << " , I_x: " << I_x << " , C_x: " << C_x << " }");
|
||||
@ -665,7 +665,7 @@ void CurveTreesGlobalTree::log_tree_extension(const CurveTreesV1::TreeExtension
|
||||
|
||||
for (std::size_t j = 0; j < c2_layer.hashes.size(); ++j)
|
||||
MDEBUG("Child chunk start idx: " << (j + c2_layer.start_idx) << " , hash: "
|
||||
<< m_curve_trees.m_c2.to_string(c2_layer.hashes[j]));
|
||||
<< m_curve_trees.m_c2->to_string(c2_layer.hashes[j]));
|
||||
|
||||
++c2_idx;
|
||||
}
|
||||
@ -678,7 +678,7 @@ void CurveTreesGlobalTree::log_tree_extension(const CurveTreesV1::TreeExtension
|
||||
|
||||
for (std::size_t j = 0; j < c1_layer.hashes.size(); ++j)
|
||||
MDEBUG("Child chunk start idx: " << (j + c1_layer.start_idx) << " , hash: "
|
||||
<< m_curve_trees.m_c1.to_string(c1_layer.hashes[j]));
|
||||
<< m_curve_trees.m_c1->to_string(c1_layer.hashes[j]));
|
||||
|
||||
++c1_idx;
|
||||
}
|
||||
@ -699,9 +699,9 @@ void CurveTreesGlobalTree::log_tree()
|
||||
{
|
||||
const auto &leaf = m_tree.leaves[i];
|
||||
|
||||
const auto O_x = m_curve_trees.m_c2.to_string(leaf.O_x);
|
||||
const auto I_x = m_curve_trees.m_c2.to_string(leaf.I_x);
|
||||
const auto C_x = m_curve_trees.m_c2.to_string(leaf.C_x);
|
||||
const auto O_x = m_curve_trees.m_c2->to_string(leaf.O_x);
|
||||
const auto I_x = m_curve_trees.m_c2->to_string(leaf.I_x);
|
||||
const auto C_x = m_curve_trees.m_c2->to_string(leaf.C_x);
|
||||
|
||||
MDEBUG("Leaf idx " << i << " : { O_x: " << O_x << " , I_x: " << I_x << " , C_x: " << C_x << " }");
|
||||
}
|
||||
@ -719,7 +719,7 @@ void CurveTreesGlobalTree::log_tree()
|
||||
MDEBUG("Selene layer size: " << c2_layer.size() << " , tree layer: " << i);
|
||||
|
||||
for (std::size_t j = 0; j < c2_layer.size(); ++j)
|
||||
MDEBUG("Child chunk start idx: " << j << " , hash: " << m_curve_trees.m_c2.to_string(c2_layer[j]));
|
||||
MDEBUG("Child chunk start idx: " << j << " , hash: " << m_curve_trees.m_c2->to_string(c2_layer[j]));
|
||||
|
||||
++c2_idx;
|
||||
}
|
||||
@ -731,7 +731,7 @@ void CurveTreesGlobalTree::log_tree()
|
||||
MDEBUG("Helios layer size: " << c1_layer.size() << " , tree layer: " << i);
|
||||
|
||||
for (std::size_t j = 0; j < c1_layer.size(); ++j)
|
||||
MDEBUG("Child chunk start idx: " << j << " , hash: " << m_curve_trees.m_c1.to_string(c1_layer[j]));
|
||||
MDEBUG("Child chunk start idx: " << j << " , hash: " << m_curve_trees.m_c1->to_string(c1_layer[j]));
|
||||
|
||||
++c1_idx;
|
||||
}
|
||||
@ -864,17 +864,17 @@ static bool trim_tree_in_memory(const std::size_t trim_n_leaf_tuples,
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
static bool grow_tree_db(const std::size_t init_leaves,
|
||||
const std::size_t ext_leaves,
|
||||
CurveTreesV1 &curve_trees,
|
||||
std::shared_ptr<CurveTreesV1> curve_trees,
|
||||
unit_test::BlockchainLMDBTest &test_db)
|
||||
{
|
||||
INIT_BLOCKCHAIN_LMDB_TEST_DB(&curve_trees);
|
||||
INIT_BLOCKCHAIN_LMDB_TEST_DB(curve_trees);
|
||||
|
||||
{
|
||||
cryptonote::db_wtxn_guard guard(test_db.m_db);
|
||||
|
||||
LOG_PRINT_L1("Adding " << init_leaves << " leaves to db, then extending by " << ext_leaves << " leaves");
|
||||
|
||||
auto init_leaf_tuples = generate_random_leaves(curve_trees, 0, init_leaves);
|
||||
auto init_leaf_tuples = generate_random_leaves(*curve_trees, 0, init_leaves);
|
||||
|
||||
test_db.m_db->grow_tree(std::move(init_leaf_tuples));
|
||||
CHECK_AND_ASSERT_MES(test_db.m_db->audit_tree(init_leaves), false,
|
||||
@ -883,7 +883,7 @@ static bool grow_tree_db(const std::size_t init_leaves,
|
||||
MDEBUG("Successfully added initial " << init_leaves << " leaves to db, extending by "
|
||||
<< ext_leaves << " leaves");
|
||||
|
||||
auto ext_leaf_tuples = generate_random_leaves(curve_trees, init_leaves, ext_leaves);
|
||||
auto ext_leaf_tuples = generate_random_leaves(*curve_trees, init_leaves, ext_leaves);
|
||||
|
||||
test_db.m_db->grow_tree(std::move(ext_leaf_tuples));
|
||||
CHECK_AND_ASSERT_MES(test_db.m_db->audit_tree(init_leaves + ext_leaves), false,
|
||||
@ -897,17 +897,17 @@ static bool grow_tree_db(const std::size_t init_leaves,
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
static bool trim_tree_db(const std::size_t init_leaves,
|
||||
const std::size_t trim_leaves,
|
||||
CurveTreesV1 &curve_trees,
|
||||
std::shared_ptr<CurveTreesV1> curve_trees,
|
||||
unit_test::BlockchainLMDBTest &test_db)
|
||||
{
|
||||
INIT_BLOCKCHAIN_LMDB_TEST_DB(&curve_trees);
|
||||
INIT_BLOCKCHAIN_LMDB_TEST_DB(curve_trees);
|
||||
|
||||
{
|
||||
cryptonote::db_wtxn_guard guard(test_db.m_db);
|
||||
|
||||
LOG_PRINT_L1("Adding " << init_leaves << " leaves to db, then trimming by " << trim_leaves << " leaves");
|
||||
|
||||
auto init_leaf_tuples = generate_random_leaves(curve_trees, 0, init_leaves);
|
||||
auto init_leaf_tuples = generate_random_leaves(*curve_trees, 0, init_leaves);
|
||||
|
||||
test_db.m_db->grow_tree(std::move(init_leaf_tuples));
|
||||
CHECK_AND_ASSERT_MES(test_db.m_db->audit_tree(init_leaves), false,
|
||||
@ -931,9 +931,6 @@ static bool trim_tree_db(const std::size_t init_leaves,
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
TEST(curve_trees, grow_tree)
|
||||
{
|
||||
Helios helios;
|
||||
Selene selene;
|
||||
|
||||
// Use lower values for chunk width than prod so that we can quickly test a many-layer deep tree
|
||||
static const std::size_t helios_chunk_width = 3;
|
||||
static const std::size_t selene_chunk_width = 2;
|
||||
@ -944,6 +941,8 @@ TEST(curve_trees, grow_tree)
|
||||
LOG_PRINT_L1("Test grow tree with helios chunk width " << helios_chunk_width
|
||||
<< ", selene chunk width " << selene_chunk_width);
|
||||
|
||||
const auto curve_trees = fcmp::curve_trees::curve_trees_v1(helios_chunk_width, selene_chunk_width);
|
||||
|
||||
// Constant for how deep we want the tree
|
||||
static const std::size_t TEST_N_LAYERS = 4;
|
||||
|
||||
@ -955,12 +954,6 @@ TEST(curve_trees, grow_tree)
|
||||
leaves_needed_for_n_layers *= width;
|
||||
}
|
||||
|
||||
auto curve_trees = CurveTreesV1(
|
||||
helios,
|
||||
selene,
|
||||
helios_chunk_width,
|
||||
selene_chunk_width);
|
||||
|
||||
unit_test::BlockchainLMDBTest test_db;
|
||||
|
||||
// Increment to test for off-by-1
|
||||
@ -973,7 +966,7 @@ TEST(curve_trees, grow_tree)
|
||||
// Then extend the tree with ext_leaves
|
||||
for (std::size_t ext_leaves = 1; (init_leaves + ext_leaves) <= leaves_needed_for_n_layers; ++ext_leaves)
|
||||
{
|
||||
ASSERT_TRUE(grow_tree_in_memory(init_leaves, ext_leaves, curve_trees));
|
||||
ASSERT_TRUE(grow_tree_in_memory(init_leaves, ext_leaves, *curve_trees));
|
||||
ASSERT_TRUE(grow_tree_db(init_leaves, ext_leaves, curve_trees, test_db));
|
||||
}
|
||||
}
|
||||
@ -981,10 +974,6 @@ TEST(curve_trees, grow_tree)
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
TEST(curve_trees, trim_tree)
|
||||
{
|
||||
// TODO: consolidate code from grow_tree test
|
||||
Helios helios;
|
||||
Selene selene;
|
||||
|
||||
// Use lower values for chunk width than prod so that we can quickly test a many-layer deep tree
|
||||
static const std::size_t helios_chunk_width = 3;
|
||||
static const std::size_t selene_chunk_width = 3;
|
||||
@ -995,6 +984,8 @@ TEST(curve_trees, trim_tree)
|
||||
LOG_PRINT_L1("Test trim tree with helios chunk width " << helios_chunk_width
|
||||
<< ", selene chunk width " << selene_chunk_width);
|
||||
|
||||
const auto curve_trees = fcmp::curve_trees::curve_trees_v1(helios_chunk_width, selene_chunk_width);
|
||||
|
||||
// Constant for how deep we want the tree
|
||||
static const std::size_t TEST_N_LAYERS = 4;
|
||||
|
||||
@ -1006,12 +997,6 @@ TEST(curve_trees, trim_tree)
|
||||
leaves_needed_for_n_layers *= width;
|
||||
}
|
||||
|
||||
auto curve_trees = CurveTreesV1(
|
||||
helios,
|
||||
selene,
|
||||
helios_chunk_width,
|
||||
selene_chunk_width);
|
||||
|
||||
unit_test::BlockchainLMDBTest test_db;
|
||||
|
||||
// Increment to test for off-by-1
|
||||
@ -1021,9 +1006,9 @@ TEST(curve_trees, trim_tree)
|
||||
for (std::size_t init_leaves = 2; init_leaves <= leaves_needed_for_n_layers; ++init_leaves)
|
||||
{
|
||||
LOG_PRINT_L1("Initializing tree with " << init_leaves << " leaves in memory");
|
||||
CurveTreesGlobalTree global_tree(curve_trees);
|
||||
CurveTreesGlobalTree global_tree(*curve_trees);
|
||||
|
||||
ASSERT_TRUE(grow_tree(curve_trees, global_tree, init_leaves));
|
||||
ASSERT_TRUE(grow_tree(*curve_trees, global_tree, init_leaves));
|
||||
|
||||
// Then extend the tree with ext_leaves
|
||||
for (std::size_t trim_leaves = 1; trim_leaves < leaves_needed_for_n_layers; ++trim_leaves)
|
||||
@ -1043,6 +1028,8 @@ TEST(curve_trees, trim_tree)
|
||||
// Make sure the result of hash_trim is the same as the equivalent hash_grow excluding the trimmed children
|
||||
TEST(curve_trees, hash_trim)
|
||||
{
|
||||
const auto curve_trees = fcmp::curve_trees::curve_trees_v1();
|
||||
|
||||
// 1. Trim 1
|
||||
{
|
||||
// Start by hashing: {selene_scalar_0, selene_scalar_1}
|
||||
@ -1052,29 +1039,29 @@ TEST(curve_trees, hash_trim)
|
||||
|
||||
// Get the initial hash of the 2 scalars
|
||||
std::vector<Selene::Scalar> init_children{selene_scalar_0, selene_scalar_1};
|
||||
const auto init_hash = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_grow(
|
||||
/*existing_hash*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.m_hash_init_point,
|
||||
const auto init_hash = curve_trees->m_c2->hash_grow(
|
||||
/*existing_hash*/ curve_trees->m_c2->hash_init_point(),
|
||||
/*offset*/ 0,
|
||||
/*existing_child_at_offset*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.zero_scalar(),
|
||||
/*existing_child_at_offset*/ curve_trees->m_c2->zero_scalar(),
|
||||
/*children*/ Selene::Chunk{init_children.data(), init_children.size()});
|
||||
|
||||
// Trim selene_scalar_1
|
||||
const auto &trimmed_children = Selene::Chunk{init_children.data() + 1, 1};
|
||||
const auto trim_res = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_trim(
|
||||
const auto trim_res = curve_trees->m_c2->hash_trim(
|
||||
init_hash,
|
||||
1,
|
||||
trimmed_children,
|
||||
fcmp::curve_trees::CURVE_TREES_V1.m_c2.zero_scalar());
|
||||
const auto trim_res_bytes = fcmp::curve_trees::CURVE_TREES_V1.m_c2.to_bytes(trim_res);
|
||||
curve_trees->m_c2->zero_scalar());
|
||||
const auto trim_res_bytes = curve_trees->m_c2->to_bytes(trim_res);
|
||||
|
||||
// Now compare to calling hash_grow{selene_scalar_0}
|
||||
std::vector<Selene::Scalar> remaining_children{selene_scalar_0};
|
||||
const auto grow_res = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_grow(
|
||||
/*existing_hash*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.m_hash_init_point,
|
||||
const auto grow_res = curve_trees->m_c2->hash_grow(
|
||||
/*existing_hash*/ curve_trees->m_c2->hash_init_point(),
|
||||
/*offset*/ 0,
|
||||
/*existing_child_at_offset*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.zero_scalar(),
|
||||
/*existing_child_at_offset*/ curve_trees->m_c2->zero_scalar(),
|
||||
/*children*/ Selene::Chunk{remaining_children.data(), remaining_children.size()});
|
||||
const auto grow_res_bytes = fcmp::curve_trees::CURVE_TREES_V1.m_c2.to_bytes(grow_res);
|
||||
const auto grow_res_bytes = curve_trees->m_c2->to_bytes(grow_res);
|
||||
|
||||
ASSERT_EQ(trim_res_bytes, grow_res_bytes);
|
||||
}
|
||||
@ -1089,29 +1076,29 @@ TEST(curve_trees, hash_trim)
|
||||
|
||||
// Get the initial hash of the 3 selene scalars
|
||||
std::vector<Selene::Scalar> init_children{selene_scalar_0, selene_scalar_1, selene_scalar_2};
|
||||
const auto init_hash = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_grow(
|
||||
/*existing_hash*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.m_hash_init_point,
|
||||
const auto init_hash = curve_trees->m_c2->hash_grow(
|
||||
/*existing_hash*/ curve_trees->m_c2->hash_init_point(),
|
||||
/*offset*/ 0,
|
||||
/*existing_child_at_offset*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.zero_scalar(),
|
||||
/*existing_child_at_offset*/ curve_trees->m_c2->zero_scalar(),
|
||||
/*children*/ Selene::Chunk{init_children.data(), init_children.size()});
|
||||
|
||||
// Trim the initial result by 2 children
|
||||
const auto &trimmed_children = Selene::Chunk{init_children.data() + 1, 2};
|
||||
const auto trim_res = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_trim(
|
||||
const auto trim_res = curve_trees->m_c2->hash_trim(
|
||||
init_hash,
|
||||
1,
|
||||
trimmed_children,
|
||||
fcmp::curve_trees::CURVE_TREES_V1.m_c2.zero_scalar());
|
||||
const auto trim_res_bytes = fcmp::curve_trees::CURVE_TREES_V1.m_c2.to_bytes(trim_res);
|
||||
curve_trees->m_c2->zero_scalar());
|
||||
const auto trim_res_bytes = curve_trees->m_c2->to_bytes(trim_res);
|
||||
|
||||
// Now compare to calling hash_grow{selene_scalar_0}
|
||||
std::vector<Selene::Scalar> remaining_children{selene_scalar_0};
|
||||
const auto grow_res = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_grow(
|
||||
/*existing_hash*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.m_hash_init_point,
|
||||
const auto grow_res = curve_trees->m_c2->hash_grow(
|
||||
/*existing_hash*/ curve_trees->m_c2->hash_init_point(),
|
||||
/*offset*/ 0,
|
||||
/*existing_child_at_offset*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.zero_scalar(),
|
||||
/*existing_child_at_offset*/ curve_trees->m_c2->zero_scalar(),
|
||||
/*children*/ Selene::Chunk{remaining_children.data(), remaining_children.size()});
|
||||
const auto grow_res_bytes = fcmp::curve_trees::CURVE_TREES_V1.m_c2.to_bytes(grow_res);
|
||||
const auto grow_res_bytes = curve_trees->m_c2->to_bytes(grow_res);
|
||||
|
||||
ASSERT_EQ(trim_res_bytes, grow_res_bytes);
|
||||
}
|
||||
@ -1125,31 +1112,31 @@ TEST(curve_trees, hash_trim)
|
||||
|
||||
// Get the initial hash of the 2 selene scalars
|
||||
std::vector<Selene::Scalar> init_children{selene_scalar_0, selene_scalar_1};
|
||||
const auto init_hash = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_grow(
|
||||
/*existing_hash*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.m_hash_init_point,
|
||||
const auto init_hash = curve_trees->m_c2->hash_grow(
|
||||
/*existing_hash*/ curve_trees->m_c2->hash_init_point(),
|
||||
/*offset*/ 0,
|
||||
/*existing_child_at_offset*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.zero_scalar(),
|
||||
/*existing_child_at_offset*/ curve_trees->m_c2->zero_scalar(),
|
||||
/*children*/ Selene::Chunk{init_children.data(), init_children.size()});
|
||||
|
||||
const auto selene_scalar_2 = generate_random_selene_scalar();
|
||||
|
||||
// Trim the 2nd child and grow with new child
|
||||
const auto &trimmed_children = Selene::Chunk{init_children.data() + 1, 1};
|
||||
const auto trim_res = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_trim(
|
||||
const auto trim_res = curve_trees->m_c2->hash_trim(
|
||||
init_hash,
|
||||
1,
|
||||
trimmed_children,
|
||||
selene_scalar_2);
|
||||
const auto trim_res_bytes = fcmp::curve_trees::CURVE_TREES_V1.m_c2.to_bytes(trim_res);
|
||||
const auto trim_res_bytes = curve_trees->m_c2->to_bytes(trim_res);
|
||||
|
||||
// Now compare to calling hash_grow{selene_scalar_0, selene_scalar_2}
|
||||
std::vector<Selene::Scalar> remaining_children{selene_scalar_0, selene_scalar_2};
|
||||
const auto grow_res = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_grow(
|
||||
/*existing_hash*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.m_hash_init_point,
|
||||
const auto grow_res = curve_trees->m_c2->hash_grow(
|
||||
/*existing_hash*/ curve_trees->m_c2->hash_init_point(),
|
||||
/*offset*/ 0,
|
||||
/*existing_child_at_offset*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.zero_scalar(),
|
||||
/*existing_child_at_offset*/ curve_trees->m_c2->zero_scalar(),
|
||||
/*children*/ Selene::Chunk{remaining_children.data(), remaining_children.size()});
|
||||
const auto grow_res_bytes = fcmp::curve_trees::CURVE_TREES_V1.m_c2.to_bytes(grow_res);
|
||||
const auto grow_res_bytes = curve_trees->m_c2->to_bytes(grow_res);
|
||||
|
||||
ASSERT_EQ(trim_res_bytes, grow_res_bytes);
|
||||
}
|
||||
@ -1164,31 +1151,31 @@ TEST(curve_trees, hash_trim)
|
||||
|
||||
// Get the initial hash of the 3 selene scalars
|
||||
std::vector<Selene::Scalar> init_children{selene_scalar_0, selene_scalar_1, selene_scalar_2};
|
||||
const auto init_hash = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_grow(
|
||||
/*existing_hash*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.m_hash_init_point,
|
||||
const auto init_hash = curve_trees->m_c2->hash_grow(
|
||||
/*existing_hash*/ curve_trees->m_c2->hash_init_point(),
|
||||
/*offset*/ 0,
|
||||
/*existing_child_at_offset*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.zero_scalar(),
|
||||
/*existing_child_at_offset*/ curve_trees->m_c2->zero_scalar(),
|
||||
/*children*/ Selene::Chunk{init_children.data(), init_children.size()});
|
||||
|
||||
const auto selene_scalar_3 = generate_random_selene_scalar();
|
||||
|
||||
// Trim the initial result by 2 children+grow by 1
|
||||
const auto &trimmed_children = Selene::Chunk{init_children.data() + 1, 2};
|
||||
const auto trim_res = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_trim(
|
||||
const auto trim_res = curve_trees->m_c2->hash_trim(
|
||||
init_hash,
|
||||
1,
|
||||
trimmed_children,
|
||||
selene_scalar_3);
|
||||
const auto trim_res_bytes = fcmp::curve_trees::CURVE_TREES_V1.m_c2.to_bytes(trim_res);
|
||||
const auto trim_res_bytes = curve_trees->m_c2->to_bytes(trim_res);
|
||||
|
||||
// Now compare to calling hash_grow{selene_scalar_0, selene_scalar_3}
|
||||
std::vector<Selene::Scalar> remaining_children{selene_scalar_0, selene_scalar_3};
|
||||
const auto grow_res = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_grow(
|
||||
/*existing_hash*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.m_hash_init_point,
|
||||
const auto grow_res = curve_trees->m_c2->hash_grow(
|
||||
/*existing_hash*/ curve_trees->m_c2->hash_init_point(),
|
||||
/*offset*/ 0,
|
||||
/*existing_child_at_offset*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.zero_scalar(),
|
||||
/*existing_child_at_offset*/ curve_trees->m_c2->zero_scalar(),
|
||||
/*children*/ Selene::Chunk{remaining_children.data(), remaining_children.size()});
|
||||
const auto grow_res_bytes = fcmp::curve_trees::CURVE_TREES_V1.m_c2.to_bytes(grow_res);
|
||||
const auto grow_res_bytes = curve_trees->m_c2->to_bytes(grow_res);
|
||||
|
||||
ASSERT_EQ(trim_res_bytes, grow_res_bytes);
|
||||
}
|
||||
@ -1196,6 +1183,8 @@ TEST(curve_trees, hash_trim)
|
||||
//----------------------------------------------------------------------------------------------------------------------
|
||||
TEST(curve_trees, hash_grow)
|
||||
{
|
||||
const auto curve_trees = fcmp::curve_trees::curve_trees_v1();
|
||||
|
||||
// Start by hashing: {selene_scalar_0, selene_scalar_1}
|
||||
// Then grow 1: {selene_scalar_0, selene_scalar_1, selene_scalar_2}
|
||||
// Then grow 1: {selene_scalar_0, selene_scalar_1, selene_scalar_2, selene_scalar_3}
|
||||
@ -1204,30 +1193,30 @@ TEST(curve_trees, hash_grow)
|
||||
|
||||
// Get the initial hash of the 2 selene scalars
|
||||
std::vector<Selene::Scalar> all_children{selene_scalar_0, selene_scalar_1};
|
||||
const auto init_hash = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_grow(
|
||||
/*existing_hash*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.m_hash_init_point,
|
||||
const auto init_hash = curve_trees->m_c2->hash_grow(
|
||||
/*existing_hash*/ curve_trees->m_c2->hash_init_point(),
|
||||
/*offset*/ 0,
|
||||
/*existing_child_at_offset*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.zero_scalar(),
|
||||
/*existing_child_at_offset*/ curve_trees->m_c2->zero_scalar(),
|
||||
/*children*/ Selene::Chunk{all_children.data(), all_children.size()});
|
||||
|
||||
// Extend with a new child
|
||||
const auto selene_scalar_2 = generate_random_selene_scalar();
|
||||
std::vector<Selene::Scalar> new_children{selene_scalar_2};
|
||||
const auto ext_hash = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_grow(
|
||||
const auto ext_hash = curve_trees->m_c2->hash_grow(
|
||||
init_hash,
|
||||
all_children.size(),
|
||||
fcmp::curve_trees::CURVE_TREES_V1.m_c2.zero_scalar(),
|
||||
curve_trees->m_c2->zero_scalar(),
|
||||
Selene::Chunk{new_children.data(), new_children.size()});
|
||||
const auto ext_hash_bytes = fcmp::curve_trees::CURVE_TREES_V1.m_c2.to_bytes(ext_hash);
|
||||
const auto ext_hash_bytes = curve_trees->m_c2->to_bytes(ext_hash);
|
||||
|
||||
// Now compare to calling hash_grow{selene_scalar_0, selene_scalar_1, selene_scalar_2}
|
||||
all_children.push_back(selene_scalar_2);
|
||||
const auto grow_res = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_grow(
|
||||
/*existing_hash*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.m_hash_init_point,
|
||||
const auto grow_res = curve_trees->m_c2->hash_grow(
|
||||
/*existing_hash*/ curve_trees->m_c2->hash_init_point(),
|
||||
/*offset*/ 0,
|
||||
/*existing_child_at_offset*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.zero_scalar(),
|
||||
/*existing_child_at_offset*/ curve_trees->m_c2->zero_scalar(),
|
||||
/*children*/ Selene::Chunk{all_children.data(), all_children.size()});
|
||||
const auto grow_res_bytes = fcmp::curve_trees::CURVE_TREES_V1.m_c2.to_bytes(grow_res);
|
||||
const auto grow_res_bytes = curve_trees->m_c2->to_bytes(grow_res);
|
||||
|
||||
ASSERT_EQ(ext_hash_bytes, grow_res_bytes);
|
||||
|
||||
@ -1235,21 +1224,21 @@ TEST(curve_trees, hash_grow)
|
||||
const auto selene_scalar_3 = generate_random_selene_scalar();
|
||||
new_children.clear();
|
||||
new_children = {selene_scalar_3};
|
||||
const auto ext_hash2 = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_grow(
|
||||
const auto ext_hash2 = curve_trees->m_c2->hash_grow(
|
||||
ext_hash,
|
||||
all_children.size(),
|
||||
fcmp::curve_trees::CURVE_TREES_V1.m_c2.zero_scalar(),
|
||||
curve_trees->m_c2->zero_scalar(),
|
||||
Selene::Chunk{new_children.data(), new_children.size()});
|
||||
const auto ext_hash_bytes2 = fcmp::curve_trees::CURVE_TREES_V1.m_c2.to_bytes(ext_hash2);
|
||||
const auto ext_hash_bytes2 = curve_trees->m_c2->to_bytes(ext_hash2);
|
||||
|
||||
// Now compare to calling hash_grow{selene_scalar_0, selene_scalar_1, selene_scalar_2, selene_scalar_3}
|
||||
all_children.push_back(selene_scalar_3);
|
||||
const auto grow_res2 = fcmp::curve_trees::CURVE_TREES_V1.m_c2.hash_grow(
|
||||
/*existing_hash*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.m_hash_init_point,
|
||||
const auto grow_res2 = curve_trees->m_c2->hash_grow(
|
||||
/*existing_hash*/ curve_trees->m_c2->hash_init_point(),
|
||||
/*offset*/ 0,
|
||||
/*existing_child_at_offset*/ fcmp::curve_trees::CURVE_TREES_V1.m_c2.zero_scalar(),
|
||||
/*existing_child_at_offset*/ curve_trees->m_c2->zero_scalar(),
|
||||
/*children*/ Selene::Chunk{all_children.data(), all_children.size()});
|
||||
const auto grow_res_bytes2 = fcmp::curve_trees::CURVE_TREES_V1.m_c2.to_bytes(grow_res2);
|
||||
const auto grow_res_bytes2 = curve_trees->m_c2->to_bytes(grow_res2);
|
||||
|
||||
ASSERT_EQ(ext_hash_bytes2, grow_res_bytes2);
|
||||
}
|
||||
|
@ -35,6 +35,7 @@
|
||||
#include "cryptonote_basic/cryptonote_format_utils.h"
|
||||
#include "cryptonote_basic/hardfork.h"
|
||||
#include "blockchain_db/testdb.h"
|
||||
#include "fcmp/curve_trees.h"
|
||||
|
||||
using namespace cryptonote;
|
||||
|
||||
|
@ -1371,7 +1371,7 @@ TEST(Serialization, tx_fcmp_pp)
|
||||
|
||||
// 1 fcmp++ proof
|
||||
fcmp::FcmpPpProof fcmp_pp;
|
||||
const std::size_t proof_len = fcmp::get_fcmp_pp_len_from_n_inputs(n_inputs);
|
||||
const std::size_t proof_len = fcmp::fcmp_pp_len(n_inputs);
|
||||
fcmp_pp.reserve(proof_len);
|
||||
for (std::size_t i = 0; i < proof_len; ++i)
|
||||
fcmp_pp.push_back(i);
|
||||
@ -1400,7 +1400,7 @@ TEST(Serialization, tx_fcmp_pp)
|
||||
string blob;
|
||||
|
||||
// Extend fcmp++ proof
|
||||
ASSERT_TRUE(tx.rct_signatures.p.fcmp_pp.size() == fcmp::get_fcmp_pp_len_from_n_inputs(n_inputs));
|
||||
ASSERT_TRUE(tx.rct_signatures.p.fcmp_pp.size() == fcmp::fcmp_pp_len(n_inputs));
|
||||
tx.rct_signatures.p.fcmp_pp.push_back(0x01);
|
||||
|
||||
ASSERT_FALSE(serialization::dump_binary(tx, blob));
|
||||
@ -1411,7 +1411,7 @@ TEST(Serialization, tx_fcmp_pp)
|
||||
transaction tx = make_dummy_fcmp_pp_tx();
|
||||
|
||||
// Shorten the fcmp++ proof
|
||||
ASSERT_TRUE(tx.rct_signatures.p.fcmp_pp.size() == fcmp::get_fcmp_pp_len_from_n_inputs(n_inputs));
|
||||
ASSERT_TRUE(tx.rct_signatures.p.fcmp_pp.size() == fcmp::fcmp_pp_len(n_inputs));
|
||||
ASSERT_TRUE(tx.rct_signatures.p.fcmp_pp.size() > 1);
|
||||
tx.rct_signatures.p.fcmp_pp.pop_back();
|
||||
|
||||
|
@ -84,7 +84,7 @@ namespace unit_test
|
||||
remove_files();
|
||||
}
|
||||
|
||||
void init_new_db(fcmp::curve_trees::CurveTreesV1 *curve_trees)
|
||||
void init_new_db(std::shared_ptr<fcmp::curve_trees::CurveTreesV1> curve_trees)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(this->m_db == nullptr, "expected nullptr m_db");
|
||||
this->m_db = new cryptonote::BlockchainLMDB(true/*batch_transactions*/, curve_trees);
|
||||
|
Loading…
Reference in New Issue
Block a user