mirror of
https://github.com/monero-project/monero.git
synced 2025-04-21 07:06:25 -04:00
rough fcmp++ tree impl (lots of work remaining to clean it up and fix)
This commit is contained in:
parent
caa62bc9ea
commit
e1c03f4d5a
2
.gitignore
vendored
2
.gitignore
vendored
@ -120,3 +120,5 @@ nbproject
|
||||
__pycache__/
|
||||
*.pyc
|
||||
*.log
|
||||
|
||||
Cargo.lock
|
||||
|
3
.gitmodules
vendored
3
.gitmodules
vendored
@ -14,3 +14,6 @@
|
||||
path = external/supercop
|
||||
url = https://github.com/monero-project/supercop
|
||||
branch = monero
|
||||
[submodule "external/fcmp-plus-plus"]
|
||||
path = external/fcmp-plus-plus
|
||||
url = https://github.com/kayabaNerve/fcmp-plus-plus.git
|
||||
|
@ -89,6 +89,7 @@ add_subdirectory(ringct)
|
||||
add_subdirectory(checkpoints)
|
||||
add_subdirectory(cryptonote_basic)
|
||||
add_subdirectory(cryptonote_core)
|
||||
add_subdirectory(fcmp)
|
||||
add_subdirectory(lmdb)
|
||||
add_subdirectory(multisig)
|
||||
add_subdirectory(net)
|
||||
|
@ -199,6 +199,9 @@ namespace
|
||||
*
|
||||
* spent_keys input hash -
|
||||
*
|
||||
* leaves leaf_idx {O.x, I.x, C.x}
|
||||
* branches layer_idx [{branch_idx, branch_hash}...]
|
||||
*
|
||||
* txpool_meta txn hash txn metadata
|
||||
* txpool_blob txn hash txn blob
|
||||
*
|
||||
@ -210,7 +213,7 @@ namespace
|
||||
* attached as a prefix on the Data to serve as the DUPSORT key.
|
||||
* (DUPFIXED saves 8 bytes per record.)
|
||||
*
|
||||
* The output_amounts table doesn't use a dummy key, but uses DUPSORT.
|
||||
* The output_amounts and branches tables don't use a dummy key, but use DUPSORT
|
||||
*/
|
||||
const char* const LMDB_BLOCKS = "blocks";
|
||||
const char* const LMDB_BLOCK_HEIGHTS = "block_heights";
|
||||
@ -228,6 +231,10 @@ const char* const LMDB_OUTPUT_TXS = "output_txs";
|
||||
const char* const LMDB_OUTPUT_AMOUNTS = "output_amounts";
|
||||
const char* const LMDB_SPENT_KEYS = "spent_keys";
|
||||
|
||||
// Curve trees tree types
|
||||
const char* const LMDB_LEAVES = "leaves";
|
||||
const char* const LMDB_BRANCHES = "branches";
|
||||
|
||||
const char* const LMDB_TXPOOL_META = "txpool_meta";
|
||||
const char* const LMDB_TXPOOL_BLOB = "txpool_blob";
|
||||
|
||||
@ -1437,6 +1444,9 @@ void BlockchainLMDB::open(const std::string& filename, const int db_flags)
|
||||
|
||||
lmdb_db_open(txn, LMDB_SPENT_KEYS, MDB_INTEGERKEY | MDB_CREATE | MDB_DUPSORT | MDB_DUPFIXED, m_spent_keys, "Failed to open db handle for m_spent_keys");
|
||||
|
||||
lmdb_db_open(txn, LMDB_LEAVES, MDB_INTEGERKEY | MDB_DUPSORT | MDB_DUPFIXED | MDB_CREATE, m_leaves, "Failed to open db handle for m_leaves");
|
||||
lmdb_db_open(txn, LMDB_BRANCHES, MDB_INTEGERKEY | MDB_DUPSORT | MDB_DUPFIXED | MDB_CREATE, m_branches, "Failed to open db handle for m_branches");
|
||||
|
||||
lmdb_db_open(txn, LMDB_TXPOOL_META, MDB_CREATE, m_txpool_meta, "Failed to open db handle for m_txpool_meta");
|
||||
lmdb_db_open(txn, LMDB_TXPOOL_BLOB, MDB_CREATE, m_txpool_blob, "Failed to open db handle for m_txpool_blob");
|
||||
|
||||
@ -1456,6 +1466,8 @@ void BlockchainLMDB::open(const std::string& filename, const int db_flags)
|
||||
mdb_set_dupsort(txn, m_block_heights, compare_hash32);
|
||||
mdb_set_dupsort(txn, m_tx_indices, compare_hash32);
|
||||
mdb_set_dupsort(txn, m_output_amounts, compare_uint64);
|
||||
mdb_set_dupsort(txn, m_leaves, compare_uint64);
|
||||
mdb_set_dupsort(txn, m_branches, compare_uint64);
|
||||
mdb_set_dupsort(txn, m_output_txs, compare_uint64);
|
||||
mdb_set_dupsort(txn, m_block_info, compare_uint64);
|
||||
if (!(mdb_flags & MDB_RDONLY))
|
||||
@ -1633,6 +1645,10 @@ void BlockchainLMDB::reset()
|
||||
throw0(DB_ERROR(lmdb_error("Failed to drop m_output_amounts: ", result).c_str()));
|
||||
if (auto result = mdb_drop(txn, m_spent_keys, 0))
|
||||
throw0(DB_ERROR(lmdb_error("Failed to drop m_spent_keys: ", result).c_str()));
|
||||
if (auto result = mdb_drop(txn, m_leaves, 0))
|
||||
throw0(DB_ERROR(lmdb_error("Failed to drop m_leaves: ", result).c_str()));
|
||||
if (auto result = mdb_drop(txn, m_branches, 0))
|
||||
throw0(DB_ERROR(lmdb_error("Failed to drop m_branches: ", result).c_str()));
|
||||
(void)mdb_drop(txn, m_hf_starting_heights, 0); // this one is dropped in new code
|
||||
if (auto result = mdb_drop(txn, m_hf_versions, 0))
|
||||
throw0(DB_ERROR(lmdb_error("Failed to drop m_hf_versions: ", result).c_str()));
|
||||
|
@ -64,6 +64,9 @@ typedef struct mdb_txn_cursors
|
||||
|
||||
MDB_cursor *m_txc_spent_keys;
|
||||
|
||||
MDB_cursor *m_txc_leaves;
|
||||
MDB_cursor *m_txc_branches;
|
||||
|
||||
MDB_cursor *m_txc_txpool_meta;
|
||||
MDB_cursor *m_txc_txpool_blob;
|
||||
|
||||
@ -87,6 +90,8 @@ typedef struct mdb_txn_cursors
|
||||
#define m_cur_tx_indices m_cursors->m_txc_tx_indices
|
||||
#define m_cur_tx_outputs m_cursors->m_txc_tx_outputs
|
||||
#define m_cur_spent_keys m_cursors->m_txc_spent_keys
|
||||
#define m_cur_leaves m_cursors->m_txc_leaves
|
||||
#define m_cur_branches m_cursors->m_txc_branches
|
||||
#define m_cur_txpool_meta m_cursors->m_txc_txpool_meta
|
||||
#define m_cur_txpool_blob m_cursors->m_txc_txpool_blob
|
||||
#define m_cur_alt_blocks m_cursors->m_txc_alt_blocks
|
||||
@ -109,6 +114,8 @@ typedef struct mdb_rflags
|
||||
bool m_rf_tx_indices;
|
||||
bool m_rf_tx_outputs;
|
||||
bool m_rf_spent_keys;
|
||||
bool m_rf_leaves;
|
||||
bool m_rf_branches;
|
||||
bool m_rf_txpool_meta;
|
||||
bool m_rf_txpool_blob;
|
||||
bool m_rf_alt_blocks;
|
||||
@ -463,6 +470,9 @@ private:
|
||||
|
||||
MDB_dbi m_spent_keys;
|
||||
|
||||
MDB_dbi m_leaves;
|
||||
MDB_dbi m_branches;
|
||||
|
||||
MDB_dbi m_txpool_meta;
|
||||
MDB_dbi m_txpool_blob;
|
||||
|
||||
|
@ -618,6 +618,12 @@ namespace crypto {
|
||||
ge_p1p1_to_p3(&res, &point2);
|
||||
}
|
||||
|
||||
void crypto_ops::derive_key_image_generator(const public_key &pub, ec_point &ki_gen) {
|
||||
ge_p3 point;
|
||||
hash_to_ec(pub, point);
|
||||
ge_p3_tobytes(&ki_gen, &point);
|
||||
}
|
||||
|
||||
void crypto_ops::generate_key_image(const public_key &pub, const secret_key &sec, key_image &image) {
|
||||
ge_p3 point;
|
||||
ge_p2 point2;
|
||||
|
@ -145,6 +145,8 @@ namespace crypto {
|
||||
friend void generate_tx_proof_v1(const hash &, const public_key &, const public_key &, const boost::optional<public_key> &, const public_key &, const secret_key &, signature &);
|
||||
static bool check_tx_proof(const hash &, const public_key &, const public_key &, const boost::optional<public_key> &, const public_key &, const signature &, const int);
|
||||
friend bool check_tx_proof(const hash &, const public_key &, const public_key &, const boost::optional<public_key> &, const public_key &, const signature &, const int);
|
||||
static void derive_key_image_generator(const public_key &, ec_point &);
|
||||
friend void derive_key_image_generator(const public_key &, ec_point &);
|
||||
static void generate_key_image(const public_key &, const secret_key &, key_image &);
|
||||
friend void generate_key_image(const public_key &, const secret_key &, key_image &);
|
||||
static void generate_ring_signature(const hash &, const key_image &,
|
||||
@ -268,6 +270,10 @@ namespace crypto {
|
||||
return crypto_ops::check_tx_proof(prefix_hash, R, A, B, D, sig, version);
|
||||
}
|
||||
|
||||
inline void derive_key_image_generator(const public_key &pub, ec_point &ki_gen) {
|
||||
crypto_ops::derive_key_image_generator(pub, ki_gen);
|
||||
}
|
||||
|
||||
/* To send money to a key:
|
||||
* * The sender generates an ephemeral key and includes it in transaction output.
|
||||
* * To spend the money, the receiver generates a key image from it.
|
||||
|
49
src/fcmp/CMakeLists.txt
Normal file
49
src/fcmp/CMakeLists.txt
Normal file
@ -0,0 +1,49 @@
|
||||
# Copyright (c) 2024, The Monero Project
|
||||
#
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification, are
|
||||
# permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice, this list of
|
||||
# conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice, this list
|
||||
# of conditions and the following disclaimer in the documentation and/or other
|
||||
# materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors may be
|
||||
# used to endorse or promote products derived from this software without specific
|
||||
# prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
|
||||
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
|
||||
# THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
||||
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
# THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
set(fcmp_sources
|
||||
fcmp.cpp)
|
||||
|
||||
monero_find_all_headers(fcmp_headers "${CMAKE_CURRENT_SOURCE_DIR}")
|
||||
|
||||
add_subdirectory(fcmp_rust)
|
||||
|
||||
monero_add_library_with_deps(
|
||||
NAME fcmp
|
||||
DEPENDS rust_cxx
|
||||
SOURCES
|
||||
${fcmp_sources}
|
||||
${fcmp_headers})
|
||||
|
||||
target_link_libraries(fcmp
|
||||
PUBLIC
|
||||
crypto
|
||||
epee
|
||||
PRIVATE
|
||||
fcmp_rust
|
||||
${EXTRA_LIBRARIES})
|
86
src/fcmp/fcmp.cpp
Normal file
86
src/fcmp/fcmp.cpp
Normal file
@ -0,0 +1,86 @@
|
||||
// Copyright (c) 2024, The Monero Project
|
||||
//
|
||||
// All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without modification, are
|
||||
// permitted provided that the following conditions are met:
|
||||
//
|
||||
// 1. Redistributions of source code must retain the above copyright notice, this list of
|
||||
// conditions and the following disclaimer.
|
||||
//
|
||||
// 2. Redistributions in binary form must reproduce the above copyright notice, this list
|
||||
// of conditions and the following disclaimer in the documentation and/or other
|
||||
// materials provided with the distribution.
|
||||
//
|
||||
// 3. Neither the name of the copyright holder nor the names of its contributors may be
|
||||
// used to endorse or promote products derived from this software without specific
|
||||
// prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
|
||||
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
|
||||
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
||||
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
#include "fcmp.h"
|
||||
#include "misc_log_ex.h"
|
||||
|
||||
namespace fcmp
|
||||
{
|
||||
|
||||
// TODO: move into its own fcmp_crypto file
|
||||
static SeleneScalar ed_25519_point_to_selene_scalar(const crypto::ec_point &point)
|
||||
{
|
||||
static_assert(sizeof(fcmp::RustEd25519Point) == sizeof(crypto::ec_point),
|
||||
"expected same size ed25519 point to rust representation");
|
||||
|
||||
// TODO: implement reading just the x coordinate of ed25519 point in C/C++
|
||||
fcmp::RustEd25519Point rust_point;
|
||||
memcpy(&rust_point, &point, sizeof(fcmp::RustEd25519Point));
|
||||
return fcmp_rust::ed25519_point_to_selene_scalar(rust_point);
|
||||
};
|
||||
|
||||
// TODO: move into its own fcmp_crypto file
|
||||
LeafTuple output_to_leaf_tuple(const crypto::public_key &O, const crypto::public_key &C)
|
||||
{
|
||||
crypto::ec_point I;
|
||||
crypto::derive_key_image_generator(O, I);
|
||||
|
||||
return LeafTuple{
|
||||
.O_x = ed_25519_point_to_selene_scalar(O),
|
||||
.I_x = ed_25519_point_to_selene_scalar(I),
|
||||
.C_x = ed_25519_point_to_selene_scalar(C)
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: move into its own fcmp_crypto file
|
||||
std::vector<SeleneScalar> flatten_leaves(const std::vector<LeafTuple> &leaves)
|
||||
{
|
||||
std::vector<SeleneScalar> flattened_leaves;
|
||||
flattened_leaves.reserve(leaves.size() * LEAF_TUPLE_SIZE);
|
||||
|
||||
for (const auto &l : leaves)
|
||||
{
|
||||
// TODO: implement without cloning
|
||||
flattened_leaves.emplace_back(fcmp_rust::clone_selene_scalar(l.O_x));
|
||||
flattened_leaves.emplace_back(fcmp_rust::clone_selene_scalar(l.I_x));
|
||||
flattened_leaves.emplace_back(fcmp_rust::clone_selene_scalar(l.C_x));
|
||||
}
|
||||
|
||||
return flattened_leaves;
|
||||
};
|
||||
|
||||
SeleneScalar Helios::point_to_cycle_scalar(const Helios::Point &point) const
|
||||
{
|
||||
return fcmp_rust::helios_point_to_selene_scalar(point);
|
||||
};
|
||||
|
||||
HeliosScalar Selene::point_to_cycle_scalar(const Selene::Point &point) const
|
||||
{
|
||||
return fcmp_rust::selene_point_to_helios_scalar(point);
|
||||
};
|
||||
} //namespace fcmp
|
921
src/fcmp/fcmp.h
Normal file
921
src/fcmp/fcmp.h
Normal file
@ -0,0 +1,921 @@
|
||||
// Copyright (c) 2024, The Monero Project
|
||||
//
|
||||
// All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without modification, are
|
||||
// permitted provided that the following conditions are met:
|
||||
//
|
||||
// 1. Redistributions of source code must retain the above copyright notice, this list of
|
||||
// conditions and the following disclaimer.
|
||||
//
|
||||
// 2. Redistributions in binary form must reproduce the above copyright notice, this list
|
||||
// of conditions and the following disclaimer in the documentation and/or other
|
||||
// materials provided with the distribution.
|
||||
//
|
||||
// 3. Neither the name of the copyright holder nor the names of its contributors may be
|
||||
// used to endorse or promote products derived from this software without specific
|
||||
// prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
|
||||
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
|
||||
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
||||
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "crypto/crypto.h"
|
||||
#include "fcmp_rust/cxx.h"
|
||||
#include "fcmp_rust/fcmp_rust.h"
|
||||
#include "misc_log_ex.h"
|
||||
#include "string_tools.h"
|
||||
|
||||
#include <boost/variant.hpp>
|
||||
|
||||
#include <string>
|
||||
|
||||
namespace fcmp
|
||||
{
|
||||
using RustEd25519Point = std::array<uint8_t, 32UL>;
|
||||
|
||||
// Need to forward declare Scalar types for point_to_cycle_scalar below
|
||||
using SeleneScalar = rust::Box<fcmp_rust::SeleneScalar>;
|
||||
using HeliosScalar = rust::Box<fcmp_rust::HeliosScalar>;
|
||||
|
||||
static struct Helios final
|
||||
{
|
||||
using Generators = rust::Box<fcmp_rust::HeliosGenerators>;
|
||||
using Scalar = HeliosScalar;
|
||||
using Point = rust::Box<fcmp_rust::HeliosPoint>;
|
||||
using Chunk = rust::Slice<const Scalar>;
|
||||
|
||||
// TODO: static constants
|
||||
const Generators GENERATORS = fcmp_rust::random_helios_generators();
|
||||
const Point HASH_INIT_POINT = fcmp_rust::random_helios_hash_init_point();
|
||||
|
||||
// TODO: use correct value
|
||||
static const std::size_t WIDTH = 5;
|
||||
|
||||
Point hash_grow(
|
||||
const Generators &generators,
|
||||
const Point &existing_hash,
|
||||
const std::size_t offset,
|
||||
const Chunk &prior_children,
|
||||
const Chunk &new_children) const
|
||||
{
|
||||
return fcmp_rust::hash_grow_helios(
|
||||
generators,
|
||||
existing_hash,
|
||||
offset,
|
||||
prior_children,
|
||||
new_children);
|
||||
}
|
||||
|
||||
SeleneScalar point_to_cycle_scalar(const Point &point) const;
|
||||
|
||||
Scalar clone(const Scalar &scalar) const { return fcmp_rust::clone_helios_scalar(scalar); }
|
||||
Point clone(const Point &point) const { return fcmp_rust::clone_helios_point(point); }
|
||||
|
||||
Scalar zero_scalar() const { return fcmp_rust::helios_zero_scalar(); }
|
||||
|
||||
std::array<uint8_t, 32UL> to_bytes(const Scalar &scalar) const
|
||||
{ return fcmp_rust::helios_scalar_to_bytes(scalar); }
|
||||
std::array<uint8_t, 32UL> to_bytes(const Point &point) const
|
||||
{ return fcmp_rust::helios_point_to_bytes(point); }
|
||||
|
||||
std::string to_string(const Scalar &scalar) const
|
||||
{ return epee::string_tools::pod_to_hex(to_bytes(scalar)); }
|
||||
std::string to_string(const Point &point) const
|
||||
{ return epee::string_tools::pod_to_hex(to_bytes(point)); }
|
||||
} HELIOS;
|
||||
|
||||
static struct Selene final
|
||||
{
|
||||
using Generators = rust::Box<fcmp_rust::SeleneGenerators>;
|
||||
using Scalar = SeleneScalar;
|
||||
using Point = rust::Box<fcmp_rust::SelenePoint>;
|
||||
using Chunk = rust::Slice<const Scalar>;
|
||||
|
||||
// TODO: static constants
|
||||
const Generators GENERATORS = fcmp_rust::random_selene_generators();
|
||||
const Point HASH_INIT_POINT = fcmp_rust::random_selene_hash_init_point();
|
||||
|
||||
// TODO: use correct value
|
||||
static const std::size_t WIDTH = 5;
|
||||
|
||||
Point hash_grow(
|
||||
const Generators &generators,
|
||||
const Point &existing_hash,
|
||||
const std::size_t offset,
|
||||
const Chunk &prior_children,
|
||||
const Chunk &new_children) const
|
||||
{
|
||||
return fcmp_rust::hash_grow_selene(
|
||||
generators,
|
||||
existing_hash,
|
||||
offset,
|
||||
prior_children,
|
||||
new_children);
|
||||
};
|
||||
|
||||
HeliosScalar point_to_cycle_scalar(const Point &point) const;
|
||||
|
||||
Scalar clone(const Scalar &scalar) const { return fcmp_rust::clone_selene_scalar(scalar); }
|
||||
Point clone(const Point &point) const { return fcmp_rust::clone_selene_point(point); }
|
||||
|
||||
Scalar zero_scalar() const { return fcmp_rust::selene_zero_scalar(); }
|
||||
|
||||
std::array<uint8_t, 32UL> to_bytes(const Scalar &scalar) const
|
||||
{ return fcmp_rust::selene_scalar_to_bytes(scalar); }
|
||||
std::array<uint8_t, 32UL> to_bytes(const Point &point) const
|
||||
{ return fcmp_rust::selene_point_to_bytes(point); }
|
||||
|
||||
std::string to_string(const Scalar &scalar) const
|
||||
{ return epee::string_tools::pod_to_hex(to_bytes(scalar)); }
|
||||
std::string to_string(const Point &point) const
|
||||
{ return epee::string_tools::pod_to_hex(to_bytes(point)); }
|
||||
} SELENE;
|
||||
|
||||
// TODO: cleanly separate everything below into another file. This current file should strictly be for the rust interface
|
||||
|
||||
// TODO: template all the curve things
|
||||
|
||||
// TODO: Curve class
|
||||
// TODO: CurveTree class instantiated with the curves and widths
|
||||
|
||||
// TODO: template
|
||||
struct LeafTuple final
|
||||
{
|
||||
Selene::Scalar O_x;
|
||||
Selene::Scalar I_x;
|
||||
Selene::Scalar C_x;
|
||||
};
|
||||
static const std::size_t LEAF_TUPLE_SIZE = 3;
|
||||
static const std::size_t LEAF_LAYER_CHUNK_SIZE = LEAF_TUPLE_SIZE * SELENE.WIDTH;
|
||||
|
||||
// Tree structure
|
||||
struct Leaves final
|
||||
{
|
||||
// Starting index in the leaf layer
|
||||
std::size_t start_idx;
|
||||
// Contiguous leaves in a tree that start at the start_idx
|
||||
std::vector<LeafTuple> tuples;
|
||||
};
|
||||
|
||||
// A layer of contiguous hashes starting from a specific start_idx in the tree
|
||||
template<typename C>
|
||||
struct LayerExtension final
|
||||
{
|
||||
std::size_t start_idx;
|
||||
std::vector<typename C::Point> hashes;
|
||||
};
|
||||
|
||||
// A struct useful to extend an existing tree, layers alternate between C1 and C2
|
||||
template<typename C1, typename C2>
|
||||
struct TreeExtension final
|
||||
{
|
||||
Leaves leaves;
|
||||
std::vector<LayerExtension<C1>> c1_layer_extensions;
|
||||
std::vector<LayerExtension<C2>> c2_layer_extensions;
|
||||
};
|
||||
|
||||
// Useful data from the last chunk in a layer
|
||||
template<typename C>
|
||||
struct LastChunkData final
|
||||
{
|
||||
// The total number of children % child layer chunk size
|
||||
/*TODO: const*/ std::size_t child_offset;
|
||||
// The last child in the chunk (and therefore the last child in the child layer)
|
||||
/*TODO: const*/ typename C::Scalar last_child;
|
||||
// The hash of the last chunk of child scalars
|
||||
/*TODO: const*/ typename C::Point last_parent;
|
||||
// Total number of children in the child layer
|
||||
/*TODO: const*/ std::size_t child_layer_size;
|
||||
// Total number of hashes in the parent layer
|
||||
/*TODO: const*/ std::size_t parent_layer_size;
|
||||
};
|
||||
|
||||
template<typename C1, typename C2>
|
||||
struct LastChunks final
|
||||
{
|
||||
std::vector<LastChunkData<C1>> c1_last_chunks;
|
||||
std::vector<LastChunkData<C2>> c2_last_chunks;
|
||||
};
|
||||
|
||||
template<typename C>
|
||||
using Layer = std::vector<typename C::Point>;
|
||||
|
||||
// A complete tree, useful for testing (can't fit the whole tree in memory otherwise)
|
||||
// TODO: move this to just the testing
|
||||
template<typename C1, typename C2>
|
||||
struct Tree final
|
||||
{
|
||||
std::vector<LeafTuple> leaves;
|
||||
std::vector<Layer<C1>> c1_layers;
|
||||
std::vector<Layer<C2>> c2_layers;
|
||||
};
|
||||
|
||||
LeafTuple output_to_leaf_tuple(const crypto::public_key &O, const crypto::public_key &C);
|
||||
std::vector<Selene::Scalar> flatten_leaves(const std::vector<LeafTuple> &leaves);
|
||||
|
||||
// TODO: move into its own fcmp_crypto file
|
||||
template <typename C_POINTS, typename C_SCALARS>
|
||||
static void extend_scalars_from_cycle_points(const C_POINTS &curve,
|
||||
const std::vector<typename C_POINTS::Point> &points,
|
||||
std::vector<typename C_SCALARS::Scalar> &scalars_out)
|
||||
{
|
||||
scalars_out.reserve(scalars_out.size() + points.size());
|
||||
|
||||
for (const auto &point : points)
|
||||
{
|
||||
// TODO: implement reading just the x coordinate of points on curves in curve cycle in C/C++
|
||||
typename C_SCALARS::Scalar scalar = curve.point_to_cycle_scalar(point);
|
||||
scalars_out.push_back(std::move(scalar));
|
||||
}
|
||||
}
|
||||
|
||||
template<typename C2>
|
||||
LastChunkData<C2> get_last_leaf_chunk(const C2 &c2,
|
||||
const std::vector<LeafTuple> &leaves,
|
||||
const std::vector<typename C2::Point> &parent_layer)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(!leaves.empty(), "empty leaf layer");
|
||||
CHECK_AND_ASSERT_THROW_MES(!parent_layer.empty(), "empty leaf parent layer");
|
||||
|
||||
const std::size_t child_offset = (leaves.size() * LEAF_TUPLE_SIZE) % LEAF_LAYER_CHUNK_SIZE;
|
||||
|
||||
const typename C2::Scalar &last_child = leaves.back().C_x;
|
||||
const typename C2::Point &last_parent = parent_layer.back();
|
||||
|
||||
return LastChunkData<C2>{
|
||||
.child_offset = child_offset,
|
||||
.last_child = c2.clone(last_child),
|
||||
.last_parent = c2.clone(last_parent),
|
||||
.child_layer_size = leaves.size() * LEAF_TUPLE_SIZE,
|
||||
.parent_layer_size = parent_layer.size()
|
||||
};
|
||||
}
|
||||
|
||||
template<typename C_CHILD, typename C_PARENT>
|
||||
LastChunkData<C_PARENT> get_last_child_layer_chunk(const C_CHILD &c_child,
|
||||
const C_PARENT &c_parent,
|
||||
const std::vector<typename C_CHILD::Point> &child_layer,
|
||||
const std::vector<typename C_PARENT::Point> &parent_layer)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(!child_layer.empty(), "empty child layer");
|
||||
CHECK_AND_ASSERT_THROW_MES(!parent_layer.empty(), "empty parent layer");
|
||||
|
||||
const std::size_t child_offset = child_layer.size() % c_parent.WIDTH;
|
||||
|
||||
const typename C_CHILD::Point &last_child_point = child_layer.back();
|
||||
const typename C_PARENT::Scalar &last_child = c_child.point_to_cycle_scalar(last_child_point);
|
||||
|
||||
const typename C_PARENT::Point &last_parent = parent_layer.back();
|
||||
|
||||
return LastChunkData<C_PARENT>{
|
||||
.child_offset = child_offset,
|
||||
.last_child = c_parent.clone(last_child),
|
||||
.last_parent = c_parent.clone(last_parent),
|
||||
.child_layer_size = child_layer.size(),
|
||||
.parent_layer_size = parent_layer.size()
|
||||
};
|
||||
}
|
||||
|
||||
// TODO: implement in the db, never want the entire tree in memory
|
||||
template<typename C1, typename C2>
|
||||
LastChunks<C1, C2> get_last_chunks(const C1 &c1,
|
||||
const C2 &c2,
|
||||
const Tree<C1, C2> &tree)
|
||||
{
|
||||
// const bool valid = validate_tree<C1, C2>(tree, C1, C2);
|
||||
// CHECK_AND_ASSERT_THROW_MES(valid, "invalid tree");
|
||||
|
||||
const auto &leaves = tree.leaves;
|
||||
const auto &c1_layers = tree.c1_layers;
|
||||
const auto &c2_layers = tree.c2_layers;
|
||||
|
||||
LastChunks<C1, C2> last_chunks;
|
||||
last_chunks.c1_last_chunks.reserve(c1_layers.size());
|
||||
last_chunks.c2_last_chunks.reserve(c2_layers.size());
|
||||
|
||||
// First push the last leaf chunk data into c2 chunks
|
||||
CHECK_AND_ASSERT_THROW_MES(!c2_layers.empty(), "empty curve 2 layers");
|
||||
auto last_leaf_chunk = get_last_leaf_chunk<C2>(c2,
|
||||
leaves,
|
||||
c2_layers[0]);
|
||||
last_chunks.c2_last_chunks.push_back(std::move(last_leaf_chunk));
|
||||
|
||||
// Next parents will be c1
|
||||
bool parent_is_c1 = true;
|
||||
|
||||
// Since we started with c2, the number of c2 layers should be == c1_layers.size() || (c1_layers.size() + 1)
|
||||
const std::size_t num_layers = c2_layers.size();
|
||||
CHECK_AND_ASSERT_THROW_MES(num_layers == c1_layers.size() || num_layers == (c1_layers.size() + 1),
|
||||
"unexpected number of curve layers");
|
||||
|
||||
// If there are no c1 layers, we're done
|
||||
if (c1_layers.empty())
|
||||
return last_chunks;
|
||||
|
||||
// Then get last chunks up until the root
|
||||
for (std::size_t layer_idx = 0; layer_idx < num_layers; ++layer_idx)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_layers.size() > layer_idx, "missing c1 layer");
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_layers.size() > layer_idx, "missing c2 layer");
|
||||
|
||||
// TODO: template the below if statement into another function
|
||||
if (parent_is_c1)
|
||||
{
|
||||
const Layer<C2> &child_layer = c2_layers[layer_idx];
|
||||
CHECK_AND_ASSERT_THROW_MES(!child_layer.empty(), "child layer is empty");
|
||||
|
||||
const Layer<C1> &parent_layer = c1_layers[layer_idx];
|
||||
CHECK_AND_ASSERT_THROW_MES(!parent_layer.empty(), "parent layer is empty");
|
||||
|
||||
auto last_parent_chunk = get_last_child_layer_chunk<C2, C1>(c2,
|
||||
c1,
|
||||
child_layer,
|
||||
parent_layer);
|
||||
|
||||
last_chunks.c1_last_chunks.push_back(std::move(last_parent_chunk));
|
||||
}
|
||||
else
|
||||
{
|
||||
const Layer<C1> &child_layer = c1_layers[layer_idx];
|
||||
CHECK_AND_ASSERT_THROW_MES(!child_layer.empty(), "child layer is empty");
|
||||
|
||||
const Layer<C2> &parent_layer = c2_layers[layer_idx];
|
||||
CHECK_AND_ASSERT_THROW_MES(!parent_layer.empty(), "parent layer is empty");
|
||||
|
||||
auto last_parent_chunk = get_last_child_layer_chunk<C1, C2>(c1,
|
||||
c2,
|
||||
child_layer,
|
||||
parent_layer);
|
||||
|
||||
last_chunks.c2_last_chunks.push_back(std::move(last_parent_chunk));
|
||||
}
|
||||
|
||||
// Alternate curves every iteration
|
||||
parent_is_c1 = !parent_is_c1;
|
||||
}
|
||||
|
||||
return last_chunks;
|
||||
}
|
||||
|
||||
template <typename C>
|
||||
static void extend_zeroes(const C &curve,
|
||||
const std::size_t num_zeroes,
|
||||
std::vector<typename C::Scalar> &zeroes_inout)
|
||||
{
|
||||
zeroes_inout.reserve(zeroes_inout.size() + num_zeroes);
|
||||
|
||||
for (std::size_t i = 0; i < num_zeroes; ++i)
|
||||
zeroes_inout.emplace_back(curve.zero_scalar());
|
||||
}
|
||||
|
||||
template <typename C>
|
||||
static typename C::Point get_new_parent(const C &curve,
|
||||
const typename C::Chunk &new_children)
|
||||
{
|
||||
// New parent means no prior children, fill priors with 0
|
||||
std::vector<typename C::Scalar> prior_children;
|
||||
extend_zeroes(curve, new_children.size(), prior_children);
|
||||
|
||||
return curve.hash_grow(
|
||||
curve.GENERATORS,
|
||||
curve.HASH_INIT_POINT,
|
||||
0,/*offset*/
|
||||
typename C::Chunk{prior_children.data(), prior_children.size()},
|
||||
new_children
|
||||
);
|
||||
}
|
||||
|
||||
template <typename C>
|
||||
static typename C::Point get_first_leaf_parent(const C &curve,
|
||||
const typename C::Chunk &new_children,
|
||||
const LastChunkData<C> *last_chunk_ptr)
|
||||
{
|
||||
// If no last chunk exists, or if the last chunk is already full, then we can get a new parent
|
||||
if (last_chunk_ptr == nullptr || last_chunk_ptr->child_offset == 0)
|
||||
return get_new_parent<C>(curve, new_children);
|
||||
|
||||
// There won't be any existing children when growing the leaf layer, fill priors with 0
|
||||
std::vector<typename C::Scalar> prior_children;
|
||||
extend_zeroes(curve, new_children.size(), prior_children);
|
||||
|
||||
return curve.hash_grow(
|
||||
curve.GENERATORS,
|
||||
last_chunk_ptr->last_parent,
|
||||
last_chunk_ptr->child_offset,
|
||||
typename C::Chunk{prior_children.data(), prior_children.size()},
|
||||
new_children
|
||||
);
|
||||
}
|
||||
|
||||
template <typename C>
|
||||
static typename C::Point get_first_non_leaf_parent(const C &curve,
|
||||
const typename C::Chunk &new_children,
|
||||
const bool child_layer_last_hash_updated,
|
||||
const LastChunkData<C> *last_chunk_ptr)
|
||||
{
|
||||
// If no last chunk exists, we can get a new parent
|
||||
if (last_chunk_ptr == nullptr)
|
||||
return get_new_parent<C>(curve, new_children);
|
||||
|
||||
std::vector<typename C::Scalar> prior_children;
|
||||
std::size_t offset = last_chunk_ptr->child_offset;
|
||||
|
||||
if (child_layer_last_hash_updated)
|
||||
{
|
||||
// If the last chunk has updated children in it, then we need to get the delta to the old children, and
|
||||
// subtract the offset by 1 since we're updating the prior last hash
|
||||
prior_children.emplace_back(curve.clone(last_chunk_ptr->last_child));
|
||||
offset = offset > 0 ? (offset - 1) : (curve.WIDTH - 1);
|
||||
|
||||
// Extend prior children by zeroes for any additional new children, since they must be new
|
||||
if (new_children.size() > 1)
|
||||
extend_zeroes(curve, new_children.size() - 1, prior_children);
|
||||
}
|
||||
else if (offset > 0)
|
||||
{
|
||||
// If we're updating the parent hash and no children were updated, then we're just adding new children
|
||||
// to the existing last chunk and can fill priors with 0
|
||||
extend_zeroes(curve, new_children.size(), prior_children);
|
||||
}
|
||||
else
|
||||
{
|
||||
// If the last chunk is already full and isn't updated in any way, then we just get a new parent
|
||||
return get_new_parent<C>(curve, new_children);
|
||||
}
|
||||
|
||||
return curve.hash_grow(
|
||||
curve.GENERATORS,
|
||||
last_chunk_ptr->last_parent,
|
||||
offset,
|
||||
typename C::Chunk{prior_children.data(), prior_children.size()},
|
||||
new_children
|
||||
);
|
||||
}
|
||||
|
||||
template<typename C_CHILD, typename C_PARENT>
|
||||
void hash_layer(const C_CHILD &c_child,
|
||||
const C_PARENT &c_parent,
|
||||
const LastChunkData<C_CHILD> *last_child_chunk_ptr,
|
||||
const LastChunkData<C_PARENT> *last_parent_chunk_ptr,
|
||||
const LayerExtension<C_CHILD> &children,
|
||||
LayerExtension<C_PARENT> &parents_out)
|
||||
{
|
||||
parents_out.start_idx = (last_parent_chunk_ptr == nullptr) ? 0 : last_parent_chunk_ptr->parent_layer_size;
|
||||
parents_out.hashes.clear();
|
||||
|
||||
CHECK_AND_ASSERT_THROW_MES(!children.hashes.empty(), "empty children hashes");
|
||||
|
||||
const std::size_t max_chunk_size = c_parent.WIDTH;
|
||||
std::size_t offset = (last_parent_chunk_ptr == nullptr) ? 0 : last_parent_chunk_ptr->child_offset;
|
||||
|
||||
// TODO: work through all edge cases, then try to simplify the approach to avoid them
|
||||
// If we're adding new children to an existing last chunk, then we need to pull the parent start idx back 1
|
||||
// since we'll be updating the existing parent hash of the last chunk
|
||||
if (offset > 0)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(parents_out.start_idx > 0, "parent start idx should be > 0");
|
||||
--parents_out.start_idx;
|
||||
}
|
||||
|
||||
// If the child layer had its existing last hash updated, then we need to update the existing last parent
|
||||
// hash in this layer as well
|
||||
bool child_layer_last_hash_updated = (last_parent_chunk_ptr == nullptr)
|
||||
? false
|
||||
: last_parent_chunk_ptr->child_layer_size == (children.start_idx + 1);
|
||||
|
||||
if (offset == 0 && child_layer_last_hash_updated)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(parents_out.start_idx > 0, "parent start idx should be > 0");
|
||||
--parents_out.start_idx;
|
||||
}
|
||||
|
||||
// TODO: clean this up so I don't have to do it twice here and in get_first_non_leaf_parent
|
||||
CHECK_AND_ASSERT_THROW_MES(max_chunk_size > offset, "unexpected offset");
|
||||
if (child_layer_last_hash_updated)
|
||||
offset = offset > 0 ? (offset - 1) : (max_chunk_size - 1);
|
||||
|
||||
// If we're creating a *new* root at the existing root layer, we may need to include the *existing* root when
|
||||
// hashing the *existing* root layer
|
||||
std::vector<typename C_PARENT::Scalar> child_scalars;
|
||||
if (last_child_chunk_ptr != nullptr && last_child_chunk_ptr->parent_layer_size == 1)
|
||||
{
|
||||
MDEBUG("Here I have captured what I want to capture... children.start_idx: " << children.start_idx
|
||||
<< " , children.hashes.size(): " << children.hashes.size() << " , max_chunk_size: " << max_chunk_size);
|
||||
|
||||
// We should be updating the existing root, there shouldn't be a last parent chunk
|
||||
CHECK_AND_ASSERT_THROW_MES(last_parent_chunk_ptr == nullptr, "last parent chunk exists at root");
|
||||
|
||||
// If the children don't already include the existing root at start_idx 0 (they would if the existing
|
||||
// root was updated in the child layer), then we need to add it to the first chunk to be hashed
|
||||
if (children.start_idx > 0)
|
||||
child_scalars.emplace_back(c_child.point_to_cycle_scalar(last_child_chunk_ptr->last_parent));
|
||||
}
|
||||
|
||||
// Convert child points to scalars
|
||||
extend_scalars_from_cycle_points<C_CHILD, C_PARENT>(c_child, children.hashes, child_scalars);
|
||||
|
||||
// See how many children we need to fill up the existing last chunk
|
||||
std::size_t chunk_size = std::min(child_scalars.size(), max_chunk_size - offset);
|
||||
MDEBUG("Starting chunk_size: " << chunk_size << " , child_scalars.size(): " << child_scalars.size() << " , offset: " << offset);
|
||||
|
||||
// Hash chunks of child scalars to create the parent hashes
|
||||
std::size_t chunk_start_idx = 0;
|
||||
while (chunk_start_idx < child_scalars.size())
|
||||
{
|
||||
const auto chunk_start = child_scalars.data() + chunk_start_idx;
|
||||
const typename C_PARENT::Chunk chunk{chunk_start, chunk_size};
|
||||
|
||||
for (const auto &c : chunk)
|
||||
MDEBUG("Hash chunk_start_idx " << chunk_start_idx << " : " << c_parent.to_string(c));
|
||||
|
||||
// Hash the chunk of children
|
||||
typename C_PARENT::Point chunk_hash = chunk_start_idx == 0
|
||||
? get_first_non_leaf_parent<C_PARENT>(c_parent, chunk, child_layer_last_hash_updated, last_parent_chunk_ptr)
|
||||
: get_new_parent<C_PARENT>(c_parent, chunk);
|
||||
|
||||
MDEBUG("Hash chunk_start_idx " << chunk_start_idx << " result: " << c_parent.to_string(chunk_hash));
|
||||
|
||||
// We've got our hash
|
||||
parents_out.hashes.emplace_back(std::move(chunk_hash));
|
||||
|
||||
// Advance to the next chunk
|
||||
chunk_start_idx += chunk_size;
|
||||
|
||||
// Prepare for next loop if there should be one
|
||||
if (chunk_start_idx == child_scalars.size())
|
||||
break;
|
||||
|
||||
// Fill a complete chunk, or add the remaining new children to the last chunk
|
||||
CHECK_AND_ASSERT_THROW_MES(chunk_start_idx < child_scalars.size(), "unexpected chunk start idx");
|
||||
chunk_size = std::min(max_chunk_size, child_scalars.size() - chunk_start_idx);
|
||||
}
|
||||
}
|
||||
|
||||
template<typename C2>
|
||||
void hash_leaf_layer(const C2 &c2,
|
||||
const LastChunkData<C2> *last_chunk_ptr,
|
||||
const Leaves &leaves,
|
||||
LayerExtension<C2> &parents_out)
|
||||
{
|
||||
parents_out.start_idx = (last_chunk_ptr == nullptr) ? 0 : last_chunk_ptr->parent_layer_size;
|
||||
parents_out.hashes.clear();
|
||||
|
||||
if (leaves.tuples.empty())
|
||||
return;
|
||||
|
||||
// Flatten leaves [(O.x, I.x, C.x),(O.x, I.x, C.x),...] -> [scalar, scalar, scalar, scalar, scalar, scalar,...]
|
||||
const std::vector<typename C2::Scalar> children = fcmp::flatten_leaves(leaves.tuples);
|
||||
|
||||
const std::size_t max_chunk_size = LEAF_LAYER_CHUNK_SIZE;
|
||||
const std::size_t offset = (last_chunk_ptr == nullptr) ? 0 : last_chunk_ptr->child_offset;
|
||||
|
||||
// If we're adding new children to an existing last chunk, then we need to pull the parent start idx back 1
|
||||
// since we'll be updating the existing parent hash of the last chunk
|
||||
if (offset > 0)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(parents_out.start_idx > 0, "parent start idx should be > 0");
|
||||
--parents_out.start_idx;
|
||||
}
|
||||
|
||||
// See how many new children are needed to fill up the existing last chunk
|
||||
CHECK_AND_ASSERT_THROW_MES(max_chunk_size > offset, "unexpected offset");
|
||||
std::size_t chunk_size = std::min(children.size(), max_chunk_size - offset);
|
||||
|
||||
std::size_t chunk_start_idx = 0;
|
||||
while (chunk_start_idx < children.size())
|
||||
{
|
||||
const auto chunk_start = children.data() + chunk_start_idx;
|
||||
const typename C2::Chunk chunk{chunk_start, chunk_size};
|
||||
|
||||
for (const auto &c : chunk)
|
||||
MDEBUG("Hash chunk_start_idx " << chunk_start_idx << " : " << c2.to_string(c));
|
||||
|
||||
// Hash the chunk of children
|
||||
typename C2::Point chunk_hash = chunk_start_idx == 0
|
||||
? get_first_leaf_parent<C2>(c2, chunk, last_chunk_ptr)
|
||||
: get_new_parent<C2>(c2, chunk);
|
||||
|
||||
MDEBUG("Hash chunk_start_idx " << chunk_start_idx << " result: " << c2.to_string(chunk_hash) << " , chunk_size: " << chunk_size);
|
||||
|
||||
// We've got our hash
|
||||
parents_out.hashes.emplace_back(std::move(chunk_hash));
|
||||
|
||||
// Advance to the next chunk
|
||||
chunk_start_idx += chunk_size;
|
||||
|
||||
// Prepare for next loop if there should be one
|
||||
if (chunk_start_idx == children.size())
|
||||
break;
|
||||
|
||||
// Fill a complete chunk, or add the remaining new children to the last chunk
|
||||
CHECK_AND_ASSERT_THROW_MES(chunk_start_idx < children.size(), "unexpected chunk start idx");
|
||||
chunk_size = std::min(max_chunk_size, children.size() - chunk_start_idx);
|
||||
}
|
||||
}
|
||||
|
||||
template<typename C1, typename C2>
|
||||
TreeExtension<C1, C2> get_tree_extension(const LastChunks<C1, C2> &existing_last_chunks,
|
||||
const Leaves &new_leaves,
|
||||
const C1 &c1,
|
||||
const C2 &c2)
|
||||
{
|
||||
TreeExtension<C1, C2> tree_extension;
|
||||
|
||||
if (new_leaves.tuples.empty())
|
||||
return tree_extension;
|
||||
|
||||
const auto &c1_last_chunks = existing_last_chunks.c1_last_chunks;
|
||||
const auto &c2_last_chunks = existing_last_chunks.c2_last_chunks;
|
||||
|
||||
// Set the leaf start idx
|
||||
tree_extension.leaves.start_idx = c2_last_chunks.empty()
|
||||
? 0
|
||||
: c2_last_chunks[0].child_layer_size;
|
||||
|
||||
// Copy the leaves
|
||||
// TODO: don't copy here
|
||||
tree_extension.leaves.tuples.reserve(new_leaves.tuples.size());
|
||||
for (const auto &leaf : new_leaves.tuples)
|
||||
{
|
||||
tree_extension.leaves.tuples.emplace_back(LeafTuple{
|
||||
.O_x = SELENE.clone(leaf.O_x),
|
||||
.I_x = SELENE.clone(leaf.I_x),
|
||||
.C_x = SELENE.clone(leaf.C_x)
|
||||
});
|
||||
}
|
||||
|
||||
auto &c1_layer_extensions_out = tree_extension.c1_layer_extensions;
|
||||
auto &c2_layer_extensions_out = tree_extension.c2_layer_extensions;
|
||||
|
||||
// Hash the leaf layer
|
||||
LayerExtension<C2> parents;
|
||||
hash_leaf_layer<C2>(c2,
|
||||
c2_last_chunks.empty() ? nullptr : &c2_last_chunks[0],
|
||||
new_leaves,
|
||||
parents);
|
||||
|
||||
c2_layer_extensions_out.emplace_back(std::move(parents));
|
||||
|
||||
// Check if we just added the root
|
||||
if (c2_layer_extensions_out.back().hashes.size() == 1 && c2_layer_extensions_out.back().start_idx == 0)
|
||||
return tree_extension;
|
||||
|
||||
// Alternate between hashing c2 children, c1 children, c2, c1, ...
|
||||
bool parent_is_c1 = true;
|
||||
|
||||
std::size_t c1_last_idx = 0;
|
||||
std::size_t c2_last_idx = 0;
|
||||
// TODO: calculate max number of layers it should take to add all leaves (existing leaves + new leaves)
|
||||
while (true)
|
||||
{
|
||||
if (parent_is_c1)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_layer_extensions_out.size() > c2_last_idx, "missing c2 layer");
|
||||
|
||||
LayerExtension<C1> c1_layer_extension;
|
||||
fcmp::hash_layer<C2, C1>(c2,
|
||||
c1,
|
||||
(c2_last_chunks.size() <= c2_last_idx) ? nullptr : &c2_last_chunks[c2_last_idx],
|
||||
(c1_last_chunks.size() <= c1_last_idx) ? nullptr : &c1_last_chunks[c1_last_idx],
|
||||
c2_layer_extensions_out[c2_last_idx],
|
||||
c1_layer_extension);
|
||||
|
||||
c1_layer_extensions_out.emplace_back(std::move(c1_layer_extension));
|
||||
|
||||
// Check if we just added the root
|
||||
if (c1_layer_extensions_out.back().hashes.size() == 1 && c1_layer_extensions_out.back().start_idx == 0)
|
||||
return tree_extension;
|
||||
|
||||
++c2_last_idx;
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_layer_extensions_out.size() > c1_last_idx, "missing c1 layer");
|
||||
|
||||
LayerExtension<C2> c2_layer_extension;
|
||||
fcmp::hash_layer<C1, C2>(c1,
|
||||
c2,
|
||||
(c1_last_chunks.size() <= c1_last_idx) ? nullptr : &c1_last_chunks[c1_last_idx],
|
||||
(c2_last_chunks.size() <= c2_last_idx) ? nullptr : &c2_last_chunks[c2_last_idx],
|
||||
c1_layer_extensions_out[c1_last_idx],
|
||||
c2_layer_extension);
|
||||
|
||||
c2_layer_extensions_out.emplace_back(std::move(c2_layer_extension));
|
||||
|
||||
// Check if we just added the root
|
||||
if (c2_layer_extensions_out.back().hashes.size() == 1 && c2_layer_extensions_out.back().start_idx == 0)
|
||||
return tree_extension;
|
||||
|
||||
++c1_last_idx;
|
||||
}
|
||||
|
||||
parent_is_c1 = !parent_is_c1;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: this is only useful for testsing, can't fit entire tree in memory
|
||||
template<typename C1, typename C2>
|
||||
void extend_tree(const TreeExtension<C1, C2> &tree_extension,
|
||||
const C1 &c1,
|
||||
const C2 &c2,
|
||||
Tree<C1, C2> &tree_inout)
|
||||
{
|
||||
// Add the leaves
|
||||
CHECK_AND_ASSERT_THROW_MES((tree_inout.leaves.size() * LEAF_TUPLE_SIZE) == tree_extension.leaves.start_idx,
|
||||
"unexpected leaf start idx");
|
||||
|
||||
tree_inout.leaves.reserve(tree_inout.leaves.size() + tree_extension.leaves.tuples.size());
|
||||
for (const auto &leaf : tree_extension.leaves.tuples)
|
||||
{
|
||||
tree_inout.leaves.emplace_back(LeafTuple{
|
||||
.O_x = c2.clone(leaf.O_x),
|
||||
.I_x = c2.clone(leaf.I_x),
|
||||
.C_x = c2.clone(leaf.C_x)
|
||||
});
|
||||
}
|
||||
|
||||
// Add the layers
|
||||
const auto &c2_extensions = tree_extension.c2_layer_extensions;
|
||||
const auto &c1_extensions = tree_extension.c1_layer_extensions;
|
||||
CHECK_AND_ASSERT_THROW_MES(!c2_extensions.empty(), "empty c2 extensions");
|
||||
|
||||
bool use_c2 = true;
|
||||
std::size_t c2_idx = 0;
|
||||
std::size_t c1_idx = 0;
|
||||
for (std::size_t i = 0; i < (c2_extensions.size() + c1_extensions.size()); ++i)
|
||||
{
|
||||
if (use_c2)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_idx < c2_extensions.size(), "unexpected c2 layer extension");
|
||||
const LayerExtension<C2> &c2_ext = c2_extensions[c2_idx];
|
||||
|
||||
CHECK_AND_ASSERT_THROW_MES(!c2_ext.hashes.empty(), "empty c2 layer extension");
|
||||
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_idx <= tree_inout.c2_layers.size(), "missing c2 layer");
|
||||
if (tree_inout.c2_layers.size() == c2_idx)
|
||||
tree_inout.c2_layers.emplace_back(Layer<C2>{});
|
||||
|
||||
auto &c2_inout = tree_inout.c2_layers[c2_idx];
|
||||
|
||||
const bool started_after_tip = (c2_inout.size() == c2_ext.start_idx);
|
||||
const bool started_at_tip = (c2_inout.size() == (c2_ext.start_idx + 1));
|
||||
CHECK_AND_ASSERT_THROW_MES(started_after_tip || started_at_tip, "unexpected c2 layer start");
|
||||
|
||||
// We updated the last hash
|
||||
if (started_at_tip)
|
||||
c2_inout.back() = c2.clone(c2_ext.hashes.front());
|
||||
|
||||
for (std::size_t i = started_at_tip ? 1 : 0; i < c2_ext.hashes.size(); ++i)
|
||||
c2_inout.emplace_back(c2.clone(c2_ext.hashes[i]));
|
||||
|
||||
++c2_idx;
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_idx < c1_extensions.size(), "unexpected c1 layer extension");
|
||||
const fcmp::LayerExtension<C1> &c1_ext = c1_extensions[c1_idx];
|
||||
|
||||
CHECK_AND_ASSERT_THROW_MES(!c1_ext.hashes.empty(), "empty c1 layer extension");
|
||||
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_idx <= tree_inout.c1_layers.size(), "missing c1 layer");
|
||||
if (tree_inout.c1_layers.size() == c1_idx)
|
||||
tree_inout.c1_layers.emplace_back(Layer<C1>{});
|
||||
|
||||
auto &c1_inout = tree_inout.c1_layers[c1_idx];
|
||||
|
||||
const bool started_after_tip = (c1_inout.size() == c1_ext.start_idx);
|
||||
const bool started_at_tip = (c1_inout.size() == (c1_ext.start_idx + 1));
|
||||
CHECK_AND_ASSERT_THROW_MES(started_after_tip || started_at_tip, "unexpected c1 layer start");
|
||||
|
||||
// We updated the last hash
|
||||
if (started_at_tip)
|
||||
c1_inout.back() = c1.clone(c1_ext.hashes.front());
|
||||
|
||||
for (std::size_t i = started_at_tip ? 1 : 0; i < c1_ext.hashes.size(); ++i)
|
||||
c1_inout.emplace_back(c1.clone(c1_ext.hashes[i]));
|
||||
|
||||
++c1_idx;
|
||||
}
|
||||
|
||||
use_c2 = !use_c2;
|
||||
}
|
||||
|
||||
// existing tree should be valid
|
||||
// TODO: only do this in debug build
|
||||
// assert(validate_tree<C1, C2>(existing_tree_inout, c1, c2));
|
||||
}
|
||||
|
||||
template<typename C_PARENT, typename C_CHILD>
|
||||
bool validate_layer(const C_PARENT &c_parent,
|
||||
const C_CHILD &c_child,
|
||||
const Layer<C_PARENT> &parents,
|
||||
const Layer<C_CHILD> &children)
|
||||
{
|
||||
// Get scalar representation of children
|
||||
std::vector<typename C_PARENT::Scalar> child_scalars;
|
||||
extend_scalars_from_cycle_points<C_CHILD, C_PARENT>(c_child, children, child_scalars);
|
||||
|
||||
const std::size_t max_chunk_size = c_parent.WIDTH;
|
||||
|
||||
// Hash chunk of children scalars, then see if the hash matches up to respective parent
|
||||
std::size_t chunk_start_idx = 0;
|
||||
for (std::size_t i = 0; i < parents.size(); ++i)
|
||||
{
|
||||
CHECK_AND_ASSERT_MES(child_scalars.size() > chunk_start_idx, false, "chunk start too high");
|
||||
const std::size_t chunk_size = std::min(child_scalars.size() - chunk_start_idx, max_chunk_size);
|
||||
CHECK_AND_ASSERT_MES(child_scalars.size() >= (chunk_start_idx + chunk_size), false, "chunk size too large");
|
||||
|
||||
const typename C_PARENT::Point &parent = parents[i];
|
||||
|
||||
const auto chunk_start = child_scalars.data() + chunk_start_idx;
|
||||
const typename C_PARENT::Chunk chunk{chunk_start, chunk_size};
|
||||
|
||||
const typename C_PARENT::Point chunk_hash = get_new_parent(c_parent, chunk);
|
||||
|
||||
const auto actual_bytes = c_parent.to_bytes(parent);
|
||||
const auto expected_bytes = c_parent.to_bytes(chunk_hash);
|
||||
CHECK_AND_ASSERT_MES(actual_bytes == expected_bytes, false, "unexpected hash");
|
||||
|
||||
chunk_start_idx += chunk_size;
|
||||
}
|
||||
|
||||
CHECK_AND_ASSERT_THROW_MES(chunk_start_idx == child_scalars.size(), "unexpected ending chunk start idx");
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
template<typename C1, typename C2>
|
||||
bool validate_tree(const Tree<C1, C2> &tree, const C1 &c1, const C2 &c2)
|
||||
{
|
||||
const auto &leaves = tree.leaves;
|
||||
const auto &c1_layers = tree.c1_layers;
|
||||
const auto &c2_layers = tree.c2_layers;
|
||||
|
||||
CHECK_AND_ASSERT_MES(!leaves.empty(), false, "must have at least 1 leaf in tree");
|
||||
CHECK_AND_ASSERT_MES(!c2_layers.empty(), false, "must have at least 1 c2 layer in tree");
|
||||
CHECK_AND_ASSERT_MES(c2_layers.size() == c1_layers.size() || c2_layers.size() == (c1_layers.size() + 1),
|
||||
false, "unexpected mismatch of c2 and c1 layers");
|
||||
|
||||
// Verify root has 1 member in it
|
||||
const bool c2_is_root = c2_layers.size() > c1_layers.size();
|
||||
CHECK_AND_ASSERT_MES(c2_is_root ? c2_layers.back().size() == 1 : c1_layers.back().size() == 1, false,
|
||||
"root must have 1 member in it");
|
||||
|
||||
// Iterate from root down to layer above leaves, and check hashes match up correctly
|
||||
bool parent_is_c2 = c2_is_root;
|
||||
std::size_t c2_idx = c2_layers.size() - 1;
|
||||
std::size_t c1_idx = c1_layers.empty() ? 0 : (c1_layers.size() - 1);
|
||||
for (std::size_t i = 1; i < (c2_layers.size() + c1_layers.size()); ++i)
|
||||
{
|
||||
if (parent_is_c2)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_idx < c2_layers.size(), "unexpected c2_idx");
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_idx < c1_layers.size(), "unexpected c1_idx");
|
||||
|
||||
const Layer<C2> &parents = c2_layers[c2_idx];
|
||||
const Layer<C1> &children = c1_layers[c1_idx];
|
||||
|
||||
CHECK_AND_ASSERT_MES(!parents.empty(), false, "no parents at c2_idx " + std::to_string(c2_idx));
|
||||
CHECK_AND_ASSERT_MES(!children.empty(), false, "no children at c1_idx " + std::to_string(c1_idx));
|
||||
|
||||
const bool valid = validate_layer<C2, C1>(c2, c1, parents, children);
|
||||
|
||||
CHECK_AND_ASSERT_MES(valid, false, "failed to validate c2_idx " + std::to_string(c2_idx));
|
||||
|
||||
--c2_idx;
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_idx < c1_layers.size(), "unexpected c1_idx");
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_idx < c2_layers.size(), "unexpected c2_idx");
|
||||
|
||||
const Layer<C1> &parents = c1_layers[c1_idx];
|
||||
const Layer<C2> &children = c2_layers[c2_idx];
|
||||
|
||||
CHECK_AND_ASSERT_MES(!parents.empty(), false, "no parents at c1_idx " + std::to_string(c1_idx));
|
||||
CHECK_AND_ASSERT_MES(!children.empty(), false, "no children at c2_idx " + std::to_string(c2_idx));
|
||||
|
||||
const bool valid = validate_layer<C1, C2>(c1, c2, parents, children);
|
||||
|
||||
CHECK_AND_ASSERT_MES(valid, false, "failed to validate c1_idx " + std::to_string(c1_idx));
|
||||
|
||||
--c1_idx;
|
||||
}
|
||||
|
||||
parent_is_c2 = !parent_is_c2;
|
||||
}
|
||||
|
||||
// // Now validate leaves
|
||||
// return validate_leaves<C2>(c2, layers[0], leaves);
|
||||
return true;
|
||||
}
|
||||
}
|
70
src/fcmp/fcmp_rust/CMakeLists.txt
Normal file
70
src/fcmp/fcmp_rust/CMakeLists.txt
Normal file
@ -0,0 +1,70 @@
|
||||
# Copyright (c) 2016-2024, The Monero Project
|
||||
#
|
||||
# All rights reserved.
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without modification, are
|
||||
# permitted provided that the following conditions are met:
|
||||
#
|
||||
# 1. Redistributions of source code must retain the above copyright notice, this list of
|
||||
# conditions and the following disclaimer.
|
||||
#
|
||||
# 2. Redistributions in binary form must reproduce the above copyright notice, this list
|
||||
# of conditions and the following disclaimer in the documentation and/or other
|
||||
# materials provided with the distribution.
|
||||
#
|
||||
# 3. Neither the name of the copyright holder nor the names of its contributors may be
|
||||
# used to endorse or promote products derived from this software without specific
|
||||
# prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
|
||||
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
|
||||
# THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
||||
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
# THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
if (CMAKE_BUILD_TYPE STREQUAL "Debug")
|
||||
set(CARGO_CMD cargo build)
|
||||
set(TARGET_DIR "debug")
|
||||
else ()
|
||||
set(CARGO_CMD cargo build --release)
|
||||
set(TARGET_DIR "release")
|
||||
endif ()
|
||||
|
||||
set(FCMP_RUST_CXX "${CMAKE_CURRENT_BINARY_DIR}/fcmp_rust.cc")
|
||||
set(FCMP_RUST_HEADER_DIR "${MONERO_GENERATED_HEADERS_DIR}/fcmp_rust")
|
||||
set(FCMP_RUST_HEADER "${FCMP_RUST_HEADER_DIR}/fcmp_rust.h")
|
||||
set(CXX_HEADER "${FCMP_RUST_HEADER_DIR}/cxx.h")
|
||||
|
||||
# Removing OUTPUT files makes sure custom command runs every time
|
||||
file(REMOVE_RECURSE "${FCMP_RUST_CXX}")
|
||||
file(REMOVE_RECURSE "${FCMP_RUST_HEADER_DIR}")
|
||||
file(MAKE_DIRECTORY "${FCMP_RUST_HEADER_DIR}")
|
||||
|
||||
add_custom_command(
|
||||
COMMENT "Building rust fcmp lib"
|
||||
OUTPUT ${FCMP_RUST_CXX} ${FCMP_RUST_HEADER} ${CXX_HEADER}
|
||||
COMMAND CARGO_TARGET_DIR=${CMAKE_CURRENT_BINARY_DIR} ${CARGO_CMD}
|
||||
COMMAND cp ${CMAKE_CURRENT_BINARY_DIR}/cxxbridge/fcmp_rust/src/lib.rs.cc ${FCMP_RUST_CXX}
|
||||
COMMAND cp ${CMAKE_CURRENT_BINARY_DIR}/cxxbridge/fcmp_rust/src/lib.rs.h ${FCMP_RUST_HEADER}
|
||||
COMMAND cp ${CMAKE_CURRENT_BINARY_DIR}/cxxbridge/rust/cxx.h ${CXX_HEADER}
|
||||
COMMAND echo "Finished copying fcmp rust targets"
|
||||
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}
|
||||
VERBATIM
|
||||
)
|
||||
|
||||
add_custom_target(rust_cxx ALL DEPENDS ${CXX_HEADER})
|
||||
|
||||
set(fcmp_rust_sources ${FCMP_RUST_CXX})
|
||||
|
||||
monero_find_all_headers(fcmp_rust_headers "${FCMP_RUST_HEAfDER_DIR}")
|
||||
|
||||
monero_add_library(fcmp_rust
|
||||
${fcmp_rust_sources}
|
||||
${fcmp_rust_headers})
|
||||
|
||||
set(FCMP_RUST_LIB "${CMAKE_CURRENT_BINARY_DIR}/${TARGET_DIR}/libfcmp_rust.a")
|
||||
target_link_libraries(fcmp_rust dl ${FCMP_RUST_LIB})
|
21
src/fcmp/fcmp_rust/Cargo.toml
Normal file
21
src/fcmp/fcmp_rust/Cargo.toml
Normal file
@ -0,0 +1,21 @@
|
||||
[package]
|
||||
name = "fcmp_rust"
|
||||
version = "0.0.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
name = "fcmp_rust"
|
||||
crate-type = ["staticlib"]
|
||||
|
||||
[dependencies]
|
||||
cxx = "1.0"
|
||||
full-chain-membership-proofs = { path = "../../../external/fcmp-plus-plus/crypto/fcmps" }
|
||||
ciphersuite = { path = "../../../external/fcmp-plus-plus/crypto/ciphersuite", features = ["helioselene", "ed25519"] }
|
||||
|
||||
ec-divisors = { path = "../../../external/fcmp-plus-plus/crypto/divisors", features = ["ed25519"] }
|
||||
rand_core = { version = "0.6", features = ["getrandom"] }
|
||||
transcript = { package = "flexible-transcript", path = "../../../external/fcmp-plus-plus/crypto/transcript", features = ["recommended"] }
|
||||
generalized-bulletproofs = { path = "../../../external/fcmp-plus-plus/crypto/generalized-bulletproofs", features = ["tests"] }
|
||||
|
||||
[build-dependencies]
|
||||
cxx-build = "1.0"
|
5
src/fcmp/fcmp_rust/build.rs
Normal file
5
src/fcmp/fcmp_rust/build.rs
Normal file
@ -0,0 +1,5 @@
|
||||
fn main() {
|
||||
let _ = cxx_build::bridge("src/lib.rs");
|
||||
|
||||
println!("cargo:rerun-if-changed=src/lib.rs");
|
||||
}
|
226
src/fcmp/fcmp_rust/src/lib.rs
Normal file
226
src/fcmp/fcmp_rust/src/lib.rs
Normal file
@ -0,0 +1,226 @@
|
||||
use rand_core::OsRng;
|
||||
|
||||
use std::io;
|
||||
|
||||
use full_chain_membership_proofs::tree::hash_grow;
|
||||
|
||||
use transcript::RecommendedTranscript;
|
||||
|
||||
use ciphersuite::{group::{Group, GroupEncoding, ff::{PrimeField, Field}}, Ciphersuite, Ed25519, Selene, Helios};
|
||||
|
||||
use ec_divisors::DivisorCurve;
|
||||
|
||||
use generalized_bulletproofs::Generators;
|
||||
|
||||
// TODO: lint
|
||||
#[cxx::bridge]
|
||||
mod ffi {
|
||||
// Rust types and signatures exposed to C++.
|
||||
#[namespace = "fcmp_rust"]
|
||||
extern "Rust" {
|
||||
// TODO: Separate Helios and Selene namespaces
|
||||
type HeliosGenerators;
|
||||
type HeliosPoint;
|
||||
type HeliosScalar;
|
||||
|
||||
type SeleneGenerators;
|
||||
type SelenePoint;
|
||||
type SeleneScalar;
|
||||
|
||||
fn random_helios_generators() -> Box<HeliosGenerators>;
|
||||
fn random_helios_hash_init_point() -> Box<HeliosPoint>;
|
||||
|
||||
fn random_selene_generators() -> Box<SeleneGenerators>;
|
||||
fn random_selene_hash_init_point() -> Box<SelenePoint>;
|
||||
|
||||
fn clone_helios_scalar(helios_scalar: &Box<HeliosScalar>) -> Box<HeliosScalar>;
|
||||
fn clone_selene_scalar(selene_scalar: &Box<SeleneScalar>) -> Box<SeleneScalar>;
|
||||
fn clone_helios_point(helios_point: &Box<HeliosPoint>) -> Box<HeliosPoint>;
|
||||
fn clone_selene_point(selene_point: &Box<SelenePoint>) -> Box<SelenePoint>;
|
||||
|
||||
fn helios_scalar_to_bytes(helios_scalar: &Box<HeliosScalar>) -> [u8; 32];
|
||||
fn selene_scalar_to_bytes(selene_scalar: &Box<SeleneScalar>) -> [u8; 32];
|
||||
fn helios_point_to_bytes(helios_point: &Box<HeliosPoint>) -> [u8; 32];
|
||||
fn selene_point_to_bytes(selene_point: &Box<SelenePoint>) -> [u8; 32];
|
||||
|
||||
fn ed25519_point_to_selene_scalar(ed25519_point: &[u8; 32]) -> Box<SeleneScalar>;
|
||||
fn selene_point_to_helios_scalar(selene_point: &Box<SelenePoint>) -> Box<HeliosScalar>;
|
||||
fn helios_point_to_selene_scalar(helios_point: &Box<HeliosPoint>) -> Box<SeleneScalar>;
|
||||
|
||||
fn helios_zero_scalar() -> Box<HeliosScalar>;
|
||||
fn selene_zero_scalar() -> Box<SeleneScalar>;
|
||||
|
||||
pub fn hash_grow_helios(
|
||||
helios_generators: &Box<HeliosGenerators>,
|
||||
existing_hash: &Box<HeliosPoint>,
|
||||
offset: usize,
|
||||
prior_children: &[Box<HeliosScalar>],
|
||||
new_children: &[Box<HeliosScalar>]
|
||||
) -> Result<Box<HeliosPoint>>;
|
||||
|
||||
pub fn hash_grow_selene(
|
||||
selene_generators: &Box<SeleneGenerators>,
|
||||
existing_hash: &Box<SelenePoint>,
|
||||
offset: usize,
|
||||
prior_children: &[Box<SeleneScalar>],
|
||||
new_children: &[Box<SeleneScalar>]
|
||||
) -> Result<Box<SelenePoint>>;
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: cleaner const usage of generators
|
||||
// TODO: try to get closer to underlying types
|
||||
// TODO: maybe don't do both tuple and Box? Just make these all boxes
|
||||
pub struct HeliosGenerators(Generators<RecommendedTranscript, Helios>);
|
||||
pub struct HeliosPoint(<Helios as Ciphersuite>::G);
|
||||
pub struct HeliosScalar(<Helios as Ciphersuite>::F);
|
||||
|
||||
pub struct SeleneGenerators(Generators<RecommendedTranscript, Selene>);
|
||||
pub struct SelenePoint(<Selene as Ciphersuite>::G);
|
||||
pub struct SeleneScalar(<Selene as Ciphersuite>::F);
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn random_helios_generators() -> Box<HeliosGenerators> {
|
||||
let helios_generators = generalized_bulletproofs::tests::generators::<Helios>(512);
|
||||
Box::new(HeliosGenerators(helios_generators))
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn random_selene_generators() -> Box<SeleneGenerators> {
|
||||
let selene_generators = generalized_bulletproofs::tests::generators::<Selene>(512);
|
||||
Box::new(SeleneGenerators(selene_generators))
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn random_helios_hash_init_point() -> Box<HeliosPoint> {
|
||||
let helios_hash_init_point = <Helios as Ciphersuite>::G::random(&mut OsRng);
|
||||
dbg!(&helios_hash_init_point);
|
||||
Box::new(HeliosPoint(helios_hash_init_point))
|
||||
}
|
||||
|
||||
#[allow(non_snake_case)]
|
||||
pub fn random_selene_hash_init_point() -> Box<SelenePoint> {
|
||||
let selene_hash_init_point = <Selene as Ciphersuite>::G::random(&mut OsRng);
|
||||
dbg!(&selene_hash_init_point);
|
||||
Box::new(SelenePoint(selene_hash_init_point))
|
||||
}
|
||||
|
||||
// TODO: should be able to use generics
|
||||
// TODO: shorter names
|
||||
pub fn clone_helios_scalar(helios_scalar: &Box<HeliosScalar>) -> Box<HeliosScalar> {
|
||||
Box::new(HeliosScalar(helios_scalar.0))
|
||||
}
|
||||
|
||||
pub fn clone_selene_scalar(selene_scalar: &Box<SeleneScalar>) -> Box<SeleneScalar> {
|
||||
Box::new(SeleneScalar(selene_scalar.0))
|
||||
}
|
||||
|
||||
pub fn clone_helios_point(helios_point: &Box<HeliosPoint>) -> Box<HeliosPoint> {
|
||||
Box::new(HeliosPoint(helios_point.0))
|
||||
}
|
||||
|
||||
pub fn clone_selene_point(selene_point: &Box<SelenePoint>) -> Box<SelenePoint> {
|
||||
Box::new(SelenePoint(selene_point.0))
|
||||
}
|
||||
|
||||
// TODO: generics
|
||||
pub fn helios_scalar_to_bytes(helios_scalar: &Box<HeliosScalar>) -> [u8; 32] {
|
||||
helios_scalar.0.to_repr()
|
||||
}
|
||||
|
||||
pub fn selene_scalar_to_bytes(selene_scalar: &Box<SeleneScalar>) -> [u8; 32] {
|
||||
selene_scalar.0.to_repr()
|
||||
}
|
||||
|
||||
pub fn helios_point_to_bytes(helios_point: &Box<HeliosPoint>) -> [u8; 32] {
|
||||
helios_point.0.to_bytes()
|
||||
}
|
||||
|
||||
pub fn selene_point_to_bytes(selene_point: &Box<SelenePoint>) -> [u8; 32] {
|
||||
selene_point.0.to_bytes()
|
||||
}
|
||||
|
||||
// Get the x coordinate of the ed25519 point
|
||||
// TODO: use generics for below logic
|
||||
pub fn ed25519_point_to_selene_scalar(ed25519_point: &[u8; 32]) -> Box<SeleneScalar> {
|
||||
// TODO: unwrap or else error
|
||||
let ed25519_point = <Ed25519>::read_G(&mut ed25519_point.as_slice()).unwrap();
|
||||
|
||||
let xy_coords = <Ed25519 as Ciphersuite>::G::to_xy(ed25519_point);
|
||||
let x: <Selene as Ciphersuite>::F = xy_coords.0;
|
||||
Box::new(SeleneScalar(x))
|
||||
}
|
||||
|
||||
// TODO: use generics for below logic
|
||||
pub fn selene_point_to_helios_scalar(selene_point: &Box<SelenePoint>) -> Box<HeliosScalar> {
|
||||
let xy_coords = <Selene as Ciphersuite>::G::to_xy(selene_point.0);
|
||||
let x: <Helios as Ciphersuite>::F = xy_coords.0;
|
||||
Box::new(HeliosScalar(x))
|
||||
}
|
||||
|
||||
// TODO: use generics for below logic
|
||||
pub fn helios_point_to_selene_scalar(helios_point: &Box<HeliosPoint>) -> Box<SeleneScalar> {
|
||||
let xy_coords = <Helios as Ciphersuite>::G::to_xy(helios_point.0);
|
||||
let x: <Selene as Ciphersuite>::F = xy_coords.0;
|
||||
Box::new(SeleneScalar(x))
|
||||
}
|
||||
|
||||
pub fn helios_zero_scalar() -> Box<HeliosScalar> {
|
||||
Box::new(HeliosScalar(<Helios as Ciphersuite>::F::ZERO))
|
||||
}
|
||||
|
||||
pub fn selene_zero_scalar() -> Box<SeleneScalar> {
|
||||
Box::new(SeleneScalar(<Selene as Ciphersuite>::F::ZERO))
|
||||
}
|
||||
|
||||
// TODO: use generics for curves
|
||||
pub fn hash_grow_helios(
|
||||
helios_generators: &Box<HeliosGenerators>,
|
||||
existing_hash: &Box<HeliosPoint>,
|
||||
offset: usize,
|
||||
prior_children: &[Box<HeliosScalar>],
|
||||
new_children: &[Box<HeliosScalar>]
|
||||
) -> Result<Box<HeliosPoint>, io::Error> {
|
||||
let prior_children = prior_children.iter().map(|c| c.0).collect::<Vec<_>>();
|
||||
let new_children = new_children.iter().map(|c| c.0).collect::<Vec<_>>();
|
||||
|
||||
let hash = hash_grow(
|
||||
&helios_generators.0,
|
||||
existing_hash.0,
|
||||
offset,
|
||||
&prior_children,
|
||||
&new_children
|
||||
);
|
||||
|
||||
if let Some(hash) = hash {
|
||||
Ok(Box::new(HeliosPoint(hash)))
|
||||
} else {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "failed to grow hash"))
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: use generics for curves
|
||||
pub fn hash_grow_selene(
|
||||
selene_generators: &Box<SeleneGenerators>,
|
||||
existing_hash: &Box<SelenePoint>,
|
||||
offset: usize,
|
||||
prior_children: &[Box<SeleneScalar>],
|
||||
new_children: &[Box<SeleneScalar>]
|
||||
) -> Result<Box<SelenePoint>, io::Error> {
|
||||
let prior_children = prior_children.iter().map(|c| c.0).collect::<Vec<_>>();
|
||||
let new_children = new_children.iter().map(|c| c.0).collect::<Vec<_>>();
|
||||
|
||||
let hash = hash_grow(
|
||||
&selene_generators.0,
|
||||
existing_hash.0,
|
||||
offset,
|
||||
&prior_children,
|
||||
&new_children
|
||||
);
|
||||
|
||||
if let Some(hash) = hash {
|
||||
Ok(Box::new(SelenePoint(hash)))
|
||||
} else {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "failed to grow hash"))
|
||||
}
|
||||
}
|
@ -51,6 +51,7 @@ set(unit_tests_sources
|
||||
epee_serialization.cpp
|
||||
epee_utils.cpp
|
||||
expect.cpp
|
||||
fcmp_tree.cpp
|
||||
json_serialization.cpp
|
||||
get_xtype_from_string.cpp
|
||||
hashchain.cpp
|
||||
@ -113,11 +114,13 @@ monero_add_minimal_executable(unit_tests
|
||||
target_link_libraries(unit_tests
|
||||
PRIVATE
|
||||
ringct
|
||||
crypto
|
||||
cryptonote_protocol
|
||||
cryptonote_core
|
||||
daemon_messages
|
||||
daemon_rpc_server
|
||||
blockchain_db
|
||||
fcmp
|
||||
lmdb_lib
|
||||
rpc
|
||||
net
|
||||
|
304
tests/unit_tests/fcmp_tree.cpp
Normal file
304
tests/unit_tests/fcmp_tree.cpp
Normal file
@ -0,0 +1,304 @@
|
||||
// Copyright (c) 2014, The Monero Project
|
||||
//
|
||||
// All rights reserved.
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without modification, are
|
||||
// permitted provided that the following conditions are met:
|
||||
//
|
||||
// 1. Redistributions of source code must retain the above copyright notice, this list of
|
||||
// conditions and the following disclaimer.
|
||||
//
|
||||
// 2. Redistributions in binary form must reproduce the above copyright notice, this list
|
||||
// of conditions and the following disclaimer in the documentation and/or other
|
||||
// materials provided with the distribution.
|
||||
//
|
||||
// 3. Neither the name of the copyright holder nor the names of its contributors may be
|
||||
// used to endorse or promote products derived from this software without specific
|
||||
// prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY
|
||||
// EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
|
||||
// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
|
||||
// THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
|
||||
// PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
||||
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
|
||||
// STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
|
||||
// THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
#include "gtest/gtest.h"
|
||||
|
||||
#include "fcmp/fcmp.h"
|
||||
#include "misc_log_ex.h"
|
||||
|
||||
#include <cmath>
|
||||
|
||||
static const fcmp::Leaves generate_leaves(const std::size_t num_leaves)
|
||||
{
|
||||
std::vector<fcmp::LeafTuple> tuples;
|
||||
tuples.reserve(num_leaves);
|
||||
|
||||
for (std::size_t i = 0; i < num_leaves; ++i)
|
||||
{
|
||||
// Generate random output tuple
|
||||
crypto::secret_key o,c;
|
||||
crypto::public_key O,C;
|
||||
crypto::generate_keys(O, o, o, false);
|
||||
crypto::generate_keys(C, c, c, false);
|
||||
|
||||
tuples.emplace_back(fcmp::output_to_leaf_tuple(O, C));
|
||||
}
|
||||
|
||||
return fcmp::Leaves{
|
||||
.start_idx = 0,
|
||||
.tuples = std::move(tuples)
|
||||
};
|
||||
}
|
||||
|
||||
static void log_tree_extension(const fcmp::TreeExtension<fcmp::Helios, fcmp::Selene> &tree_extension)
|
||||
{
|
||||
const auto &c1_extensions = tree_extension.c1_layer_extensions;
|
||||
const auto &c2_extensions = tree_extension.c2_layer_extensions;
|
||||
|
||||
MDEBUG("Tree extension has " << tree_extension.leaves.tuples.size() << " leaves, "
|
||||
<< c1_extensions.size() << " helios layers, " << c2_extensions.size() << " selene layers");
|
||||
|
||||
MDEBUG("Leaf start idx: " << tree_extension.leaves.start_idx);
|
||||
for (std::size_t i = 0; i < tree_extension.leaves.tuples.size(); ++i)
|
||||
{
|
||||
const auto &leaf = tree_extension.leaves.tuples[i];
|
||||
|
||||
const auto O_x = fcmp::SELENE.to_string(leaf.O_x);
|
||||
const auto I_x = fcmp::SELENE.to_string(leaf.I_x);
|
||||
const auto C_x = fcmp::SELENE.to_string(leaf.C_x);
|
||||
|
||||
MDEBUG("Leaf idx " << ((i*fcmp::LEAF_TUPLE_SIZE) + tree_extension.leaves.start_idx) << " : { O_x: " << O_x << " , I_x: " << I_x << " , C_x: " << C_x << " }");
|
||||
}
|
||||
|
||||
bool use_c2 = true;
|
||||
std::size_t c1_idx = 0;
|
||||
std::size_t c2_idx = 0;
|
||||
for (std::size_t i = 0; i < (c1_extensions.size() + c2_extensions.size()); ++i)
|
||||
{
|
||||
if (use_c2)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_idx < c2_extensions.size(), "unexpected c2 layer");
|
||||
|
||||
const fcmp::LayerExtension<fcmp::Selene> &c2_layer = c2_extensions[c2_idx];
|
||||
MDEBUG("Selene tree extension start idx: " << c2_layer.start_idx);
|
||||
|
||||
for (std::size_t j = 0; j < c2_layer.hashes.size(); ++j)
|
||||
MDEBUG("Hash idx: " << (j + c2_layer.start_idx) << " , hash: " << fcmp::SELENE.to_string(c2_layer.hashes[j]));
|
||||
|
||||
++c2_idx;
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_idx < c1_extensions.size(), "unexpected c1 layer");
|
||||
|
||||
const fcmp::LayerExtension<fcmp::Helios> &c1_layer = c1_extensions[c1_idx];
|
||||
MDEBUG("Helios tree extension start idx: " << c1_layer.start_idx);
|
||||
|
||||
for (std::size_t j = 0; j < c1_layer.hashes.size(); ++j)
|
||||
MDEBUG("Hash idx: " << (j + c1_layer.start_idx) << " , hash: " << fcmp::HELIOS.to_string(c1_layer.hashes[j]));
|
||||
|
||||
++c1_idx;
|
||||
}
|
||||
|
||||
use_c2 = !use_c2;
|
||||
}
|
||||
}
|
||||
|
||||
static void log_tree(const fcmp::Tree<fcmp::Helios, fcmp::Selene> &tree)
|
||||
{
|
||||
MDEBUG("Tree has " << tree.leaves.size() << " leaves, "
|
||||
<< tree.c1_layers.size() << " helios layers, " << tree.c2_layers.size() << " selene layers");
|
||||
|
||||
for (std::size_t i = 0; i < tree.leaves.size(); ++i)
|
||||
{
|
||||
const auto &leaf = tree.leaves[i];
|
||||
|
||||
const auto O_x = fcmp::SELENE.to_string(leaf.O_x);
|
||||
const auto I_x = fcmp::SELENE.to_string(leaf.I_x);
|
||||
const auto C_x = fcmp::SELENE.to_string(leaf.C_x);
|
||||
|
||||
MDEBUG("Leaf idx " << i << " : { O_x: " << O_x << " , I_x: " << I_x << " , C_x: " << C_x << " }");
|
||||
}
|
||||
|
||||
bool use_c2 = true;
|
||||
std::size_t c1_idx = 0;
|
||||
std::size_t c2_idx = 0;
|
||||
for (std::size_t i = 0; i < (tree.c1_layers.size() + tree.c2_layers.size()); ++i)
|
||||
{
|
||||
if (use_c2)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_idx < tree.c2_layers.size(), "unexpected c2 layer");
|
||||
|
||||
const fcmp::Layer<fcmp::Selene> &c2_layer = tree.c2_layers[c2_idx];
|
||||
MDEBUG("Selene layer size: " << c2_layer.size() << " , tree layer: " << i);
|
||||
|
||||
for (std::size_t j = 0; j < c2_layer.size(); ++j)
|
||||
MDEBUG("Hash idx: " << j << " , hash: " << fcmp::SELENE.to_string(c2_layer[j]));
|
||||
|
||||
++c2_idx;
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_idx < tree.c1_layers.size(), "unexpected c1 layer");
|
||||
|
||||
const fcmp::Layer<fcmp::Helios> &c1_layer = tree.c1_layers[c1_idx];
|
||||
MDEBUG("Helios layer size: " << c1_layer.size() << " , tree layer: " << i);
|
||||
|
||||
for (std::size_t j = 0; j < c1_layer.size(); ++j)
|
||||
MDEBUG("Hash idx: " << j << " , hash: " << fcmp::HELIOS.to_string(c1_layer[j]));
|
||||
|
||||
++c1_idx;
|
||||
}
|
||||
|
||||
use_c2 = !use_c2;
|
||||
}
|
||||
}
|
||||
|
||||
static void log_last_chunks(const fcmp::LastChunks<fcmp::Helios, fcmp::Selene> &last_chunks)
|
||||
{
|
||||
const auto &c1_last_chunks = last_chunks.c1_last_chunks;
|
||||
const auto &c2_last_chunks = last_chunks.c2_last_chunks;
|
||||
|
||||
MDEBUG("Total of " << c1_last_chunks.size() << " Helios last chunks and "
|
||||
<< c2_last_chunks.size() << " Selene last chunks");
|
||||
|
||||
bool use_c2 = true;
|
||||
std::size_t c1_idx = 0;
|
||||
std::size_t c2_idx = 0;
|
||||
for (std::size_t i = 0; i < (c1_last_chunks.size() + c2_last_chunks.size()); ++i)
|
||||
{
|
||||
if (use_c2)
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c2_idx < c2_last_chunks.size(), "unexpected c2 layer");
|
||||
|
||||
const fcmp::LastChunkData<fcmp::Selene> &last_chunk = c2_last_chunks[c2_idx];
|
||||
|
||||
MDEBUG("child_offset: " << last_chunk.child_offset
|
||||
<< " , last_child: " << fcmp::SELENE.to_string(last_chunk.last_child)
|
||||
<< " , last_parent: " << fcmp::SELENE.to_string(last_chunk.last_parent)
|
||||
<< " , child_layer_size: " << last_chunk.child_layer_size
|
||||
<< " , parent_layer_size: " << last_chunk.parent_layer_size);
|
||||
|
||||
++c2_idx;
|
||||
}
|
||||
else
|
||||
{
|
||||
CHECK_AND_ASSERT_THROW_MES(c1_idx < c1_last_chunks.size(), "unexpected c1 layer");
|
||||
|
||||
const fcmp::LastChunkData<fcmp::Helios> &last_chunk = c1_last_chunks[c1_idx];
|
||||
|
||||
MDEBUG("child_offset: " << last_chunk.child_offset
|
||||
<< " , last_child: " << fcmp::HELIOS.to_string(last_chunk.last_child)
|
||||
<< " , last_parent: " << fcmp::HELIOS.to_string(last_chunk.last_parent)
|
||||
<< " , child_layer_size: " << last_chunk.child_layer_size
|
||||
<< " , parent_layer_size: " << last_chunk.parent_layer_size);
|
||||
|
||||
++c1_idx;
|
||||
}
|
||||
|
||||
use_c2 = !use_c2;
|
||||
}
|
||||
}
|
||||
|
||||
TEST(fcmp_tree, grow_tree)
|
||||
{
|
||||
// TODO: 1 .. std::pow(fcmp::SELENE.WIDTH, 5)+2
|
||||
const std::vector<std::size_t> N_LEAVES{
|
||||
1,
|
||||
2,
|
||||
3,
|
||||
fcmp::SELENE.WIDTH - 1,
|
||||
fcmp::SELENE.WIDTH,
|
||||
fcmp::SELENE.WIDTH + 1,
|
||||
(std::size_t)std::pow(fcmp::SELENE.WIDTH, 2) - 1,
|
||||
(std::size_t)std::pow(fcmp::SELENE.WIDTH, 2),
|
||||
(std::size_t)std::pow(fcmp::SELENE.WIDTH, 2) + 1,
|
||||
(std::size_t)std::pow(fcmp::SELENE.WIDTH, 3)
|
||||
// (std::size_t)std::pow(fcmp::SELENE.WIDTH, 4),
|
||||
// (std::size_t)std::pow(fcmp::SELENE.WIDTH, 5)
|
||||
};
|
||||
|
||||
for (const auto &init_leaves : N_LEAVES)
|
||||
{
|
||||
for (const auto &ext_leaves : N_LEAVES)
|
||||
{
|
||||
MDEBUG("Adding " << init_leaves << " leaves to tree, then extending by " << ext_leaves << " leaves");
|
||||
|
||||
fcmp::Tree<fcmp::Helios, fcmp::Selene> global_tree;
|
||||
|
||||
// TODO: use a class that's initialized with the curve cycle and don't need to call templated functions with curve instances every time
|
||||
|
||||
// Initially extend global tree by `init_leaves`
|
||||
{
|
||||
MDEBUG("Adding " << init_leaves << " leaves to tree");
|
||||
|
||||
const auto tree_extension = fcmp::get_tree_extension<fcmp::Helios, fcmp::Selene>(
|
||||
fcmp::LastChunks<fcmp::Helios, fcmp::Selene>{},
|
||||
generate_leaves(init_leaves),
|
||||
fcmp::HELIOS,
|
||||
fcmp::SELENE);
|
||||
|
||||
log_tree_extension(tree_extension);
|
||||
|
||||
fcmp::extend_tree<fcmp::Helios, fcmp::Selene>(
|
||||
tree_extension,
|
||||
fcmp::HELIOS,
|
||||
fcmp::SELENE,
|
||||
global_tree);
|
||||
|
||||
log_tree(global_tree);
|
||||
|
||||
const bool validated = fcmp::validate_tree<fcmp::Helios, fcmp::Selene>(
|
||||
global_tree,
|
||||
fcmp::HELIOS,
|
||||
fcmp::SELENE);
|
||||
|
||||
ASSERT_TRUE(validated);
|
||||
|
||||
MDEBUG("Successfully added initial " << init_leaves << " leaves to tree");
|
||||
}
|
||||
|
||||
// Then extend the global tree again by `ext_leaves`
|
||||
{
|
||||
MDEBUG("Extending tree by " << ext_leaves << " leaves");
|
||||
|
||||
const fcmp::LastChunks<fcmp::Helios, fcmp::Selene> &last_chunks = fcmp::get_last_chunks<fcmp::Helios, fcmp::Selene>(
|
||||
fcmp::HELIOS,
|
||||
fcmp::SELENE,
|
||||
global_tree);
|
||||
|
||||
log_last_chunks(last_chunks);
|
||||
|
||||
const auto tree_extension = fcmp::get_tree_extension<fcmp::Helios, fcmp::Selene>(
|
||||
last_chunks,
|
||||
generate_leaves(ext_leaves),
|
||||
fcmp::HELIOS,
|
||||
fcmp::SELENE);
|
||||
|
||||
log_tree_extension(tree_extension);
|
||||
|
||||
fcmp::extend_tree<fcmp::Helios, fcmp::Selene>(
|
||||
tree_extension,
|
||||
fcmp::HELIOS,
|
||||
fcmp::SELENE,
|
||||
global_tree);
|
||||
|
||||
log_tree(global_tree);
|
||||
|
||||
const bool validated = fcmp::validate_tree<fcmp::Helios, fcmp::Selene>(
|
||||
global_tree,
|
||||
fcmp::HELIOS,
|
||||
fcmp::SELENE);
|
||||
|
||||
ASSERT_TRUE(validated);
|
||||
|
||||
MDEBUG("Successfully extended by " << ext_leaves << " leaves");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user