expose and test hash_trim from rust lib

This commit is contained in:
j-berman 2024-06-05 19:39:43 -07:00
parent ae89fddc00
commit e8af7090b0
5 changed files with 181 additions and 1 deletions

View File

@ -116,11 +116,19 @@ CResult<HeliosPoint> hash_grow_helios(HeliosPoint existing_hash,
HeliosScalar first_child_after_offset,
HeliosScalarSlice new_children);
CResult<HeliosPoint> hash_trim_helios(HeliosPoint existing_hash,
uintptr_t offset,
HeliosScalarSlice children);
CResult<SelenePoint> hash_grow_selene(SelenePoint existing_hash,
uintptr_t offset,
SeleneScalar first_child_after_offset,
SeleneScalarSlice new_children);
CResult<SelenePoint> hash_trim_selene(SelenePoint existing_hash,
uintptr_t offset,
SeleneScalarSlice children);
} // extern "C"
}

View File

@ -17,7 +17,7 @@ use transcript::RecommendedTranscript;
use generalized_bulletproofs::Generators;
use ec_divisors::DivisorCurve;
use full_chain_membership_proofs::tree::hash_grow;
use full_chain_membership_proofs::tree::{hash_grow, hash_trim};
// TODO: Use a macro to de-duplicate some of of this code
@ -173,6 +173,29 @@ pub extern "C" fn hash_grow_helios(
}
}
#[no_mangle]
pub extern "C" fn hash_trim_helios(
existing_hash: HeliosPoint,
offset: usize,
children: HeliosScalarSlice,
) -> CResult<HeliosPoint, io::Error> {
let hash = hash_trim(
helios_generators(),
existing_hash,
offset,
children.into(),
);
if let Some(hash) = hash {
CResult::ok(hash)
} else {
CResult::err(
HeliosPoint::identity(),
io::Error::new(io::ErrorKind::Other, "failed to trim hash"),
)
}
}
#[no_mangle]
pub extern "C" fn hash_grow_selene(
existing_hash: SelenePoint,
@ -197,3 +220,26 @@ pub extern "C" fn hash_grow_selene(
)
}
}
#[no_mangle]
pub extern "C" fn hash_trim_selene(
existing_hash: SelenePoint,
offset: usize,
children: SeleneScalarSlice,
) -> CResult<SelenePoint, io::Error> {
let hash = hash_trim(
selene_generators(),
existing_hash,
offset,
children.into(),
);
if let Some(hash) = hash {
CResult::ok(hash)
} else {
CResult::err(
SelenePoint::identity(),
io::Error::new(io::ErrorKind::Other, "failed to trim hash"),
)
}
}

View File

@ -62,6 +62,21 @@ Helios::Point Helios::hash_grow(
return res.value;
}
//----------------------------------------------------------------------------------------------------------------------
Helios::Point Helios::hash_trim(
const Helios::Point &existing_hash,
const std::size_t offset,
const Helios::Chunk &children) const
{
fcmp_rust::CResult<Helios::Point> res = fcmp_rust::hash_trim_helios(
existing_hash,
offset,
children);
if (res.err != 0) {
throw std::runtime_error("failed to hash trim");
}
return res.value;
}
//----------------------------------------------------------------------------------------------------------------------
Selene::Point Selene::hash_grow(
const Selene::Point &existing_hash,
const std::size_t offset,
@ -79,6 +94,21 @@ Selene::Point Selene::hash_grow(
return res.value;
}
//----------------------------------------------------------------------------------------------------------------------
Selene::Point Selene::hash_trim(
const Selene::Point &existing_hash,
const std::size_t offset,
const Selene::Chunk &children) const
{
fcmp_rust::CResult<Selene::Point> res = fcmp_rust::hash_trim_selene(
existing_hash,
offset,
children);
if (res.err != 0) {
throw std::runtime_error("failed to hash trim");
}
return res.value;
}
//----------------------------------------------------------------------------------------------------------------------
Helios::Scalar Helios::zero_scalar() const
{
return fcmp_rust::helios_zero_scalar();

View File

@ -83,6 +83,11 @@ public:
const typename C::Scalar &first_child_after_offset,
const typename C::Chunk &new_children) const = 0;
virtual typename C::Point hash_trim(
const typename C::Point &existing_hash,
const std::size_t offset,
const typename C::Chunk &children) const = 0;
virtual typename C::Scalar zero_scalar() const = 0;
virtual std::array<uint8_t, 32UL> to_bytes(const typename C::Scalar &scalar) const = 0;
@ -122,6 +127,11 @@ public:
const Scalar &first_child_after_offset,
const Chunk &new_children) const override;
Point hash_trim(
const Point &existing_hash,
const std::size_t offset,
const Chunk &children) const override;
Scalar zero_scalar() const override;
std::array<uint8_t, 32UL> to_bytes(const Scalar &scalar) const override;
@ -156,6 +166,11 @@ public:
const Scalar &first_child_after_offset,
const Chunk &new_children) const override;
Point hash_trim(
const Point &existing_hash,
const std::size_t offset,
const Chunk &children) const override;
Scalar zero_scalar() const override;
std::array<uint8_t, 32UL> to_bytes(const Scalar &scalar) const override;

View File

@ -788,3 +788,84 @@ TEST(curve_trees, trim_tree)
}
}
}
//----------------------------------------------------------------------------------------------------------------------
// Make sure the result of hash_trim is the same as the equivalent hash_grow excluding the trimmed children
TEST(curve_trees, hash_trim)
{
Helios helios;
Selene selene;
auto curve_trees = CurveTreesV1(
helios,
selene,
HELIOS_CHUNK_WIDTH,
SELENE_CHUNK_WIDTH);
// Selene
// Generate 3 random leaf tuples
const std::size_t NUM_LEAF_TUPLES = 3;
const std::size_t NUM_LEAVES = NUM_LEAF_TUPLES * CurveTreesV1::LEAF_TUPLE_SIZE;
const auto grow_leaves = generate_random_leaves(curve_trees, NUM_LEAF_TUPLES);
const auto grow_children = curve_trees.flatten_leaves(grow_leaves);
const auto &grow_chunk = Selene::Chunk{grow_children.data(), grow_children.size()};
// Hash the leaves
const auto init_grow_result = curve_trees.m_c2.hash_grow(
/*existing_hash*/ curve_trees.m_c2.m_hash_init_point,
/*offset*/ 0,
/*first_child_after_offset*/ curve_trees.m_c2.zero_scalar(),
/*children*/ grow_chunk);
// Trim the initial result
const std::size_t trim_offset = NUM_LEAVES - CurveTreesV1::LEAF_TUPLE_SIZE;
const auto &trimmed_child = Selene::Chunk{grow_children.data() + trim_offset, CurveTreesV1::LEAF_TUPLE_SIZE};
const auto trim_result = curve_trees.m_c2.hash_trim(
init_grow_result,
trim_offset,
trimmed_child);
const auto trim_res_bytes = curve_trees.m_c2.to_bytes(trim_result);
// Now compare to calling hash_grow with the remaining children, excluding the trimmed child
const auto &remaining_children = Selene::Chunk{grow_children.data(), trim_offset};
const auto remaining_children_hash = curve_trees.m_c2.hash_grow(
/*existing_hash*/ curve_trees.m_c2.m_hash_init_point,
/*offset*/ 0,
/*first_child_after_offset*/ curve_trees.m_c2.zero_scalar(),
/*children*/ remaining_children);
const auto grow_res_bytes = curve_trees.m_c2.to_bytes(remaining_children_hash);
ASSERT_EQ(trim_res_bytes, grow_res_bytes);
// Helios
// Get 2 helios scalars
std::vector<Helios::Scalar> grow_helios_scalars;
fcmp::tower_cycle::extend_scalars_from_cycle_points<Selene, Helios>(curve_trees.m_c2,
{init_grow_result, trim_result},
grow_helios_scalars);
const auto &grow_helios_chunk = Helios::Chunk{grow_helios_scalars.data(), grow_helios_scalars.size()};
// Get the initial hash of the 2 helios scalars
const auto helios_grow_result = curve_trees.m_c1.hash_grow(
/*existing_hash*/ curve_trees.m_c1.m_hash_init_point,
/*offset*/ 0,
/*first_child_after_offset*/ curve_trees.m_c1.zero_scalar(),
/*children*/ grow_helios_chunk);
// Trim the initial result by 1 child
const auto &trimmed_helios_child = Helios::Chunk{grow_helios_scalars.data() + 1, 1};
const auto trim_helios_result = curve_trees.m_c1.hash_trim(
helios_grow_result,
1,
trimmed_helios_child);
const auto trim_helios_res_bytes = curve_trees.m_c1.to_bytes(trim_helios_result);
// Now compare to calling hash_grow with the remaining children, excluding the trimmed child
const auto &remaining_helios_children = Helios::Chunk{grow_helios_scalars.data(), 1};
const auto remaining_helios_children_hash = curve_trees.m_c1.hash_grow(
/*existing_hash*/ curve_trees.m_c1.m_hash_init_point,
/*offset*/ 0,
/*first_child_after_offset*/ curve_trees.m_c1.zero_scalar(),
/*children*/ remaining_helios_children);
const auto grow_helios_res_bytes = curve_trees.m_c1.to_bytes(remaining_helios_children_hash);
ASSERT_EQ(trim_helios_res_bytes, grow_helios_res_bytes);
}