frozen-abi: Ovf in digester child depth calculation
This commit is contained in:
parent
14985420fd
commit
193e370186
|
@ -271,7 +271,7 @@ fn do_derive_abi_enum_visitor(input: ItemEnum) -> TokenStream {
|
|||
};
|
||||
serialized_variants.extend(quote! {
|
||||
#sample_variant;
|
||||
Serialize::serialize(&sample_variant, digester.create_enum_child())?;
|
||||
Serialize::serialize(&sample_variant, digester.create_enum_child()?)?;
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -284,7 +284,7 @@ fn do_derive_abi_enum_visitor(input: ItemEnum) -> TokenStream {
|
|||
use ::solana_frozen_abi::abi_example::AbiExample;
|
||||
digester.update_with_string(format!("enum {} (variants = {})", enum_name, #variant_count));
|
||||
#serialized_variants
|
||||
Ok(digester.create_child())
|
||||
digester.create_child()
|
||||
}
|
||||
}
|
||||
}).into()
|
||||
|
|
|
@ -27,6 +27,8 @@ pub enum DigestError {
|
|||
Node(Sstr, Box<DigestError>),
|
||||
#[error("leaf error")]
|
||||
Leaf(Sstr, Sstr, Box<DigestError>),
|
||||
#[error("arithmetic overflow")]
|
||||
ArithmeticOverflow,
|
||||
}
|
||||
|
||||
impl SerdeError for DigestError {
|
||||
|
@ -77,22 +79,30 @@ impl AbiDigester {
|
|||
}
|
||||
}
|
||||
|
||||
pub fn create_child(&self) -> Self {
|
||||
Self {
|
||||
pub fn create_child(&self) -> Result<Self, DigestError> {
|
||||
let depth = self
|
||||
.depth
|
||||
.checked_add(1)
|
||||
.ok_or(DigestError::ArithmeticOverflow)?;
|
||||
Ok(Self {
|
||||
data_types: self.data_types.clone(),
|
||||
depth: self.depth + 1,
|
||||
depth,
|
||||
for_enum: false,
|
||||
opaque_scope: self.opaque_scope.clone(),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn create_enum_child(&self) -> Self {
|
||||
Self {
|
||||
pub fn create_enum_child(&self) -> Result<Self, DigestError> {
|
||||
let depth = self
|
||||
.depth
|
||||
.checked_add(1)
|
||||
.ok_or(DigestError::ArithmeticOverflow)?;
|
||||
Ok(Self {
|
||||
data_types: self.data_types.clone(),
|
||||
depth: self.depth + 1,
|
||||
depth,
|
||||
for_enum: true,
|
||||
opaque_scope: self.opaque_scope.clone(),
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
pub fn digest_data<T: ?Sized + Serialize>(&mut self, value: &T) -> DigestResult {
|
||||
|
@ -120,7 +130,12 @@ impl AbiDigester {
|
|||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join(" ");
|
||||
buf = format!("{:0width$}{}\n", "", buf, width = self.depth * INDENT_WIDTH);
|
||||
buf = format!(
|
||||
"{:0width$}{}\n",
|
||||
"",
|
||||
buf,
|
||||
width = self.depth.saturating_mul(INDENT_WIDTH)
|
||||
);
|
||||
info!("updating with: {}", buf.trim_end());
|
||||
(*self.data_types.borrow_mut()).push(buf);
|
||||
}
|
||||
|
@ -141,7 +156,7 @@ impl AbiDigester {
|
|||
|
||||
fn digest_element<T: ?Sized + Serialize>(&mut self, v: &T) -> Result<(), DigestError> {
|
||||
self.update_with_type::<T>("element");
|
||||
self.create_child().digest_data(v).map(|_| ())
|
||||
self.create_child()?.digest_data(v).map(|_| ())
|
||||
}
|
||||
|
||||
fn digest_named_field<T: ?Sized + Serialize>(
|
||||
|
@ -150,7 +165,7 @@ impl AbiDigester {
|
|||
v: &T,
|
||||
) -> Result<(), DigestError> {
|
||||
self.update_with_string(format!("field {}: {}", key, type_name::<T>()));
|
||||
self.create_child()
|
||||
self.create_child()?
|
||||
.digest_data(v)
|
||||
.map(|_| ())
|
||||
.map_err(|e| DigestError::wrap_by_str(e, key))
|
||||
|
@ -158,7 +173,7 @@ impl AbiDigester {
|
|||
|
||||
fn digest_unnamed_field<T: ?Sized + Serialize>(&mut self, v: &T) -> Result<(), DigestError> {
|
||||
self.update_with_type::<T>("field");
|
||||
self.create_child().digest_data(v).map(|_| ())
|
||||
self.create_child()?.digest_data(v).map(|_| ())
|
||||
}
|
||||
|
||||
#[allow(clippy::unnecessary_wraps)]
|
||||
|
@ -293,12 +308,12 @@ impl Serializer for AbiDigester {
|
|||
{
|
||||
// emulate the ABI digest for the Option enum; see TestMyOption
|
||||
self.update(&["enum Option (variants = 2)"]);
|
||||
let mut variant_digester = self.create_child();
|
||||
let mut variant_digester = self.create_child()?;
|
||||
|
||||
variant_digester.update_with_string("variant(0) None (unit)".to_owned());
|
||||
variant_digester
|
||||
.update_with_string(format!("variant(1) Some({}) (newtype)", type_name::<T>()));
|
||||
variant_digester.create_child().digest_data(v)
|
||||
variant_digester.create_child()?.digest_data(v)
|
||||
}
|
||||
|
||||
fn serialize_unit_struct(mut self, name: Sstr) -> DigestResult {
|
||||
|
@ -317,7 +332,7 @@ impl Serializer for AbiDigester {
|
|||
T: ?Sized + Serialize,
|
||||
{
|
||||
self.update_with_string(format!("struct {}({}) (newtype)", name, type_name::<T>()));
|
||||
self.create_child()
|
||||
self.create_child()?
|
||||
.digest_data(v)
|
||||
.map_err(|e| DigestError::wrap_by_str(e, "newtype_struct"))
|
||||
}
|
||||
|
@ -339,7 +354,7 @@ impl Serializer for AbiDigester {
|
|||
variant,
|
||||
type_name::<T>()
|
||||
));
|
||||
self.create_child()
|
||||
self.create_child()?
|
||||
.digest_data(v)
|
||||
.map_err(|e| DigestError::wrap_by_str(e, "newtype_variant"))
|
||||
}
|
||||
|
@ -351,17 +366,17 @@ impl Serializer for AbiDigester {
|
|||
"Exactly 1 seq element is needed to generate the ABI digest precisely"
|
||||
);
|
||||
self.update_with_string(format!("seq (elements = {})", len));
|
||||
Ok(self.create_child())
|
||||
self.create_child()
|
||||
}
|
||||
|
||||
fn serialize_tuple(mut self, len: usize) -> DigestResult {
|
||||
self.update_with_string(format!("tuple (elements = {})", len));
|
||||
Ok(self.create_child())
|
||||
self.create_child()
|
||||
}
|
||||
|
||||
fn serialize_tuple_struct(mut self, name: Sstr, len: usize) -> DigestResult {
|
||||
self.update_with_string(format!("struct {} (fields = {}) (tuple)", name, len));
|
||||
Ok(self.create_child())
|
||||
self.create_child()
|
||||
}
|
||||
|
||||
fn serialize_tuple_variant(
|
||||
|
@ -373,7 +388,7 @@ impl Serializer for AbiDigester {
|
|||
) -> DigestResult {
|
||||
self.check_for_enum("tuple_variant", variant)?;
|
||||
self.update_with_string(format!("variant({}) {} (fields = {})", i, variant, len));
|
||||
Ok(self.create_child())
|
||||
self.create_child()
|
||||
}
|
||||
|
||||
fn serialize_map(mut self, len: Option<usize>) -> DigestResult {
|
||||
|
@ -383,12 +398,12 @@ impl Serializer for AbiDigester {
|
|||
"Exactly 1 map entry is needed to generate the ABI digest precisely"
|
||||
);
|
||||
self.update_with_string(format!("map (entries = {})", len));
|
||||
Ok(self.create_child())
|
||||
self.create_child()
|
||||
}
|
||||
|
||||
fn serialize_struct(mut self, name: Sstr, len: usize) -> DigestResult {
|
||||
self.update_with_string(format!("struct {} (fields = {})", name, len));
|
||||
Ok(self.create_child())
|
||||
self.create_child()
|
||||
}
|
||||
|
||||
fn serialize_struct_variant(
|
||||
|
@ -403,7 +418,7 @@ impl Serializer for AbiDigester {
|
|||
"variant({}) struct {} (fields = {})",
|
||||
i, variant, len
|
||||
));
|
||||
Ok(self.create_child())
|
||||
self.create_child()
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -464,12 +479,12 @@ impl SerializeMap for AbiDigester {
|
|||
|
||||
fn serialize_key<T: ?Sized + Serialize>(&mut self, key: &T) -> Result<(), DigestError> {
|
||||
self.update_with_type::<T>("key");
|
||||
self.create_child().digest_data(key).map(|_| ())
|
||||
self.create_child()?.digest_data(key).map(|_| ())
|
||||
}
|
||||
|
||||
fn serialize_value<T: ?Sized + Serialize>(&mut self, value: &T) -> Result<(), DigestError> {
|
||||
self.update_with_type::<T>("value");
|
||||
self.create_child().digest_data(value).map(|_| ())
|
||||
self.create_child()?.digest_data(value).map(|_| ())
|
||||
}
|
||||
|
||||
fn end(self) -> DigestResult {
|
||||
|
|
|
@ -512,11 +512,11 @@ impl<O: AbiEnumVisitor, E: AbiEnumVisitor> AbiEnumVisitor for Result<O, E> {
|
|||
|
||||
digester.update(&["enum Result (variants = 2)"]);
|
||||
let variant: Self = Result::Ok(O::example());
|
||||
variant.serialize(digester.create_enum_child())?;
|
||||
variant.serialize(digester.create_enum_child()?)?;
|
||||
|
||||
let variant: Self = Result::Err(E::example());
|
||||
variant.serialize(digester.create_enum_child())?;
|
||||
variant.serialize(digester.create_enum_child()?)?;
|
||||
|
||||
Ok(digester.create_child())
|
||||
digester.create_child()
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue