8000 [pull] master from paritytech:master by pull[bot] · Pull Request #353 · zatoichi-labs/parity-common · GitHub
[go: up one dir, main page]
More Web Proxy on the site http://driver.im/
Skip to content

[pull] master from paritytech:master #353

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Mar 21, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/workflows/ci.yml
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ jobs:
- uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: 1.79.0
toolchain: 1.81.0
override: true

- name: Install Clang (Ubuntu)
Expand Down
3 changes: 3 additions & 0 deletions bounded-collections/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,9 @@ The format is based on [Keep a Changelog].

[Keep a Changelog]: http://keepachangelog.com/en/1.0.0/

## [0.2.4] - 2025-03-20
- Implement DecodeWithMemTracking for BoundedBTreeMap [#906](https://github.com/paritytech/parity-common/pull/906)

## [0.2.3] - 2025-02-11
- Implement DecodeWithMemTracking for some structs [#897](https://github.com/paritytech/parity-common/pull/897)

Expand Down
2 changes: 1 addition & 1 deletion bounded-collections/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "bounded-collections"
version = "0.2.3"
version = "0.2.4"
description = "Bounded types and their supporting traits"
readme = "README.md"
rust-version = "1.79.0"
Expand Down
126 changes: 117 additions & 9 deletions bounded-collections/src/bounded_btree_map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@

use crate::{Get, TryCollect};
use alloc::collections::BTreeMap;
use codec::{Compact, Decode, Encode, MaxEncodedLen};
use codec::{Compact, Decode, DecodeWithMemTracking, Encode, MaxEncodedLen};
use core::{borrow::Borrow, marker::PhantomData, ops::Deref};
#[cfg(feature = "serde")]
use serde::{
Expand Down Expand Up @@ -99,22 +99,62 @@ where
}
}

// Struct which allows prepending the compact after reading from an input.
pub(crate) struct PrependCompactInput<'a, I> {
encoded_len: &'a [u8],
read: usize,
inner: &'a mut I,
}

impl<'a, I: codec::Input> codec::Input for PrependCompactInput<'a, I> {
fn remaining_len(&mut self) -> Result<Option<usize>, codec::Error> {
let remaining_compact = self.encoded_len.len().saturating_sub(self.read);
Ok(self.inner.remaining_len()?.map(|len| len.saturating_add(remaining_compact)))
}

fn read(&mut self, into: &mut [u8]) -> Result<(), codec::Error> {
if into.is_empty() {
return Ok(());
}

let remaining_compact = self.encoded_len.len().saturating_sub(self.read);
if remaining_compact > 0 {
let to_read = into.len().min(remaining_compact);
into[..to_read].copy_from_slice(&self.encoded_len[self.read..][..to_read]);
self.read += to_read;

if to_read < into.len() {
// Buffer not full, keep reading the inner.
self.inner.read(&mut into[to_read..])
} else {
// Buffer was filled by the compact.
Ok(())
}
} else {
// Prepended compact has been read, just read from inner.
self.inner.read(into)
}
}
}

impl<K, V, S> Decode for BoundedBTreeMap<K, V, S>
where
K: Decode + Ord,
V: Decode,
S: Get<u32>,
{
fn decode<I: codec::Input>(input: &mut I) -> Result<Self, codec::Error> {
// Same as the underlying implementation for `Decode` on `BTreeMap`, except we fail early if
// the len is too big.
let len: u32 = <Compact<u32>>::decode(input)?.into();
if len > S::get() {
// Fail early if the len is too big. This is a compact u32 which we will later put back.
let compact = <Compact<u32>>::decode(input)?;
if compact.0 > S::get() {
return Err("BoundedBTreeMap exceeds its limit".into());
}
input.descend_ref()?;
let inner = Result::from_iter((0..len).map(|_| Decode::decode(input)))?;
input.ascend_ref();
// Reconstruct the original input by prepending the length we just read, then delegate the decoding to BTreeMap.
let inner = BTreeMap::decode(&mut PrependCompactInput {
encoded_len: compact.encode().as_ref(),
read: 0,
inner: input,
})?;
Ok(Self(inner, PhantomData))
}

Expand All @@ -123,6 +163,15 @@ where
}
}

impl<K, V, S> DecodeWithMemTracking for BoundedBTreeMap<K, V, S>
where
K: DecodeWithMemTracking + Ord,
V: DecodeWithMemTracking,
S: Get<u32>,
BoundedBTreeMap<K, V, S>: Decode,
{
}

impl<K, V, S> BoundedBTreeMap<K, V, S>
where
S: Get<u32>,
Expand Down Expand Up @@ -479,7 +528,7 @@ mod test {
use super::*;
use crate::ConstU32;
use alloc::{vec, vec::Vec};
use codec::CompactLen;
use codec::{CompactLen, Input};

fn map_from_keys<K>(keys: &[K]) -> BTreeMap<K, ()>
where
Expand All @@ -504,6 +553,14 @@ mod test {
assert_eq!(b.encode(), m.encode());
}

#[test]
fn encode_then_decode_gives_original_map() {
let b = boundedmap_from_keys::<u32, ConstU32<7>>(&[1, 2, 3, 4, 5, 6]);
let b_encode_decode = BoundedBTreeMap::<u32, (), ConstU32<7>>::decode(&mut &b.encode()[..]).unwrap();

assert_eq!(b_encode_decode, b);
}

#[test]
fn try_insert_works() {
let mut bounded = boundedmap_from_keys::<u32, ConstU32<4>>(&[1, 2, 3]);
Expand Down Expand Up @@ -721,6 +778,57 @@ mod test {
assert_eq!(Ok(b2), b1.try_map(|(_, v)| (v as u16).checked_mul(100_u16).ok_or("overflow")));
}

#[test]
fn prepend_compact_input_works() {
let encoded_len = Compact(3u32).encode();
let inner = [2, 3, 4];
let mut input = PrependCompactInput { encoded_len: encoded_len.as_ref(), read: 0, inner: &mut &inner[..] };
assert_eq!(input.remaining_len(), Ok(Some(4)));

// Passing an empty buffer should leave input unchanged.
let mut empty_buf = [];
assert_eq!(input.read(&mut empty_buf), Ok(()));
assert_eq!(input.remaining_len(), Ok(Some(4)));
assert_eq!(input.read, 0);

// Passing a correctly-sized buffer will read correctly.
let mut buf = [0; 4];
assert_eq!(input.read(&mut buf), Ok(()));
assert_eq!(buf[0], encoded_len[0]);
assert_eq!(buf[1..], inner[..]);
// And the bookkeeping agrees.
assert_eq!(input.remaining_len(), Ok(Some(0)));
assert_eq!(input.read, encoded_len.len());

// And we can't read more.
assert!(input.read(&mut buf).is_err());
}

#[test]
fn prepend_compact_input_incremental_read_works() {
let encoded_len = Compact(3u32).encode();
let inner = [2, 3, 4];
let mut input = PrependCompactInput { encoded_len: encoded_len.as_ref(), read: 0, inner: &mut &inner[..] };
assert_eq!(input.remaining_len(), Ok(Some(4)));

// Compact is first byte - ensure that it fills the buffer when it's more than one.
let mut buf = [0u8; 2];
assert_eq!(input.read(&mut buf), Ok(()));
assert_eq!(buf[0], encoded_len[0]);
assert_eq!(buf[1], inner[0]);
assert_eq!(input.remaining_len(), Ok(Some(2)));
assert_eq!(input.read, encoded_len.len());

// Check the last two bytes are read correctly.
assert_eq!(input.read(&mut buf), Ok(()));
assert_eq!(buf[..], inner[1..]);
assert_eq!(input.remaining_len(), Ok(Some(0)));
assert_eq!(input.read, encoded_len.len());

// And we can't read more.
assert!(input.read(&mut buf).is_err());
}

// Just a test that structs containing `BoundedBTreeMap` can derive `Hash`. (This was broken
// when it was deriving `Hash`).
#[test]
Expand Down
Loading
0