use crate::Config;
use common_primitives::{
msa::MessageSourceId,
schema::SchemaId,
stateful_storage::{PageHash, PageId, PageNonce},
};
use frame_support::pallet_prelude::*;
use frame_system::pallet_prelude::*;
use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
use scale_info::TypeInfo;
use sp_core::bounded::BoundedVec;
use sp_std::{
cmp::*,
collections::btree_map::BTreeMap,
fmt::Debug,
hash::{Hash, Hasher},
prelude::*,
};
use twox_hash::XxHash64;
pub const PALLET_STORAGE_PREFIX: &[u8] = b"stateful-storage";
pub const ITEMIZED_STORAGE_PREFIX: &[u8] = b"itemized";
pub const PAGINATED_STORAGE_PREFIX: &[u8] = b"paginated";
pub type ItemizedKey = (SchemaId,);
pub type PaginatedKey = (SchemaId, PageId);
pub type PaginatedPrefixKey = (SchemaId,);
pub type ItemizedPage<T> = Page<<T as Config>::MaxItemizedPageSizeBytes>;
pub type PaginatedPage<T> = Page<<T as Config>::MaxPaginatedPageSizeBytes>;
pub trait ItemizedOperations<T: Config> {
fn apply_item_actions(
&self,
actions: &[ItemAction<T::MaxItemizedBlobSizeBytes>],
) -> Result<ItemizedPage<T>, PageError>;
fn try_parse(&self, include_header: bool) -> Result<ParsedItemPage, PageError>;
}
#[derive(Clone, Encode, Decode, Debug, TypeInfo, MaxEncodedLen, PartialEq)]
#[scale_info(skip_type_params(DataSize))]
#[codec(mel_bound(DataSize: MaxEncodedLen))]
pub enum ItemAction<DataSize: Get<u32> + Clone + sp_std::fmt::Debug + PartialEq> {
Add {
data: BoundedVec<u8, DataSize>,
},
Delete {
index: u16,
},
}
#[derive(Encode, Decode, PartialEq, MaxEncodedLen, Debug)]
pub struct ItemHeader {
pub payload_len: u16,
}
#[derive(Debug, PartialEq)]
pub enum PageError {
ErrorParsing(&'static str),
InvalidAction(&'static str),
ArithmeticOverflow,
PageSizeOverflow,
}
#[derive(Encode, Decode, TypeInfo, MaxEncodedLen, PartialEq, RuntimeDebugNoBound, Clone)]
#[scale_info(skip_type_params(T))]
pub struct ItemizedSignaturePayload<T: Config> {
#[codec(compact)]
pub msa_id: MessageSourceId,
#[codec(compact)]
pub schema_id: SchemaId,
#[codec(compact)]
pub target_hash: PageHash,
pub expiration: BlockNumberFor<T>,
pub actions: BoundedVec<
ItemAction<<T as Config>::MaxItemizedBlobSizeBytes>,
<T as Config>::MaxItemizedActionsCount,
>,
}
#[derive(Encode, Decode, TypeInfo, MaxEncodedLen, PartialEq, RuntimeDebugNoBound, Clone)]
#[scale_info(skip_type_params(T))]
pub struct ItemizedSignaturePayloadV2<T: Config> {
#[codec(compact)]
pub schema_id: SchemaId,
#[codec(compact)]
pub target_hash: PageHash,
pub expiration: BlockNumberFor<T>,
pub actions: BoundedVec<
ItemAction<<T as Config>::MaxItemizedBlobSizeBytes>,
<T as Config>::MaxItemizedActionsCount,
>,
}
#[derive(Encode, Decode, TypeInfo, MaxEncodedLen, PartialEq, RuntimeDebugNoBound, Clone)]
#[scale_info(skip_type_params(T))]
pub struct PaginatedUpsertSignaturePayload<T: Config> {
#[codec(compact)]
pub msa_id: MessageSourceId,
#[codec(compact)]
pub schema_id: SchemaId,
#[codec(compact)]
pub page_id: PageId,
#[codec(compact)]
pub target_hash: PageHash,
pub expiration: BlockNumberFor<T>,
pub payload: BoundedVec<u8, <T as Config>::MaxPaginatedPageSizeBytes>,
}
#[derive(Encode, Decode, TypeInfo, MaxEncodedLen, PartialEq, RuntimeDebugNoBound, Clone)]
#[scale_info(skip_type_params(T))]
pub struct PaginatedUpsertSignaturePayloadV2<T: Config> {
#[codec(compact)]
pub schema_id: SchemaId,
#[codec(compact)]
pub page_id: PageId,
#[codec(compact)]
pub target_hash: PageHash,
pub expiration: BlockNumberFor<T>,
pub payload: BoundedVec<u8, <T as Config>::MaxPaginatedPageSizeBytes>,
}
#[derive(Encode, Decode, TypeInfo, MaxEncodedLen, PartialEq, RuntimeDebugNoBound, Clone)]
#[scale_info(skip_type_params(T))]
pub struct PaginatedDeleteSignaturePayload<T: Config> {
#[codec(compact)]
pub msa_id: MessageSourceId,
#[codec(compact)]
pub schema_id: SchemaId,
#[codec(compact)]
pub page_id: PageId,
#[codec(compact)]
pub target_hash: PageHash,
pub expiration: BlockNumberFor<T>,
}
#[derive(Encode, Decode, TypeInfo, MaxEncodedLen, PartialEq, RuntimeDebugNoBound, Clone)]
#[scale_info(skip_type_params(T))]
pub struct PaginatedDeleteSignaturePayloadV2<T: Config> {
#[codec(compact)]
pub schema_id: SchemaId,
#[codec(compact)]
pub page_id: PageId,
#[codec(compact)]
pub target_hash: PageHash,
pub expiration: BlockNumberFor<T>,
}
#[derive(Encode, Decode, TypeInfo, MaxEncodedLen, Debug, Default)]
#[scale_info(skip_type_params(PageDataSize))]
#[codec(mel_bound(PageDataSize: MaxEncodedLen))]
pub struct Page<PageDataSize: Get<u32>> {
pub nonce: PageNonce,
pub data: BoundedVec<u8, PageDataSize>,
}
#[derive(Debug, PartialEq)]
pub struct ParsedItemPage<'a> {
pub page_size: usize,
pub items: BTreeMap<u16, &'a [u8]>,
}
impl<PageDataSize: Get<u32>> Page<PageDataSize> {
pub fn is_empty(&self) -> bool {
self.data.is_empty()
}
pub fn get_hash(&self) -> PageHash {
if self.is_empty() {
return PageHash::default()
}
let mut hasher = XxHash64::with_seed(0);
self.hash(&mut hasher);
let value_bytes: [u8; 4] =
hasher.finish().to_be_bytes()[..4].try_into().expect("incorrect hash size");
PageHash::from_be_bytes(value_bytes)
}
}
impl<PageDataSize: Get<u32>> Hash for Page<PageDataSize> {
fn hash<H: Hasher>(&self, state: &mut H) {
state.write(&self.nonce.encode());
state.write(&self.data[..]);
}
}
impl<PageDataSize: Get<u32>> PartialEq for Page<PageDataSize> {
fn eq(&self, other: &Self) -> bool {
self.nonce.eq(&other.nonce) && self.data.eq(&other.data)
}
}
impl<PageDataSize: Get<u32>> From<BoundedVec<u8, PageDataSize>> for Page<PageDataSize> {
fn from(bounded: BoundedVec<u8, PageDataSize>) -> Self {
Self { nonce: PageNonce::default(), data: bounded }
}
}
impl<PageDataSize: Get<u32>> TryFrom<Vec<u8>> for Page<PageDataSize> {
type Error = ();
fn try_from(data: Vec<u8>) -> Result<Self, Self::Error> {
let nonce: PageNonce =
PageNonce::decode(&mut &data[..PageNonce::max_encoded_len()]).map_err(|_| ())?;
let bounded: BoundedVec<u8, PageDataSize> =
BoundedVec::try_from(data[PageNonce::max_encoded_len()..].to_vec()).map_err(|_| ())?;
Ok(Self { nonce, data: bounded })
}
}
impl<T: Config> ItemizedOperations<T> for ItemizedPage<T> {
fn apply_item_actions(
&self,
actions: &[ItemAction<T::MaxItemizedBlobSizeBytes>],
) -> Result<Self, PageError> {
let mut parsed = ItemizedOperations::<T>::try_parse(self, true)?;
let mut updated_page_buffer = Vec::with_capacity(parsed.page_size);
let mut add_buffer = Vec::new();
for action in actions {
match action {
ItemAction::Delete { index } => {
ensure!(
parsed.items.contains_key(&index),
PageError::InvalidAction("item index is invalid")
);
parsed.items.remove(&index);
},
ItemAction::Add { data } => {
let header = ItemHeader {
payload_len: data
.len()
.try_into()
.map_err(|_| PageError::InvalidAction("invalid payload size"))?,
};
add_buffer.extend_from_slice(&header.encode()[..]);
add_buffer.extend_from_slice(&data[..]);
},
}
}
for (_, slice) in parsed.items.iter() {
updated_page_buffer.extend_from_slice(slice);
}
updated_page_buffer.append(&mut add_buffer);
Ok(ItemizedPage::<T>::from(
BoundedVec::try_from(updated_page_buffer).map_err(|_| PageError::PageSizeOverflow)?,
))
}
fn try_parse(&self, include_header: bool) -> Result<ParsedItemPage, PageError> {
let mut count = 0u16;
let mut items = BTreeMap::new();
let mut offset = 0;
while offset < self.data.len() {
ensure!(
offset + ItemHeader::max_encoded_len() <= self.data.len(),
PageError::ErrorParsing("wrong header size")
);
let header = <ItemHeader>::decode(&mut &self.data[offset..])
.map_err(|_| PageError::ErrorParsing("decoding header"))?;
let item_total_length = ItemHeader::max_encoded_len() + header.payload_len as usize;
ensure!(
offset + item_total_length <= self.data.len(),
PageError::ErrorParsing("wrong payload size")
);
items.insert(
count,
match include_header {
true => &self.data[offset..(offset + item_total_length)],
false =>
&self.data
[(offset + ItemHeader::max_encoded_len())..(offset + item_total_length)],
},
);
offset += item_total_length;
count = count.checked_add(1).ok_or(PageError::ArithmeticOverflow)?;
}
Ok(ParsedItemPage { page_size: self.data.len(), items })
}
}