1use crate::Config;
3use alloc::boxed::Box;
4use common_primitives::{
5 node::EIP712Encode,
6 schema::SchemaId,
7 stateful_storage::{PageHash, PageId, PageNonce},
8};
9use frame_support::pallet_prelude::*;
10use frame_system::pallet_prelude::*;
11use lazy_static::lazy_static;
12use parity_scale_codec::{Decode, Encode, MaxEncodedLen};
13use scale_info::TypeInfo;
14use sp_core::bounded::BoundedVec;
15extern crate alloc;
16use alloc::{collections::btree_map::BTreeMap, vec::Vec};
17use core::{
18 cmp::*,
19 fmt::Debug,
20 hash::{Hash, Hasher},
21};
22use sp_core::U256;
23
24use common_primitives::{signatures::get_eip712_encoding_prefix, utils::to_abi_compatible_number};
25use twox_hash::XxHash64;
26
27pub const MIGRATION_PAGE_SIZE: u32 = 20;
29pub const STATEFUL_STORAGE_VERSION: StorageVersion = StorageVersion::new(1);
31pub const PALLET_STORAGE_PREFIX: &[u8] = b"stateful-storage";
33pub const ITEMIZED_STORAGE_PREFIX: &[u8] = b"itemized";
35pub const PAGINATED_STORAGE_PREFIX: &[u8] = b"paginated";
37
38pub type ItemizedKey = (SchemaId,);
40pub type PaginatedKey = (SchemaId, PageId);
42pub type PaginatedPrefixKey = (SchemaId,);
44pub type ItemizedPage<T> = Page<<T as Config>::MaxItemizedPageSizeBytes>;
46pub type PaginatedPage<T> = Page<<T as Config>::MaxPaginatedPageSizeBytes>;
48
49pub trait ItemizedOperations<T: Config> {
51 fn apply_item_actions(
53 &self,
54 actions: &[ItemAction<T::MaxItemizedBlobSizeBytes>],
55 ) -> Result<ItemizedPage<T>, PageError>;
56
57 fn try_parse(&self, include_header: bool) -> Result<ParsedItemPage, PageError>;
59}
60#[derive(
62 Clone, Encode, Decode, DecodeWithMemTracking, Debug, TypeInfo, MaxEncodedLen, PartialEq,
63)]
64#[scale_info(skip_type_params(DataSize))]
65#[codec(mel_bound(DataSize: MaxEncodedLen))]
66pub enum ItemAction<DataSize: Get<u32> + Clone + core::fmt::Debug + PartialEq> {
67 Add {
69 data: BoundedVec<u8, DataSize>,
71 },
72 Delete {
74 index: u16,
76 },
77}
78
79#[derive(Encode, Decode, PartialEq, MaxEncodedLen, Debug)]
82pub struct ItemHeader {
83 pub payload_len: u16,
85}
86
87#[derive(Debug, PartialEq)]
89pub enum PageError {
90 ErrorParsing(&'static str),
92 InvalidAction(&'static str),
94 ArithmeticOverflow,
96 PageSizeOverflow,
98}
99
100#[derive(
104 Encode,
105 Decode,
106 DecodeWithMemTracking,
107 TypeInfo,
108 MaxEncodedLen,
109 PartialEq,
110 RuntimeDebugNoBound,
111 Clone,
112)]
113#[scale_info(skip_type_params(T))]
114pub struct ItemizedSignaturePayloadV2<T: Config> {
115 #[codec(compact)]
117 pub schema_id: SchemaId,
118
119 #[codec(compact)]
121 pub target_hash: PageHash,
122
123 pub expiration: BlockNumberFor<T>,
125
126 pub actions: BoundedVec<
128 ItemAction<<T as Config>::MaxItemizedBlobSizeBytes>,
129 <T as Config>::MaxItemizedActionsCount,
130 >,
131}
132
133impl<T: Config> EIP712Encode for ItemizedSignaturePayloadV2<T> {
134 fn encode_eip_712(&self, chain_id: u32) -> Box<[u8]> {
135 lazy_static! {
136 static ref MAIN_TYPE_HASH: [u8; 32] =
138 sp_io::hashing::keccak_256(b"ItemizedSignaturePayloadV2(uint16 schemaId,uint32 targetHash,uint32 expiration,ItemAction[] actions)ItemAction(string actionType,bytes data,uint16 index)");
139
140 static ref SUB_TYPE_HASH: [u8; 32] =
141 sp_io::hashing::keccak_256(b"ItemAction(string actionType,bytes data,uint16 index)");
142
143 static ref ITEM_ACTION_ADD: [u8; 32] = sp_io::hashing::keccak_256(b"Add");
144 static ref ITEM_ACTION_DELETE: [u8; 32] = sp_io::hashing::keccak_256(b"Delete");
145
146 static ref EMPTY_BYTES_HASH: [u8; 32] = sp_io::hashing::keccak_256([].as_slice());
147 }
148 let prefix_domain_separator: Box<[u8]> =
150 get_eip712_encoding_prefix("0xcccccccccccccccccccccccccccccccccccccccc", chain_id);
151 let coded_schema_id = to_abi_compatible_number(self.schema_id);
152 let coded_target_hash = to_abi_compatible_number(self.target_hash);
153 let expiration: U256 = self.expiration.into();
154 let coded_expiration = to_abi_compatible_number(expiration.as_u128());
155 let coded_actions = {
156 let values: Vec<u8> = self
157 .actions
158 .iter()
159 .flat_map(|a| match a {
160 ItemAction::Add { data } => sp_io::hashing::keccak_256(
161 &[
162 SUB_TYPE_HASH.as_slice(),
163 ITEM_ACTION_ADD.as_slice(),
164 &sp_io::hashing::keccak_256(data.as_slice()),
165 [0u8; 32].as_slice(),
166 ]
167 .concat(),
168 ),
169 ItemAction::Delete { index } => sp_io::hashing::keccak_256(
170 &[
171 SUB_TYPE_HASH.as_slice(),
172 ITEM_ACTION_DELETE.as_slice(),
173 EMPTY_BYTES_HASH.as_slice(),
174 to_abi_compatible_number(*index).as_slice(),
175 ]
176 .concat(),
177 ),
178 })
179 .collect();
180 sp_io::hashing::keccak_256(&values)
181 };
182 let message = sp_io::hashing::keccak_256(
183 &[
184 MAIN_TYPE_HASH.as_slice(),
185 &coded_schema_id,
186 &coded_target_hash,
187 &coded_expiration,
188 &coded_actions,
189 ]
190 .concat(),
191 );
192 let combined = [prefix_domain_separator.as_ref(), &message].concat();
193 combined.into_boxed_slice()
194 }
195}
196
197#[derive(
201 Encode,
202 Decode,
203 DecodeWithMemTracking,
204 TypeInfo,
205 MaxEncodedLen,
206 PartialEq,
207 RuntimeDebugNoBound,
208 Clone,
209)]
210#[scale_info(skip_type_params(T))]
211pub struct PaginatedUpsertSignaturePayloadV2<T: Config> {
212 #[codec(compact)]
214 pub schema_id: SchemaId,
215
216 #[codec(compact)]
218 pub page_id: PageId,
219
220 #[codec(compact)]
222 pub target_hash: PageHash,
223
224 pub expiration: BlockNumberFor<T>,
226
227 pub payload: BoundedVec<u8, <T as Config>::MaxPaginatedPageSizeBytes>,
229}
230
231impl<T: Config> EIP712Encode for PaginatedUpsertSignaturePayloadV2<T> {
232 fn encode_eip_712(&self, chain_id: u32) -> Box<[u8]> {
233 lazy_static! {
234 static ref MAIN_TYPE_HASH: [u8; 32] =
236 sp_io::hashing::keccak_256(b"PaginatedUpsertSignaturePayloadV2(uint16 schemaId,uint16 pageId,uint32 targetHash,uint32 expiration,bytes payload)");
237 }
238 let prefix_domain_separator: Box<[u8]> =
240 get_eip712_encoding_prefix("0xcccccccccccccccccccccccccccccccccccccccc", chain_id);
241 let coded_schema_id = to_abi_compatible_number(self.schema_id);
242 let coded_page_id = to_abi_compatible_number(self.page_id);
243 let coded_target_hash = to_abi_compatible_number(self.target_hash);
244 let expiration: U256 = self.expiration.into();
245 let coded_expiration = to_abi_compatible_number(expiration.as_u128());
246 let coded_payload = sp_io::hashing::keccak_256(self.payload.as_slice());
247 let message = sp_io::hashing::keccak_256(
248 &[
249 MAIN_TYPE_HASH.as_slice(),
250 &coded_schema_id,
251 &coded_page_id,
252 &coded_target_hash,
253 &coded_expiration,
254 &coded_payload,
255 ]
256 .concat(),
257 );
258 let combined = [prefix_domain_separator.as_ref(), &message].concat();
259 combined.into_boxed_slice()
260 }
261}
262
263#[derive(
267 Encode,
268 Decode,
269 DecodeWithMemTracking,
270 TypeInfo,
271 MaxEncodedLen,
272 PartialEq,
273 RuntimeDebugNoBound,
274 Clone,
275)]
276#[scale_info(skip_type_params(T))]
277pub struct PaginatedDeleteSignaturePayloadV2<T: Config> {
278 #[codec(compact)]
280 pub schema_id: SchemaId,
281
282 #[codec(compact)]
284 pub page_id: PageId,
285
286 #[codec(compact)]
288 pub target_hash: PageHash,
289
290 pub expiration: BlockNumberFor<T>,
292}
293
294impl<T: Config> EIP712Encode for PaginatedDeleteSignaturePayloadV2<T> {
295 fn encode_eip_712(&self, chain_id: u32) -> Box<[u8]> {
296 lazy_static! {
297 static ref MAIN_TYPE_HASH: [u8; 32] =
299 sp_io::hashing::keccak_256(b"PaginatedDeleteSignaturePayloadV2(uint16 schemaId,uint16 pageId,uint32 targetHash,uint32 expiration)");
300 }
301 let prefix_domain_separator: Box<[u8]> =
303 get_eip712_encoding_prefix("0xcccccccccccccccccccccccccccccccccccccccc", chain_id);
304 let coded_schema_id = to_abi_compatible_number(self.schema_id);
305 let coded_page_id = to_abi_compatible_number(self.page_id);
306 let coded_target_hash = to_abi_compatible_number(self.target_hash);
307 let expiration: U256 = self.expiration.into();
308 let coded_expiration = to_abi_compatible_number(expiration.as_u128());
309 let message = sp_io::hashing::keccak_256(
310 &[
311 MAIN_TYPE_HASH.as_slice(),
312 &coded_schema_id,
313 &coded_page_id,
314 &coded_target_hash,
315 &coded_expiration,
316 ]
317 .concat(),
318 );
319 let combined = [prefix_domain_separator.as_ref(), &message].concat();
320 combined.into_boxed_slice()
321 }
322}
323
324#[derive(Encode, Decode, TypeInfo, MaxEncodedLen, Debug, Default)]
326#[scale_info(skip_type_params(PageDataSize))]
327#[codec(mel_bound(PageDataSize: MaxEncodedLen))]
328pub struct Page<PageDataSize: Get<u32>> {
329 pub nonce: PageNonce,
331 pub data: BoundedVec<u8, PageDataSize>,
335}
336
337#[derive(Debug, PartialEq)]
339pub struct ParsedItemPage<'a> {
340 pub page_size: usize,
342 pub items: BTreeMap<u16, &'a [u8]>,
344}
345
346impl<PageDataSize: Get<u32>> Page<PageDataSize> {
347 pub fn is_empty(&self) -> bool {
349 self.data.is_empty()
350 }
351
352 pub fn get_hash(&self) -> PageHash {
354 if self.is_empty() {
355 return PageHash::default();
356 }
357 let mut hasher = XxHash64::with_seed(0);
358 self.hash(&mut hasher);
359 let value_bytes: [u8; 4] =
360 hasher.finish().to_be_bytes()[..4].try_into().expect("incorrect hash size");
361 PageHash::from_be_bytes(value_bytes)
362 }
363}
364
365impl<PageDataSize: Get<u32>> Hash for Page<PageDataSize> {
367 fn hash<H: Hasher>(&self, state: &mut H) {
368 state.write(&self.nonce.encode());
369 state.write(&self.data[..]);
370 }
371}
372
373impl<PageDataSize: Get<u32>> PartialEq for Page<PageDataSize> {
375 fn eq(&self, other: &Self) -> bool {
376 self.nonce.eq(&other.nonce) && self.data.eq(&other.data)
377 }
378}
379
380impl<PageDataSize: Get<u32>> From<BoundedVec<u8, PageDataSize>> for Page<PageDataSize> {
383 fn from(bounded: BoundedVec<u8, PageDataSize>) -> Self {
384 Self { nonce: PageNonce::default(), data: bounded }
385 }
386}
387
388impl<PageDataSize: Get<u32>> TryFrom<Vec<u8>> for Page<PageDataSize> {
391 type Error = ();
392
393 fn try_from(data: Vec<u8>) -> Result<Self, Self::Error> {
394 let nonce: PageNonce =
395 PageNonce::decode(&mut &data[..PageNonce::max_encoded_len()]).map_err(|_| ())?;
396 let bounded: BoundedVec<u8, PageDataSize> =
397 BoundedVec::try_from(data[PageNonce::max_encoded_len()..].to_vec()).map_err(|_| ())?;
398 Ok(Self { nonce, data: bounded })
399 }
400}
401
402impl<T: Config> ItemizedOperations<T> for ItemizedPage<T> {
403 fn apply_item_actions(
406 &self,
407 actions: &[ItemAction<T::MaxItemizedBlobSizeBytes>],
408 ) -> Result<Self, PageError> {
409 let mut parsed = ItemizedOperations::<T>::try_parse(self, true)?;
410
411 let mut updated_page_buffer = Vec::with_capacity(parsed.page_size);
412 let mut add_buffer = Vec::new();
413
414 for action in actions {
415 match action {
416 ItemAction::Delete { index } => {
417 ensure!(
418 parsed.items.contains_key(index),
419 PageError::InvalidAction("item index is invalid")
420 );
421 parsed.items.remove(index);
422 },
423 ItemAction::Add { data } => {
424 let header = ItemHeader {
425 payload_len: data
426 .len()
427 .try_into()
428 .map_err(|_| PageError::InvalidAction("invalid payload size"))?,
429 };
430 add_buffer.extend_from_slice(&header.encode()[..]);
431 add_buffer.extend_from_slice(&data[..]);
432 },
433 }
434 }
435
436 for (_, slice) in parsed.items.iter() {
438 updated_page_buffer.extend_from_slice(slice);
439 }
440 updated_page_buffer.append(&mut add_buffer);
441
442 Ok(ItemizedPage::<T>::from(
443 BoundedVec::try_from(updated_page_buffer).map_err(|_| PageError::PageSizeOverflow)?,
444 ))
445 }
446
447 fn try_parse(&self, include_header: bool) -> Result<ParsedItemPage, PageError> {
450 let mut count = 0u16;
451 let mut items = BTreeMap::new();
452 let mut offset = 0;
453 while offset < self.data.len() {
454 ensure!(
455 offset + ItemHeader::max_encoded_len() <= self.data.len(),
456 PageError::ErrorParsing("wrong header size")
457 );
458 let header = <ItemHeader>::decode(&mut &self.data[offset..])
459 .map_err(|_| PageError::ErrorParsing("decoding header"))?;
460 let item_total_length = ItemHeader::max_encoded_len() + header.payload_len as usize;
461 ensure!(
462 offset + item_total_length <= self.data.len(),
463 PageError::ErrorParsing("wrong payload size")
464 );
465
466 items.insert(
467 count,
468 match include_header {
469 true => &self.data[offset..(offset + item_total_length)],
470 false =>
471 &self.data
472 [(offset + ItemHeader::max_encoded_len())..(offset + item_total_length)],
473 },
474 );
475 offset += item_total_length;
476 count = count.checked_add(1).ok_or(PageError::ArithmeticOverflow)?;
477 }
478
479 Ok(ParsedItemPage { page_size: self.data.len(), items })
480 }
481}