Merge pull request #238 from ia0/v2_optim

Make store operations constant wrt flash operations
This commit is contained in:
Julien Cretin
2021-01-18 16:38:11 +01:00
committed by GitHub
6 changed files with 188 additions and 123 deletions

View File

@@ -13,6 +13,11 @@ jobs:
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- uses: actions-rs/toolchain@v1
with:
toolchain: nightly
override: true
- name: Unit testing of Persistent store library (release mode) - name: Unit testing of Persistent store library (release mode)
uses: actions-rs/cargo@v1 uses: actions-rs/cargo@v1
with: with:

View File

@@ -124,15 +124,15 @@ fn main() {
compute_latency(&timer, 20, 1, 50); compute_latency(&timer, 20, 1, 50);
// Those overwritten 1 word entries simulate counters. // Those overwritten 1 word entries simulate counters.
compute_latency(&timer, 3, 0, 1); compute_latency(&timer, 3, 0, 1);
compute_latency(&timer, 6, 0, 1); compute_latency(&timer, 20, 0, 1);
writeln!(Console::new(), "\nDone.").unwrap(); writeln!(Console::new(), "\nDone.").unwrap();
// Results on nrf52840dk: // Results on nrf52840dk:
// //
// | Pages | Overwrite | Length | Boot | Compaction | Insert | Remove | // | Pages | Overwrite | Length | Boot | Compaction | Insert | Remove |
// | ----- | --------- | --------- | ------- | ---------- | ------ | ------- | // | ----- | --------- | --------- | ------- | ---------- | ------ | ------ |
// | 3 | no | 50 words | 2.0 ms | 132.5 ms | 4.8 ms | 1.2 ms | // | 3 | no | 50 words | 2.0 ms | 132.8 ms | 4.3 ms | 1.2 ms |
// | 20 | no | 50 words | 7.4 ms | 135.5 ms | 10.2 ms | 3.9 ms | // | 20 | no | 50 words | 7.8 ms | 135.7 ms | 9.9 ms | 4.0 ms |
// | 3 | yes | 1 word | 21.9 ms | 94.5 ms | 12.4 ms | 5.9 ms | // | 3 | yes | 1 word | 19.6 ms | 90.8 ms | 4.7 ms | 2.3 ms |
// | 6 | yes | 1 word | 55.2 ms | 100.8 ms | 24.8 ms | 12.1 ms | // | 20 | yes | 1 word | 183.3 ms | 90.9 ms | 4.8 ms | 2.3 ms |
} }

View File

@@ -1077,4 +1077,15 @@ mod tests {
0xff800000 0xff800000
); );
} }
#[test]
fn position_offsets_fit_in_a_halfword() {
// The store stores in RAM the entry positions as their offset from the head. Those offsets
// are represented as u16. The bound below is a large over-approximation of the maximal
// offset. We first make sure it fits in a u16.
const MAX_POS: Nat = (MAX_PAGE_INDEX + 1) * MAX_VIRT_PAGE_SIZE;
assert!(MAX_POS <= u16::MAX as Nat);
// We also check the actual value for up-to-date documentation, since it's a constant.
assert_eq!(MAX_POS, 0xff80);
}
} }

View File

@@ -344,6 +344,7 @@
//! storage, the store is checked not to crash. //! storage, the store is checked not to crash.
#![cfg_attr(not(feature = "std"), no_std)] #![cfg_attr(not(feature = "std"), no_std)]
#![feature(try_trait)]
#[macro_use] #[macro_use]
extern crate alloc; extern crate alloc;

View File

@@ -23,8 +23,11 @@ use crate::{usize_to_nat, Nat, Storage, StorageError, StorageIndex};
pub use crate::{ pub use crate::{
BufferStorage, StoreDriver, StoreDriverOff, StoreDriverOn, StoreInterruption, StoreInvariant, BufferStorage, StoreDriver, StoreDriverOff, StoreDriverOn, StoreInterruption, StoreInvariant,
}; };
use alloc::boxed::Box;
use alloc::vec::Vec; use alloc::vec::Vec;
use core::cmp::{max, min, Ordering}; use core::cmp::{max, min, Ordering};
use core::convert::TryFrom;
use core::option::NoneError;
#[cfg(feature = "std")] #[cfg(feature = "std")]
use std::collections::HashSet; use std::collections::HashSet;
@@ -75,6 +78,14 @@ impl From<StorageError> for StoreError {
} }
} }
impl From<NoneError> for StoreError {
fn from(error: NoneError) -> StoreError {
match error {
NoneError => StoreError::InvalidStorage,
}
}
}
/// Result of store operations. /// Result of store operations.
pub type StoreResult<T> = Result<T, StoreError>; pub type StoreResult<T> = Result<T, StoreError>;
@@ -174,6 +185,8 @@ impl StoreUpdate {
} }
} }
pub type StoreIter<'a> = Box<dyn Iterator<Item = StoreResult<StoreHandle>> + 'a>;
/// Implements a store with a map interface over a storage. /// Implements a store with a map interface over a storage.
#[derive(Clone)] #[derive(Clone)]
pub struct Store<S: Storage> { pub struct Store<S: Storage> {
@@ -182,6 +195,14 @@ pub struct Store<S: Storage> {
/// The storage configuration. /// The storage configuration.
format: Format, format: Format,
/// The position of the first word in the store.
head: Option<Position>,
/// The list of the position of the user entries.
///
/// The position is encoded as the word offset from the [head](Store#structfield.head).
entries: Option<Vec<u16>>,
} }
impl<S: Storage> Store<S> { impl<S: Storage> Store<S> {
@@ -199,7 +220,12 @@ impl<S: Storage> Store<S> {
None => return Err((StoreError::InvalidArgument, storage)), None => return Err((StoreError::InvalidArgument, storage)),
Some(x) => x, Some(x) => x,
}; };
let mut store = Store { storage, format }; let mut store = Store {
storage,
format,
head: None,
entries: None,
};
if let Err(error) = store.recover() { if let Err(error) = store.recover() {
return Err((error, store.storage)); return Err((error, store.storage));
} }
@@ -207,8 +233,19 @@ impl<S: Storage> Store<S> {
} }
/// Iterates over the entries. /// Iterates over the entries.
pub fn iter<'a>(&'a self) -> StoreResult<StoreIter<'a, S>> { pub fn iter<'a>(&'a self) -> StoreResult<StoreIter<'a>> {
StoreIter::new(self) let head = self.head?;
Ok(Box::new(self.entries.as_ref()?.iter().map(
move |&offset| {
let pos = head + offset as Nat;
match self.parse_entry(&mut pos.clone())? {
ParsedEntry::User(Header {
key, length: len, ..
}) => Ok(StoreHandle { key, pos, len }),
_ => Err(StoreError::InvalidStorage),
}
},
)))
} }
/// Returns the current capacity in words. /// Returns the current capacity in words.
@@ -217,16 +254,9 @@ impl<S: Storage> Store<S> {
pub fn capacity(&self) -> StoreResult<StoreRatio> { pub fn capacity(&self) -> StoreResult<StoreRatio> {
let total = self.format.total_capacity(); let total = self.format.total_capacity();
let mut used = 0; let mut used = 0;
let mut pos = self.head()?; for handle in self.iter()? {
let end = pos + self.format.virt_size(); let handle = handle?;
while pos < end { used += 1 + self.format.bytes_to_words(handle.len);
let entry_pos = pos;
match self.parse_entry(&mut pos)? {
ParsedEntry::Tail => break,
ParsedEntry::Padding => (),
ParsedEntry::User(_) => used += pos - entry_pos,
_ => return Err(StoreError::InvalidStorage),
}
} }
Ok(StoreRatio { used, total }) Ok(StoreRatio { used, total })
} }
@@ -381,6 +411,7 @@ impl<S: Storage> Store<S> {
let footer = entry_len / word_size - 1; let footer = entry_len / word_size - 1;
self.write_slice(tail, &entry[..(footer * word_size) as usize])?; self.write_slice(tail, &entry[..(footer * word_size) as usize])?;
self.write_slice(tail + footer, &entry[(footer * word_size) as usize..])?; self.write_slice(tail + footer, &entry[(footer * word_size) as usize..])?;
self.push_entry(tail)?;
self.insert_init(tail, footer, key) self.insert_init(tail, footer, key)
} }
@@ -398,7 +429,8 @@ impl<S: Storage> Store<S> {
/// Removes an entry given a handle. /// Removes an entry given a handle.
pub fn remove_handle(&mut self, handle: &StoreHandle) -> StoreResult<()> { pub fn remove_handle(&mut self, handle: &StoreHandle) -> StoreResult<()> {
self.check_handle(handle)?; self.check_handle(handle)?;
self.delete_pos(handle.pos, self.format.bytes_to_words(handle.len)) self.delete_pos(handle.pos, self.format.bytes_to_words(handle.len))?;
self.remove_entry(handle.pos)
} }
/// Returns the maximum length in bytes of a value. /// Returns the maximum length in bytes of a value.
@@ -460,7 +492,9 @@ impl<S: Storage> Store<S> {
/// Recovers a possible compaction interrupted while copying the entries. /// Recovers a possible compaction interrupted while copying the entries.
fn recover_compaction(&mut self) -> StoreResult<()> { fn recover_compaction(&mut self) -> StoreResult<()> {
let head_page = self.head()?.page(&self.format); let head = self.get_extremum_page_head(Ordering::Less)?;
self.head = Some(head);
let head_page = head.page(&self.format);
match self.parse_compact(head_page)? { match self.parse_compact(head_page)? {
WordState::Erased => Ok(()), WordState::Erased => Ok(()),
WordState::Partial => self.compact(), WordState::Partial => self.compact(),
@@ -470,14 +504,15 @@ impl<S: Storage> Store<S> {
/// Recover a possible interrupted operation which is not a compaction. /// Recover a possible interrupted operation which is not a compaction.
fn recover_operation(&mut self) -> StoreResult<()> { fn recover_operation(&mut self) -> StoreResult<()> {
let mut pos = self.head()?; self.entries = Some(Vec::new());
let mut pos = self.head?;
let mut prev_pos = pos; let mut prev_pos = pos;
let end = pos + self.format.virt_size(); let end = pos + self.format.virt_size();
while pos < end { while pos < end {
let entry_pos = pos; let entry_pos = pos;
match self.parse_entry(&mut pos)? { match self.parse_entry(&mut pos)? {
ParsedEntry::Tail => break, ParsedEntry::Tail => break,
ParsedEntry::User(_) => (), ParsedEntry::User(_) => self.push_entry(entry_pos)?,
ParsedEntry::Padding => { ParsedEntry::Padding => {
self.wipe_span(entry_pos + 1, pos - entry_pos - 1)?; self.wipe_span(entry_pos + 1, pos - entry_pos - 1)?;
} }
@@ -610,7 +645,7 @@ impl<S: Storage> Store<S> {
/// ///
/// In particular, the handle has not been compacted. /// In particular, the handle has not been compacted.
fn check_handle(&self, handle: &StoreHandle) -> StoreResult<()> { fn check_handle(&self, handle: &StoreHandle) -> StoreResult<()> {
if handle.pos < self.head()? { if handle.pos < self.head? {
Err(StoreError::InvalidArgument) Err(StoreError::InvalidArgument)
} else { } else {
Ok(()) Ok(())
@@ -640,7 +675,7 @@ impl<S: Storage> Store<S> {
/// Compacts one page. /// Compacts one page.
fn compact(&mut self) -> StoreResult<()> { fn compact(&mut self) -> StoreResult<()> {
let head = self.head()?; let head = self.head?;
if head.cycle(&self.format) >= self.format.max_page_erases() { if head.cycle(&self.format) >= self.format.max_page_erases() {
return Err(StoreError::NoLifetime); return Err(StoreError::NoLifetime);
} }
@@ -653,7 +688,7 @@ impl<S: Storage> Store<S> {
/// Continues a compaction after its compact page info has been written. /// Continues a compaction after its compact page info has been written.
fn compact_copy(&mut self) -> StoreResult<()> { fn compact_copy(&mut self) -> StoreResult<()> {
let mut head = self.head()?; let mut head = self.head?;
let page = head.page(&self.format); let page = head.page(&self.format);
let end = head.next_page(&self.format); let end = head.next_page(&self.format);
let mut tail = match self.parse_compact(page)? { let mut tail = match self.parse_compact(page)? {
@@ -667,8 +702,12 @@ impl<S: Storage> Store<S> {
let pos = head; let pos = head;
match self.parse_entry(&mut head)? { match self.parse_entry(&mut head)? {
ParsedEntry::Tail => break, ParsedEntry::Tail => break,
// This can happen if we copy to the next page. We actually reached the tail but we
// read what we just copied.
ParsedEntry::Partial if head > end => break,
ParsedEntry::User(_) => (), ParsedEntry::User(_) => (),
_ => continue, ParsedEntry::Padding => continue,
_ => return Err(StoreError::InvalidStorage),
}; };
let length = head - pos; let length = head - pos;
// We have to copy the slice for 2 reasons: // We have to copy the slice for 2 reasons:
@@ -676,7 +715,9 @@ impl<S: Storage> Store<S> {
// 2. We can't pass a flash slice to the kernel. This should get fixed with // 2. We can't pass a flash slice to the kernel. This should get fixed with
// https://github.com/tock/tock/issues/1274. // https://github.com/tock/tock/issues/1274.
let entry = self.read_slice(pos, length * self.format.word_size()); let entry = self.read_slice(pos, length * self.format.word_size());
self.remove_entry(pos)?;
self.write_slice(tail, &entry)?; self.write_slice(tail, &entry)?;
self.push_entry(tail)?;
self.init_page(tail, tail + (length - 1))?; self.init_page(tail, tail + (length - 1))?;
tail += length; tail += length;
} }
@@ -688,14 +729,31 @@ impl<S: Storage> Store<S> {
/// Continues a compaction after its erase entry has been written. /// Continues a compaction after its erase entry has been written.
fn compact_erase(&mut self, erase: Position) -> StoreResult<()> { fn compact_erase(&mut self, erase: Position) -> StoreResult<()> {
let page = match self.parse_entry(&mut erase.clone())? { // Read the page to erase from the erase entry.
let mut page = match self.parse_entry(&mut erase.clone())? {
ParsedEntry::Internal(InternalEntry::Erase { page }) => page, ParsedEntry::Internal(InternalEntry::Erase { page }) => page,
_ => return Err(StoreError::InvalidStorage), _ => return Err(StoreError::InvalidStorage),
}; };
// Erase the page.
self.storage_erase_page(page)?; self.storage_erase_page(page)?;
let head = self.head()?; // Update the head.
page = (page + 1) % self.format.num_pages();
let init = match self.parse_init(page)? {
WordState::Valid(x) => x,
_ => return Err(StoreError::InvalidStorage),
};
let head = self.format.page_head(init, page);
if let Some(entries) = &mut self.entries {
let head_offset = u16::try_from(head - self.head?).ok()?;
for entry in entries {
*entry = entry.checked_sub(head_offset)?;
}
}
self.head = Some(head);
// Wipe the overlapping entry from the erased page.
let pos = head.page_begin(&self.format); let pos = head.page_begin(&self.format);
self.wipe_span(pos, head - pos)?; self.wipe_span(pos, head - pos)?;
// Mark the erase entry as done.
self.set_padding(erase)?; self.set_padding(erase)?;
Ok(()) Ok(())
} }
@@ -704,13 +762,13 @@ impl<S: Storage> Store<S> {
fn transaction_apply(&mut self, sorted_keys: &[Nat], marker: Position) -> StoreResult<()> { fn transaction_apply(&mut self, sorted_keys: &[Nat], marker: Position) -> StoreResult<()> {
self.delete_keys(&sorted_keys, marker)?; self.delete_keys(&sorted_keys, marker)?;
self.set_padding(marker)?; self.set_padding(marker)?;
let end = self.head()? + self.format.virt_size(); let end = self.head? + self.format.virt_size();
let mut pos = marker + 1; let mut pos = marker + 1;
while pos < end { while pos < end {
let entry_pos = pos; let entry_pos = pos;
match self.parse_entry(&mut pos)? { match self.parse_entry(&mut pos)? {
ParsedEntry::Tail => break, ParsedEntry::Tail => break,
ParsedEntry::User(_) => (), ParsedEntry::User(_) => self.push_entry(entry_pos)?,
ParsedEntry::Internal(InternalEntry::Remove { .. }) => { ParsedEntry::Internal(InternalEntry::Remove { .. }) => {
self.set_padding(entry_pos)? self.set_padding(entry_pos)?
} }
@@ -727,37 +785,38 @@ impl<S: Storage> Store<S> {
ParsedEntry::Internal(InternalEntry::Clear { min_key }) => min_key, ParsedEntry::Internal(InternalEntry::Clear { min_key }) => min_key,
_ => return Err(StoreError::InvalidStorage), _ => return Err(StoreError::InvalidStorage),
}; };
let mut pos = self.head()?; self.delete_if(clear, |key| key >= min_key)?;
let end = pos + self.format.virt_size();
while pos < end {
let entry_pos = pos;
match self.parse_entry(&mut pos)? {
ParsedEntry::Internal(InternalEntry::Clear { .. }) if entry_pos == clear => break,
ParsedEntry::User(header) if header.key >= min_key => {
self.delete_pos(entry_pos, pos - entry_pos - 1)?;
}
ParsedEntry::Padding | ParsedEntry::User(_) => (),
_ => return Err(StoreError::InvalidStorage),
}
}
self.set_padding(clear)?; self.set_padding(clear)?;
Ok(()) Ok(())
} }
/// Deletes a set of entries up to a certain position. /// Deletes a set of entries up to a certain position.
fn delete_keys(&mut self, sorted_keys: &[Nat], end: Position) -> StoreResult<()> { fn delete_keys(&mut self, sorted_keys: &[Nat], end: Position) -> StoreResult<()> {
let mut pos = self.head()?; self.delete_if(end, |key| sorted_keys.binary_search(&key).is_ok())
while pos < end { }
let entry_pos = pos;
match self.parse_entry(&mut pos)? { /// Deletes entries matching a predicate up to a certain position.
ParsedEntry::Tail => break, fn delete_if(&mut self, end: Position, delete: impl Fn(Nat) -> bool) -> StoreResult<()> {
ParsedEntry::User(header) if sorted_keys.binary_search(&header.key).is_ok() => { let head = self.head?;
self.delete_pos(entry_pos, pos - entry_pos - 1)?; let mut entries = self.entries.take()?;
} let mut i = 0;
ParsedEntry::Padding | ParsedEntry::User(_) => (), while i < entries.len() {
let pos = head + entries[i] as Nat;
if pos >= end {
break;
}
let header = match self.parse_entry(&mut pos.clone())? {
ParsedEntry::User(x) => x,
_ => return Err(StoreError::InvalidStorage), _ => return Err(StoreError::InvalidStorage),
};
if delete(header.key) {
self.delete_pos(pos, self.format.bytes_to_words(header.length))?;
entries.swap_remove(i);
} else {
i += 1;
} }
} }
self.entries = Some(entries);
Ok(()) Ok(())
} }
@@ -836,19 +895,20 @@ impl<S: Storage> Store<S> {
} }
} }
// There is always at least one initialized page. // There is always at least one initialized page.
best.ok_or(StoreError::InvalidStorage) Ok(best?)
} }
/// Returns the number of words that can be written without compaction. /// Returns the number of words that can be written without compaction.
fn immediate_capacity(&self) -> StoreResult<Nat> { fn immediate_capacity(&self) -> StoreResult<Nat> {
let tail = self.tail()?; let tail = self.tail()?;
let end = self.head()? + self.format.virt_size(); let end = self.head? + self.format.virt_size();
Ok(end.get().saturating_sub(tail.get())) Ok(end.get().saturating_sub(tail.get()))
} }
/// Returns the position of the first word in the store. /// Returns the position of the first word in the store.
#[cfg(feature = "std")]
pub(crate) fn head(&self) -> StoreResult<Position> { pub(crate) fn head(&self) -> StoreResult<Position> {
self.get_extremum_page_head(Ordering::Less) Ok(self.head?)
} }
/// Returns one past the position of the last word in the store. /// Returns one past the position of the last word in the store.
@@ -863,6 +923,30 @@ impl<S: Storage> Store<S> {
Ok(pos) Ok(pos)
} }
fn push_entry(&mut self, pos: Position) -> StoreResult<()> {
let entries = match &mut self.entries {
None => return Ok(()),
Some(x) => x,
};
let head = self.head?;
let offset = u16::try_from(pos - head).ok()?;
debug_assert!(!entries.contains(&offset));
entries.push(offset);
Ok(())
}
fn remove_entry(&mut self, pos: Position) -> StoreResult<()> {
let entries = match &mut self.entries {
None => return Ok(()),
Some(x) => x,
};
let head = self.head?;
let offset = u16::try_from(pos - head).ok()?;
let i = entries.iter().position(|x| *x == offset)?;
entries.swap_remove(i);
Ok(())
}
/// Parses the entry at a given position. /// Parses the entry at a given position.
/// ///
/// The position is updated to point to the next entry. /// The position is updated to point to the next entry.
@@ -1061,7 +1145,7 @@ impl Store<BufferStorage> {
/// If the value has been partially compacted, only return the non-compacted part. Returns an /// If the value has been partially compacted, only return the non-compacted part. Returns an
/// empty value if it has been fully compacted. /// empty value if it has been fully compacted.
pub fn inspect_value(&self, handle: &StoreHandle) -> Vec<u8> { pub fn inspect_value(&self, handle: &StoreHandle) -> Vec<u8> {
let head = self.head().unwrap(); let head = self.head.unwrap();
let length = self.format.bytes_to_words(handle.len); let length = self.format.bytes_to_words(handle.len);
if head <= handle.pos { if head <= handle.pos {
// The value has not been compacted. // The value has not been compacted.
@@ -1087,20 +1171,21 @@ impl Store<BufferStorage> {
store store
.iter() .iter()
.unwrap() .unwrap()
.map(|x| x.unwrap()) .filter(|x| x.is_err() || delete_key(x.as_ref().unwrap().key as usize))
.filter(|x| delete_key(x.key as usize)) .collect::<Result<Vec<_>, _>>()
.collect::<Vec<_>>()
}; };
match *operation { match *operation {
StoreOperation::Transaction { ref updates } => { StoreOperation::Transaction { ref updates } => {
let keys: HashSet<usize> = updates.iter().map(|x| x.key()).collect(); let keys: HashSet<usize> = updates.iter().map(|x| x.key()).collect();
let deleted = deleted(self, &|key| keys.contains(&key)); match deleted(self, &|key| keys.contains(&key)) {
(deleted, self.transaction(updates)) Ok(deleted) => (deleted, self.transaction(updates)),
} Err(error) => (Vec::new(), Err(error)),
StoreOperation::Clear { min_key } => { }
let deleted = deleted(self, &|key| key >= min_key);
(deleted, self.clear(min_key))
} }
StoreOperation::Clear { min_key } => match deleted(self, &|key| key >= min_key) {
Ok(deleted) => (deleted, self.clear(min_key)),
Err(error) => (Vec::new(), Err(error)),
},
StoreOperation::Prepare { length } => (Vec::new(), self.prepare(length)), StoreOperation::Prepare { length } => (Vec::new(), self.prepare(length)),
} }
} }
@@ -1165,61 +1250,6 @@ enum ParsedEntry {
Tail, Tail,
} }
/// Iterates over the entries of a store.
pub struct StoreIter<'a, S: Storage> {
/// The store being iterated.
store: &'a Store<S>,
/// The position of the next entry.
pos: Position,
/// Iteration stops when reaching this position.
end: Position,
}
impl<'a, S: Storage> StoreIter<'a, S> {
/// Creates an iterator over the entries of a store.
fn new(store: &'a Store<S>) -> StoreResult<StoreIter<'a, S>> {
let pos = store.head()?;
let end = pos + store.format.virt_size();
Ok(StoreIter { store, pos, end })
}
}
impl<'a, S: Storage> StoreIter<'a, S> {
/// Returns the next entry and advances the iterator.
fn transposed_next(&mut self) -> StoreResult<Option<StoreHandle>> {
if self.pos >= self.end {
return Ok(None);
}
while self.pos < self.end {
let entry_pos = self.pos;
match self.store.parse_entry(&mut self.pos)? {
ParsedEntry::Tail => break,
ParsedEntry::Padding => (),
ParsedEntry::User(header) => {
return Ok(Some(StoreHandle {
key: header.key,
pos: entry_pos,
len: header.length,
}))
}
_ => return Err(StoreError::InvalidStorage),
}
}
self.pos = self.end;
Ok(None)
}
}
impl<'a, S: Storage> Iterator for StoreIter<'a, S> {
type Item = StoreResult<StoreHandle>;
fn next(&mut self) -> Option<StoreResult<StoreHandle>> {
self.transposed_next().transpose()
}
}
/// Returns whether 2 slices are different. /// Returns whether 2 slices are different.
/// ///
/// Returns an error if `target` has a bit set to one for which `source` is set to zero. /// Returns an error if `target` has a bit set to one for which `source` is set to zero.
@@ -1438,4 +1468,22 @@ mod tests {
driver = driver.power_off().power_on().unwrap(); driver = driver.power_off().power_on().unwrap();
driver.check().unwrap(); driver.check().unwrap();
} }
#[test]
fn entries_ok() {
let mut driver = MINIMAL.new_driver().power_on().unwrap();
// The store is initially empty.
assert!(driver.store().entries.as_ref().unwrap().is_empty());
// Inserted elements are added.
const LEN: usize = 6;
driver.insert(0, &[0x38; (LEN - 1) * 4]).unwrap();
driver.insert(1, &[0x5c; 4]).unwrap();
assert_eq!(driver.store().entries, Some(vec![0, LEN as u16]));
// Deleted elements are removed.
driver.remove(0).unwrap();
assert_eq!(driver.store().entries, Some(vec![LEN as u16]));
}
} }

View File

@@ -573,7 +573,7 @@ pub struct IterCredentials<'a> {
store: &'a persistent_store::Store<Storage>, store: &'a persistent_store::Store<Storage>,
/// The store iterator. /// The store iterator.
iter: persistent_store::StoreIter<'a, Storage>, iter: persistent_store::StoreIter<'a>,
/// The iteration result. /// The iteration result.
/// ///