Merge branch 'develop' into command-cred-mgmt
This commit is contained in:
5
.github/workflows/persistent_store_test.yml
vendored
5
.github/workflows/persistent_store_test.yml
vendored
@@ -13,6 +13,11 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
|
|
||||||
|
- uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
override: true
|
||||||
|
|
||||||
- name: Unit testing of Persistent store library (release mode)
|
- name: Unit testing of Persistent store library (release mode)
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
|
|||||||
@@ -124,15 +124,15 @@ fn main() {
|
|||||||
compute_latency(&timer, 20, 1, 50);
|
compute_latency(&timer, 20, 1, 50);
|
||||||
// Those overwritten 1 word entries simulate counters.
|
// Those overwritten 1 word entries simulate counters.
|
||||||
compute_latency(&timer, 3, 0, 1);
|
compute_latency(&timer, 3, 0, 1);
|
||||||
compute_latency(&timer, 6, 0, 1);
|
compute_latency(&timer, 20, 0, 1);
|
||||||
writeln!(Console::new(), "\nDone.").unwrap();
|
writeln!(Console::new(), "\nDone.").unwrap();
|
||||||
|
|
||||||
// Results on nrf52840dk:
|
// Results on nrf52840dk:
|
||||||
//
|
//
|
||||||
// | Pages | Overwrite | Length | Boot | Compaction | Insert | Remove |
|
// | Pages | Overwrite | Length | Boot | Compaction | Insert | Remove |
|
||||||
// | ----- | --------- | --------- | ------- | ---------- | ------ | ------- |
|
// | ----- | --------- | --------- | ------- | ---------- | ------ | ------ |
|
||||||
// | 3 | no | 50 words | 2.0 ms | 132.5 ms | 4.8 ms | 1.2 ms |
|
// | 3 | no | 50 words | 2.0 ms | 132.8 ms | 4.3 ms | 1.2 ms |
|
||||||
// | 20 | no | 50 words | 7.4 ms | 135.5 ms | 10.2 ms | 3.9 ms |
|
// | 20 | no | 50 words | 7.8 ms | 135.7 ms | 9.9 ms | 4.0 ms |
|
||||||
// | 3 | yes | 1 word | 21.9 ms | 94.5 ms | 12.4 ms | 5.9 ms |
|
// | 3 | yes | 1 word | 19.6 ms | 90.8 ms | 4.7 ms | 2.3 ms |
|
||||||
// | 6 | yes | 1 word | 55.2 ms | 100.8 ms | 24.8 ms | 12.1 ms |
|
// | 20 | yes | 1 word | 183.3 ms | 90.9 ms | 4.8 ms | 2.3 ms |
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -335,12 +335,12 @@ impl Format {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Builds the storage representation of an init info.
|
/// Builds the storage representation of an init info.
|
||||||
pub fn build_init(&self, init: InitInfo) -> WordSlice {
|
pub fn build_init(&self, init: InitInfo) -> StoreResult<WordSlice> {
|
||||||
let mut word = ERASED_WORD;
|
let mut word = ERASED_WORD;
|
||||||
INIT_CYCLE.set(&mut word, init.cycle);
|
INIT_CYCLE.set(&mut word, init.cycle)?;
|
||||||
INIT_PREFIX.set(&mut word, init.prefix);
|
INIT_PREFIX.set(&mut word, init.prefix)?;
|
||||||
WORD_CHECKSUM.set(&mut word, 0);
|
WORD_CHECKSUM.set(&mut word, 0)?;
|
||||||
word.as_slice()
|
Ok(word.as_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the storage index of the compact info of a page.
|
/// Returns the storage index of the compact info of a page.
|
||||||
@@ -368,36 +368,36 @@ impl Format {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Builds the storage representation of a compact info.
|
/// Builds the storage representation of a compact info.
|
||||||
pub fn build_compact(&self, compact: CompactInfo) -> WordSlice {
|
pub fn build_compact(&self, compact: CompactInfo) -> StoreResult<WordSlice> {
|
||||||
let mut word = ERASED_WORD;
|
let mut word = ERASED_WORD;
|
||||||
COMPACT_TAIL.set(&mut word, compact.tail);
|
COMPACT_TAIL.set(&mut word, compact.tail)?;
|
||||||
WORD_CHECKSUM.set(&mut word, 0);
|
WORD_CHECKSUM.set(&mut word, 0)?;
|
||||||
word.as_slice()
|
Ok(word.as_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Builds the storage representation of an internal entry.
|
/// Builds the storage representation of an internal entry.
|
||||||
pub fn build_internal(&self, internal: InternalEntry) -> WordSlice {
|
pub fn build_internal(&self, internal: InternalEntry) -> StoreResult<WordSlice> {
|
||||||
let mut word = ERASED_WORD;
|
let mut word = ERASED_WORD;
|
||||||
match internal {
|
match internal {
|
||||||
InternalEntry::Erase { page } => {
|
InternalEntry::Erase { page } => {
|
||||||
ID_ERASE.set(&mut word);
|
ID_ERASE.set(&mut word)?;
|
||||||
ERASE_PAGE.set(&mut word, page);
|
ERASE_PAGE.set(&mut word, page)?;
|
||||||
}
|
}
|
||||||
InternalEntry::Clear { min_key } => {
|
InternalEntry::Clear { min_key } => {
|
||||||
ID_CLEAR.set(&mut word);
|
ID_CLEAR.set(&mut word)?;
|
||||||
CLEAR_MIN_KEY.set(&mut word, min_key);
|
CLEAR_MIN_KEY.set(&mut word, min_key)?;
|
||||||
}
|
}
|
||||||
InternalEntry::Marker { count } => {
|
InternalEntry::Marker { count } => {
|
||||||
ID_MARKER.set(&mut word);
|
ID_MARKER.set(&mut word)?;
|
||||||
MARKER_COUNT.set(&mut word, count);
|
MARKER_COUNT.set(&mut word, count)?;
|
||||||
}
|
}
|
||||||
InternalEntry::Remove { key } => {
|
InternalEntry::Remove { key } => {
|
||||||
ID_REMOVE.set(&mut word);
|
ID_REMOVE.set(&mut word)?;
|
||||||
REMOVE_KEY.set(&mut word, key);
|
REMOVE_KEY.set(&mut word, key)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
WORD_CHECKSUM.set(&mut word, 0);
|
WORD_CHECKSUM.set(&mut word, 0)?;
|
||||||
word.as_slice()
|
Ok(word.as_slice())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parses the first word of an entry from its storage representation.
|
/// Parses the first word of an entry from its storage representation.
|
||||||
@@ -459,31 +459,31 @@ impl Format {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Builds the storage representation of a user entry.
|
/// Builds the storage representation of a user entry.
|
||||||
pub fn build_user(&self, key: Nat, value: &[u8]) -> Vec<u8> {
|
pub fn build_user(&self, key: Nat, value: &[u8]) -> StoreResult<Vec<u8>> {
|
||||||
let length = usize_to_nat(value.len());
|
let length = usize_to_nat(value.len());
|
||||||
let word_size = self.word_size();
|
let word_size = self.word_size();
|
||||||
let footer = self.bytes_to_words(length);
|
let footer = self.bytes_to_words(length);
|
||||||
let mut result = vec![0xff; ((1 + footer) * word_size) as usize];
|
let mut result = vec![0xff; ((1 + footer) * word_size) as usize];
|
||||||
result[word_size as usize..][..length as usize].copy_from_slice(value);
|
result[word_size as usize..][..length as usize].copy_from_slice(value);
|
||||||
let mut word = ERASED_WORD;
|
let mut word = ERASED_WORD;
|
||||||
ID_HEADER.set(&mut word);
|
ID_HEADER.set(&mut word)?;
|
||||||
if footer > 0 && is_erased(&result[(footer * word_size) as usize..]) {
|
if footer > 0 && is_erased(&result[(footer * word_size) as usize..]) {
|
||||||
HEADER_FLIPPED.set(&mut word);
|
HEADER_FLIPPED.set(&mut word);
|
||||||
*result.last_mut().unwrap() = 0x7f;
|
*result.last_mut().unwrap() = 0x7f;
|
||||||
}
|
}
|
||||||
HEADER_LENGTH.set(&mut word, length);
|
HEADER_LENGTH.set(&mut word, length)?;
|
||||||
HEADER_KEY.set(&mut word, key);
|
HEADER_KEY.set(&mut word, key)?;
|
||||||
HEADER_CHECKSUM.set(
|
HEADER_CHECKSUM.set(
|
||||||
&mut word,
|
&mut word,
|
||||||
count_zeros(&result[(footer * word_size) as usize..]),
|
count_zeros(&result[(footer * word_size) as usize..]),
|
||||||
);
|
)?;
|
||||||
result[..word_size as usize].copy_from_slice(&word.as_slice());
|
result[..word_size as usize].copy_from_slice(&word.as_slice());
|
||||||
result
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the padding bit in the first word of a user entry.
|
/// Sets the padding bit in the first word of a user entry.
|
||||||
pub fn set_padding(&self, word: &mut Word) {
|
pub fn set_padding(&self, word: &mut Word) -> StoreResult<()> {
|
||||||
ID_PADDING.set(word);
|
ID_PADDING.set(word)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the deleted bit in the first word of a user entry.
|
/// Sets the deleted bit in the first word of a user entry.
|
||||||
@@ -1077,4 +1077,15 @@ mod tests {
|
|||||||
0xff800000
|
0xff800000
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn position_offsets_fit_in_a_halfword() {
|
||||||
|
// The store stores in RAM the entry positions as their offset from the head. Those offsets
|
||||||
|
// are represented as u16. The bound below is a large over-approximation of the maximal
|
||||||
|
// offset. We first make sure it fits in a u16.
|
||||||
|
const MAX_POS: Nat = (MAX_PAGE_INDEX + 1) * MAX_VIRT_PAGE_SIZE;
|
||||||
|
assert!(MAX_POS <= u16::MAX as Nat);
|
||||||
|
// We also check the actual value for up-to-date documentation, since it's a constant.
|
||||||
|
assert_eq!(MAX_POS, 0xff80);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -42,15 +42,20 @@ impl Field {
|
|||||||
|
|
||||||
/// Sets the value of a bit field.
|
/// Sets the value of a bit field.
|
||||||
///
|
///
|
||||||
/// # Preconditions
|
/// # Errors
|
||||||
///
|
///
|
||||||
/// - The value must fit in the bit field: `num_bits(value) < self.len`.
|
/// - The value must fit in the bit field: `num_bits(value) < self.len`.
|
||||||
/// - The value must only change bits from 1 to 0: `self.get(*word) & value == value`.
|
/// - The value must only change bits from 1 to 0: `self.get(*word) & value == value`.
|
||||||
pub fn set(&self, word: &mut Word, value: Nat) {
|
pub fn set(&self, word: &mut Word, value: Nat) -> StoreResult<()> {
|
||||||
debug_assert_eq!(value & self.mask(), value);
|
if value & self.mask() != value {
|
||||||
|
return Err(StoreError::InvalidStorage);
|
||||||
|
}
|
||||||
let mask = !(self.mask() << self.pos);
|
let mask = !(self.mask() << self.pos);
|
||||||
word.0 &= mask | (value << self.pos);
|
word.0 &= mask | (value << self.pos);
|
||||||
debug_assert_eq!(self.get(*word), value);
|
if self.get(*word) != value {
|
||||||
|
return Err(StoreError::InvalidStorage);
|
||||||
|
}
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns a bit mask the length of the bit field.
|
/// Returns a bit mask the length of the bit field.
|
||||||
@@ -82,8 +87,8 @@ impl ConstField {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Sets the bit field to its value.
|
/// Sets the bit field to its value.
|
||||||
pub fn set(&self, word: &mut Word) {
|
pub fn set(&self, word: &mut Word) -> StoreResult<()> {
|
||||||
self.field.set(word, self.value);
|
self.field.set(word, self.value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -135,15 +140,15 @@ impl Checksum {
|
|||||||
|
|
||||||
/// Sets the checksum to the external increment value.
|
/// Sets the checksum to the external increment value.
|
||||||
///
|
///
|
||||||
/// # Preconditions
|
/// # Errors
|
||||||
///
|
///
|
||||||
/// - The bits of the checksum bit field should be set to one: `self.field.get(*word) ==
|
/// - The bits of the checksum bit field should be set to one: `self.field.get(*word) ==
|
||||||
/// self.field.mask()`.
|
/// self.field.mask()`.
|
||||||
/// - The checksum value should fit in the checksum bit field: `num_bits(word.count_zeros() +
|
/// - The checksum value should fit in the checksum bit field: `num_bits(word.count_zeros() +
|
||||||
/// value) < self.field.len`.
|
/// value) < self.field.len`.
|
||||||
pub fn set(&self, word: &mut Word, value: Nat) {
|
pub fn set(&self, word: &mut Word, value: Nat) -> StoreResult<()> {
|
||||||
debug_assert_eq!(self.field.get(*word), self.field.mask());
|
debug_assert_eq!(self.field.get(*word), self.field.mask());
|
||||||
self.field.set(word, word.0.count_zeros() + value);
|
self.field.set(word, word.0.count_zeros() + value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -290,7 +295,7 @@ mod tests {
|
|||||||
assert_eq!(field.get(Word(0x000000f8)), 0x1f);
|
assert_eq!(field.get(Word(0x000000f8)), 0x1f);
|
||||||
assert_eq!(field.get(Word(0x0000ff37)), 6);
|
assert_eq!(field.get(Word(0x0000ff37)), 6);
|
||||||
let mut word = Word(0xffffffff);
|
let mut word = Word(0xffffffff);
|
||||||
field.set(&mut word, 3);
|
field.set(&mut word, 3).unwrap();
|
||||||
assert_eq!(word, Word(0xffffff1f));
|
assert_eq!(word, Word(0xffffff1f));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -305,7 +310,7 @@ mod tests {
|
|||||||
assert!(field.check(Word(0x00000048)));
|
assert!(field.check(Word(0x00000048)));
|
||||||
assert!(field.check(Word(0x0000ff4f)));
|
assert!(field.check(Word(0x0000ff4f)));
|
||||||
let mut word = Word(0xffffffff);
|
let mut word = Word(0xffffffff);
|
||||||
field.set(&mut word);
|
field.set(&mut word).unwrap();
|
||||||
assert_eq!(word, Word(0xffffff4f));
|
assert_eq!(word, Word(0xffffff4f));
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -333,7 +338,7 @@ mod tests {
|
|||||||
assert_eq!(field.get(Word(0x00ffff67)), Ok(4));
|
assert_eq!(field.get(Word(0x00ffff67)), Ok(4));
|
||||||
assert_eq!(field.get(Word(0x7fffff07)), Err(StoreError::InvalidStorage));
|
assert_eq!(field.get(Word(0x7fffff07)), Err(StoreError::InvalidStorage));
|
||||||
let mut word = Word(0x0fffffff);
|
let mut word = Word(0x0fffffff);
|
||||||
field.set(&mut word, 4);
|
field.set(&mut word, 4).unwrap();
|
||||||
assert_eq!(word, Word(0x0fffff47));
|
assert_eq!(word, Word(0x0fffff47));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -344,6 +344,7 @@
|
|||||||
//! storage, the store is checked not to crash.
|
//! storage, the store is checked not to crash.
|
||||||
|
|
||||||
#![cfg_attr(not(feature = "std"), no_std)]
|
#![cfg_attr(not(feature = "std"), no_std)]
|
||||||
|
#![feature(try_trait)]
|
||||||
|
|
||||||
#[macro_use]
|
#[macro_use]
|
||||||
extern crate alloc;
|
extern crate alloc;
|
||||||
|
|||||||
@@ -23,8 +23,11 @@ use crate::{usize_to_nat, Nat, Storage, StorageError, StorageIndex};
|
|||||||
pub use crate::{
|
pub use crate::{
|
||||||
BufferStorage, StoreDriver, StoreDriverOff, StoreDriverOn, StoreInterruption, StoreInvariant,
|
BufferStorage, StoreDriver, StoreDriverOff, StoreDriverOn, StoreInterruption, StoreInvariant,
|
||||||
};
|
};
|
||||||
|
use alloc::boxed::Box;
|
||||||
use alloc::vec::Vec;
|
use alloc::vec::Vec;
|
||||||
use core::cmp::{max, min, Ordering};
|
use core::cmp::{max, min, Ordering};
|
||||||
|
use core::convert::TryFrom;
|
||||||
|
use core::option::NoneError;
|
||||||
#[cfg(feature = "std")]
|
#[cfg(feature = "std")]
|
||||||
use std::collections::HashSet;
|
use std::collections::HashSet;
|
||||||
|
|
||||||
@@ -75,6 +78,14 @@ impl From<StorageError> for StoreError {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl From<NoneError> for StoreError {
|
||||||
|
fn from(error: NoneError) -> StoreError {
|
||||||
|
match error {
|
||||||
|
NoneError => StoreError::InvalidStorage,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
/// Result of store operations.
|
/// Result of store operations.
|
||||||
pub type StoreResult<T> = Result<T, StoreError>;
|
pub type StoreResult<T> = Result<T, StoreError>;
|
||||||
|
|
||||||
@@ -174,6 +185,8 @@ impl StoreUpdate {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub type StoreIter<'a> = Box<dyn Iterator<Item = StoreResult<StoreHandle>> + 'a>;
|
||||||
|
|
||||||
/// Implements a store with a map interface over a storage.
|
/// Implements a store with a map interface over a storage.
|
||||||
#[derive(Clone)]
|
#[derive(Clone)]
|
||||||
pub struct Store<S: Storage> {
|
pub struct Store<S: Storage> {
|
||||||
@@ -182,6 +195,14 @@ pub struct Store<S: Storage> {
|
|||||||
|
|
||||||
/// The storage configuration.
|
/// The storage configuration.
|
||||||
format: Format,
|
format: Format,
|
||||||
|
|
||||||
|
/// The position of the first word in the store.
|
||||||
|
head: Option<Position>,
|
||||||
|
|
||||||
|
/// The list of the position of the user entries.
|
||||||
|
///
|
||||||
|
/// The position is encoded as the word offset from the [head](Store#structfield.head).
|
||||||
|
entries: Option<Vec<u16>>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<S: Storage> Store<S> {
|
impl<S: Storage> Store<S> {
|
||||||
@@ -199,7 +220,12 @@ impl<S: Storage> Store<S> {
|
|||||||
None => return Err((StoreError::InvalidArgument, storage)),
|
None => return Err((StoreError::InvalidArgument, storage)),
|
||||||
Some(x) => x,
|
Some(x) => x,
|
||||||
};
|
};
|
||||||
let mut store = Store { storage, format };
|
let mut store = Store {
|
||||||
|
storage,
|
||||||
|
format,
|
||||||
|
head: None,
|
||||||
|
entries: None,
|
||||||
|
};
|
||||||
if let Err(error) = store.recover() {
|
if let Err(error) = store.recover() {
|
||||||
return Err((error, store.storage));
|
return Err((error, store.storage));
|
||||||
}
|
}
|
||||||
@@ -207,8 +233,19 @@ impl<S: Storage> Store<S> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Iterates over the entries.
|
/// Iterates over the entries.
|
||||||
pub fn iter<'a>(&'a self) -> StoreResult<StoreIter<'a, S>> {
|
pub fn iter<'a>(&'a self) -> StoreResult<StoreIter<'a>> {
|
||||||
StoreIter::new(self)
|
let head = self.head?;
|
||||||
|
Ok(Box::new(self.entries.as_ref()?.iter().map(
|
||||||
|
move |&offset| {
|
||||||
|
let pos = head + offset as Nat;
|
||||||
|
match self.parse_entry(&mut pos.clone())? {
|
||||||
|
ParsedEntry::User(Header {
|
||||||
|
key, length: len, ..
|
||||||
|
}) => Ok(StoreHandle { key, pos, len }),
|
||||||
|
_ => Err(StoreError::InvalidStorage),
|
||||||
|
}
|
||||||
|
},
|
||||||
|
)))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the current capacity in words.
|
/// Returns the current capacity in words.
|
||||||
@@ -217,16 +254,9 @@ impl<S: Storage> Store<S> {
|
|||||||
pub fn capacity(&self) -> StoreResult<StoreRatio> {
|
pub fn capacity(&self) -> StoreResult<StoreRatio> {
|
||||||
let total = self.format.total_capacity();
|
let total = self.format.total_capacity();
|
||||||
let mut used = 0;
|
let mut used = 0;
|
||||||
let mut pos = self.head()?;
|
for handle in self.iter()? {
|
||||||
let end = pos + self.format.virt_size();
|
let handle = handle?;
|
||||||
while pos < end {
|
used += 1 + self.format.bytes_to_words(handle.len);
|
||||||
let entry_pos = pos;
|
|
||||||
match self.parse_entry(&mut pos)? {
|
|
||||||
ParsedEntry::Tail => break,
|
|
||||||
ParsedEntry::Padding => (),
|
|
||||||
ParsedEntry::User(_) => used += pos - entry_pos,
|
|
||||||
_ => return Err(StoreError::InvalidStorage),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
Ok(StoreRatio { used, total })
|
Ok(StoreRatio { used, total })
|
||||||
}
|
}
|
||||||
@@ -270,7 +300,9 @@ impl<S: Storage> Store<S> {
|
|||||||
self.reserve(self.format.transaction_capacity(updates))?;
|
self.reserve(self.format.transaction_capacity(updates))?;
|
||||||
// Write the marker entry.
|
// Write the marker entry.
|
||||||
let marker = self.tail()?;
|
let marker = self.tail()?;
|
||||||
let entry = self.format.build_internal(InternalEntry::Marker { count });
|
let entry = self
|
||||||
|
.format
|
||||||
|
.build_internal(InternalEntry::Marker { count })?;
|
||||||
self.write_slice(marker, &entry)?;
|
self.write_slice(marker, &entry)?;
|
||||||
self.init_page(marker, marker)?;
|
self.init_page(marker, marker)?;
|
||||||
// Write the updates.
|
// Write the updates.
|
||||||
@@ -278,7 +310,7 @@ impl<S: Storage> Store<S> {
|
|||||||
for update in updates {
|
for update in updates {
|
||||||
let length = match *update {
|
let length = match *update {
|
||||||
StoreUpdate::Insert { key, ref value } => {
|
StoreUpdate::Insert { key, ref value } => {
|
||||||
let entry = self.format.build_user(usize_to_nat(key), value);
|
let entry = self.format.build_user(usize_to_nat(key), value)?;
|
||||||
let word_size = self.format.word_size();
|
let word_size = self.format.word_size();
|
||||||
let footer = usize_to_nat(entry.len()) / word_size - 1;
|
let footer = usize_to_nat(entry.len()) / word_size - 1;
|
||||||
self.write_slice(tail, &entry[..(footer * word_size) as usize])?;
|
self.write_slice(tail, &entry[..(footer * word_size) as usize])?;
|
||||||
@@ -287,7 +319,7 @@ impl<S: Storage> Store<S> {
|
|||||||
}
|
}
|
||||||
StoreUpdate::Remove { key } => {
|
StoreUpdate::Remove { key } => {
|
||||||
let key = usize_to_nat(key);
|
let key = usize_to_nat(key);
|
||||||
let remove = self.format.build_internal(InternalEntry::Remove { key });
|
let remove = self.format.build_internal(InternalEntry::Remove { key })?;
|
||||||
self.write_slice(tail, &remove)?;
|
self.write_slice(tail, &remove)?;
|
||||||
0
|
0
|
||||||
}
|
}
|
||||||
@@ -307,7 +339,9 @@ impl<S: Storage> Store<S> {
|
|||||||
if min_key > self.format.max_key() {
|
if min_key > self.format.max_key() {
|
||||||
return Err(StoreError::InvalidArgument);
|
return Err(StoreError::InvalidArgument);
|
||||||
}
|
}
|
||||||
let clear = self.format.build_internal(InternalEntry::Clear { min_key });
|
let clear = self
|
||||||
|
.format
|
||||||
|
.build_internal(InternalEntry::Clear { min_key })?;
|
||||||
// We always have one word available. We can't use `reserve` because this is internal
|
// We always have one word available. We can't use `reserve` because this is internal
|
||||||
// capacity, not user capacity.
|
// capacity, not user capacity.
|
||||||
while self.immediate_capacity()? < 1 {
|
while self.immediate_capacity()? < 1 {
|
||||||
@@ -373,7 +407,7 @@ impl<S: Storage> Store<S> {
|
|||||||
if key > self.format.max_key() || value_len > self.format.max_value_len() {
|
if key > self.format.max_key() || value_len > self.format.max_value_len() {
|
||||||
return Err(StoreError::InvalidArgument);
|
return Err(StoreError::InvalidArgument);
|
||||||
}
|
}
|
||||||
let entry = self.format.build_user(key, value);
|
let entry = self.format.build_user(key, value)?;
|
||||||
let entry_len = usize_to_nat(entry.len());
|
let entry_len = usize_to_nat(entry.len());
|
||||||
self.reserve(entry_len / self.format.word_size())?;
|
self.reserve(entry_len / self.format.word_size())?;
|
||||||
let tail = self.tail()?;
|
let tail = self.tail()?;
|
||||||
@@ -381,6 +415,7 @@ impl<S: Storage> Store<S> {
|
|||||||
let footer = entry_len / word_size - 1;
|
let footer = entry_len / word_size - 1;
|
||||||
self.write_slice(tail, &entry[..(footer * word_size) as usize])?;
|
self.write_slice(tail, &entry[..(footer * word_size) as usize])?;
|
||||||
self.write_slice(tail + footer, &entry[(footer * word_size) as usize..])?;
|
self.write_slice(tail + footer, &entry[(footer * word_size) as usize..])?;
|
||||||
|
self.push_entry(tail)?;
|
||||||
self.insert_init(tail, footer, key)
|
self.insert_init(tail, footer, key)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -398,7 +433,8 @@ impl<S: Storage> Store<S> {
|
|||||||
/// Removes an entry given a handle.
|
/// Removes an entry given a handle.
|
||||||
pub fn remove_handle(&mut self, handle: &StoreHandle) -> StoreResult<()> {
|
pub fn remove_handle(&mut self, handle: &StoreHandle) -> StoreResult<()> {
|
||||||
self.check_handle(handle)?;
|
self.check_handle(handle)?;
|
||||||
self.delete_pos(handle.pos, self.format.bytes_to_words(handle.len))
|
self.delete_pos(handle.pos, self.format.bytes_to_words(handle.len))?;
|
||||||
|
self.remove_entry(handle.pos)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the maximum length in bytes of a value.
|
/// Returns the maximum length in bytes of a value.
|
||||||
@@ -437,7 +473,7 @@ impl<S: Storage> Store<S> {
|
|||||||
let init_info = self.format.build_init(InitInfo {
|
let init_info = self.format.build_init(InitInfo {
|
||||||
cycle: 0,
|
cycle: 0,
|
||||||
prefix: 0,
|
prefix: 0,
|
||||||
});
|
})?;
|
||||||
self.storage_write_slice(index, &init_info)
|
self.storage_write_slice(index, &init_info)
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -460,7 +496,9 @@ impl<S: Storage> Store<S> {
|
|||||||
|
|
||||||
/// Recovers a possible compaction interrupted while copying the entries.
|
/// Recovers a possible compaction interrupted while copying the entries.
|
||||||
fn recover_compaction(&mut self) -> StoreResult<()> {
|
fn recover_compaction(&mut self) -> StoreResult<()> {
|
||||||
let head_page = self.head()?.page(&self.format);
|
let head = self.get_extremum_page_head(Ordering::Less)?;
|
||||||
|
self.head = Some(head);
|
||||||
|
let head_page = head.page(&self.format);
|
||||||
match self.parse_compact(head_page)? {
|
match self.parse_compact(head_page)? {
|
||||||
WordState::Erased => Ok(()),
|
WordState::Erased => Ok(()),
|
||||||
WordState::Partial => self.compact(),
|
WordState::Partial => self.compact(),
|
||||||
@@ -470,14 +508,15 @@ impl<S: Storage> Store<S> {
|
|||||||
|
|
||||||
/// Recover a possible interrupted operation which is not a compaction.
|
/// Recover a possible interrupted operation which is not a compaction.
|
||||||
fn recover_operation(&mut self) -> StoreResult<()> {
|
fn recover_operation(&mut self) -> StoreResult<()> {
|
||||||
let mut pos = self.head()?;
|
self.entries = Some(Vec::new());
|
||||||
|
let mut pos = self.head?;
|
||||||
let mut prev_pos = pos;
|
let mut prev_pos = pos;
|
||||||
let end = pos + self.format.virt_size();
|
let end = pos + self.format.virt_size();
|
||||||
while pos < end {
|
while pos < end {
|
||||||
let entry_pos = pos;
|
let entry_pos = pos;
|
||||||
match self.parse_entry(&mut pos)? {
|
match self.parse_entry(&mut pos)? {
|
||||||
ParsedEntry::Tail => break,
|
ParsedEntry::Tail => break,
|
||||||
ParsedEntry::User(_) => (),
|
ParsedEntry::User(_) => self.push_entry(entry_pos)?,
|
||||||
ParsedEntry::Padding => {
|
ParsedEntry::Padding => {
|
||||||
self.wipe_span(entry_pos + 1, pos - entry_pos - 1)?;
|
self.wipe_span(entry_pos + 1, pos - entry_pos - 1)?;
|
||||||
}
|
}
|
||||||
@@ -610,7 +649,7 @@ impl<S: Storage> Store<S> {
|
|||||||
///
|
///
|
||||||
/// In particular, the handle has not been compacted.
|
/// In particular, the handle has not been compacted.
|
||||||
fn check_handle(&self, handle: &StoreHandle) -> StoreResult<()> {
|
fn check_handle(&self, handle: &StoreHandle) -> StoreResult<()> {
|
||||||
if handle.pos < self.head()? {
|
if handle.pos < self.head? {
|
||||||
Err(StoreError::InvalidArgument)
|
Err(StoreError::InvalidArgument)
|
||||||
} else {
|
} else {
|
||||||
Ok(())
|
Ok(())
|
||||||
@@ -640,20 +679,22 @@ impl<S: Storage> Store<S> {
|
|||||||
|
|
||||||
/// Compacts one page.
|
/// Compacts one page.
|
||||||
fn compact(&mut self) -> StoreResult<()> {
|
fn compact(&mut self) -> StoreResult<()> {
|
||||||
let head = self.head()?;
|
let head = self.head?;
|
||||||
if head.cycle(&self.format) >= self.format.max_page_erases() {
|
if head.cycle(&self.format) >= self.format.max_page_erases() {
|
||||||
return Err(StoreError::NoLifetime);
|
return Err(StoreError::NoLifetime);
|
||||||
}
|
}
|
||||||
let tail = max(self.tail()?, head.next_page(&self.format));
|
let tail = max(self.tail()?, head.next_page(&self.format));
|
||||||
let index = self.format.index_compact(head.page(&self.format));
|
let index = self.format.index_compact(head.page(&self.format));
|
||||||
let compact_info = self.format.build_compact(CompactInfo { tail: tail - head });
|
let compact_info = self
|
||||||
|
.format
|
||||||
|
.build_compact(CompactInfo { tail: tail - head })?;
|
||||||
self.storage_write_slice(index, &compact_info)?;
|
self.storage_write_slice(index, &compact_info)?;
|
||||||
self.compact_copy()
|
self.compact_copy()
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Continues a compaction after its compact page info has been written.
|
/// Continues a compaction after its compact page info has been written.
|
||||||
fn compact_copy(&mut self) -> StoreResult<()> {
|
fn compact_copy(&mut self) -> StoreResult<()> {
|
||||||
let mut head = self.head()?;
|
let mut head = self.head?;
|
||||||
let page = head.page(&self.format);
|
let page = head.page(&self.format);
|
||||||
let end = head.next_page(&self.format);
|
let end = head.next_page(&self.format);
|
||||||
let mut tail = match self.parse_compact(page)? {
|
let mut tail = match self.parse_compact(page)? {
|
||||||
@@ -667,8 +708,12 @@ impl<S: Storage> Store<S> {
|
|||||||
let pos = head;
|
let pos = head;
|
||||||
match self.parse_entry(&mut head)? {
|
match self.parse_entry(&mut head)? {
|
||||||
ParsedEntry::Tail => break,
|
ParsedEntry::Tail => break,
|
||||||
|
// This can happen if we copy to the next page. We actually reached the tail but we
|
||||||
|
// read what we just copied.
|
||||||
|
ParsedEntry::Partial if head > end => break,
|
||||||
ParsedEntry::User(_) => (),
|
ParsedEntry::User(_) => (),
|
||||||
_ => continue,
|
ParsedEntry::Padding => continue,
|
||||||
|
_ => return Err(StoreError::InvalidStorage),
|
||||||
};
|
};
|
||||||
let length = head - pos;
|
let length = head - pos;
|
||||||
// We have to copy the slice for 2 reasons:
|
// We have to copy the slice for 2 reasons:
|
||||||
@@ -676,11 +721,13 @@ impl<S: Storage> Store<S> {
|
|||||||
// 2. We can't pass a flash slice to the kernel. This should get fixed with
|
// 2. We can't pass a flash slice to the kernel. This should get fixed with
|
||||||
// https://github.com/tock/tock/issues/1274.
|
// https://github.com/tock/tock/issues/1274.
|
||||||
let entry = self.read_slice(pos, length * self.format.word_size());
|
let entry = self.read_slice(pos, length * self.format.word_size());
|
||||||
|
self.remove_entry(pos)?;
|
||||||
self.write_slice(tail, &entry)?;
|
self.write_slice(tail, &entry)?;
|
||||||
|
self.push_entry(tail)?;
|
||||||
self.init_page(tail, tail + (length - 1))?;
|
self.init_page(tail, tail + (length - 1))?;
|
||||||
tail += length;
|
tail += length;
|
||||||
}
|
}
|
||||||
let erase = self.format.build_internal(InternalEntry::Erase { page });
|
let erase = self.format.build_internal(InternalEntry::Erase { page })?;
|
||||||
self.write_slice(tail, &erase)?;
|
self.write_slice(tail, &erase)?;
|
||||||
self.init_page(tail, tail)?;
|
self.init_page(tail, tail)?;
|
||||||
self.compact_erase(tail)
|
self.compact_erase(tail)
|
||||||
@@ -688,14 +735,31 @@ impl<S: Storage> Store<S> {
|
|||||||
|
|
||||||
/// Continues a compaction after its erase entry has been written.
|
/// Continues a compaction after its erase entry has been written.
|
||||||
fn compact_erase(&mut self, erase: Position) -> StoreResult<()> {
|
fn compact_erase(&mut self, erase: Position) -> StoreResult<()> {
|
||||||
let page = match self.parse_entry(&mut erase.clone())? {
|
// Read the page to erase from the erase entry.
|
||||||
|
let mut page = match self.parse_entry(&mut erase.clone())? {
|
||||||
ParsedEntry::Internal(InternalEntry::Erase { page }) => page,
|
ParsedEntry::Internal(InternalEntry::Erase { page }) => page,
|
||||||
_ => return Err(StoreError::InvalidStorage),
|
_ => return Err(StoreError::InvalidStorage),
|
||||||
};
|
};
|
||||||
|
// Erase the page.
|
||||||
self.storage_erase_page(page)?;
|
self.storage_erase_page(page)?;
|
||||||
let head = self.head()?;
|
// Update the head.
|
||||||
|
page = (page + 1) % self.format.num_pages();
|
||||||
|
let init = match self.parse_init(page)? {
|
||||||
|
WordState::Valid(x) => x,
|
||||||
|
_ => return Err(StoreError::InvalidStorage),
|
||||||
|
};
|
||||||
|
let head = self.format.page_head(init, page);
|
||||||
|
if let Some(entries) = &mut self.entries {
|
||||||
|
let head_offset = u16::try_from(head - self.head?).ok()?;
|
||||||
|
for entry in entries {
|
||||||
|
*entry = entry.checked_sub(head_offset)?;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
self.head = Some(head);
|
||||||
|
// Wipe the overlapping entry from the erased page.
|
||||||
let pos = head.page_begin(&self.format);
|
let pos = head.page_begin(&self.format);
|
||||||
self.wipe_span(pos, head - pos)?;
|
self.wipe_span(pos, head - pos)?;
|
||||||
|
// Mark the erase entry as done.
|
||||||
self.set_padding(erase)?;
|
self.set_padding(erase)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -704,13 +768,13 @@ impl<S: Storage> Store<S> {
|
|||||||
fn transaction_apply(&mut self, sorted_keys: &[Nat], marker: Position) -> StoreResult<()> {
|
fn transaction_apply(&mut self, sorted_keys: &[Nat], marker: Position) -> StoreResult<()> {
|
||||||
self.delete_keys(&sorted_keys, marker)?;
|
self.delete_keys(&sorted_keys, marker)?;
|
||||||
self.set_padding(marker)?;
|
self.set_padding(marker)?;
|
||||||
let end = self.head()? + self.format.virt_size();
|
let end = self.head? + self.format.virt_size();
|
||||||
let mut pos = marker + 1;
|
let mut pos = marker + 1;
|
||||||
while pos < end {
|
while pos < end {
|
||||||
let entry_pos = pos;
|
let entry_pos = pos;
|
||||||
match self.parse_entry(&mut pos)? {
|
match self.parse_entry(&mut pos)? {
|
||||||
ParsedEntry::Tail => break,
|
ParsedEntry::Tail => break,
|
||||||
ParsedEntry::User(_) => (),
|
ParsedEntry::User(_) => self.push_entry(entry_pos)?,
|
||||||
ParsedEntry::Internal(InternalEntry::Remove { .. }) => {
|
ParsedEntry::Internal(InternalEntry::Remove { .. }) => {
|
||||||
self.set_padding(entry_pos)?
|
self.set_padding(entry_pos)?
|
||||||
}
|
}
|
||||||
@@ -727,37 +791,38 @@ impl<S: Storage> Store<S> {
|
|||||||
ParsedEntry::Internal(InternalEntry::Clear { min_key }) => min_key,
|
ParsedEntry::Internal(InternalEntry::Clear { min_key }) => min_key,
|
||||||
_ => return Err(StoreError::InvalidStorage),
|
_ => return Err(StoreError::InvalidStorage),
|
||||||
};
|
};
|
||||||
let mut pos = self.head()?;
|
self.delete_if(clear, |key| key >= min_key)?;
|
||||||
let end = pos + self.format.virt_size();
|
|
||||||
while pos < end {
|
|
||||||
let entry_pos = pos;
|
|
||||||
match self.parse_entry(&mut pos)? {
|
|
||||||
ParsedEntry::Internal(InternalEntry::Clear { .. }) if entry_pos == clear => break,
|
|
||||||
ParsedEntry::User(header) if header.key >= min_key => {
|
|
||||||
self.delete_pos(entry_pos, pos - entry_pos - 1)?;
|
|
||||||
}
|
|
||||||
ParsedEntry::Padding | ParsedEntry::User(_) => (),
|
|
||||||
_ => return Err(StoreError::InvalidStorage),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.set_padding(clear)?;
|
self.set_padding(clear)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Deletes a set of entries up to a certain position.
|
/// Deletes a set of entries up to a certain position.
|
||||||
fn delete_keys(&mut self, sorted_keys: &[Nat], end: Position) -> StoreResult<()> {
|
fn delete_keys(&mut self, sorted_keys: &[Nat], end: Position) -> StoreResult<()> {
|
||||||
let mut pos = self.head()?;
|
self.delete_if(end, |key| sorted_keys.binary_search(&key).is_ok())
|
||||||
while pos < end {
|
|
||||||
let entry_pos = pos;
|
|
||||||
match self.parse_entry(&mut pos)? {
|
|
||||||
ParsedEntry::Tail => break,
|
|
||||||
ParsedEntry::User(header) if sorted_keys.binary_search(&header.key).is_ok() => {
|
|
||||||
self.delete_pos(entry_pos, pos - entry_pos - 1)?;
|
|
||||||
}
|
}
|
||||||
ParsedEntry::Padding | ParsedEntry::User(_) => (),
|
|
||||||
|
/// Deletes entries matching a predicate up to a certain position.
|
||||||
|
fn delete_if(&mut self, end: Position, delete: impl Fn(Nat) -> bool) -> StoreResult<()> {
|
||||||
|
let head = self.head?;
|
||||||
|
let mut entries = self.entries.take()?;
|
||||||
|
let mut i = 0;
|
||||||
|
while i < entries.len() {
|
||||||
|
let pos = head + entries[i] as Nat;
|
||||||
|
if pos >= end {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
let header = match self.parse_entry(&mut pos.clone())? {
|
||||||
|
ParsedEntry::User(x) => x,
|
||||||
_ => return Err(StoreError::InvalidStorage),
|
_ => return Err(StoreError::InvalidStorage),
|
||||||
|
};
|
||||||
|
if delete(header.key) {
|
||||||
|
self.delete_pos(pos, self.format.bytes_to_words(header.length))?;
|
||||||
|
entries.swap_remove(i);
|
||||||
|
} else {
|
||||||
|
i += 1;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
self.entries = Some(entries);
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@@ -792,7 +857,7 @@ impl<S: Storage> Store<S> {
|
|||||||
let init_info = self.format.build_init(InitInfo {
|
let init_info = self.format.build_init(InitInfo {
|
||||||
cycle: new_first.cycle(&self.format),
|
cycle: new_first.cycle(&self.format),
|
||||||
prefix: new_first.word(&self.format),
|
prefix: new_first.word(&self.format),
|
||||||
});
|
})?;
|
||||||
self.storage_write_slice(index, &init_info)?;
|
self.storage_write_slice(index, &init_info)?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -800,7 +865,7 @@ impl<S: Storage> Store<S> {
|
|||||||
/// Sets the padding bit of a user header.
|
/// Sets the padding bit of a user header.
|
||||||
fn set_padding(&mut self, pos: Position) -> StoreResult<()> {
|
fn set_padding(&mut self, pos: Position) -> StoreResult<()> {
|
||||||
let mut word = Word::from_slice(self.read_word(pos));
|
let mut word = Word::from_slice(self.read_word(pos));
|
||||||
self.format.set_padding(&mut word);
|
self.format.set_padding(&mut word)?;
|
||||||
self.write_slice(pos, &word.as_slice())?;
|
self.write_slice(pos, &word.as_slice())?;
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
@@ -836,19 +901,20 @@ impl<S: Storage> Store<S> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
// There is always at least one initialized page.
|
// There is always at least one initialized page.
|
||||||
best.ok_or(StoreError::InvalidStorage)
|
Ok(best?)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the number of words that can be written without compaction.
|
/// Returns the number of words that can be written without compaction.
|
||||||
fn immediate_capacity(&self) -> StoreResult<Nat> {
|
fn immediate_capacity(&self) -> StoreResult<Nat> {
|
||||||
let tail = self.tail()?;
|
let tail = self.tail()?;
|
||||||
let end = self.head()? + self.format.virt_size();
|
let end = self.head? + self.format.virt_size();
|
||||||
Ok(end.get().saturating_sub(tail.get()))
|
Ok(end.get().saturating_sub(tail.get()))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns the position of the first word in the store.
|
/// Returns the position of the first word in the store.
|
||||||
|
#[cfg(feature = "std")]
|
||||||
pub(crate) fn head(&self) -> StoreResult<Position> {
|
pub(crate) fn head(&self) -> StoreResult<Position> {
|
||||||
self.get_extremum_page_head(Ordering::Less)
|
Ok(self.head?)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Returns one past the position of the last word in the store.
|
/// Returns one past the position of the last word in the store.
|
||||||
@@ -863,6 +929,30 @@ impl<S: Storage> Store<S> {
|
|||||||
Ok(pos)
|
Ok(pos)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn push_entry(&mut self, pos: Position) -> StoreResult<()> {
|
||||||
|
let entries = match &mut self.entries {
|
||||||
|
None => return Ok(()),
|
||||||
|
Some(x) => x,
|
||||||
|
};
|
||||||
|
let head = self.head?;
|
||||||
|
let offset = u16::try_from(pos - head).ok()?;
|
||||||
|
debug_assert!(!entries.contains(&offset));
|
||||||
|
entries.push(offset);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
fn remove_entry(&mut self, pos: Position) -> StoreResult<()> {
|
||||||
|
let entries = match &mut self.entries {
|
||||||
|
None => return Ok(()),
|
||||||
|
Some(x) => x,
|
||||||
|
};
|
||||||
|
let head = self.head?;
|
||||||
|
let offset = u16::try_from(pos - head).ok()?;
|
||||||
|
let i = entries.iter().position(|x| *x == offset)?;
|
||||||
|
entries.swap_remove(i);
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
/// Parses the entry at a given position.
|
/// Parses the entry at a given position.
|
||||||
///
|
///
|
||||||
/// The position is updated to point to the next entry.
|
/// The position is updated to point to the next entry.
|
||||||
@@ -1061,7 +1151,7 @@ impl Store<BufferStorage> {
|
|||||||
/// If the value has been partially compacted, only return the non-compacted part. Returns an
|
/// If the value has been partially compacted, only return the non-compacted part. Returns an
|
||||||
/// empty value if it has been fully compacted.
|
/// empty value if it has been fully compacted.
|
||||||
pub fn inspect_value(&self, handle: &StoreHandle) -> Vec<u8> {
|
pub fn inspect_value(&self, handle: &StoreHandle) -> Vec<u8> {
|
||||||
let head = self.head().unwrap();
|
let head = self.head.unwrap();
|
||||||
let length = self.format.bytes_to_words(handle.len);
|
let length = self.format.bytes_to_words(handle.len);
|
||||||
if head <= handle.pos {
|
if head <= handle.pos {
|
||||||
// The value has not been compacted.
|
// The value has not been compacted.
|
||||||
@@ -1087,20 +1177,21 @@ impl Store<BufferStorage> {
|
|||||||
store
|
store
|
||||||
.iter()
|
.iter()
|
||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|x| x.unwrap())
|
.filter(|x| x.is_err() || delete_key(x.as_ref().unwrap().key as usize))
|
||||||
.filter(|x| delete_key(x.key as usize))
|
.collect::<Result<Vec<_>, _>>()
|
||||||
.collect::<Vec<_>>()
|
|
||||||
};
|
};
|
||||||
match *operation {
|
match *operation {
|
||||||
StoreOperation::Transaction { ref updates } => {
|
StoreOperation::Transaction { ref updates } => {
|
||||||
let keys: HashSet<usize> = updates.iter().map(|x| x.key()).collect();
|
let keys: HashSet<usize> = updates.iter().map(|x| x.key()).collect();
|
||||||
let deleted = deleted(self, &|key| keys.contains(&key));
|
match deleted(self, &|key| keys.contains(&key)) {
|
||||||
(deleted, self.transaction(updates))
|
Ok(deleted) => (deleted, self.transaction(updates)),
|
||||||
|
Err(error) => (Vec::new(), Err(error)),
|
||||||
}
|
}
|
||||||
StoreOperation::Clear { min_key } => {
|
|
||||||
let deleted = deleted(self, &|key| key >= min_key);
|
|
||||||
(deleted, self.clear(min_key))
|
|
||||||
}
|
}
|
||||||
|
StoreOperation::Clear { min_key } => match deleted(self, &|key| key >= min_key) {
|
||||||
|
Ok(deleted) => (deleted, self.clear(min_key)),
|
||||||
|
Err(error) => (Vec::new(), Err(error)),
|
||||||
|
},
|
||||||
StoreOperation::Prepare { length } => (Vec::new(), self.prepare(length)),
|
StoreOperation::Prepare { length } => (Vec::new(), self.prepare(length)),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@@ -1110,10 +1201,12 @@ impl Store<BufferStorage> {
|
|||||||
let format = Format::new(storage).unwrap();
|
let format = Format::new(storage).unwrap();
|
||||||
// Write the init info of the first page.
|
// Write the init info of the first page.
|
||||||
let mut index = format.index_init(0);
|
let mut index = format.index_init(0);
|
||||||
let init_info = format.build_init(InitInfo {
|
let init_info = format
|
||||||
|
.build_init(InitInfo {
|
||||||
cycle: usize_to_nat(cycle),
|
cycle: usize_to_nat(cycle),
|
||||||
prefix: 0,
|
prefix: 0,
|
||||||
});
|
})
|
||||||
|
.unwrap();
|
||||||
storage.write_slice(index, &init_info).unwrap();
|
storage.write_slice(index, &init_info).unwrap();
|
||||||
// Pad the first word of the page. This makes the store looks used, otherwise we may confuse
|
// Pad the first word of the page. This makes the store looks used, otherwise we may confuse
|
||||||
// it with a partially initialized store.
|
// it with a partially initialized store.
|
||||||
@@ -1165,61 +1258,6 @@ enum ParsedEntry {
|
|||||||
Tail,
|
Tail,
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Iterates over the entries of a store.
|
|
||||||
pub struct StoreIter<'a, S: Storage> {
|
|
||||||
/// The store being iterated.
|
|
||||||
store: &'a Store<S>,
|
|
||||||
|
|
||||||
/// The position of the next entry.
|
|
||||||
pos: Position,
|
|
||||||
|
|
||||||
/// Iteration stops when reaching this position.
|
|
||||||
end: Position,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, S: Storage> StoreIter<'a, S> {
|
|
||||||
/// Creates an iterator over the entries of a store.
|
|
||||||
fn new(store: &'a Store<S>) -> StoreResult<StoreIter<'a, S>> {
|
|
||||||
let pos = store.head()?;
|
|
||||||
let end = pos + store.format.virt_size();
|
|
||||||
Ok(StoreIter { store, pos, end })
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, S: Storage> StoreIter<'a, S> {
|
|
||||||
/// Returns the next entry and advances the iterator.
|
|
||||||
fn transposed_next(&mut self) -> StoreResult<Option<StoreHandle>> {
|
|
||||||
if self.pos >= self.end {
|
|
||||||
return Ok(None);
|
|
||||||
}
|
|
||||||
while self.pos < self.end {
|
|
||||||
let entry_pos = self.pos;
|
|
||||||
match self.store.parse_entry(&mut self.pos)? {
|
|
||||||
ParsedEntry::Tail => break,
|
|
||||||
ParsedEntry::Padding => (),
|
|
||||||
ParsedEntry::User(header) => {
|
|
||||||
return Ok(Some(StoreHandle {
|
|
||||||
key: header.key,
|
|
||||||
pos: entry_pos,
|
|
||||||
len: header.length,
|
|
||||||
}))
|
|
||||||
}
|
|
||||||
_ => return Err(StoreError::InvalidStorage),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self.pos = self.end;
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a, S: Storage> Iterator for StoreIter<'a, S> {
|
|
||||||
type Item = StoreResult<StoreHandle>;
|
|
||||||
|
|
||||||
fn next(&mut self) -> Option<StoreResult<StoreHandle>> {
|
|
||||||
self.transposed_next().transpose()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Returns whether 2 slices are different.
|
/// Returns whether 2 slices are different.
|
||||||
///
|
///
|
||||||
/// Returns an error if `target` has a bit set to one for which `source` is set to zero.
|
/// Returns an error if `target` has a bit set to one for which `source` is set to zero.
|
||||||
@@ -1438,4 +1476,22 @@ mod tests {
|
|||||||
driver = driver.power_off().power_on().unwrap();
|
driver = driver.power_off().power_on().unwrap();
|
||||||
driver.check().unwrap();
|
driver.check().unwrap();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn entries_ok() {
|
||||||
|
let mut driver = MINIMAL.new_driver().power_on().unwrap();
|
||||||
|
|
||||||
|
// The store is initially empty.
|
||||||
|
assert!(driver.store().entries.as_ref().unwrap().is_empty());
|
||||||
|
|
||||||
|
// Inserted elements are added.
|
||||||
|
const LEN: usize = 6;
|
||||||
|
driver.insert(0, &[0x38; (LEN - 1) * 4]).unwrap();
|
||||||
|
driver.insert(1, &[0x5c; 4]).unwrap();
|
||||||
|
assert_eq!(driver.store().entries, Some(vec![0, LEN as u16]));
|
||||||
|
|
||||||
|
// Deleted elements are removed.
|
||||||
|
driver.remove(0).unwrap();
|
||||||
|
assert_eq!(driver.store().entries, Some(vec![LEN as u16]));
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -573,7 +573,7 @@ pub struct IterCredentials<'a> {
|
|||||||
store: &'a persistent_store::Store<Storage>,
|
store: &'a persistent_store::Store<Storage>,
|
||||||
|
|
||||||
/// The store iterator.
|
/// The store iterator.
|
||||||
iter: persistent_store::StoreIter<'a, Storage>,
|
iter: persistent_store::StoreIter<'a>,
|
||||||
|
|
||||||
/// The iteration result.
|
/// The iteration result.
|
||||||
///
|
///
|
||||||
|
|||||||
Reference in New Issue
Block a user