Skip to content

Commit

Permalink
Merge pull request #16 from tweedegolf/shutdown-strengthening
Browse files Browse the repository at this point in the history
Shutdown strengthening
  • Loading branch information
diondokter authored Jan 2, 2024
2 parents 513e857 + a0261d5 commit 51ad715
Show file tree
Hide file tree
Showing 10 changed files with 1,205 additions and 455 deletions.
4 changes: 1 addition & 3 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,10 @@ keywords = ["no_std", "embedded", "flash", "storage"]
[dependencies]
embedded-storage = "0.3.0"
defmt = { version = "0.3", optional = true }
rand = { version = "0.8.5", optional = true }

[dev-dependencies]
rand = "0.8.5"
approx = "0.5.1"

[features]
defmt = ["dep:defmt"]
_test = ["rand"]
_test = []
8 changes: 8 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,14 @@ When using peek_many, you can look at all data from oldest to newest.

(DD-MM-YY)

### Unreleased

- *Breaking* Data CRC has been upgraded to 32-bit from 16-bit. Turns out 16-bit has too many collisions.
This increases the item header size from 6 to 8. The CRC was also moved to the front of the header to
aid with shutdown/cancellation issues.
- When the state is corrupted, many issues can now be repaired with the repair functions in the map and queue modules
- Made changes to the entire to better survive shutoffs

### 0.6.2 - 22-12-23

- Small bug fixes and refactorings including an off-by-one error. Found with added fuzzing from ([#13](https://github.com/tweedegolf/sequential-storage/pull/13))
Expand Down
2 changes: 1 addition & 1 deletion fuzz/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ libfuzzer-sys = "0.4"
sequential-storage = { path = "..", features = ["_test"] }
arbitrary = { version = "1.2.2", features = ["derive"] }
rand = "0.8.5"
embedded-storage = "0.3.0"
rand_pcg = "0.3.1"

# Prevent this from interfering with workspaces
[workspace]
Expand Down
200 changes: 141 additions & 59 deletions fuzz/fuzz_targets/map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,71 +2,74 @@

use libfuzzer_sys::arbitrary::Arbitrary;
use libfuzzer_sys::fuzz_target;
use rand::SeedableRng;
use sequential_storage::{
map::{MapError, StorageItem},
mock_flash::MockFlashBase,
mock_flash::{MockFlashBase, MockFlashError, WriteCountCheck},
};
use std::{collections::HashMap, ops::Range};

fuzz_target!(|data: Input| fuzz(data));

#[derive(Arbitrary, Debug)]
#[derive(Arbitrary, Debug, Clone)]
struct Input {
seed: u64,
fuel: u16,
ops: Vec<Op>,
}

#[derive(Arbitrary, Debug)]
#[derive(Arbitrary, Debug, Clone)]
enum Op {
Store(StoreOp),
Fetch(u16),
Fetch(u8),
}

#[derive(Arbitrary, Debug)]
#[derive(Arbitrary, Debug, Clone)]
struct StoreOp {
key: u16,
key: u8,
value_len: u8,
}

impl StoreOp {
fn into_test_item(self) -> TestItem {
fn into_test_item(self, rng: &mut impl rand::Rng) -> TestItem {
TestItem {
key: self.key,
value: (0..self.value_len as usize)
.map(|_| rand::random::<u8>())
value: (0..(self.value_len % 8) as usize)
.map(|_| rng.gen())
.collect(),
}
}
}

#[derive(Debug, Clone, PartialEq, Eq)]
struct TestItem {
key: u16,
key: u8,
value: Vec<u8>,
}

impl StorageItem for TestItem {
type Key = u16;
type Key = u8;

type Error = ();

fn serialize_into(&self, buffer: &mut [u8]) -> Result<usize, Self::Error> {
if buffer.len() < 2 + self.value.len() {
if buffer.len() < 1 + self.value.len() {
return Err(());
}

buffer[0..2].copy_from_slice(&self.key.to_ne_bytes());
buffer[2..][..self.value.len()].copy_from_slice(&self.value);
buffer[0] = self.key;
buffer[1..][..self.value.len()].copy_from_slice(&self.value);

Ok(2 + self.value.len())
Ok(1 + self.value.len())
}

fn deserialize_from(buffer: &[u8]) -> Result<Self, Self::Error>
where
Self: Sized,
{
Ok(Self {
key: u16::from_ne_bytes(buffer[0..2].try_into().unwrap()),
value: buffer[2..].to_vec(),
key: buffer[0],
value: buffer[1..].to_vec(),
})
}

Expand All @@ -78,7 +81,7 @@ impl StorageItem for TestItem {
where
Self: Sized,
{
Ok(u16::from_ne_bytes(buffer[0..2].try_into().unwrap()))
Ok(buffer[0])
}
}

Expand All @@ -87,55 +90,134 @@ fn fuzz(ops: Input) {
const WORD_SIZE: usize = 4;
const WORDS_PER_PAGE: usize = 256;

let mut flash = MockFlashBase::<PAGES, WORD_SIZE, WORDS_PER_PAGE>::default();
let mut flash = MockFlashBase::<PAGES, WORD_SIZE, WORDS_PER_PAGE>::new(
WriteCountCheck::OnceOnly,
Some(ops.fuel as u32),
);
const FLASH_RANGE: Range<u32> = 0x000..0x1000;

let mut map = HashMap::new();
let mut buf = [0; 260]; // Max length of test item serialized, rounded up to align to flash word.

let mut rng = rand_pcg::Pcg32::seed_from_u64(ops.seed);

#[cfg(fuzzing_repro)]
eprintln!("\n=== START ===\n");

for op in ops.ops.into_iter() {
// println!(
// "==================================================== op: {:?}",
// op,
// );
match op {
Op::Store(op) => {
let item = op.into_test_item();
match sequential_storage::map::store_item(
&mut flash,
FLASH_RANGE,
&mut buf,
item.clone(),
) {
Ok(_) => {
map.insert(item.key, item.value);
let mut retry = true;
let mut corruption_repaired = false;

while std::mem::replace(&mut retry, false) {
#[cfg(fuzzing_repro)]
eprintln!("{}", flash.print_items());
#[cfg(fuzzing_repro)]
eprintln!("=== OP: {op:?} ===");

match op.clone() {
Op::Store(op) => {
let item = op.into_test_item(&mut rng);
match sequential_storage::map::store_item(
&mut flash,
FLASH_RANGE,
&mut buf,
item.clone(),
) {
Ok(_) => {
map.insert(item.key, item.value);
}
Err(MapError::FullStorage) => {}
Err(MapError::Storage {
value: MockFlashError::EarlyShutoff(_),
backtrace: _backtrace,
}) => {
match sequential_storage::map::fetch_item::<TestItem, _>(
&mut flash,
FLASH_RANGE,
&mut buf,
item.key,
) {
Ok(Some(check_item))
if check_item.key == item.key
&& check_item.value == item.value =>
{
#[cfg(fuzzing_repro)]
eprintln!("Early shutoff when storing {item:?}! (but it still stored fully). Originated from:\n{_backtrace:#}");
// Even though we got a shutoff, it still managed to store well
map.insert(item.key, item.value);
}
_ => {
// Could not fetch the item we stored...
#[cfg(fuzzing_repro)]
eprintln!("Early shutoff when storing {item:?}! Originated from:\n{_backtrace:#}");
}
}
}
Err(MapError::Corrupted {
backtrace: _backtrace,
}) if !corruption_repaired => {
#[cfg(fuzzing_repro)]
eprintln!(
"### Encountered curruption while storing! Repairing now. Originated from:\n{_backtrace:#}"
);

sequential_storage::map::try_repair::<TestItem, _>(
&mut flash,
FLASH_RANGE,
&mut buf,
)
.unwrap();
corruption_repaired = true;
retry = true;
}
Err(e) => panic!("{e:?}"),
}
Err(MapError::FullStorage) => {}
Err(e) => panic!("{e:?}"),
}
}
Op::Fetch(key) => {
let fetch_result = sequential_storage::map::fetch_item::<TestItem, _>(
&mut flash,
FLASH_RANGE,
&mut buf,
key,
)
.unwrap();

if let Some(existing_value) = map.get(&key) {
assert_eq!(
fetch_result.as_ref().map(|item| &item.key),
Some(&key),
"Mismatching keys"
);
assert_eq!(
fetch_result.as_ref().map(|item| &item.value),
Some(existing_value),
"Mismatching values"
);
} else {
assert_eq!(fetch_result, None)
Op::Fetch(key) => {
match sequential_storage::map::fetch_item::<TestItem, _>(
&mut flash,
FLASH_RANGE,
&mut buf,
key,
) {
Ok(Some(fetch_result)) => {
let map_value = map
.get(&key)
.expect(&format!("Map doesn't contain: {fetch_result:?}"));
assert_eq!(key, fetch_result.key, "Mismatching keys");
assert_eq!(map_value, &fetch_result.value, "Mismatching values");
}
Ok(None) => {
assert_eq!(None, map.get(&key));
}
Err(MapError::Storage {
value: MockFlashError::EarlyShutoff(_),
backtrace: _backtrace,
}) => {
#[cfg(fuzzing_repro)]
eprintln!(
"Early shutoff when fetching! Originated from:\n{_backtrace:#}"
);
}
Err(MapError::Corrupted {
backtrace: _backtrace,
}) if !corruption_repaired => {
#[cfg(fuzzing_repro)]
eprintln!(
"### Encountered curruption while fetching! Repairing now. Originated from:\n{_backtrace:#}"
);

sequential_storage::map::try_repair::<TestItem, _>(
&mut flash,
FLASH_RANGE,
&mut buf,
)
.unwrap();
corruption_repaired = true;
retry = true;
}
Err(e) => panic!("{e:?}"),
}
}
}
}
Expand Down
Loading

0 comments on commit 51ad715

Please sign in to comment.