Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Error when chunk coordinates are invalid #395

Open
wants to merge 8 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
102 changes: 101 additions & 1 deletion icechunk/src/format/snapshot.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ use crate::metadata::{
use super::{
format_constants, manifest::ManifestRef, AttributesId, IcechunkFormatError,
IcechunkFormatVersion, IcechunkResult, ManifestId, NodeId, ObjectId, Path,
SnapshotId, TableOffset,
SnapshotId, TableOffset, ChunkIndices
};

#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
Expand Down Expand Up @@ -49,6 +49,58 @@ pub struct ZarrArrayMetadata {
pub dimension_names: Option<DimensionNames>,
}

impl ZarrArrayMetadata {

/// Returns an iterator over the maximum permitted chunk indices for the array.
///
/// This function calculates the maximum chunk indices based on the shape of the array
/// and the chunk shape, using (shape - 1) / chunk_shape. Given integer division is truncating,
/// this will always result in proper indices at the boundaries.
///
/// # Returns
///
/// A ChunkIndices type containing the max chunk index for each dimension.
fn max_chunk_indices_permitted(&self) -> ChunkIndices {

debug_assert_eq!(self.shape.len(), self.chunk_shape.0.len());

ChunkIndices(
self.shape
.iter()
.zip(self.chunk_shape.0.iter())
.map(|(s, cs)| if *s == 0 { 0 } else { ((s - 1) / cs.get()) as u32 })
.collect()
)
}

/// Validates the provided chunk coordinates for the array.
///
/// This function checks if the provided chunk indices are valid for the array.
///
/// # Arguments
///
/// * `coord` - The chunk indices to validate.
///
/// # Returns
///
/// An `IcechunkResult` indicating whether the chunk coordinates are valid.
///
/// # Errors
///
/// Returns `IcechunkFormatError::ChunkCoordinatesNotFound` if the chunk coordinates are invalid.
pub fn valid_chunk_coord(&self, coord: &ChunkIndices) -> bool {

debug_assert_eq!(self.shape.len(), coord.0.len());

coord
.0
.iter()
.zip(self.max_chunk_indices_permitted().0)
.all(|(index, index_permitted)| *index <= index_permitted)

}
}

#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub enum NodeData {
Array(ZarrArrayMetadata, Vec<ManifestRef>),
Expand Down Expand Up @@ -450,4 +502,52 @@ mod tests {
);
Ok(())
}

#[test]
fn test_valid_chunk_coord() {

let zarr_meta1 = ZarrArrayMetadata {
shape: vec![10000, 10001, 9999],
data_type: DataType::Float32,
chunk_shape: ChunkShape(vec![
NonZeroU64::new(1000).unwrap(),
NonZeroU64::new(1000).unwrap(),
NonZeroU64::new(1000).unwrap(),
]),
chunk_key_encoding: ChunkKeyEncoding::Slash,
fill_value: FillValue::Float32(0f32),

codecs: vec![Codec {
name: "mycodec".to_string(),
configuration: Some(HashMap::from_iter(iter::once((
"foo".to_string(),
serde_json::Value::from(42),
)))),
}],
storage_transformers: None,
dimension_names: None,
};


let zarr_meta2 = ZarrArrayMetadata{
shape: vec![0, 0, 0],
chunk_shape: ChunkShape(vec![
NonZeroU64::new(1000).unwrap(),
NonZeroU64::new(1000).unwrap(),
NonZeroU64::new(1000).unwrap(),
]),
..zarr_meta1.clone()

};

let coord1 = ChunkIndices(vec![9, 10, 9]);
let coord2 = ChunkIndices(vec![10, 11, 10]);
let coord3 = ChunkIndices(vec![0, 0 ,0]);

assert!(zarr_meta1.valid_chunk_coord(&coord1));
assert!(!zarr_meta1.valid_chunk_coord(&coord2));

assert!(zarr_meta2.valid_chunk_coord(&coord3));

}
}
119 changes: 110 additions & 9 deletions icechunk/src/repository.rs
Original file line number Diff line number Diff line change
Expand Up @@ -172,6 +172,8 @@ pub enum RepositoryError {
SerializationError(#[from] rmp_serde::encode::Error),
#[error("error in repository deserialization `{0}`")]
DeserializationError(#[from] rmp_serde::decode::Error),
#[error("invalid chunk index: coordinates {coords:?} are not valid for array at {path}")]
InvalidIndex { coords: ChunkIndices, path: Path }
}

pub type RepositoryResult<T> = Result<T, RepositoryError>;
Expand Down Expand Up @@ -424,9 +426,27 @@ impl Repository {
coord: ChunkIndices,
data: Option<ChunkPayload>,
) -> RepositoryResult<()> {
self.get_array(&path)
.await
.map(|node| self.change_set.set_chunk_ref(node.id, coord, data))

let node_snapshot = self.get_array(&path).await?;

if let NodeData::Array(zarr_metadata, _, ) = node_snapshot.node_data {
if zarr_metadata.valid_chunk_coord(&coord) {
self.change_set.set_chunk_ref(node_snapshot.id, coord, data);
Ok(())
} else {
Err(RepositoryError::InvalidIndex {
coords: coord,
path: path.clone()
})
}

} else {
Err(RepositoryError::NotAnArray {
node: node_snapshot,
message: "getting an array".to_string(),
})
}

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

maybe we can add a test for Repository too? that validates that the error is correctly bubbling up.

Copy link
Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Done!

}

pub async fn get_node(&self, path: &Path) -> RepositoryResult<NodeSnapshot> {
Expand Down Expand Up @@ -1574,7 +1594,11 @@ mod tests {
let zarr_meta = ZarrArrayMetadata {
shape: vec![1, 1, 2],
data_type: DataType::Float16,
chunk_shape: ChunkShape(vec![NonZeroU64::new(2).unwrap()]),
chunk_shape: ChunkShape(vec![
NonZeroU64::new(2).unwrap(),
NonZeroU64::new(2).unwrap(),
NonZeroU64::new(1).unwrap(),
]),
chunk_key_encoding: ChunkKeyEncoding::Slash,
fill_value: FillValue::Float16(f32::NEG_INFINITY),
codecs: vec![Codec { name: "mycodec".to_string(), configuration: None }],
Expand Down Expand Up @@ -1820,7 +1844,7 @@ mod tests {
let zarr_meta = ZarrArrayMetadata {
shape: vec![5, 5],
data_type: DataType::Float16,
chunk_shape: ChunkShape(vec![NonZeroU64::new(2).unwrap()]),
chunk_shape: ChunkShape(vec![NonZeroU64::new(2).unwrap(), NonZeroU64::new(2).unwrap()]),
chunk_key_encoding: ChunkKeyEncoding::Slash,
fill_value: FillValue::Float16(f32::NEG_INFINITY),
codecs: vec![Codec { name: "mycodec".to_string(), configuration: None }],
Expand Down Expand Up @@ -1975,9 +1999,13 @@ mod tests {
// add a new array and retrieve its node
ds.add_group(Path::root()).await?;
let zarr_meta = ZarrArrayMetadata {
shape: vec![1, 1, 2],
shape: vec![4, 2, 4],
data_type: DataType::Int32,
chunk_shape: ChunkShape(vec![NonZeroU64::new(2).unwrap()]),
chunk_shape: ChunkShape(vec![
NonZeroU64::new(2).unwrap(),
NonZeroU64::new(1).unwrap(),
NonZeroU64::new(2).unwrap()
]),
chunk_key_encoding: ChunkKeyEncoding::Slash,
fill_value: FillValue::Int32(0),
codecs: vec![Codec { name: "mycodec".to_string(), configuration: None }],
Expand Down Expand Up @@ -2009,6 +2037,12 @@ mod tests {
Some(ChunkPayload::Inline("hello".into())),
)
.await?;
ds.set_chunk_ref(
new_array_path.clone(),
ChunkIndices(vec![0, 1, 0]),
Some(ChunkPayload::Inline("hello".into())),
)
.await?;
let snapshot_id = ds.flush("commit", SnapshotProperties::default()).await?;
let ds = Repository::update(Arc::clone(&storage), snapshot_id).build();
let coords = ds
Expand All @@ -2022,7 +2056,8 @@ mod tests {
vec![
ChunkIndices(vec![0, 0, 0]),
ChunkIndices(vec![0, 0, 1]),
ChunkIndices(vec![1, 0, 0])
ChunkIndices(vec![1, 0, 0]),
ChunkIndices(vec![0, 1, 0])
]
.into_iter()
.collect()
Expand Down Expand Up @@ -2067,7 +2102,11 @@ mod tests {
let zarr_meta = ZarrArrayMetadata {
shape: vec![1, 1, 2],
data_type: DataType::Int32,
chunk_shape: ChunkShape(vec![NonZeroU64::new(2).unwrap()]),
chunk_shape: ChunkShape(vec![
NonZeroU64::new(2).unwrap(),
NonZeroU64::new(2).unwrap(),
NonZeroU64::new(2).unwrap()],
),
chunk_key_encoding: ChunkKeyEncoding::Slash,
fill_value: FillValue::Int32(0),
codecs: vec![Codec { name: "mycodec".to_string(), configuration: None }],
Expand Down Expand Up @@ -2159,6 +2198,68 @@ mod tests {
Ok(())
}

#[tokio::test]
async fn test_setting_w_invalid_coords() -> Result<(), Box<dyn Error>> {
let in_mem_storage =
Arc::new(ObjectStorage::new_in_memory_store(Some("prefix".into())));
let storage: Arc<dyn Storage + Send + Sync> = in_mem_storage.clone();
let mut ds = Repository::init(Arc::clone(&storage), false).await?.build();

ds.add_group(Path::root()).await?;
let zarr_meta = ZarrArrayMetadata {
shape: vec![5, 5],
data_type: DataType::Float16,
chunk_shape: ChunkShape(vec![NonZeroU64::new(2).unwrap(), NonZeroU64::new(2).unwrap()]),
chunk_key_encoding: ChunkKeyEncoding::Slash,
fill_value: FillValue::Float16(f32::NEG_INFINITY),
codecs: vec![Codec { name: "mycodec".to_string(), configuration: None }],
storage_transformers: None,
dimension_names: None,
};

let apath: Path = "/array1".try_into()?;

ds.add_array(apath.clone(), zarr_meta.clone()).await?;

ds.commit("main", "first commit", None).await?;

// add 3 chunks
// First 2 chunks are valid, third will be invalid chunk indices

assert!(
ds.set_chunk_ref(
apath.clone(),
ChunkIndices(vec![0, 0]),
Some(ChunkPayload::Inline("hello".into())),
).await.is_ok()
);
assert!(
ds.set_chunk_ref(
apath.clone(),
ChunkIndices(vec![2, 2]),
Some(ChunkPayload::Inline("hello".into())),
).await.is_ok()
);

let bad_result = ds.set_chunk_ref(
apath.clone(),
ChunkIndices(vec![3, 0]),
Some(ChunkPayload::Inline("hello".into())),
).await;

match bad_result {
Err(RepositoryError::InvalidIndex {
coords,
path
}) => {
assert_eq!(coords, ChunkIndices(vec![3, 0]));
assert_eq!(path, apath);
},
_ => panic!("Expected InvalidIndex Error")
}
Ok(())
}

#[cfg(test)]
mod state_machine_test {
use crate::format::snapshot::NodeData;
Expand Down