From c488b0ff484d7204ecbaa4a52b77b88a53771e59 Mon Sep 17 00:00:00 2001 From: Sebastian Galkin Date: Tue, 6 Aug 2024 17:49:32 -0300 Subject: [PATCH] Justfile, CI and formatting config --- .github/dependabot.yml | 20 +++++ .github/workflows/rust-ci.yaml | 72 +++++++++++++++++ Justfile | 37 +++++++++ deny.toml | 68 ++++++++++++++++ rustfmt.toml | 2 + shell.nix | 57 +++++++------ src/dataset.rs | 27 ++----- src/lib.rs | 18 +++-- src/storage.rs | 19 ++++- src/structure.rs | 141 ++++++++++++--------------------- 10 files changed, 312 insertions(+), 149 deletions(-) create mode 100644 .github/dependabot.yml create mode 100644 .github/workflows/rust-ci.yaml create mode 100644 Justfile create mode 100644 deny.toml create mode 100644 rustfmt.toml diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 00000000..cc0cdfd8 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,20 @@ +version: 2 +updates: + - package-ecosystem: "cargo" + directory: "/" + schedule: + interval: "weekly" + day: "monday" + time: "05:00" + timezone: "US/Pacific" + groups: + rust-dependencies: + patterns: + - "*" + - package-ecosystem: "github-actions" + directory: "/" + schedule: + interval: "weekly" + day: "monday" + time: "05:00" + timezone: "US/Pacific" diff --git a/.github/workflows/rust-ci.yaml b/.github/workflows/rust-ci.yaml new file mode 100644 index 00000000..bb55a769 --- /dev/null +++ b/.github/workflows/rust-ci.yaml @@ -0,0 +1,72 @@ +# This is mostly copied from the rust-analyzer repo +# https://github.com/rust-lang/rust-analyzer/blob/12e7aa3132217cc6a6c1151d468be35d7b365999/.github/workflows/ci.yaml + +name: Rust CI +on: + pull_request: + types: [opened, reopened, synchronize, labeled] + push: + branches: + - main + +env: + CARGO_INCREMENTAL: 0 + CARGO_NET_RETRY: 10 + CI: 1 + RUST_BACKTRACE: short + RUSTFLAGS: "-D warnings -W unreachable-pub -W bare-trait-objects" + RUSTUP_MAX_RETRIES: 10 + +jobs: + rust: + name: Rust CI + timeout-minutes: 20 + runs-on: ${{ matrix.os }} + defaults: + run: + working-directory: ./ + #permissions: + #contents: read + #actions: read + #pull-requests: read + env: + #CC: deny_c + RUST_CHANNEL: 'stable' + + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest-m] + + steps: + - name: Checkout repository + uses: actions/checkout@v4 + with: + ref: ${{ github.event.pull_request.head.sha }} + + - name: Install Just + run: sudo snap install --edge --classic just + + - name: Install Rust toolchain + run: | + rustup update --no-self-update ${{ env.RUST_CHANNEL }} + rustup component add --toolchain ${{ env.RUST_CHANNEL }} rustfmt rust-src + rustup default ${{ env.RUST_CHANNEL }} + + - name: Cache Dependencies + uses: Swatinem/rust-cache@v2 + with: + workspaces: "rust -> target" + key: ${{ env.RUST_CHANNEL }} + + - name: Install cargo-deny + run: cargo install --locked cargo-deny + + - name: Check + if: matrix.os == 'ubuntu-latest-m' || github.event_name == 'push' + env: + AWS_ACCESS_KEY_ID: minio123 + AWS_SECRET_ACCESS_KEY: minio123 + AWS_DEFAULT_REGION: "us-east-1" + run: | + just pre-commit diff --git a/Justfile b/Justfile new file mode 100644 index 00000000..ff1afa29 --- /dev/null +++ b/Justfile @@ -0,0 +1,37 @@ +alias fmt := format +alias pre := pre-commit + +# run all tests +test *args='': + cargo test {{args}} + +# compile but don't run all tests +compile-tests *args='': + cargo test --no-run {{args}} + +# build debug version +build *args='': + cargo build {{args}} + +# build release version +build-release *args='': + cargo build --release {{args}} + +# run clippy +lint *args='': + cargo clippy --all-targets --all-features {{args}} + +# reformat all rust files +format *args='': + cargo fmt --all {{args}} + +# reformat all nix files +format-nix *args='': + alejandra . + +# run cargo deny to check dependencies +check-deps *args='': + cargo deny --all-features check {{args}} + +# run all checks that CI actions will run +pre-commit: (compile-tests "--locked") build (format "--check") lint test check-deps diff --git a/deny.toml b/deny.toml new file mode 100644 index 00000000..787ca2a0 --- /dev/null +++ b/deny.toml @@ -0,0 +1,68 @@ +[graph] +all-features = true + +[advisories] +version = 2 + +[licenses] +# List of explicitly allowed licenses +# See https://spdx.org/licenses/ for list of possible licenses +# [possible values: any SPDX 3.11 short identifier (+ optional exception)]. +# This list was generated using: +# https://github.com/earth-mover/arraylake/pull/1067/files +allow = [ + "MIT", + "Apache-2.0", + "BSD-2-Clause", + "BSD-3-Clause", + "MPL-2.0", + "ISC", + "OpenSSL", + "Unicode-DFS-2016", + "CC0-1.0", +] +# The confidence threshold for detecting a license from license text. +# The higher the value, the more closely the license text must be to the +# canonical license text of a valid SPDX license file. +# [possible values: any between 0.0 and 1.0]. +confidence-threshold = 0.8 +# Allow 1 or more licenses on a per-crate basis, so that particular licenses +# aren't accepted for every possible crate as with the normal allow list +exceptions = [ + # Each entry is the crate and version constraint, and its specific allow + # list + #{ allow = ["Zlib"], name = "adler32", version = "*" }, +] + +[licenses.private] +ignore = true + +[[licenses.clarify]] +name = "ring" +expression = "MIT AND ISC AND OpenSSL" +license-files = [ + # Each entry is a crate relative path, and the (opaque) hash of its contents + { path = "LICENSE", hash = 0xbd0eed23 }, +] + + +[bans] +# Lint level for when multiple versions of the same crate are detected +multiple-versions = "warn" +# Lint level for when a crate version requirement is `*` +wildcards = "warn" +workspace-default-features = "allow" +external-default-features = "allow" +allow = [ + #{ name = "ansi_term", version = "=0.11.0" }, +] +# List of crates to deny +deny = [ + # Each entry the name of a crate and a version range. If version is + # not specified, all versions will be matched. + #{ name = "ansi_term", version = "=0.11.0" }, + # + # Wrapper crates can optionally be specified to allow the crate when it + # is a direct dependency of the otherwise banned crate + #{ name = "ansi_term", version = "=0.11.0", wrappers = [] }, +] diff --git a/rustfmt.toml b/rustfmt.toml new file mode 100644 index 00000000..cc131176 --- /dev/null +++ b/rustfmt.toml @@ -0,0 +1,2 @@ +max_width = 90 +use_small_heuristics = "Max" diff --git a/shell.nix b/shell.nix index f0527042..40ae0d4c 100644 --- a/shell.nix +++ b/shell.nix @@ -1,43 +1,42 @@ let -# Pinned nixpkgs, deterministic. Last updated to nixpkgs-unstable as of: 2024-07-23 -pkgs = import (fetchTarball "https://github.com/NixOS/nixpkgs/archive/68c9ed8bbed9dfce253cc91560bf9043297ef2fe.tar.gz") {}; + # Pinned nixpkgs, deterministic. Last updated to nixpkgs-unstable as of: 2024-07-23 + pkgs = import (fetchTarball "https://github.com/NixOS/nixpkgs/archive/68c9ed8bbed9dfce253cc91560bf9043297ef2fe.tar.gz") {}; + # Rolling updates, not deterministic. + # pkgs = import (fetchTarball("channel:nixpkgs-unstable")) {}; -# Rolling updates, not deterministic. -# pkgs = import (fetchTarball("channel:nixpkgs-unstable")) {}; - -alejandra = -(import (builtins.fetchTarball { - url = "https://github.com/kamadorueda/alejandra/tarball/3.0.0"; - sha256 = "sha256:18jm0d5xrxk38hw5sa470zgfz9xzdcyaskjhgjwhnmzd5fgacny4"; - }) {}) -.outPath; + alejandra = + (import (builtins.fetchTarball { + url = "https://github.com/kamadorueda/alejandra/tarball/3.0.0"; + sha256 = "sha256:18jm0d5xrxk38hw5sa470zgfz9xzdcyaskjhgjwhnmzd5fgacny4"; + }) {}) + .outPath; in -pkgs.mkShell.override { - stdenv = pkgs.stdenvAdapters.useMoldLinker pkgs.clangStdenv; -} { - packages = with pkgs; [ - rustc + pkgs.mkShell.override { + stdenv = pkgs.stdenvAdapters.useMoldLinker pkgs.clangStdenv; + } { + packages = with pkgs; [ + rustc cargo cargo-watch - cargo-nextest # test runner + cargo-nextest # test runner cargo-deny - rust-analyzer # rust lsp server + rust-analyzer # rust lsp server rustfmt clippy - taplo # toml lsp server + taplo # toml lsp server awscli2 - just # script launcher with a make flavor - alejandra # nix code formatter - ]; + just # script launcher with a make flavor + alejandra # nix code formatter + ]; - shellHook = '' - export PYTHONPATH=".:$PYTHONPATH" + shellHook = '' + export PYTHONPATH=".:$PYTHONPATH" - export AWS_ACCESS_KEY_ID=minio123 - export AWS_SECRET_ACCESS_KEY=minio123 - export AWS_DEFAULT_REGION=us-east-1 - export RUSTFLAGS="-W unreachable-pub -W bare-trait-objects" + export AWS_ACCESS_KEY_ID=minio123 + export AWS_SECRET_ACCESS_KEY=minio123 + export AWS_DEFAULT_REGION=us-east-1 + export RUSTFLAGS="-W unreachable-pub -W bare-trait-objects" ''; -} + } diff --git a/src/dataset.rs b/src/dataset.rs index c6f58022..502c227e 100644 --- a/src/dataset.rs +++ b/src/dataset.rs @@ -1,8 +1,9 @@ use std::sync::Arc; use crate::{ - AddNodeError, ArrayIndices, AttributesTable, ChunkPayload, Dataset, ManifestsTable, NodeData, - NodeStructure, Path, StructureTable, UpdateNodeError, UserAttributes, ZarrArrayMetadata, + AddNodeError, ArrayIndices, AttributesTable, ChunkPayload, Dataset, ManifestsTable, + NodeData, NodeStructure, Path, StructureTable, UpdateNodeError, UserAttributes, + ZarrArrayMetadata, }; /// FIXME: what do we want to do with implicit groups? @@ -46,10 +47,7 @@ impl Dataset { ) -> Result<(), UpdateNodeError> { match self.get_node(&path).await { None => Err(UpdateNodeError::NotFound), - Some(NodeStructure { - node_data: NodeData::Array(..), - .. - }) => { + Some(NodeStructure { node_data: NodeData::Array(..), .. }) => { self.updated_arrays.insert(path, metadata); Ok(()) } @@ -83,10 +81,7 @@ impl Dataset { ) -> Result<(), UpdateNodeError> { match self.get_node(&path).await { None => Err(UpdateNodeError::NotFound), - Some(NodeStructure { - node_data: NodeData::Array(..), - .. - }) => { + Some(NodeStructure { node_data: NodeData::Array(..), .. }) => { self.set_chunks.insert((path, coord), data); Ok(()) } @@ -98,11 +93,7 @@ impl Dataset { // FIXME: we should have errros here, not only None pub async fn get_node(&self, path: &Path) -> Option { - let structure = self - .storage - .fetch_structure(&self.structure_id) - .await - .ok()?; + let structure = self.storage.fetch_structure(&self.structure_id).await.ok()?; structure.get_node(path) } @@ -121,11 +112,7 @@ impl Dataset { /// Files that are reused from previous commits are not returned because they don't need saving pub async fn consolidate( &mut self, - ) -> ( - Arc, - Vec>, - Vec>, - ) { + ) -> (Arc, Vec>, Vec>) { todo!() } } diff --git a/src/lib.rs b/src/lib.rs index e7e6fdd5..74f689c4 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -204,8 +204,7 @@ impl TryFrom<&[u8]> for ObjectId { fn try_from(value: &[u8]) -> Result { let buf = value.try_into(); - buf.map(ObjectId) - .map_err(|_| "Invalid ObjectId buffer length") + buf.map(ObjectId).map_err(|_| "Invalid ObjectId buffer length") } } @@ -327,9 +326,18 @@ pub enum StorageError { /// Implementations are free to assume files are never overwritten. #[async_trait] trait Storage { - async fn fetch_structure(&self, id: &ObjectId) -> Result, StorageError>; // FIXME: format flags - async fn fetch_attributes(&self, id: &ObjectId) -> Result, StorageError>; // FIXME: format flags - async fn fetch_manifests(&self, id: &ObjectId) -> Result, StorageError>; // FIXME: format flags + async fn fetch_structure( + &self, + id: &ObjectId, + ) -> Result, StorageError>; // FIXME: format flags + async fn fetch_attributes( + &self, + id: &ObjectId, + ) -> Result, StorageError>; // FIXME: format flags + async fn fetch_manifests( + &self, + id: &ObjectId, + ) -> Result, StorageError>; // FIXME: format flags async fn write_structure( &self, diff --git a/src/storage.rs b/src/storage.rs index 6163d820..3d9b0a5d 100644 --- a/src/storage.rs +++ b/src/storage.rs @@ -5,7 +5,9 @@ use std::{ use async_trait::async_trait; -use crate::{AttributesTable, ManifestsTable, ObjectId, Storage, StorageError, StructureTable}; +use crate::{ + AttributesTable, ManifestsTable, ObjectId, Storage, StorageError, StructureTable, +}; #[derive(Default)] pub struct InMemoryStorage { @@ -26,7 +28,10 @@ impl InMemoryStorage { #[async_trait] impl Storage for InMemoryStorage { - async fn fetch_structure(&self, id: &ObjectId) -> Result, StorageError> { + async fn fetch_structure( + &self, + id: &ObjectId, + ) -> Result, StorageError> { self.struct_files .read() .or(Err(StorageError::Deadlock))? @@ -35,7 +40,10 @@ impl Storage for InMemoryStorage { .ok_or(StorageError::NotFound) } - async fn fetch_attributes(&self, id: &ObjectId) -> Result, StorageError> { + async fn fetch_attributes( + &self, + id: &ObjectId, + ) -> Result, StorageError> { self.attr_files .read() .or(Err(StorageError::Deadlock))? @@ -44,7 +52,10 @@ impl Storage for InMemoryStorage { .ok_or(StorageError::NotFound) } - async fn fetch_manifests(&self, id: &ObjectId) -> Result, StorageError> { + async fn fetch_manifests( + &self, + id: &ObjectId, + ) -> Result, StorageError> { self.man_files .read() .or(Err(StorageError::Deadlock))? diff --git a/src/structure.rs b/src/structure.rs index 256ddfc1..39fa0861 100644 --- a/src/structure.rs +++ b/src/structure.rs @@ -2,9 +2,9 @@ use std::{num::NonZeroU64, sync::Arc}; use arrow::{ array::{ - Array, ArrayRef, AsArray, FixedSizeBinaryArray, FixedSizeBinaryBuilder, ListArray, - ListBuilder, RecordBatch, StringArray, StringBuilder, StructArray, UInt32Array, - UInt32Builder, UInt8Array, + Array, ArrayRef, AsArray, FixedSizeBinaryArray, FixedSizeBinaryBuilder, + ListArray, ListBuilder, RecordBatch, StringArray, StringBuilder, StructArray, + UInt32Array, UInt32Builder, UInt8Array, }, datatypes::{Field, Fields, Schema, UInt32Type, UInt64Type, UInt8Type}, }; @@ -12,9 +12,9 @@ use itertools::izip; use crate::{ ChunkKeyEncoding, ChunkShape, Codecs, DataType, DimensionName, FillValue, Flags, - ManifestExtents, ManifestRef, NodeData, NodeId, NodeStructure, NodeType, ObjectId, Path, - StorageTransformers, TableRegion, UserAttributes, UserAttributesRef, UserAttributesStructure, - ZarrArrayMetadata, + ManifestExtents, ManifestRef, NodeData, NodeId, NodeStructure, NodeType, ObjectId, + Path, StorageTransformers, TableRegion, UserAttributes, UserAttributesRef, + UserAttributesStructure, ZarrArrayMetadata, }; pub struct StructureTable { @@ -43,10 +43,7 @@ impl StructureTable { .flatten() .collect(); let data_type = DataType::try_from( - self.batch - .column_by_name("data_type")? - .as_string_opt::()? - .value(idx), + self.batch.column_by_name("data_type")?.as_string_opt::()?.value(idx), ) .ok()?; let chunk_shape = ChunkShape( @@ -75,22 +72,16 @@ impl StructureTable { .to_string(), ); - let storage_transformers = self - .batch - .column_by_name("storage_transformers")? - .as_string_opt::()?; + let storage_transformers = + self.batch.column_by_name("storage_transformers")?.as_string_opt::()?; let storage_transformers = if storage_transformers.is_null(idx) { None } else { - Some(StorageTransformers( - storage_transformers.value(idx).to_string(), - )) + Some(StorageTransformers(storage_transformers.value(idx).to_string())) }; - let dimension_names = self - .batch - .column_by_name("dimension_names")? - .as_list_opt::()?; + let dimension_names = + self.batch.column_by_name("dimension_names")?.as_list_opt::()?; let dimension_names = if dimension_names.is_null(idx) { None } else { @@ -118,10 +109,8 @@ impl StructureTable { // FIXME: there should be a failure reason here, so return a Result fn build_manifest_refs(&self, idx: usize) -> Option> { - let manifest_refs_array = self - .batch - .column_by_name("manifest_references")? - .as_struct_opt()?; + let manifest_refs_array = + self.batch.column_by_name("manifest_references")?.as_struct_opt()?; if manifest_refs_array.is_valid(idx) { let refs = manifest_refs_array .column_by_name("reference")? @@ -162,16 +151,10 @@ impl StructureTable { } fn build_node_structure(&self, path: &Path, idx: usize) -> Option { - let node_type = self - .batch - .column_by_name("type")? - .as_string_opt::()? - .value(idx); - let id = self - .batch - .column_by_name("id")? - .as_primitive_opt::()? - .value(idx); + let node_type = + self.batch.column_by_name("type")?.as_string_opt::()?.value(idx); + let id = + self.batch.column_by_name("id")?.as_primitive_opt::()?.value(idx); let user_attributes = self.build_user_attributes(idx); match node_type { "group" => Some(NodeStructure { @@ -194,14 +177,10 @@ impl StructureTable { } fn build_user_attributes(&self, idx: usize) -> Option { - let inline = self - .batch - .column_by_name("user_attributes")? - .as_string_opt::()?; + let inline = + self.batch.column_by_name("user_attributes")?.as_string_opt::()?; if inline.is_valid(idx) { - Some(UserAttributesStructure::Inline( - inline.value(idx).to_string(), - )) + Some(UserAttributesStructure::Inline(inline.value(idx).to_string())) } else { self.build_user_attributes_ref(idx) } @@ -265,17 +244,11 @@ where T: IntoIterator>, P: IntoIterator, { - let iter = coll - .into_iter() - .map(|opt| opt.map(|p| p.into_iter().map(Some))); + let iter = coll.into_iter().map(|opt| opt.map(|p| p.into_iter().map(Some))); // I don't know how to create a ListArray that has not nullable elements let res = ListArray::from_iter_primitive::(iter); let (_, offsets, values, nulls) = res.into_parts(); - let field = Arc::new(Field::new( - "item", - arrow::datatypes::DataType::UInt64, - false, - )); + let field = Arc::new(Field::new("item", arrow::datatypes::DataType::UInt64, false)); ListArray::new(field, offsets, values, nulls) } @@ -293,11 +266,7 @@ where .map(|opt| opt.map(|p| p.0.iter().map(|n| Some(n.get())).collect::>())); let res = ListArray::from_iter_primitive::(iter); let (_, offsets, values, nulls) = res.into_parts(); - let field = Arc::new(Field::new( - "item", - arrow::datatypes::DataType::UInt64, - false, - )); + let field = Arc::new(Field::new("item", arrow::datatypes::DataType::UInt64, false)); ListArray::new(field, offsets, values, nulls) } @@ -347,7 +316,9 @@ fn mk_user_attributes_ref_array>>( .expect("Bad ObjectId size") } -fn mk_user_attributes_row_array>>(coll: T) -> UInt32Array { +fn mk_user_attributes_row_array>>( + coll: T, +) -> UInt32Array { UInt32Array::from_iter(coll) } @@ -356,7 +327,8 @@ where T: IntoIterator>, P: IntoIterator, { - let mut ref_array = ListBuilder::new(FixedSizeBinaryBuilder::new(ObjectId::SIZE as i32)); + let mut ref_array = + ListBuilder::new(FixedSizeBinaryBuilder::new(ObjectId::SIZE as i32)); let mut from_row_array = ListBuilder::new(UInt32Builder::new()); let mut to_row_array = ListBuilder::new(UInt32Builder::new()); @@ -396,19 +368,11 @@ where let ref_array = ListArray::new(field, offsets, values, nulls); let (_, offsets, values, nulls) = from_row_array.into_parts(); - let field = Arc::new(Field::new( - "item", - arrow::datatypes::DataType::UInt32, - false, - )); + let field = Arc::new(Field::new("item", arrow::datatypes::DataType::UInt32, false)); let from_row_array = ListArray::new(field, offsets, values, nulls); let (_, offsets, values, nulls) = to_row_array.into_parts(); - let field = Arc::new(Field::new( - "item", - arrow::datatypes::DataType::UInt32, - false, - )); + let field = Arc::new(Field::new("item", arrow::datatypes::DataType::UInt32, false)); let to_row_array = ListArray::new(field, offsets, values, nulls); StructArray::from(vec![ @@ -444,7 +408,9 @@ where } // For testing only -pub fn mk_structure_table>(coll: T) -> StructureTable { +pub fn mk_structure_table>( + coll: T, +) -> StructureTable { let mut ids = Vec::new(); let mut types = Vec::new(); let mut paths = Vec::new(); @@ -557,19 +523,11 @@ pub fn mk_structure_table>(coll: T) -> Str Field::new("item", arrow::datatypes::DataType::UInt64, false), true, ), - Field::new( - "chunk_key_encoding", - arrow::datatypes::DataType::UInt8, - true, - ), + Field::new("chunk_key_encoding", arrow::datatypes::DataType::UInt8, true), // FIXME: //Field::new("fill_value", todo!(), true), Field::new("codecs", arrow::datatypes::DataType::Utf8, true), - Field::new( - "storage_transformers", - arrow::datatypes::DataType::Utf8, - true, - ), + Field::new("storage_transformers", arrow::datatypes::DataType::Utf8, true), Field::new_list( "dimension_names", Field::new("item", arrow::datatypes::DataType::Utf8, true), @@ -581,11 +539,7 @@ pub fn mk_structure_table>(coll: T) -> Str arrow::datatypes::DataType::FixedSizeBinary(ObjectId::SIZE as i32), true, ), - Field::new( - "user_attributes_row", - arrow::datatypes::DataType::UInt32, - true, - ), + Field::new("user_attributes_row", arrow::datatypes::DataType::UInt32, true), Field::new( "manifest_references", arrow::datatypes::DataType::Struct(Fields::from(vec![ @@ -593,7 +547,9 @@ pub fn mk_structure_table>(coll: T) -> Str "reference", Field::new( "item", - arrow::datatypes::DataType::FixedSizeBinary(ObjectId::SIZE as i32), + arrow::datatypes::DataType::FixedSizeBinary( + ObjectId::SIZE as i32, + ), false, ), true, @@ -612,7 +568,8 @@ pub fn mk_structure_table>(coll: T) -> Str true, ), ])); - let batch = RecordBatch::try_new(schema, columns).expect("Error creating record batch"); + let batch = + RecordBatch::try_new(schema, columns).expect("Error creating record batch"); StructureTable { batch } } @@ -647,10 +604,8 @@ mod tests { dimension_names: Some(vec![None, None, Some("t".to_string())]), ..zarr_meta1.clone() }; - let zarr_meta3 = ZarrArrayMetadata { - dimension_names: None, - ..zarr_meta2.clone() - }; + let zarr_meta3 = + ZarrArrayMetadata { dimension_names: None, ..zarr_meta2.clone() }; let man_ref1 = ManifestRef { object_id: ObjectId::random(), location: TableRegion(0, 1), @@ -687,7 +642,9 @@ mod tests { NodeStructure { path: "/b/c".into(), id: 4, - user_attributes: Some(UserAttributesStructure::Inline("some inline".to_string())), + user_attributes: Some(UserAttributesStructure::Inline( + "some inline".to_string(), + )), node_data: NodeData::Group, }, NodeStructure { @@ -725,7 +682,9 @@ mod tests { Some(NodeStructure { path: "/b/c".into(), id: 4, - user_attributes: Some(UserAttributesStructure::Inline("some inline".to_string())), + user_attributes: Some(UserAttributesStructure::Inline( + "some inline".to_string() + )), node_data: NodeData::Group, }), );