diff --git a/Cargo.lock b/Cargo.lock index 551937a0bc..1a5227689b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -7881,6 +7881,7 @@ dependencies = [ "subspace-core-primitives", "subspace-erasure-coding", "subspace-farmer-components", + "subspace-kzg", "subspace-proof-of-space", "subspace-runtime-primitives", "subspace-verification", @@ -9993,6 +9994,7 @@ dependencies = [ "subspace-archiving", "subspace-core-primitives", "subspace-erasure-coding", + "subspace-kzg", "subspace-proof-of-space", "subspace-verification", "thiserror", @@ -10026,6 +10028,7 @@ dependencies = [ "subspace-core-primitives", "subspace-erasure-coding", "subspace-farmer-components", + "subspace-kzg", "subspace-networking", "subspace-rpc-primitives", "thiserror", @@ -11520,6 +11523,7 @@ dependencies = [ "sp-std", "sp-timestamp", "subspace-core-primitives", + "subspace-kzg", "subspace-proof-of-space", "subspace-verification", "thiserror", @@ -12395,6 +12399,8 @@ dependencies = [ "serde", "subspace-core-primitives", "subspace-erasure-coding", + "subspace-kzg", + "subspace-verification", "thiserror", ] @@ -12404,22 +12410,15 @@ version = "0.1.0" dependencies = [ "blake3", "bytes", - "criterion", "derive_more 1.0.0", "hex", - "kzg", "num-traits", "parity-scale-codec", - "parking_lot 0.12.3", "rand", - "rand_core", "rayon", - "rust-kzg-blst", "scale-info", "serde", - "spin 0.9.8", "static_assertions", - "tracing", "uint 0.10.0", ] @@ -12448,6 +12447,7 @@ dependencies = [ "rand", "rust-kzg-blst", "subspace-core-primitives", + "subspace-kzg", ] [[package]] @@ -12520,15 +12520,16 @@ dependencies = [ "serde_json", "ss58-registry", "static_assertions", - "subspace-archiving", "subspace-core-primitives", "subspace-erasure-coding", "subspace-farmer-components", + "subspace-kzg", "subspace-metrics", "subspace-networking", "subspace-proof-of-space", "subspace-proof-of-space-gpu", "subspace-rpc-primitives", + "subspace-verification", "substrate-bip39", "supports-color", "tempfile", @@ -12565,6 +12566,7 @@ dependencies = [ "subspace-archiving", "subspace-core-primitives", "subspace-erasure-coding", + "subspace-kzg", "subspace-proof-of-space", "subspace-verification", "thiserror", @@ -12573,6 +12575,23 @@ dependencies = [ "winapi", ] +[[package]] +name = "subspace-kzg" +version = "0.1.0" +dependencies = [ + "criterion", + "derive_more 1.0.0", + "kzg", + "parking_lot 0.12.3", + "rand", + "rand_core", + "rust-kzg-blst", + "spin 0.9.8", + "static_assertions", + "subspace-core-primitives", + "tracing", +] + [[package]] name = "subspace-malicious-operator" version = "0.1.0" @@ -12787,6 +12806,7 @@ dependencies = [ "subspace-core-primitives", "subspace-erasure-coding", "subspace-farmer-components", + "subspace-kzg", "subspace-proof-of-space", ] @@ -12966,9 +12986,11 @@ dependencies = [ "subspace-archiving", "subspace-core-primitives", "subspace-erasure-coding", + "subspace-kzg", "subspace-networking", "subspace-proof-of-space", "subspace-runtime-primitives", + "subspace-verification", "substrate-frame-rpc-system", "substrate-prometheus-endpoint", "thiserror", @@ -13001,6 +13023,7 @@ dependencies = [ "subspace-core-primitives", "subspace-erasure-coding", "subspace-farmer-components", + "subspace-kzg", "subspace-proof-of-space", "subspace-runtime-primitives", "subspace-service", @@ -13140,8 +13163,8 @@ version = "0.1.0" dependencies = [ "parity-scale-codec", "schnorrkel", - "subspace-archiving", "subspace-core-primitives", + "subspace-kzg", "subspace-proof-of-space", "thiserror", ] diff --git a/crates/pallet-subspace/Cargo.toml b/crates/pallet-subspace/Cargo.toml index 9c644b701d..8b4bec2524 100644 --- a/crates/pallet-subspace/Cargo.toml +++ b/crates/pallet-subspace/Cargo.toml @@ -37,8 +37,9 @@ rand = { version = "0.8.5", features = ["min_const_gen"] } sp-io = { git = "https://github.com/subspace/polkadot-sdk", rev = "5871818e1d736f1843eb9078f886290695165c42" } subspace-archiving = { version = "0.1.0", path = "../subspace-archiving" } subspace-core-primitives = { version = "0.1.0", path = "../subspace-core-primitives" } -subspace-farmer-components = { version = "0.1.0", path = "../subspace-farmer-components" } subspace-erasure-coding = { version = "0.1.0", path = "../subspace-erasure-coding" } +subspace-farmer-components = { version = "0.1.0", path = "../subspace-farmer-components" } +subspace-kzg = { version = "0.1.0", path = "../../shared/subspace-kzg" } subspace-proof-of-space = { version = "0.1.0", path = "../subspace-proof-of-space" } [features] diff --git a/crates/pallet-subspace/src/lib.rs b/crates/pallet-subspace/src/lib.rs index fde51ff178..73f6cacef4 100644 --- a/crates/pallet-subspace/src/lib.rs +++ b/crates/pallet-subspace/src/lib.rs @@ -57,14 +57,14 @@ use sp_runtime::transaction_validity::{ }; use sp_std::collections::btree_map::BTreeMap; use sp_std::prelude::*; -use subspace_core_primitives::crypto::Scalar; use subspace_core_primitives::pieces::PieceOffset; use subspace_core_primitives::sectors::{SectorId, SectorIndex}; use subspace_core_primitives::segments::{ ArchivedHistorySegment, HistorySize, SegmentHeader, SegmentIndex, }; use subspace_core_primitives::{ - BlockHash, PublicKey, RewardSignature, SlotNumber, SolutionRange, REWARD_SIGNING_CONTEXT, + BlockHash, PublicKey, RewardSignature, ScalarBytes, SlotNumber, SolutionRange, + REWARD_SIGNING_CONTEXT, }; use subspace_verification::{ check_reward_signature, derive_next_solution_range, derive_pot_entropy, PieceCheckParams, @@ -112,13 +112,12 @@ pub mod pallet { use sp_std::collections::btree_map::BTreeMap; use sp_std::num::NonZeroU32; use sp_std::prelude::*; - use subspace_core_primitives::crypto::Scalar; use subspace_core_primitives::pieces::PieceOffset; use subspace_core_primitives::pot::PotCheckpoints; use subspace_core_primitives::sectors::SectorIndex; use subspace_core_primitives::segments::{HistorySize, SegmentHeader, SegmentIndex}; use subspace_core_primitives::{ - Blake3Hash, PublicKey, Randomness, RewardSignature, SolutionRange, + Blake3Hash, PublicKey, Randomness, RewardSignature, ScalarBytes, SolutionRange, }; pub(super) struct InitialSolutionRanges { @@ -437,7 +436,7 @@ pub mod pallet { /// Parent block author information. #[pallet::storage] pub(super) type ParentBlockAuthorInfo = - StorageValue<_, (PublicKey, SectorIndex, PieceOffset, Scalar, Slot)>; + StorageValue<_, (PublicKey, SectorIndex, PieceOffset, ScalarBytes, Slot)>; /// Enable rewards since specified block number. #[pallet::storage] @@ -455,7 +454,7 @@ pub mod pallet { PublicKey, SectorIndex, PieceOffset, - Scalar, + ScalarBytes, Slot, Option, ), @@ -466,7 +465,7 @@ pub mod pallet { pub(super) type ParentBlockVoters = StorageValue< _, BTreeMap< - (PublicKey, SectorIndex, PieceOffset, Scalar, Slot), + (PublicKey, SectorIndex, PieceOffset, ScalarBytes, Slot), (Option, RewardSignature), >, ValueQuery, @@ -477,7 +476,7 @@ pub mod pallet { pub(super) type CurrentBlockVoters = StorageValue< _, BTreeMap< - (PublicKey, SectorIndex, PieceOffset, Scalar, Slot), + (PublicKey, SectorIndex, PieceOffset, ScalarBytes, Slot), (Option, RewardSignature), >, >; @@ -846,7 +845,7 @@ impl Pallet { } } CurrentBlockVoters::::put(BTreeMap::< - (PublicKey, SectorIndex, PieceOffset, Scalar, Slot), + (PublicKey, SectorIndex, PieceOffset, ScalarBytes, Slot), (Option, RewardSignature), >::default()); @@ -920,7 +919,7 @@ impl Pallet { if (block_number % pot_entropy_injection_interval).is_zero() { let current_block_entropy = derive_pot_entropy( - pre_digest.solution().chunk, + &pre_digest.solution().chunk, pre_digest.pot_info().proof_of_time(), ); // Collect entropy every `T::PotEntropyInjectionInterval` blocks diff --git a/crates/pallet-subspace/src/mock.rs b/crates/pallet-subspace/src/mock.rs index 34e25c9ef2..bdc0280e81 100644 --- a/crates/pallet-subspace/src/mock.rs +++ b/crates/pallet-subspace/src/mock.rs @@ -35,7 +35,6 @@ use std::simd::Simd; use std::sync::{Once, OnceLock}; use std::{iter, slice}; use subspace_archiving::archiver::{Archiver, NewArchivedSegment}; -use subspace_core_primitives::crypto::kzg::{embedded_kzg_settings, Kzg}; use subspace_core_primitives::pieces::{Piece, PieceOffset, Record}; use subspace_core_primitives::pos::PosSeed; use subspace_core_primitives::pot::PotOutput; @@ -53,6 +52,7 @@ use subspace_farmer_components::auditing::audit_sector_sync; use subspace_farmer_components::plotting::{plot_sector, CpuRecordsEncoder, PlotSectorOptions}; use subspace_farmer_components::reading::ReadSectorRecordChunksMode; use subspace_farmer_components::FarmerProtocolInfo; +use subspace_kzg::Kzg; use subspace_proof_of_space::shim::ShimTable; use subspace_proof_of_space::{Table, TableGenerator}; use subspace_verification::is_within_solution_range; @@ -67,7 +67,7 @@ const MAX_PIECES_IN_SECTOR: u16 = 1; fn kzg_instance() -> &'static Kzg { static KZG: OnceLock = OnceLock::new(); - KZG.get_or_init(|| Kzg::new(embedded_kzg_settings())) + KZG.get_or_init(Kzg::new) } fn erasure_coding_instance() -> &'static ErasureCoding { @@ -388,9 +388,8 @@ pub fn create_signed_vote( solution.sector_index, ); let sector_slot_challenge = sector_id.derive_sector_slot_challenge(&global_challenge); - let masked_chunk = (Simd::from(solution.chunk.to_bytes()) - ^ Simd::from(*solution.proof_of_space.hash())) - .to_array(); + let masked_chunk = + (Simd::from(*solution.chunk) ^ Simd::from(*solution.proof_of_space.hash())).to_array(); // Check that solution quality is not too high if is_within_solution_range( diff --git a/crates/pallet-subspace/src/tests.rs b/crates/pallet-subspace/src/tests.rs index e061e43ba4..57dc03e9fb 100644 --- a/crates/pallet-subspace/src/tests.rs +++ b/crates/pallet-subspace/src/tests.rs @@ -41,11 +41,10 @@ use std::assert_matches::assert_matches; use std::collections::BTreeMap; use std::num::NonZeroU32; use std::sync::{Arc, Mutex}; -use subspace_core_primitives::crypto::Scalar; use subspace_core_primitives::pieces::PieceOffset; use subspace_core_primitives::pot::PotOutput; use subspace_core_primitives::segments::SegmentIndex; -use subspace_core_primitives::{PublicKey, RewardSignature, SolutionRange}; +use subspace_core_primitives::{PublicKey, RewardSignature, ScalarBytes, SolutionRange}; use subspace_runtime_primitives::{FindBlockRewardAddress, FindVotingRewardAddresses}; #[test] @@ -1277,7 +1276,7 @@ fn enabling_block_rewards_works() { PublicKey::from(Keypair::generate().public.to_bytes()), 0, PieceOffset::ZERO, - Scalar::default(), + ScalarBytes::default(), Subspace::current_slot(), Some(1), )); @@ -1288,7 +1287,7 @@ fn enabling_block_rewards_works() { PublicKey::from(Keypair::generate().public.to_bytes()), 0, PieceOffset::ZERO, - Scalar::default(), + ScalarBytes::default(), Subspace::current_slot(), ), (Some(2), RewardSignature::from([0; 64])), diff --git a/crates/sc-consensus-subspace-rpc/Cargo.toml b/crates/sc-consensus-subspace-rpc/Cargo.toml index d2f2a0f6c9..17e245872a 100644 --- a/crates/sc-consensus-subspace-rpc/Cargo.toml +++ b/crates/sc-consensus-subspace-rpc/Cargo.toml @@ -35,6 +35,7 @@ subspace-archiving = { version = "0.1.0", path = "../subspace-archiving" } subspace-core-primitives = { version = "0.1.0", path = "../subspace-core-primitives" } subspace-erasure-coding = { version = "0.1.0", path = "../subspace-erasure-coding" } subspace-farmer-components = { version = "0.1.0", path = "../subspace-farmer-components" } +subspace-kzg = { version = "0.1.0", path = "../../shared/subspace-kzg" } subspace-networking = { version = "0.1.0", path = "../subspace-networking" } subspace-rpc-primitives = { version = "0.1.0", path = "../subspace-rpc-primitives" } thiserror = "1.0.64" diff --git a/crates/sc-consensus-subspace-rpc/src/lib.rs b/crates/sc-consensus-subspace-rpc/src/lib.rs index 1dc2b1fd47..a717559c88 100644 --- a/crates/sc-consensus-subspace-rpc/src/lib.rs +++ b/crates/sc-consensus-subspace-rpc/src/lib.rs @@ -53,13 +53,13 @@ use std::sync::atomic::{AtomicU64, Ordering}; use std::sync::{Arc, Weak}; use std::time::Duration; use subspace_archiving::archiver::NewArchivedSegment; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::objects::GlobalObjectMapping; use subspace_core_primitives::pieces::{Piece, PieceIndex}; use subspace_core_primitives::segments::{HistorySize, SegmentHeader, SegmentIndex}; use subspace_core_primitives::{Blake3Hash, BlockHash, PublicKey, SlotNumber, Solution}; use subspace_erasure_coding::ErasureCoding; use subspace_farmer_components::FarmerProtocolInfo; +use subspace_kzg::Kzg; use subspace_networking::libp2p::Multiaddr; use subspace_rpc_primitives::{ FarmerAppInfo, RewardSignatureResponse, RewardSigningInfo, SlotInfo, SolutionResponse, diff --git a/crates/sc-consensus-subspace/Cargo.toml b/crates/sc-consensus-subspace/Cargo.toml index 742f729f78..c1ba02ada4 100644 --- a/crates/sc-consensus-subspace/Cargo.toml +++ b/crates/sc-consensus-subspace/Cargo.toml @@ -42,6 +42,7 @@ sp-runtime = { git = "https://github.com/subspace/polkadot-sdk", rev = "5871818e subspace-archiving = { version = "0.1.0", path = "../subspace-archiving" } subspace-core-primitives = { version = "0.1.0", path = "../subspace-core-primitives" } subspace-erasure-coding = { version = "0.1.0", path = "../subspace-erasure-coding" } +subspace-kzg = { version = "0.1.0", path = "../../shared/subspace-kzg" } subspace-proof-of-space = { version = "0.1.0", path = "../subspace-proof-of-space" } subspace-verification = { version = "0.1.0", path = "../subspace-verification" } thiserror = "1.0.64" diff --git a/crates/sc-consensus-subspace/src/archiver.rs b/crates/sc-consensus-subspace/src/archiver.rs index 0f46456d25..14286b04d1 100644 --- a/crates/sc-consensus-subspace/src/archiver.rs +++ b/crates/sc-consensus-subspace/src/archiver.rs @@ -76,11 +76,11 @@ use std::sync::atomic::{AtomicU16, Ordering}; use std::sync::Arc; use std::time::Duration; use subspace_archiving::archiver::{Archiver, NewArchivedSegment}; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::objects::BlockObjectMapping; use subspace_core_primitives::segments::{RecordedHistorySegment, SegmentHeader, SegmentIndex}; use subspace_core_primitives::{BlockNumber, PublicKey}; use subspace_erasure_coding::ErasureCoding; +use subspace_kzg::Kzg; use tracing::{debug, info, trace, warn}; /// Number of WASM instances is 8, this is a bit lower to avoid warnings exceeding number of diff --git a/crates/sc-consensus-subspace/src/block_import.rs b/crates/sc-consensus-subspace/src/block_import.rs index e2792d9886..a6038f4908 100644 --- a/crates/sc-consensus-subspace/src/block_import.rs +++ b/crates/sc-consensus-subspace/src/block_import.rs @@ -132,6 +132,9 @@ pub enum Error { /// Invalid audit chunk offset #[error("Invalid audit chunk offset")] InvalidAuditChunkOffset, + /// Invalid chunk + #[error("Invalid chunk: {0}")] + InvalidChunk(String), /// Invalid chunk witness #[error("Invalid chunk witness")] InvalidChunkWitness, @@ -242,6 +245,7 @@ where VerificationPrimitiveError::InvalidAuditChunkOffset => { Error::InvalidAuditChunkOffset } + VerificationPrimitiveError::InvalidChunk(error) => Error::InvalidChunk(error), VerificationPrimitiveError::InvalidChunkWitness => Error::InvalidChunkWitness, VerificationPrimitiveError::SectorExpired { expiration_history_size, diff --git a/crates/sc-consensus-subspace/src/lib.rs b/crates/sc-consensus-subspace/src/lib.rs index 01bca2c342..744174c44f 100644 --- a/crates/sc-consensus-subspace/src/lib.rs +++ b/crates/sc-consensus-subspace/src/lib.rs @@ -42,8 +42,8 @@ use crate::notification::{SubspaceNotificationSender, SubspaceNotificationStream use crate::slot_worker::{NewSlotNotification, RewardSigningNotification}; use sp_consensus_subspace::ChainConstants; use sp_runtime::traits::Block as BlockT; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_erasure_coding::ErasureCoding; +use subspace_kzg::Kzg; /// State that must be shared between various consensus components. #[derive(Clone)] diff --git a/crates/sc-consensus-subspace/src/tests.rs b/crates/sc-consensus-subspace/src/tests.rs index a1f1695230..168d4d07d7 100644 --- a/crates/sc-consensus-subspace/src/tests.rs +++ b/crates/sc-consensus-subspace/src/tests.rs @@ -72,8 +72,8 @@ // use std::task::Poll; // use std::time::Duration; // use subspace_archiving::archiver::Archiver; -// use subspace_core_primitives::crypto::kzg; -// use subspace_core_primitives::crypto::kzg::{embedded_kzg_settings, Kzg}; +// use subspace_kzg; +// use subspace_kzg::{Kzg}; // use subspace_core_primitives::objects::BlockObjectMapping; // use subspace_core_primitives::{ // ArchivedHistorySegment, FlatPieces, HistorySize, Piece, PieceIndex, PieceOffset, Solution, diff --git a/crates/sc-consensus-subspace/src/verifier.rs b/crates/sc-consensus-subspace/src/verifier.rs index 7b66028158..0ca47d227b 100644 --- a/crates/sc-consensus-subspace/src/verifier.rs +++ b/crates/sc-consensus-subspace/src/verifier.rs @@ -40,8 +40,8 @@ use std::num::NonZeroUsize; use std::sync::atomic::{AtomicU32, Ordering}; use std::sync::Arc; use std::thread::available_parallelism; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::{BlockNumber, PublicKey}; +use subspace_kzg::Kzg; use subspace_proof_of_space::Table; use subspace_verification::{check_reward_signature, verify_solution, VerifySolutionParams}; use tokio::runtime::Handle; diff --git a/crates/sp-consensus-subspace/Cargo.toml b/crates/sp-consensus-subspace/Cargo.toml index 4bc2ac90e4..35d5df2276 100644 --- a/crates/sp-consensus-subspace/Cargo.toml +++ b/crates/sp-consensus-subspace/Cargo.toml @@ -30,6 +30,7 @@ sp-runtime-interface = { default-features = false, git = "https://github.com/sub sp-std = { default-features = false, git = "https://github.com/subspace/polkadot-sdk", rev = "5871818e1d736f1843eb9078f886290695165c42" } sp-timestamp = { git = "https://github.com/subspace/polkadot-sdk", rev = "5871818e1d736f1843eb9078f886290695165c42", default-features = false } subspace-core-primitives = { version = "0.1.0", path = "../subspace-core-primitives", default-features = false } +subspace-kzg = { version = "0.1.0", path = "../../shared/subspace-kzg", optional = true } subspace-proof-of-space = { version = "0.1.0", path = "../subspace-proof-of-space", default-features = false } subspace-verification = { version = "0.1.0", path = "../subspace-verification", default-features = false } thiserror = { version = "1.0.64", optional = true } @@ -54,7 +55,9 @@ std = [ "sp-std/std", "sp-timestamp/std", "subspace-core-primitives/std", + "subspace-kzg/std", "subspace-proof-of-space/std", + "subspace-verification/kzg", "subspace-verification/std", "thiserror", ] diff --git a/crates/sp-consensus-subspace/src/lib.rs b/crates/sp-consensus-subspace/src/lib.rs index b831f402fa..4ec32ecc83 100644 --- a/crates/sp-consensus-subspace/src/lib.rs +++ b/crates/sp-consensus-subspace/src/lib.rs @@ -40,8 +40,6 @@ use sp_runtime::{ConsensusEngineId, Justification}; use sp_runtime_interface::pass_by::PassBy; use sp_runtime_interface::{pass_by, runtime_interface}; use sp_std::num::NonZeroU32; -#[cfg(feature = "std")] -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::pot::{PotCheckpoints, PotOutput, PotSeed}; use subspace_core_primitives::segments::{ HistorySize, SegmentCommitment, SegmentHeader, SegmentIndex, @@ -51,6 +49,8 @@ use subspace_core_primitives::{ SolutionRange, }; #[cfg(feature = "std")] +use subspace_kzg::Kzg; +#[cfg(feature = "std")] use subspace_proof_of_space::chia::ChiaTable; #[cfg(feature = "std")] use subspace_proof_of_space::shim::ShimTable; diff --git a/crates/subspace-archiving/Cargo.toml b/crates/subspace-archiving/Cargo.toml index d4513abe48..726250fd5d 100644 --- a/crates/subspace-archiving/Cargo.toml +++ b/crates/subspace-archiving/Cargo.toml @@ -22,12 +22,14 @@ rayon = { version = "1.10.0", optional = true } serde = { version = "1.0.110", optional = true, features = ["derive"] } subspace-core-primitives = { version = "0.1.0", path = "../subspace-core-primitives", default-features = false } subspace-erasure-coding = { version = "0.1.0", path = "../subspace-erasure-coding", default-features = false } +subspace-kzg = { version = "0.1.0", path = "../../shared/subspace-kzg", default-features = false } thiserror = { version = "1.0.64", optional = true } [dev-dependencies] criterion = "0.5.1" rand = { version = "0.8.5", features = ["min_const_gen"] } subspace-core-primitives = { version = "0.1.0", path = "../subspace-core-primitives" } +subspace-verification = { version = "0.1.0", path = "../subspace-verification" } [features] default = ["std"] @@ -45,6 +47,7 @@ std = [ "serde", "subspace-core-primitives/std", "subspace-erasure-coding/std", + "subspace-kzg/std", "thiserror", ] diff --git a/crates/subspace-archiving/benches/archiving.rs b/crates/subspace-archiving/benches/archiving.rs index 7d53ffe382..97a9423f91 100644 --- a/crates/subspace-archiving/benches/archiving.rs +++ b/crates/subspace-archiving/benches/archiving.rs @@ -2,10 +2,9 @@ use criterion::{black_box, criterion_group, criterion_main, Criterion}; use rand::{thread_rng, Rng}; use std::num::NonZeroUsize; use subspace_archiving::archiver::Archiver; -use subspace_core_primitives::crypto::kzg; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::pieces::Record; use subspace_erasure_coding::ErasureCoding; +use subspace_kzg::Kzg; const AMOUNT_OF_DATA: usize = 5 * 1024 * 1024; const SMALL_BLOCK_SIZE: usize = 500; @@ -13,7 +12,7 @@ const SMALL_BLOCK_SIZE: usize = 500; fn criterion_benchmark(c: &mut Criterion) { let mut input = vec![0u8; AMOUNT_OF_DATA]; thread_rng().fill(input.as_mut_slice()); - let kzg = Kzg::new(kzg::embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), diff --git a/crates/subspace-archiving/src/archiver.rs b/crates/subspace-archiving/src/archiver.rs index b6ecd5411b..70bd601d62 100644 --- a/crates/subspace-archiving/src/archiver.rs +++ b/crates/subspace-archiving/src/archiver.rs @@ -29,18 +29,18 @@ use core::cmp::Ordering; use parity_scale_codec::{Compact, CompactLen, Decode, Encode, Input, Output}; #[cfg(feature = "parallel")] use rayon::prelude::*; -use subspace_core_primitives::crypto::kzg::{Commitment, Kzg, Witness}; -use subspace_core_primitives::crypto::{blake3_254_hash_to_scalar, Scalar}; +use subspace_core_primitives::crypto::blake3_254_hash_to_scalar; use subspace_core_primitives::objects::{ BlockObject, BlockObjectMapping, GlobalObject, PieceObject, PieceObjectMapping, }; -use subspace_core_primitives::pieces::{PieceArray, RawRecord, RecordWitness}; +use subspace_core_primitives::pieces::RawRecord; use subspace_core_primitives::segments::{ ArchivedBlockProgress, ArchivedHistorySegment, LastArchivedBlock, RecordedHistorySegment, SegmentCommitment, SegmentHeader, SegmentIndex, }; -use subspace_core_primitives::{Blake3Hash, BlockNumber}; +use subspace_core_primitives::{Blake3Hash, BlockNumber, ScalarBytes}; use subspace_erasure_coding::ErasureCoding; +use subspace_kzg::{Kzg, Scalar}; const INITIAL_LAST_ARCHIVED_BLOCK: LastArchivedBlock = LastArchivedBlock { number: 0, @@ -683,14 +683,14 @@ impl Archiver { // Scratch buffer to avoid re-allocation let mut tmp_source_shards_scalars = Vec::::with_capacity(RecordedHistorySegment::NUM_RAW_RECORDS); - // Iterate over the chunks of `Scalar::SAFE_BYTES` bytes of all records + // Iterate over the chunks of `ScalarBytes::SAFE_BYTES` bytes of all records for record_offset in 0..RawRecord::NUM_CHUNKS { // Collect chunks of each record at the same offset raw_record_shards .array_chunks::<{ RawRecord::SIZE }>() .map(|record_bytes| { record_bytes - .array_chunks::<{ Scalar::SAFE_BYTES }>() + .array_chunks::<{ ScalarBytes::SAFE_BYTES }>() .nth(record_offset) .expect("Statically known to exist in a record; qed") }) @@ -779,7 +779,10 @@ impl Archiver { .poly( &record_commitments .iter() - .map(|commitment| blake3_254_hash_to_scalar(&commitment.to_bytes())) + .map(|commitment| { + Scalar::try_from(blake3_254_hash_to_scalar(&commitment.to_bytes())) + .expect("Create correctly by dedicated hash function; qed") + }) .collect::>(), ) .expect("Internally produced values must never fail; qed"); @@ -837,89 +840,3 @@ impl Archiver { } } } - -/// Validate witness embedded within a piece produced by archiver -pub fn is_piece_valid( - kzg: &Kzg, - piece: &PieceArray, - segment_commitment: &SegmentCommitment, - position: u32, -) -> bool { - let (record, commitment, witness) = piece.split(); - let witness = match Witness::try_from_bytes(witness) { - Ok(witness) => witness, - _ => { - return false; - } - }; - - let mut scalars = Vec::with_capacity(record.len().next_power_of_two()); - - for record_chunk in record.iter() { - match Scalar::try_from(record_chunk) { - Ok(scalar) => { - scalars.push(scalar); - } - _ => { - return false; - } - } - } - - // Number of scalars for KZG must be a power of two elements - scalars.resize(scalars.capacity(), Scalar::default()); - - let polynomial = match kzg.poly(&scalars) { - Ok(polynomial) => polynomial, - _ => { - return false; - } - }; - - if kzg - .commit(&polynomial) - .map(|commitment| commitment.to_bytes()) - .as_ref() - != Ok(commitment) - { - return false; - } - - let Ok(segment_commitment) = Commitment::try_from(segment_commitment) else { - return false; - }; - - let commitment_hash = blake3_254_hash_to_scalar(commitment.as_ref()); - - kzg.verify( - &segment_commitment, - ArchivedHistorySegment::NUM_PIECES, - position, - &commitment_hash, - &witness, - ) -} - -/// Validate witness for record commitment hash produced by archiver -pub fn is_record_commitment_hash_valid( - kzg: &Kzg, - record_commitment_hash: &Scalar, - commitment: &SegmentCommitment, - witness: &RecordWitness, - position: u32, -) -> bool { - let Ok(commitment) = Commitment::try_from(commitment) else { - return false; - }; - let Ok(witness) = Witness::try_from(witness) else { - return false; - }; - - kzg.verify( - &commitment, - ArchivedHistorySegment::NUM_PIECES, - position, - record_commitment_hash, - &witness, - ) -} diff --git a/crates/subspace-archiving/src/archiver/incremental_record_commitments.rs b/crates/subspace-archiving/src/archiver/incremental_record_commitments.rs index 6e1f7e4a6c..1b70f35e11 100644 --- a/crates/subspace-archiving/src/archiver/incremental_record_commitments.rs +++ b/crates/subspace-archiving/src/archiver/incremental_record_commitments.rs @@ -8,9 +8,9 @@ use core::ops::{Deref, DerefMut}; use parity_scale_codec::{Encode, Output}; #[cfg(feature = "parallel")] use rayon::prelude::*; -use subspace_core_primitives::crypto::kzg::{Commitment, Kzg}; -use subspace_core_primitives::crypto::Scalar; use subspace_core_primitives::pieces::RawRecord; +use subspace_core_primitives::ScalarBytes; +use subspace_kzg::{Commitment, Kzg, Scalar}; /// State of incremental record commitments, encapsulated to hide implementation details and /// encapsulate tricky logic @@ -88,7 +88,7 @@ impl<'a> Drop for IncrementalRecordCommitmentsProcessor<'a> { let iter = raw_records_bytes .map(|raw_record_bytes| { raw_record_bytes - .array_chunks::<{ Scalar::SAFE_BYTES }>() + .array_chunks::<{ ScalarBytes::SAFE_BYTES }>() .map(Scalar::from) }) .map(|record_chunks| { diff --git a/crates/subspace-archiving/src/piece_reconstructor.rs b/crates/subspace-archiving/src/piece_reconstructor.rs index e88d65565f..d018314bba 100644 --- a/crates/subspace-archiving/src/piece_reconstructor.rs +++ b/crates/subspace-archiving/src/piece_reconstructor.rs @@ -7,11 +7,11 @@ use alloc::string::String; use alloc::vec::Vec; #[cfg(feature = "parallel")] use rayon::prelude::*; -use subspace_core_primitives::crypto::kzg::{Commitment, Kzg, Polynomial}; -use subspace_core_primitives::crypto::{blake3_254_hash_to_scalar, Scalar}; +use subspace_core_primitives::crypto::blake3_254_hash_to_scalar; use subspace_core_primitives::pieces::{Piece, RawRecord}; use subspace_core_primitives::segments::ArchivedHistorySegment; use subspace_erasure_coding::ErasureCoding; +use subspace_kzg::{Commitment, Kzg, Polynomial, Scalar}; /// Reconstructor-related instantiation error #[derive(Debug, Clone, PartialEq)] @@ -62,7 +62,7 @@ impl PiecesReconstructor { // Scratch buffer to avoid re-allocation let mut tmp_shards_scalars = Vec::>::with_capacity(ArchivedHistorySegment::NUM_PIECES); - // Iterate over the chunks of `Scalar::SAFE_BYTES` bytes of all records + // Iterate over the chunks of `ScalarBytes::SAFE_BYTES` bytes of all records for record_offset in 0..RawRecord::NUM_CHUNKS { // Collect chunks of each record at the same offset for maybe_piece in input_pieces.iter() { @@ -157,7 +157,8 @@ impl PiecesReconstructor { reconstructed_piece .commitment_mut() .copy_from_slice(&commitment_bytes); - blake3_254_hash_to_scalar(&commitment_bytes) + Scalar::try_from(blake3_254_hash_to_scalar(&commitment_bytes)) + .expect("Create correctly by dedicated hash function; qed") }) .collect::>(); diff --git a/crates/subspace-archiving/src/reconstructor.rs b/crates/subspace-archiving/src/reconstructor.rs index c20da90caf..a16625e385 100644 --- a/crates/subspace-archiving/src/reconstructor.rs +++ b/crates/subspace-archiving/src/reconstructor.rs @@ -8,7 +8,6 @@ use alloc::string::String; use alloc::vec::Vec; use core::mem; use parity_scale_codec::Decode; -use subspace_core_primitives::crypto::Scalar; use subspace_core_primitives::pieces::{Piece, RawRecord}; use subspace_core_primitives::segments::{ ArchivedBlockProgress, ArchivedHistorySegment, LastArchivedBlock, RecordedHistorySegment, @@ -16,6 +15,7 @@ use subspace_core_primitives::segments::{ }; use subspace_core_primitives::BlockNumber; use subspace_erasure_coding::ErasureCoding; +use subspace_kzg::Scalar; /// Reconstructor-related instantiation error #[derive(Debug, Clone, PartialEq)] @@ -109,7 +109,7 @@ impl Reconstructor { // Scratch buffer to avoid re-allocation let mut tmp_shards_scalars = Vec::>::with_capacity(ArchivedHistorySegment::NUM_PIECES); - // Iterate over the chunks of `Scalar::SAFE_BYTES` bytes of all records + // Iterate over the chunks of `ScalarBytes::SAFE_BYTES` bytes of all records for record_offset in 0..RawRecord::NUM_CHUNKS { // Collect chunks of each record at the same offset for maybe_piece in segment_pieces.iter() { diff --git a/crates/subspace-archiving/tests/integration/archiver.rs b/crates/subspace-archiving/tests/integration/archiver.rs index 5998da9104..003f56f050 100644 --- a/crates/subspace-archiving/tests/integration/archiver.rs +++ b/crates/subspace-archiving/tests/integration/archiver.rs @@ -6,9 +6,7 @@ use std::assert_matches::assert_matches; use std::io::Write; use std::iter; use std::num::NonZeroUsize; -use subspace_archiving::archiver; use subspace_archiving::archiver::{Archiver, ArchiverInstantiationError, SegmentItem}; -use subspace_core_primitives::crypto::kzg::{embedded_kzg_settings, Kzg}; use subspace_core_primitives::objects::{BlockObject, BlockObjectMapping, PieceObject}; use subspace_core_primitives::pieces::{Piece, Record}; use subspace_core_primitives::segments::{ @@ -17,6 +15,8 @@ use subspace_core_primitives::segments::{ }; use subspace_core_primitives::Blake3Hash; use subspace_erasure_coding::ErasureCoding; +use subspace_kzg::Kzg; +use subspace_verification::is_piece_valid; fn extract_data>(data: &[u8], offset: O) -> &[u8] { let offset: u64 = offset.into(); @@ -63,7 +63,7 @@ fn compare_block_objects_to_piece_objects<'a>( #[test] fn archiver() { - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), @@ -197,7 +197,7 @@ fn archiver() { .map(|(position, piece)| { ( position, - archiver::is_piece_valid( + is_piece_valid( &kzg, piece, &first_archived_segment.segment_header.segment_commitment(), @@ -320,7 +320,7 @@ fn archiver() { .map(|(position, piece)| { ( position, - archiver::is_piece_valid( + is_piece_valid( &kzg, piece, &archived_segment.segment_header.segment_commitment(), @@ -380,7 +380,7 @@ fn archiver() { .map(|(position, piece)| { ( position, - archiver::is_piece_valid( + is_piece_valid( &kzg, piece, &archived_segment.segment_header.segment_commitment(), @@ -397,7 +397,7 @@ fn archiver() { #[test] fn invalid_usage() { - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), @@ -469,7 +469,7 @@ fn invalid_usage() { #[test] fn one_byte_smaller_segment() { - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), @@ -508,7 +508,7 @@ fn one_byte_smaller_segment() { #[test] fn spill_over_edge_case() { - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), @@ -570,7 +570,7 @@ fn spill_over_edge_case() { #[test] fn object_on_the_edge_of_segment() { - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), diff --git a/crates/subspace-archiving/tests/integration/piece_reconstruction.rs b/crates/subspace-archiving/tests/integration/piece_reconstruction.rs index 1617fe9bd7..69bc2d7e7e 100644 --- a/crates/subspace-archiving/tests/integration/piece_reconstruction.rs +++ b/crates/subspace-archiving/tests/integration/piece_reconstruction.rs @@ -4,11 +4,11 @@ use rayon::prelude::*; use std::num::NonZeroUsize; use subspace_archiving::archiver::Archiver; use subspace_archiving::piece_reconstructor::{PiecesReconstructor, ReconstructorError}; -use subspace_core_primitives::crypto::kzg::{embedded_kzg_settings, Kzg}; use subspace_core_primitives::objects::BlockObjectMapping; use subspace_core_primitives::pieces::{FlatPieces, Piece, Record}; use subspace_core_primitives::segments::{ArchivedHistorySegment, RecordedHistorySegment}; use subspace_erasure_coding::ErasureCoding; +use subspace_kzg::Kzg; fn pieces_to_option_of_pieces(pieces: &FlatPieces) -> Vec> { pieces.pieces().map(Some).collect() @@ -23,7 +23,7 @@ fn get_random_block() -> Vec { #[test] fn segment_reconstruction_works() { - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), @@ -69,7 +69,7 @@ fn segment_reconstruction_works() { #[test] fn piece_reconstruction_works() { - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), @@ -119,7 +119,7 @@ fn piece_reconstruction_works() { #[test] fn segment_reconstruction_fails() { - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), @@ -160,7 +160,7 @@ fn segment_reconstruction_fails() { #[test] fn piece_reconstruction_fails() { - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), diff --git a/crates/subspace-archiving/tests/integration/reconstructor.rs b/crates/subspace-archiving/tests/integration/reconstructor.rs index 054d0770ca..05af807097 100644 --- a/crates/subspace-archiving/tests/integration/reconstructor.rs +++ b/crates/subspace-archiving/tests/integration/reconstructor.rs @@ -4,7 +4,6 @@ use std::iter; use std::num::NonZeroUsize; use subspace_archiving::archiver::Archiver; use subspace_archiving::reconstructor::{Reconstructor, ReconstructorError}; -use subspace_core_primitives::crypto::kzg::{embedded_kzg_settings, Kzg}; use subspace_core_primitives::objects::BlockObjectMapping; use subspace_core_primitives::pieces::{FlatPieces, Piece, Record}; use subspace_core_primitives::segments::{ @@ -12,6 +11,7 @@ use subspace_core_primitives::segments::{ SegmentIndex, }; use subspace_erasure_coding::ErasureCoding; +use subspace_kzg::Kzg; fn pieces_to_option_of_pieces(pieces: &FlatPieces) -> Vec> { pieces.pieces().map(Some).collect() @@ -19,7 +19,7 @@ fn pieces_to_option_of_pieces(pieces: &FlatPieces) -> Vec> { #[test] fn basic() { - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), @@ -250,7 +250,7 @@ fn basic() { #[test] fn partial_data() { - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), @@ -333,7 +333,7 @@ fn partial_data() { #[test] fn invalid_usage() { - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), diff --git a/crates/subspace-core-primitives/Cargo.toml b/crates/subspace-core-primitives/Cargo.toml index 1746a34445..dd4340cbb4 100644 --- a/crates/subspace-core-primitives/Cargo.toml +++ b/crates/subspace-core-primitives/Cargo.toml @@ -8,49 +8,33 @@ edition = "2021" include = [ "/src", "/Cargo.toml", - "/README.md", ] -[lib] -# Necessary for CLI options to work on benches -bench = false - [dependencies] blake3 = { version = "1.5.4", default-features = false } bytes = { version = "1.7.1", default-features = false } derive_more = { version = "1.0.0", default-features = false, features = ["full"] } hex = { version = "0.4.3", default-features = false, features = ["alloc"] } -kzg = { git = "https://github.com/grandinetech/rust-kzg", rev = "6c8fcc623df3d7e8c0f30951a49bfea764f90bf4", default-features = false } num-traits = { version = "0.2.18", default-features = false } parity-scale-codec = { version = "3.6.12", default-features = false, features = ["bytes", "derive", "max-encoded-len"] } -parking_lot = { version = "0.12.2", optional = true } rayon = { version = "1.10.0", optional = true } -rust-kzg-blst = { git = "https://github.com/grandinetech/rust-kzg", rev = "6c8fcc623df3d7e8c0f30951a49bfea764f90bf4", default-features = false } scale-info = { version = "2.11.2", default-features = false, features = ["derive"] } serde = { version = "1.0.110", optional = true, default-features = false, features = ["alloc", "derive"] } -# Replacement for `parking_lot` in `no_std` environment -spin = "0.9.7" static_assertions = "1.1.0" -tracing = { version = "0.1.40", default-features = false } uint = { version = "0.10.0", default-features = false } [dev-dependencies] -criterion = "0.5.1" rand = { version = "0.8.5", features = ["min_const_gen"] } -rand_core = "0.6.4" [features] default = [ - "embedded-kzg-settings", "serde", "std", "parallel", ] -embedded-kzg-settings = [] -# Enables some APIs and internal parallelism for KZG +# Enables some APIs parallel = [ "blake3/rayon", - "rust-kzg-blst/parallel", "dep:rayon", ] serde = [ @@ -61,20 +45,10 @@ std = [ "blake3/std", "bytes/std", "derive_more/std", - "rust-kzg-blst/std", "hex/std", - "kzg/std", "num-traits/std", "parity-scale-codec/std", - # In no-std environment we use `spin` - "parking_lot", "scale-info/std", "serde?/std", - "tracing/std", "uint/std", ] - -[[bench]] -name = "kzg" -harness = false - diff --git a/crates/subspace-core-primitives/README.md b/crates/subspace-core-primitives/README.md deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/crates/subspace-core-primitives/src/crypto.rs b/crates/subspace-core-primitives/src/crypto.rs index 4433d0eba1..627ceb5213 100644 --- a/crates/subspace-core-primitives/src/crypto.rs +++ b/crates/subspace-core-primitives/src/crypto.rs @@ -15,26 +15,7 @@ //! Various cryptographic utilities used across Subspace Network. -#[cfg(not(feature = "std"))] -extern crate alloc; - -pub mod kzg; - -use crate::Blake3Hash; -use ::kzg::Fr; -#[cfg(not(feature = "std"))] -use alloc::format; -#[cfg(not(feature = "std"))] -use alloc::string::String; -#[cfg(not(feature = "std"))] -use alloc::vec::Vec; -use core::cmp::Ordering; -use core::hash::{Hash, Hasher}; -use core::mem; -use derive_more::{AsMut, AsRef, Deref, DerefMut, From, Into}; -use parity_scale_codec::{Decode, Encode, EncodeLike, Input, MaxEncodedLen}; -use rust_kzg_blst::types::fr::FsFr; -use scale_info::{Type, TypeInfo}; +use crate::{Blake3Hash, ScalarBytes}; /// BLAKE3 hashing of a single value. pub fn blake3_hash(data: &[u8]) -> Blake3Hash { @@ -43,6 +24,7 @@ pub fn blake3_hash(data: &[u8]) -> Blake3Hash { /// BLAKE3 hashing of a single value in parallel (only useful for large values well above 128kiB). #[cfg(feature = "parallel")] +#[inline] pub fn blake3_hash_parallel(data: &[u8]) -> Blake3Hash { let mut state = blake3::Hasher::new(); state.update_rayon(data); @@ -50,11 +32,13 @@ pub fn blake3_hash_parallel(data: &[u8]) -> Blake3Hash { } /// BLAKE3 keyed hashing of a single value. +#[inline] pub fn blake3_hash_with_key(key: &[u8; 32], data: &[u8]) -> Blake3Hash { blake3::keyed_hash(key, data).as_bytes().into() } /// BLAKE3 hashing of a list of values. +#[inline] pub fn blake3_hash_list(data: &[&[u8]]) -> Blake3Hash { let mut state = blake3::Hasher::new(); for d in data { @@ -64,311 +48,10 @@ pub fn blake3_hash_list(data: &[&[u8]]) -> Blake3Hash { } /// BLAKE3 hashing of a single value truncated to 254 bits as Scalar for usage with KZG. -pub fn blake3_254_hash_to_scalar(data: &[u8]) -> Scalar { +#[inline] +pub fn blake3_254_hash_to_scalar(data: &[u8]) -> ScalarBytes { let mut hash = blake3_hash(data); // Erase first 2 bits to effectively truncate the hash (number is interpreted as big-endian) hash[0] &= 0b00111111; - Scalar::try_from(*hash) - .expect("Last bit erased, thus hash is guaranteed to fit into scalar; qed") -} - -/// Representation of a single BLS12-381 scalar value. -#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, From, Into, AsRef, AsMut, Deref, DerefMut)] -#[repr(transparent)] -pub struct Scalar(FsFr); - -impl Hash for Scalar { - fn hash(&self, state: &mut H) { - self.to_bytes().hash(state) - } -} - -impl PartialOrd for Scalar { - fn partial_cmp(&self, other: &Self) -> Option { - Some(self.cmp(other)) - } -} - -impl Ord for Scalar { - fn cmp(&self, other: &Self) -> Ordering { - self.to_bytes().cmp(&other.to_bytes()) - } -} - -impl Encode for Scalar { - fn size_hint(&self) -> usize { - Self::FULL_BYTES - } - - fn using_encoded R>(&self, f: F) -> R { - f(&self.to_bytes()) - } - - #[inline] - fn encoded_size(&self) -> usize { - Self::FULL_BYTES - } -} - -impl EncodeLike for Scalar {} - -impl Decode for Scalar { - fn decode(input: &mut I) -> Result { - Self::try_from(&<[u8; Self::FULL_BYTES]>::decode(input)?).map_err(|error_code| { - parity_scale_codec::Error::from("Failed to create scalar from bytes") - .chain(format!("Error code: {error_code}")) - }) - } - - #[inline] - fn encoded_fixed_size() -> Option { - Some(Self::FULL_BYTES) - } -} - -impl TypeInfo for Scalar { - type Identity = Self; - - fn type_info() -> Type { - Type::builder() - .path(scale_info::Path::new(stringify!(Scalar), module_path!())) - .docs(&["BLS12-381 scalar"]) - .composite(scale_info::build::Fields::named().field(|f| { - f.ty::<[u8; Self::FULL_BYTES]>() - .name(stringify!(inner)) - .type_name("FsFr") - })) - } -} - -impl MaxEncodedLen for Scalar { - #[inline] - fn max_encoded_len() -> usize { - Self::FULL_BYTES - } -} - -#[cfg(feature = "serde")] -mod scalar_serde { - use serde::de::Error; - use serde::{Deserialize, Deserializer, Serialize, Serializer}; - - // Custom wrapper so we don't have to write serialization/deserialization code manually - #[derive(Serialize, Deserialize)] - struct Scalar(#[serde(with = "hex")] pub(super) [u8; super::Scalar::FULL_BYTES]); - - impl Serialize for super::Scalar { - fn serialize(&self, serializer: S) -> Result - where - S: Serializer, - { - Scalar(self.to_bytes()).serialize(serializer) - } - } - - impl<'de> Deserialize<'de> for super::Scalar { - fn deserialize(deserializer: D) -> Result - where - D: Deserializer<'de>, - { - let Scalar(bytes) = Scalar::deserialize(deserializer)?; - Self::try_from(bytes).map_err(D::Error::custom) - } - } -} - -impl From<&[u8; Self::SAFE_BYTES]> for Scalar { - #[inline] - fn from(value: &[u8; Self::SAFE_BYTES]) -> Self { - let mut bytes = [0u8; Self::FULL_BYTES]; - bytes[1..].copy_from_slice(value); - Self::try_from(bytes).expect("Safe bytes always fit into scalar and thus succeed; qed") - } -} - -impl From<[u8; Self::SAFE_BYTES]> for Scalar { - #[inline] - fn from(value: [u8; Self::SAFE_BYTES]) -> Self { - Self::from(&value) - } -} - -impl TryFrom<&[u8; Self::FULL_BYTES]> for Scalar { - type Error = String; - - #[inline] - fn try_from(value: &[u8; Self::FULL_BYTES]) -> Result { - Self::try_from(*value) - } -} - -impl TryFrom<[u8; Self::FULL_BYTES]> for Scalar { - type Error = String; - - #[inline] - fn try_from(value: [u8; Self::FULL_BYTES]) -> Result { - FsFr::from_bytes(&value).map(Scalar) - } -} - -impl From<&Scalar> for [u8; Scalar::FULL_BYTES] { - #[inline] - fn from(value: &Scalar) -> Self { - value.0.to_bytes() - } -} - -impl From for [u8; Scalar::FULL_BYTES] { - #[inline] - fn from(value: Scalar) -> Self { - Self::from(&value) - } -} - -impl Scalar { - /// How many full bytes can be stored in BLS12-381 scalar (for instance before encoding). It is - /// actually 254 bits, but bits are mut harder to work with and likely not worth it. - /// - /// NOTE: After encoding more bytes can be used, so don't rely on this as the max number of - /// bytes stored within at all times! - pub const SAFE_BYTES: usize = 31; - /// How many bytes Scalar contains physically, use [`Self::SAFE_BYTES`] for the amount of data - /// that you can put into it safely (for instance before encoding). - pub const FULL_BYTES: usize = 32; - - /// Convert scalar into bytes - pub fn to_bytes(&self) -> [u8; Scalar::FULL_BYTES] { - self.into() - } - - /// Convert scalar into safe bytes, returns `None` if not possible to convert due to larger - /// internal value - pub fn try_to_safe_bytes(&self) -> Option<[u8; Scalar::SAFE_BYTES]> { - let bytes = self.to_bytes(); - if bytes[0] == 0 { - Some(bytes[1..].try_into().expect("Correct length; qed")) - } else { - None - } - } - - /// Convenient conversion from slice of scalar to underlying representation for efficiency - /// purposes. - #[inline] - pub fn slice_to_repr(value: &[Self]) -> &[FsFr] { - // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout - unsafe { mem::transmute(value) } - } - - /// Convenient conversion from slice of underlying representation to scalar for efficiency - /// purposes. - #[inline] - pub fn slice_from_repr(value: &[FsFr]) -> &[Self] { - // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout - unsafe { mem::transmute(value) } - } - - /// Convenient conversion from slice of optional scalar to underlying representation for efficiency - /// purposes. - pub fn slice_option_to_repr(value: &[Option]) -> &[Option] { - // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout - unsafe { mem::transmute(value) } - } - - /// Convenient conversion from slice of optional underlying representation to scalar for efficiency - /// purposes. - pub fn slice_option_from_repr(value: &[Option]) -> &[Option] { - // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout - unsafe { mem::transmute(value) } - } - - /// Convenient conversion from mutable slice of scalar to underlying representation for - /// efficiency purposes. - #[inline] - pub fn slice_mut_to_repr(value: &mut [Self]) -> &mut [FsFr] { - // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout - unsafe { mem::transmute(value) } - } - - /// Convenient conversion from mutable slice of underlying representation to scalar for - /// efficiency purposes. - #[inline] - pub fn slice_mut_from_repr(value: &mut [FsFr]) -> &mut [Self] { - // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout - unsafe { mem::transmute(value) } - } - - /// Convenient conversion from optional mutable slice of scalar to underlying representation for - /// efficiency purposes. - pub fn slice_option_mut_to_repr(value: &mut [Option]) -> &mut [Option] { - // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout - unsafe { mem::transmute(value) } - } - - /// Convenient conversion from optional mutable slice of underlying representation to scalar for - /// efficiency purposes. - pub fn slice_option_mut_from_repr(value: &mut [Option]) -> &mut [Option] { - // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout - unsafe { mem::transmute(value) } - } - - /// Convenient conversion from vector of scalar to underlying representation for efficiency - /// purposes. - pub fn vec_to_repr(value: Vec) -> Vec { - // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory - // layout, original vector is not dropped - unsafe { - let mut value = mem::ManuallyDrop::new(value); - Vec::from_raw_parts( - value.as_mut_ptr() as *mut FsFr, - value.len(), - value.capacity(), - ) - } - } - - /// Convenient conversion from vector of underlying representation to scalar for efficiency - /// purposes. - pub fn vec_from_repr(value: Vec) -> Vec { - // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory - // layout, original vector is not dropped - unsafe { - let mut value = mem::ManuallyDrop::new(value); - Vec::from_raw_parts( - value.as_mut_ptr() as *mut Self, - value.len(), - value.capacity(), - ) - } - } - - /// Convenient conversion from vector of optional scalar to underlying representation for - /// efficiency purposes. - pub fn vec_option_to_repr(value: Vec>) -> Vec> { - // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory - // layout, original vector is not dropped - unsafe { - let mut value = mem::ManuallyDrop::new(value); - Vec::from_raw_parts( - value.as_mut_ptr() as *mut Option, - value.len(), - value.capacity(), - ) - } - } - - /// Convenient conversion from vector of optional underlying representation to scalar for - /// efficiency purposes. - pub fn vec_option_from_repr(value: Vec>) -> Vec> { - // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory - // layout, original vector is not dropped - unsafe { - let mut value = mem::ManuallyDrop::new(value); - Vec::from_raw_parts( - value.as_mut_ptr() as *mut Option, - value.len(), - value.capacity(), - ) - } - } + ScalarBytes(*hash) } diff --git a/crates/subspace-core-primitives/src/crypto/kzg/tests.rs b/crates/subspace-core-primitives/src/crypto/kzg/tests.rs deleted file mode 100644 index 7e13616d2b..0000000000 --- a/crates/subspace-core-primitives/src/crypto/kzg/tests.rs +++ /dev/null @@ -1,26 +0,0 @@ -use crate::crypto::kzg::{embedded_kzg_settings, Kzg}; -use crate::crypto::Scalar; - -#[test] -fn basic() { - let values = (0..8) - .map(|_| Scalar::from(rand::random::<[u8; Scalar::SAFE_BYTES]>())) - .collect::>(); - - let kzg = Kzg::new(embedded_kzg_settings()); - let polynomial = kzg.poly(&values).unwrap(); - let commitment = kzg.commit(&polynomial).unwrap(); - - let num_values = values.len(); - - for (index, value) in values.iter().enumerate() { - let index = index.try_into().unwrap(); - - let witness = kzg.create_witness(&polynomial, num_values, index).unwrap(); - - assert!( - kzg.verify(&commitment, num_values, index, value, &witness), - "failed on index {index}" - ); - } -} diff --git a/crates/subspace-core-primitives/src/lib.rs b/crates/subspace-core-primitives/src/lib.rs index 779753c135..417a484c60 100644 --- a/crates/subspace-core-primitives/src/lib.rs +++ b/crates/subspace-core-primitives/src/lib.rs @@ -39,19 +39,11 @@ pub mod segments; #[cfg(test)] mod tests; -#[cfg(not(feature = "std"))] -extern crate alloc; - -use crate::crypto::kzg::Witness; -use crate::crypto::{blake3_hash, blake3_hash_list, Scalar}; +use crate::crypto::{blake3_hash, blake3_hash_list}; use crate::pieces::{PieceOffset, Record, RecordCommitment, RecordWitness}; use crate::pos::PosProof; use crate::sectors::SectorIndex; use crate::segments::{HistorySize, SegmentIndex}; -#[cfg(feature = "serde")] -use ::serde::{Deserialize, Serialize}; -#[cfg(not(feature = "std"))] -use alloc::string::String; use core::array::TryFromSliceError; use core::fmt; use derive_more::{Add, AsMut, AsRef, Deref, DerefMut, Display, Div, From, Into, Mul, Rem, Sub}; @@ -59,6 +51,8 @@ use hex::FromHex; use num_traits::{WrappingAdd, WrappingSub}; use parity_scale_codec::{Decode, Encode, MaxEncodedLen}; use scale_info::TypeInfo; +#[cfg(feature = "serde")] +use serde::{Deserialize, Serialize}; use static_assertions::const_assert; // Refuse to compile on lower than 32-bit platforms @@ -329,6 +323,43 @@ impl RewardSignature { pub const SIZE: usize = 64; } +/// Single BLS12-381 scalar with big-endian representation, not guaranteed to be valid +#[derive( + Debug, + Default, + Copy, + Clone, + Eq, + PartialEq, + Ord, + PartialOrd, + Hash, + From, + Into, + AsRef, + AsMut, + Deref, + DerefMut, + Encode, + Decode, + TypeInfo, +)] +#[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] +#[cfg_attr(feature = "serde", serde(transparent))] +pub struct ScalarBytes([u8; ScalarBytes::FULL_BYTES]); + +impl ScalarBytes { + /// How many full bytes can be stored in BLS12-381 scalar (for instance before encoding). It is + /// actually 254 bits, but bits are mut harder to work with and likely not worth it. + /// + /// NOTE: After encoding more bytes can be used, so don't rely on this as the max number of + /// bytes stored within at all times! + pub const SAFE_BYTES: usize = 31; + /// How many bytes Scalar contains physically, use [`Self::SAFE_BYTES`] for the amount of data + /// that you can put into it safely (for instance before encoding). + pub const FULL_BYTES: usize = 32; +} + /// Witness for chunk contained within a record. #[derive( Debug, @@ -387,31 +418,6 @@ impl ChunkWitness { pub const SIZE: usize = 48; } -impl From for ChunkWitness { - #[inline] - fn from(witness: Witness) -> Self { - Self(witness.to_bytes()) - } -} - -impl TryFrom<&ChunkWitness> for Witness { - type Error = String; - - #[inline] - fn try_from(witness: &ChunkWitness) -> Result { - Witness::try_from(&witness.0) - } -} - -impl TryFrom for Witness { - type Error = String; - - #[inline] - fn try_from(witness: ChunkWitness) -> Result { - Witness::try_from(witness.0) - } -} - /// Farmer solution for slot challenge. #[derive(Clone, Debug, Eq, PartialEq, Encode, Decode, TypeInfo)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] @@ -432,7 +438,7 @@ pub struct Solution { /// Witness for above record commitment pub record_witness: RecordWitness, /// Chunk at above offset - pub chunk: Scalar, + pub chunk: ScalarBytes, /// Witness for above chunk pub chunk_witness: ChunkWitness, /// Proof of space for piece offset @@ -485,7 +491,7 @@ impl Solution { piece_offset: PieceOffset::default(), record_commitment: RecordCommitment::default(), record_witness: RecordWitness::default(), - chunk: Scalar::default(), + chunk: ScalarBytes::default(), chunk_witness: ChunkWitness::default(), proof_of_space: PosProof::default(), } diff --git a/crates/subspace-core-primitives/src/pieces.rs b/crates/subspace-core-primitives/src/pieces.rs index be84f10c3e..dcb6efe9bc 100644 --- a/crates/subspace-core-primitives/src/pieces.rs +++ b/crates/subspace-core-primitives/src/pieces.rs @@ -5,9 +5,8 @@ mod serde; extern crate alloc; -use crate::crypto::kzg::{Commitment, Witness}; -use crate::crypto::Scalar; use crate::segments::{ArchivedHistorySegment, RecordedHistorySegment, SegmentIndex}; +use crate::ScalarBytes; #[cfg(feature = "serde")] use ::serde::{Deserialize, Serialize}; #[cfg(not(feature = "std"))] @@ -16,8 +15,6 @@ use alloc::fmt; #[cfg(not(feature = "std"))] use alloc::format; #[cfg(not(feature = "std"))] -use alloc::string::String; -#[cfg(not(feature = "std"))] use alloc::vec::Vec; use bytes::{Bytes, BytesMut}; use core::array::TryFromSliceError; @@ -248,7 +245,7 @@ impl PieceOffset { /// NOTE: This is a stack-allocated data structure and can cause stack overflow! #[derive(Debug, Copy, Clone, Eq, PartialEq, Deref, DerefMut)] #[repr(transparent)] -pub struct RawRecord([[u8; Scalar::SAFE_BYTES]; Self::NUM_CHUNKS]); +pub struct RawRecord([[u8; ScalarBytes::SAFE_BYTES]; Self::NUM_CHUNKS]); impl Default for RawRecord { #[inline] @@ -271,7 +268,7 @@ impl AsMut<[u8]> for RawRecord { } } -impl From<&RawRecord> for &[[u8; Scalar::SAFE_BYTES]; RawRecord::NUM_CHUNKS] { +impl From<&RawRecord> for &[[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS] { #[inline] fn from(value: &RawRecord) -> Self { // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory @@ -280,16 +277,16 @@ impl From<&RawRecord> for &[[u8; Scalar::SAFE_BYTES]; RawRecord::NUM_CHUNKS] { } } -impl From<&[[u8; Scalar::SAFE_BYTES]; RawRecord::NUM_CHUNKS]> for &RawRecord { +impl From<&[[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS]> for &RawRecord { #[inline] - fn from(value: &[[u8; Scalar::SAFE_BYTES]; RawRecord::NUM_CHUNKS]) -> Self { + fn from(value: &[[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS]) -> Self { // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory // layout unsafe { mem::transmute(value) } } } -impl From<&mut RawRecord> for &mut [[u8; Scalar::SAFE_BYTES]; RawRecord::NUM_CHUNKS] { +impl From<&mut RawRecord> for &mut [[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS] { #[inline] fn from(value: &mut RawRecord) -> Self { // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory @@ -298,16 +295,16 @@ impl From<&mut RawRecord> for &mut [[u8; Scalar::SAFE_BYTES]; RawRecord::NUM_CHU } } -impl From<&mut [[u8; Scalar::SAFE_BYTES]; RawRecord::NUM_CHUNKS]> for &mut RawRecord { +impl From<&mut [[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS]> for &mut RawRecord { #[inline] - fn from(value: &mut [[u8; Scalar::SAFE_BYTES]; RawRecord::NUM_CHUNKS]) -> Self { + fn from(value: &mut [[u8; ScalarBytes::SAFE_BYTES]; RawRecord::NUM_CHUNKS]) -> Self { // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory // layout unsafe { mem::transmute(value) } } } -impl From<&RawRecord> for &[u8; Scalar::SAFE_BYTES * RawRecord::NUM_CHUNKS] { +impl From<&RawRecord> for &[u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS] { #[inline] fn from(value: &RawRecord) -> Self { // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory @@ -316,16 +313,16 @@ impl From<&RawRecord> for &[u8; Scalar::SAFE_BYTES * RawRecord::NUM_CHUNKS] { } } -impl From<&[u8; Scalar::SAFE_BYTES * RawRecord::NUM_CHUNKS]> for &RawRecord { +impl From<&[u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS]> for &RawRecord { #[inline] - fn from(value: &[u8; Scalar::SAFE_BYTES * RawRecord::NUM_CHUNKS]) -> Self { + fn from(value: &[u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS]) -> Self { // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory // layout as inner array, while array of byte arrays has the same alignment as a single byte unsafe { mem::transmute(value) } } } -impl From<&mut RawRecord> for &mut [u8; Scalar::SAFE_BYTES * RawRecord::NUM_CHUNKS] { +impl From<&mut RawRecord> for &mut [u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS] { #[inline] fn from(value: &mut RawRecord) -> Self { // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory @@ -334,9 +331,9 @@ impl From<&mut RawRecord> for &mut [u8; Scalar::SAFE_BYTES * RawRecord::NUM_CHUN } } -impl From<&mut [u8; Scalar::SAFE_BYTES * RawRecord::NUM_CHUNKS]> for &mut RawRecord { +impl From<&mut [u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS]> for &mut RawRecord { #[inline] - fn from(value: &mut [u8; Scalar::SAFE_BYTES * RawRecord::NUM_CHUNKS]) -> Self { + fn from(value: &mut [u8; ScalarBytes::SAFE_BYTES * RawRecord::NUM_CHUNKS]) -> Self { // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory // layout as inner array, while array of byte arrays has the same alignment as a single byte unsafe { mem::transmute(value) } @@ -346,8 +343,8 @@ impl From<&mut [u8; Scalar::SAFE_BYTES * RawRecord::NUM_CHUNKS]> for &mut RawRec impl RawRecord { /// Number of chunks (scalars) within one raw record. pub const NUM_CHUNKS: usize = 2_usize.pow(15); - /// Size of raw record in bytes, is guaranteed to be a multiple of [`Scalar::SAFE_BYTES`]. - pub const SIZE: usize = Scalar::SAFE_BYTES * Self::NUM_CHUNKS; + /// Size of raw record in bytes, is guaranteed to be a multiple of [`ScalarBytes::SAFE_BYTES`]. + pub const SIZE: usize = ScalarBytes::SAFE_BYTES * Self::NUM_CHUNKS; /// Create boxed value without hitting stack overflow #[inline] @@ -360,7 +357,7 @@ impl RawRecord { /// Convenient conversion from slice of record to underlying representation for efficiency /// purposes. #[inline] - pub fn slice_to_repr(value: &[Self]) -> &[[[u8; Scalar::SAFE_BYTES]; Self::NUM_CHUNKS]] { + pub fn slice_to_repr(value: &[Self]) -> &[[[u8; ScalarBytes::SAFE_BYTES]; Self::NUM_CHUNKS]] { // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory // layout unsafe { mem::transmute(value) } @@ -369,7 +366,7 @@ impl RawRecord { /// Convenient conversion from slice of underlying representation to record for efficiency /// purposes. #[inline] - pub fn slice_from_repr(value: &[[[u8; Scalar::SAFE_BYTES]; Self::NUM_CHUNKS]]) -> &[Self] { + pub fn slice_from_repr(value: &[[[u8; ScalarBytes::SAFE_BYTES]; Self::NUM_CHUNKS]]) -> &[Self] { // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory // layout unsafe { mem::transmute(value) } @@ -380,7 +377,7 @@ impl RawRecord { #[inline] pub fn slice_mut_to_repr( value: &mut [Self], - ) -> &mut [[[u8; Scalar::SAFE_BYTES]; Self::NUM_CHUNKS]] { + ) -> &mut [[[u8; ScalarBytes::SAFE_BYTES]; Self::NUM_CHUNKS]] { // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory // layout unsafe { mem::transmute(value) } @@ -390,7 +387,7 @@ impl RawRecord { /// efficiency purposes. #[inline] pub fn slice_mut_from_repr( - value: &mut [[[u8; Scalar::SAFE_BYTES]; Self::NUM_CHUNKS]], + value: &mut [[[u8; ScalarBytes::SAFE_BYTES]; Self::NUM_CHUNKS]], ) -> &mut [Self] { // SAFETY: `RawRecord` is `#[repr(transparent)]` and guaranteed to have the same memory // layout @@ -403,7 +400,7 @@ impl RawRecord { /// NOTE: This is a stack-allocated data structure and can cause stack overflow! #[derive(Debug, Copy, Clone, Eq, PartialEq, Deref, DerefMut)] #[repr(transparent)] -pub struct Record([[u8; Scalar::FULL_BYTES]; Self::NUM_CHUNKS]); +pub struct Record([[u8; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS]); impl Default for Record { #[inline] @@ -426,7 +423,7 @@ impl AsMut<[u8]> for Record { } } -impl From<&Record> for &[[u8; Scalar::FULL_BYTES]; Record::NUM_CHUNKS] { +impl From<&Record> for &[[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS] { #[inline] fn from(value: &Record) -> Self { // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout @@ -434,15 +431,15 @@ impl From<&Record> for &[[u8; Scalar::FULL_BYTES]; Record::NUM_CHUNKS] { } } -impl From<&[[u8; Scalar::FULL_BYTES]; Record::NUM_CHUNKS]> for &Record { +impl From<&[[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS]> for &Record { #[inline] - fn from(value: &[[u8; Scalar::FULL_BYTES]; Record::NUM_CHUNKS]) -> Self { + fn from(value: &[[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS]) -> Self { // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout unsafe { mem::transmute(value) } } } -impl From<&mut Record> for &mut [[u8; Scalar::FULL_BYTES]; Record::NUM_CHUNKS] { +impl From<&mut Record> for &mut [[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS] { #[inline] fn from(value: &mut Record) -> Self { // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout @@ -450,15 +447,15 @@ impl From<&mut Record> for &mut [[u8; Scalar::FULL_BYTES]; Record::NUM_CHUNKS] { } } -impl From<&mut [[u8; Scalar::FULL_BYTES]; Record::NUM_CHUNKS]> for &mut Record { +impl From<&mut [[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS]> for &mut Record { #[inline] - fn from(value: &mut [[u8; Scalar::FULL_BYTES]; Record::NUM_CHUNKS]) -> Self { + fn from(value: &mut [[u8; ScalarBytes::FULL_BYTES]; Record::NUM_CHUNKS]) -> Self { // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout unsafe { mem::transmute(value) } } } -impl From<&Record> for &[u8; Scalar::FULL_BYTES * Record::NUM_CHUNKS] { +impl From<&Record> for &[u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS] { #[inline] fn from(value: &Record) -> Self { // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout @@ -467,16 +464,16 @@ impl From<&Record> for &[u8; Scalar::FULL_BYTES * Record::NUM_CHUNKS] { } } -impl From<&[u8; Scalar::FULL_BYTES * Record::NUM_CHUNKS]> for &Record { +impl From<&[u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS]> for &Record { #[inline] - fn from(value: &[u8; Scalar::FULL_BYTES * Record::NUM_CHUNKS]) -> Self { + fn from(value: &[u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS]) -> Self { // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout // as inner array, while array of byte arrays has the same alignment as a single byte unsafe { mem::transmute(value) } } } -impl From<&mut Record> for &mut [u8; Scalar::FULL_BYTES * Record::NUM_CHUNKS] { +impl From<&mut Record> for &mut [u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS] { #[inline] fn from(value: &mut Record) -> Self { // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout @@ -485,9 +482,9 @@ impl From<&mut Record> for &mut [u8; Scalar::FULL_BYTES * Record::NUM_CHUNKS] { } } -impl From<&mut [u8; Scalar::FULL_BYTES * Record::NUM_CHUNKS]> for &mut Record { +impl From<&mut [u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS]> for &mut Record { #[inline] - fn from(value: &mut [u8; Scalar::FULL_BYTES * Record::NUM_CHUNKS]) -> Self { + fn from(value: &mut [u8; ScalarBytes::FULL_BYTES * Record::NUM_CHUNKS]) -> Self { // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout // as inner array, while array of byte arrays has the same alignment as a single byte unsafe { mem::transmute(value) } @@ -504,8 +501,8 @@ impl Record { * RecordedHistorySegment::ERASURE_CODING_RATE.1 / RecordedHistorySegment::ERASURE_CODING_RATE.0; /// Size of a segment record given the global piece size (in bytes) after erasure coding - /// [`RawRecord`], is guaranteed to be a multiple of [`Scalar::FULL_BYTES`]. - pub const SIZE: usize = Scalar::FULL_BYTES * Self::NUM_CHUNKS; + /// [`RawRecord`], is guaranteed to be a multiple of [`ScalarBytes::FULL_BYTES`]. + pub const SIZE: usize = ScalarBytes::FULL_BYTES * Self::NUM_CHUNKS; /// Create boxed value without hitting stack overflow #[inline] @@ -528,7 +525,7 @@ impl Record { let slice = unsafe { slice::from_raw_parts_mut( slice.as_mut_ptr() - as *mut [[mem::MaybeUninit; Scalar::FULL_BYTES]; Self::NUM_CHUNKS], + as *mut [[mem::MaybeUninit; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS], length, ) }; @@ -547,7 +544,7 @@ impl Record { /// Convenient conversion from slice of record to underlying representation for efficiency /// purposes. #[inline] - pub fn slice_to_repr(value: &[Self]) -> &[[[u8; Scalar::FULL_BYTES]; Self::NUM_CHUNKS]] { + pub fn slice_to_repr(value: &[Self]) -> &[[[u8; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS]] { // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout unsafe { mem::transmute(value) } } @@ -555,7 +552,7 @@ impl Record { /// Convenient conversion from slice of underlying representation to record for efficiency /// purposes. #[inline] - pub fn slice_from_repr(value: &[[[u8; Scalar::FULL_BYTES]; Self::NUM_CHUNKS]]) -> &[Self] { + pub fn slice_from_repr(value: &[[[u8; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS]]) -> &[Self] { // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout unsafe { mem::transmute(value) } } @@ -565,7 +562,7 @@ impl Record { #[inline] pub fn slice_mut_to_repr( value: &mut [Self], - ) -> &mut [[[u8; Scalar::FULL_BYTES]; Self::NUM_CHUNKS]] { + ) -> &mut [[[u8; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS]] { // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout unsafe { mem::transmute(value) } } @@ -574,7 +571,7 @@ impl Record { /// efficiency purposes. #[inline] pub fn slice_mut_from_repr( - value: &mut [[[u8; Scalar::FULL_BYTES]; Self::NUM_CHUNKS]], + value: &mut [[[u8; ScalarBytes::FULL_BYTES]; Self::NUM_CHUNKS]], ) -> &mut [Self] { // SAFETY: `Record` is `#[repr(transparent)]` and guaranteed to have the same memory layout unsafe { mem::transmute(value) } @@ -583,8 +580,10 @@ impl Record { /// Convert from a record to its raw bytes, assumes dealing with source record that only stores /// safe bytes in its chunks. #[inline] - pub fn to_raw_record_chunks(&self) -> impl Iterator + '_ { - // We have zero byte padding from [`Scalar::SAFE_BYTES`] to [`Scalar::FULL_BYTES`] that we need + pub fn to_raw_record_chunks( + &self, + ) -> impl Iterator + '_ { + // We have zero byte padding from [`ScalarBytes::SAFE_BYTES`] to [`ScalarBytes::FULL_BYTES`] that we need // to skip self.iter() .map(|bytes| bytes[1..].try_into().expect("Correct length; qed")) @@ -685,31 +684,6 @@ impl RecordCommitment { pub const SIZE: usize = 48; } -impl From for RecordCommitment { - #[inline] - fn from(commitment: Commitment) -> Self { - Self(commitment.to_bytes()) - } -} - -impl TryFrom<&RecordCommitment> for Commitment { - type Error = String; - - #[inline] - fn try_from(commitment: &RecordCommitment) -> Result { - Commitment::try_from(&commitment.0) - } -} - -impl TryFrom for Commitment { - type Error = String; - - #[inline] - fn try_from(commitment: RecordCommitment) -> Result { - Commitment::try_from(commitment.0) - } -} - /// Record witness contained within a piece. #[derive( Debug, @@ -804,31 +778,6 @@ impl RecordWitness { pub const SIZE: usize = 48; } -impl From for RecordWitness { - #[inline] - fn from(witness: Witness) -> Self { - Self(witness.to_bytes()) - } -} - -impl TryFrom<&RecordWitness> for Witness { - type Error = String; - - #[inline] - fn try_from(witness: &RecordWitness) -> Result { - Witness::try_from(&witness.0) - } -} - -impl TryFrom for Witness { - type Error = String; - - #[inline] - fn try_from(witness: RecordWitness) -> Result { - Witness::try_from(witness.0) - } -} - #[derive(Debug)] enum CowBytes { Shared(Bytes), diff --git a/crates/subspace-core-primitives/src/pot.rs b/crates/subspace-core-primitives/src/pot.rs index 020789559b..8616cb5c6d 100644 --- a/crates/subspace-core-primitives/src/pot.rs +++ b/crates/subspace-core-primitives/src/pot.rs @@ -2,14 +2,14 @@ use crate::crypto::{blake3_hash, blake3_hash_list}; use crate::{Blake3Hash, Randomness}; -#[cfg(feature = "serde")] -use ::serde::{Deserialize, Serialize}; use core::fmt; use core::num::NonZeroU8; use core::str::FromStr; use derive_more::{AsMut, AsRef, Deref, DerefMut, From}; use parity_scale_codec::{Decode, Encode, MaxEncodedLen}; use scale_info::TypeInfo; +#[cfg(feature = "serde")] +use serde::{Deserialize, Serialize}; /// Proof of time key(input to the encryption). #[derive( diff --git a/crates/subspace-core-primitives/src/sectors.rs b/crates/subspace-core-primitives/src/sectors.rs index 4bc36ce371..fe8577df7d 100644 --- a/crates/subspace-core-primitives/src/sectors.rs +++ b/crates/subspace-core-primitives/src/sectors.rs @@ -8,8 +8,6 @@ use crate::pieces::{PieceIndex, PieceOffset, Record}; use crate::pos::PosSeed; use crate::segments::{HistorySize, SegmentCommitment}; use crate::{Blake3Hash, U256}; -#[cfg(feature = "serde")] -use ::serde::{Deserialize, Serialize}; use core::hash::Hash; use core::iter::Step; use core::num::{NonZeroU64, TryFromIntError}; @@ -20,6 +18,8 @@ use derive_more::{ }; use parity_scale_codec::{Decode, Encode, MaxEncodedLen}; use scale_info::TypeInfo; +#[cfg(feature = "serde")] +use serde::{Deserialize, Serialize}; use static_assertions::const_assert_eq; /// Sector index in consensus diff --git a/crates/subspace-core-primitives/src/segments.rs b/crates/subspace-core-primitives/src/segments.rs index 406ed31bb3..26c15a94a9 100644 --- a/crates/subspace-core-primitives/src/segments.rs +++ b/crates/subspace-core-primitives/src/segments.rs @@ -1,13 +1,13 @@ //! Segments-related data structures. +#[cfg(not(feature = "std"))] +extern crate alloc; + use crate::crypto::blake3_hash; -use crate::crypto::kzg::Commitment; use crate::pieces::{FlatPieces, Piece, PieceIndex, RawRecord}; use crate::{Blake3Hash, BlockNumber}; #[cfg(not(feature = "std"))] use alloc::boxed::Box; -#[cfg(not(feature = "std"))] -use alloc::string::String; use core::array::TryFromSliceError; use core::iter::Step; use core::num::NonZeroU64; @@ -186,31 +186,6 @@ impl SegmentCommitment { pub const SIZE: usize = 48; } -impl From for SegmentCommitment { - #[inline] - fn from(commitment: Commitment) -> Self { - Self(commitment.to_bytes()) - } -} - -impl TryFrom<&SegmentCommitment> for Commitment { - type Error = String; - - #[inline] - fn try_from(commitment: &SegmentCommitment) -> Result { - Commitment::try_from(&commitment.0) - } -} - -impl TryFrom for Commitment { - type Error = String; - - #[inline] - fn try_from(commitment: SegmentCommitment) -> Result { - Commitment::try_from(commitment.0) - } -} - /// Size of blockchain history in segments. #[derive( Debug, diff --git a/crates/subspace-core-primitives/src/tests.rs b/crates/subspace-core-primitives/src/tests.rs index 4f27b0c378..5e86aae379 100644 --- a/crates/subspace-core-primitives/src/tests.rs +++ b/crates/subspace-core-primitives/src/tests.rs @@ -1,65 +1,6 @@ -use crate::crypto::Scalar; use crate::U256; -use rand::thread_rng; -use rand_core::RngCore; #[test] fn piece_distance_middle() { assert_eq!(U256::MIDDLE, U256::MAX / 2); } - -#[test] -fn bytes_scalars_conversion() { - { - let mut bytes = vec![0u8; Scalar::SAFE_BYTES * 16]; - thread_rng().fill_bytes(&mut bytes); - - let scalars = bytes - .chunks_exact(Scalar::SAFE_BYTES) - .map(|bytes| { - Scalar::from( - <&[u8; Scalar::SAFE_BYTES]>::try_from(bytes) - .expect("Chunked into correct size; qed"), - ) - }) - .collect::>(); - - { - let mut decoded_bytes = vec![0u8; bytes.len()]; - decoded_bytes - .chunks_exact_mut(Scalar::SAFE_BYTES) - .zip(scalars.iter()) - .for_each(|(bytes, scalar)| { - bytes.copy_from_slice(&scalar.to_bytes()[1..]); - }); - - assert_eq!(bytes, decoded_bytes); - } - - { - let mut decoded_bytes = vec![0u8; bytes.len()]; - decoded_bytes - .chunks_exact_mut(Scalar::SAFE_BYTES) - .zip(scalars.iter()) - .for_each(|(bytes, scalar)| { - bytes.copy_from_slice(&scalar.to_bytes()[1..]); - }); - - assert_eq!(bytes, decoded_bytes); - } - } - - { - let bytes = { - let mut bytes = [0u8; Scalar::FULL_BYTES]; - bytes[1..].copy_from_slice(&rand::random::<[u8; Scalar::SAFE_BYTES]>()); - bytes - }; - - { - let scalar = Scalar::try_from(&bytes).unwrap(); - - assert_eq!(bytes, scalar.to_bytes()); - } - } -} diff --git a/crates/subspace-erasure-coding/Cargo.toml b/crates/subspace-erasure-coding/Cargo.toml index 1a16abbf17..39416f1c34 100644 --- a/crates/subspace-erasure-coding/Cargo.toml +++ b/crates/subspace-erasure-coding/Cargo.toml @@ -18,6 +18,7 @@ bench = false kzg = { git = "https://github.com/grandinetech/rust-kzg", rev = "6c8fcc623df3d7e8c0f30951a49bfea764f90bf4", default-features = false } rust-kzg-blst = { git = "https://github.com/grandinetech/rust-kzg", rev = "6c8fcc623df3d7e8c0f30951a49bfea764f90bf4", default-features = false } subspace-core-primitives = { version = "0.1.0", path = "../subspace-core-primitives", default-features = false } +subspace-kzg = { version = "0.1.0", path = "../../shared/subspace-kzg", default-features = false } [dev-dependencies] rust-kzg-blst = { git = "https://github.com/grandinetech/rust-kzg", rev = "6c8fcc623df3d7e8c0f30951a49bfea764f90bf4" } @@ -30,6 +31,7 @@ std = [ "kzg/std", "rust-kzg-blst/std", "subspace-core-primitives/std", + "subspace-kzg/std", ] parallel = ["rust-kzg-blst/parallel"] diff --git a/crates/subspace-erasure-coding/benches/commitments.rs b/crates/subspace-erasure-coding/benches/commitments.rs index 6283bf0e64..1d8c5244cf 100644 --- a/crates/subspace-erasure-coding/benches/commitments.rs +++ b/crates/subspace-erasure-coding/benches/commitments.rs @@ -2,9 +2,9 @@ use criterion::{black_box, criterion_group, criterion_main, Criterion}; use kzg::G1; use rust_kzg_blst::types::g1::FsG1; use std::num::NonZeroUsize; -use subspace_core_primitives::crypto::kzg::Commitment; use subspace_core_primitives::segments::ArchivedHistorySegment; use subspace_erasure_coding::ErasureCoding; +use subspace_kzg::Commitment; fn criterion_benchmark(c: &mut Criterion) { let num_shards = ArchivedHistorySegment::NUM_PIECES; diff --git a/crates/subspace-erasure-coding/src/lib.rs b/crates/subspace-erasure-coding/src/lib.rs index 04f541c881..7ea884974e 100644 --- a/crates/subspace-erasure-coding/src/lib.rs +++ b/crates/subspace-erasure-coding/src/lib.rs @@ -15,8 +15,7 @@ use kzg::{FFTSettings, PolyRecover, DAS, FFTG1, G1}; use rust_kzg_blst::types::fft_settings::FsFFTSettings; use rust_kzg_blst::types::g1::FsG1; use rust_kzg_blst::types::poly::FsPoly; -use subspace_core_primitives::crypto::kzg::{Commitment, Polynomial}; -use subspace_core_primitives::crypto::Scalar; +use subspace_kzg::{Commitment, Polynomial, Scalar}; /// Erasure coding abstraction. /// diff --git a/crates/subspace-erasure-coding/src/tests.rs b/crates/subspace-erasure-coding/src/tests.rs index 933b9f7cfe..7a22cd29d9 100644 --- a/crates/subspace-erasure-coding/src/tests.rs +++ b/crates/subspace-erasure-coding/src/tests.rs @@ -3,8 +3,8 @@ use kzg::G1; use rust_kzg_blst::types::g1::FsG1; use std::iter; use std::num::NonZeroUsize; -use subspace_core_primitives::crypto::kzg::Commitment; -use subspace_core_primitives::crypto::Scalar; +use subspace_core_primitives::ScalarBytes; +use subspace_kzg::{Commitment, Scalar}; // TODO: This could have been done in-place, once implemented can be exposed as a utility fn concatenated_to_interleaved(input: Vec) -> Vec @@ -43,7 +43,7 @@ fn basic_data() { let ec = ErasureCoding::new(scale).unwrap(); let source_shards = (0..num_shards / 2) - .map(|_| rand::random::<[u8; Scalar::SAFE_BYTES]>()) + .map(|_| rand::random::<[u8; ScalarBytes::SAFE_BYTES]>()) .map(Scalar::from) .collect::>(); diff --git a/crates/subspace-farmer-components/Cargo.toml b/crates/subspace-farmer-components/Cargo.toml index 0e4bd6ed65..6fa1335404 100644 --- a/crates/subspace-farmer-components/Cargo.toml +++ b/crates/subspace-farmer-components/Cargo.toml @@ -35,6 +35,7 @@ static_assertions = "1.1.0" subspace-archiving = { version = "0.1.0", path = "../subspace-archiving" } subspace-core-primitives = { version = "0.1.0", path = "../subspace-core-primitives" } subspace-erasure-coding = { version = "0.1.0", path = "../subspace-erasure-coding" } +subspace-kzg = { version = "0.1.0", path = "../../shared/subspace-kzg" } subspace-proof-of-space = { version = "0.1.0", path = "../subspace-proof-of-space", features = ["parallel"] } subspace-verification = { version = "0.1.0", path = "../subspace-verification" } thiserror = "1.0.64" diff --git a/crates/subspace-farmer-components/benches/auditing.rs b/crates/subspace-farmer-components/benches/auditing.rs index 5baee4ce55..e5ed09ad20 100644 --- a/crates/subspace-farmer-components/benches/auditing.rs +++ b/crates/subspace-farmer-components/benches/auditing.rs @@ -7,8 +7,6 @@ use std::io::Write; use std::num::{NonZeroU64, NonZeroUsize}; use std::{env, fs, slice}; use subspace_archiving::archiver::Archiver; -use subspace_core_primitives::crypto::kzg; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::pieces::Record; use subspace_core_primitives::sectors::SectorId; use subspace_core_primitives::segments::{HistorySize, RecordedHistorySegment}; @@ -23,6 +21,7 @@ use subspace_farmer_components::sector::{ sector_size, SectorContentsMap, SectorMetadata, SectorMetadataChecksummed, }; use subspace_farmer_components::FarmerProtocolInfo; +use subspace_kzg::Kzg; use subspace_proof_of_space::chia::ChiaTable; use subspace_proof_of_space::Table; @@ -49,7 +48,7 @@ pub fn criterion_benchmark(c: &mut Criterion) { let sector_index = 0; let mut input = RecordedHistorySegment::new_boxed(); StdRng::seed_from_u64(42).fill(AsMut::<[u8]>::as_mut(input.as_mut())); - let kzg = Kzg::new(kzg::embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), diff --git a/crates/subspace-farmer-components/benches/plotting.rs b/crates/subspace-farmer-components/benches/plotting.rs index 14eff2d522..afcb5ed1ae 100644 --- a/crates/subspace-farmer-components/benches/plotting.rs +++ b/crates/subspace-farmer-components/benches/plotting.rs @@ -4,8 +4,6 @@ use rand::prelude::*; use std::env; use std::num::{NonZeroU64, NonZeroUsize}; use subspace_archiving::archiver::Archiver; -use subspace_core_primitives::crypto::kzg; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::pieces::Record; use subspace_core_primitives::segments::{HistorySize, RecordedHistorySegment}; use subspace_core_primitives::PublicKey; @@ -13,6 +11,7 @@ use subspace_erasure_coding::ErasureCoding; use subspace_farmer_components::plotting::{plot_sector, CpuRecordsEncoder, PlotSectorOptions}; use subspace_farmer_components::sector::sector_size; use subspace_farmer_components::FarmerProtocolInfo; +use subspace_kzg::Kzg; use subspace_proof_of_space::chia::ChiaTable; use subspace_proof_of_space::Table; @@ -30,7 +29,7 @@ fn criterion_benchmark(c: &mut Criterion) { let sector_index = 0; let mut input = RecordedHistorySegment::new_boxed(); StdRng::seed_from_u64(42).fill(AsMut::<[u8]>::as_mut(input.as_mut())); - let kzg = Kzg::new(kzg::embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), diff --git a/crates/subspace-farmer-components/benches/proving.rs b/crates/subspace-farmer-components/benches/proving.rs index 06fa60a96f..bac02d1b68 100644 --- a/crates/subspace-farmer-components/benches/proving.rs +++ b/crates/subspace-farmer-components/benches/proving.rs @@ -11,8 +11,6 @@ use std::io::Write; use std::num::{NonZeroU64, NonZeroUsize}; use std::{env, fs, slice}; use subspace_archiving::archiver::Archiver; -use subspace_core_primitives::crypto::kzg; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::pieces::Record; use subspace_core_primitives::pos::PosSeed; use subspace_core_primitives::sectors::SectorId; @@ -29,6 +27,7 @@ use subspace_farmer_components::sector::{ sector_size, SectorContentsMap, SectorMetadata, SectorMetadataChecksummed, }; use subspace_farmer_components::FarmerProtocolInfo; +use subspace_kzg::Kzg; use subspace_proof_of_space::chia::ChiaTable; use subspace_proof_of_space::{Table, TableGenerator}; @@ -57,7 +56,7 @@ pub fn criterion_benchmark(c: &mut Criterion) { let mut input = RecordedHistorySegment::new_boxed(); let mut rng = StdRng::seed_from_u64(42); rng.fill(AsMut::<[u8]>::as_mut(input.as_mut())); - let kzg = &Kzg::new(kzg::embedded_kzg_settings()); + let kzg = &Kzg::new(); let erasure_coding = &ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), diff --git a/crates/subspace-farmer-components/benches/reading.rs b/crates/subspace-farmer-components/benches/reading.rs index 7908b94123..034d485383 100644 --- a/crates/subspace-farmer-components/benches/reading.rs +++ b/crates/subspace-farmer-components/benches/reading.rs @@ -8,8 +8,6 @@ use std::io::Write; use std::num::{NonZeroU64, NonZeroUsize}; use std::{env, fs, slice}; use subspace_archiving::archiver::Archiver; -use subspace_core_primitives::crypto::kzg; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::pieces::{PieceOffset, Record}; use subspace_core_primitives::sectors::SectorId; use subspace_core_primitives::segments::{HistorySize, RecordedHistorySegment}; @@ -24,6 +22,7 @@ use subspace_farmer_components::sector::{ sector_size, SectorContentsMap, SectorMetadata, SectorMetadataChecksummed, }; use subspace_farmer_components::{FarmerProtocolInfo, ReadAt, ReadAtSync}; +use subspace_kzg::Kzg; use subspace_proof_of_space::chia::ChiaTable; use subspace_proof_of_space::Table; @@ -50,7 +49,7 @@ pub fn criterion_benchmark(c: &mut Criterion) { let sector_index = 0; let mut input = RecordedHistorySegment::new_boxed(); StdRng::seed_from_u64(42).fill(AsMut::<[u8]>::as_mut(input.as_mut())); - let kzg = Kzg::new(kzg::embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), diff --git a/crates/subspace-farmer-components/src/auditing.rs b/crates/subspace-farmer-components/src/auditing.rs index 8b7de1c0be..1a01620f60 100644 --- a/crates/subspace-farmer-components/src/auditing.rs +++ b/crates/subspace-farmer-components/src/auditing.rs @@ -10,9 +10,8 @@ use crate::{ReadAtOffset, ReadAtSync}; use rayon::prelude::*; use std::collections::HashSet; use std::io; -use subspace_core_primitives::crypto::Scalar; use subspace_core_primitives::sectors::{SBucket, SectorId, SectorIndex, SectorSlotChallenge}; -use subspace_core_primitives::{Blake3Hash, PublicKey, SolutionRange}; +use subspace_core_primitives::{Blake3Hash, PublicKey, ScalarBytes, SolutionRange}; use subspace_verification::is_within_solution_range; use thiserror::Error; @@ -203,9 +202,9 @@ fn collect_sector_auditing_details( let sector_slot_challenge = sector_id.derive_sector_slot_challenge(global_challenge); let s_bucket_audit_index = sector_slot_challenge.s_bucket_audit_index(); - let s_bucket_audit_size = Scalar::FULL_BYTES + let s_bucket_audit_size = ScalarBytes::FULL_BYTES * usize::from(sector_metadata.s_bucket_sizes[usize::from(s_bucket_audit_index)]); - let s_bucket_audit_offset = Scalar::FULL_BYTES as u64 + let s_bucket_audit_offset = ScalarBytes::FULL_BYTES as u64 * sector_metadata .s_bucket_sizes .iter() @@ -237,7 +236,7 @@ fn map_winning_chunks( ) -> Option> { // Map all winning chunks let mut chunk_candidates = s_bucket - .array_chunks::<{ Scalar::FULL_BYTES }>() + .array_chunks::<{ ScalarBytes::FULL_BYTES }>() .enumerate() .filter_map(|(chunk_offset, chunk)| { is_within_solution_range( diff --git a/crates/subspace-farmer-components/src/plotting.rs b/crates/subspace-farmer-components/src/plotting.rs index 354399327c..db63eb8c5a 100644 --- a/crates/subspace-farmer-components/src/plotting.rs +++ b/crates/subspace-farmer-components/src/plotting.rs @@ -24,14 +24,14 @@ use std::simd::Simd; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::time::Duration; -use subspace_core_primitives::crypto::kzg::Kzg; -use subspace_core_primitives::crypto::{blake3_hash, blake3_hash_parallel, Scalar}; +use subspace_core_primitives::crypto::{blake3_hash, blake3_hash_parallel}; use subspace_core_primitives::pieces::{PieceIndex, PieceOffset, Record}; use subspace_core_primitives::pos::PosSeed; use subspace_core_primitives::sectors::{SBucket, SectorId, SectorIndex}; use subspace_core_primitives::segments::HistorySize; -use subspace_core_primitives::{Blake3Hash, PublicKey}; +use subspace_core_primitives::{Blake3Hash, PublicKey, ScalarBytes}; use subspace_erasure_coding::ErasureCoding; +use subspace_kzg::{Kzg, Scalar}; use subspace_proof_of_space::{Table, TableGenerator}; use thiserror::Error; use tokio::sync::{AcquireError, Semaphore}; @@ -584,7 +584,7 @@ pub fn write_sector( .iter_s_bucket_records(s_bucket) .expect("S-bucket guaranteed to be in range; qed") }) - .zip(s_buckets_region.array_chunks_mut::<{ Scalar::FULL_BYTES }>()) + .zip(s_buckets_region.array_chunks_mut::<{ ScalarBytes::FULL_BYTES }>()) { let num_encoded_record_chunks = usize::from(num_encoded_record_chunks[usize::from(piece_offset)]); @@ -629,7 +629,7 @@ fn record_encoding( mut encoded_chunks_used: EncodedChunksUsed<'_>, table_generator: &mut PosTable::Generator, erasure_coding: &ErasureCoding, - chunks_scratch: &mut Vec<[u8; Scalar::FULL_BYTES]>, + chunks_scratch: &mut Vec<[u8; ScalarBytes::FULL_BYTES]>, ) where PosTable: Table, { @@ -651,7 +651,7 @@ fn record_encoding( ) .expect("Instance was verified to be able to work with this many values earlier; qed") .into_iter() - .map(<[u8; Scalar::FULL_BYTES]>::from) + .map(<[u8; ScalarBytes::FULL_BYTES]>::from) .collect::>(); let source_record_chunks = record.to_vec(); @@ -671,7 +671,7 @@ fn record_encoding( (Simd::from(*record_chunk) ^ Simd::from(*proof.hash())).to_array() } else { // Dummy value indicating no proof - [0; Scalar::FULL_BYTES] + [0; ScalarBytes::FULL_BYTES] } }) .collect_into_vec(chunks_scratch); @@ -680,7 +680,7 @@ fn record_encoding( .zip(encoded_chunks_used.iter_mut()) .filter_map(|(maybe_encoded_chunk, mut encoded_chunk_used)| { // No proof, see above - if maybe_encoded_chunk == [0; Scalar::FULL_BYTES] { + if maybe_encoded_chunk == [0; ScalarBytes::FULL_BYTES] { None } else { *encoded_chunk_used = true; diff --git a/crates/subspace-farmer-components/src/proving.rs b/crates/subspace-farmer-components/src/proving.rs index 4530e58e6c..498bffd5cb 100644 --- a/crates/subspace-farmer-components/src/proving.rs +++ b/crates/subspace-farmer-components/src/proving.rs @@ -14,12 +14,12 @@ use crate::{ReadAt, ReadAtSync}; use futures::FutureExt; use std::collections::VecDeque; use std::io; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::pieces::{PieceOffset, Record}; use subspace_core_primitives::pos::PosSeed; use subspace_core_primitives::sectors::{SBucket, SectorId}; -use subspace_core_primitives::{ChunkWitness, PublicKey, Solution, SolutionRange}; +use subspace_core_primitives::{ChunkWitness, PublicKey, ScalarBytes, Solution, SolutionRange}; use subspace_erasure_coding::ErasureCoding; +use subspace_kzg::Kzg; use subspace_proof_of_space::Table; use thiserror::Error; @@ -263,10 +263,12 @@ where .now_or_never() .expect("Sync reader; qed")?; - let chunk = sector_record_chunks - .get(usize::from(self.s_bucket)) - .expect("Within s-bucket range; qed") - .expect("Winning chunk was plotted; qed"); + let chunk = ScalarBytes::from( + sector_record_chunks + .get(usize::from(self.s_bucket)) + .expect("Within s-bucket range; qed") + .expect("Winning chunk was plotted; qed"), + ); let source_chunks_polynomial = self .erasure_coding diff --git a/crates/subspace-farmer-components/src/reading.rs b/crates/subspace-farmer-components/src/reading.rs index efd5f85d73..1b867a479f 100644 --- a/crates/subspace-farmer-components/src/reading.rs +++ b/crates/subspace-farmer-components/src/reading.rs @@ -17,10 +17,12 @@ use std::mem::ManuallyDrop; use std::simd::Simd; use std::str::FromStr; use std::{fmt, io}; -use subspace_core_primitives::crypto::{blake3_hash, Scalar}; +use subspace_core_primitives::crypto::blake3_hash; use subspace_core_primitives::pieces::{Piece, PieceOffset, Record}; use subspace_core_primitives::sectors::{SBucket, SectorId}; +use subspace_core_primitives::ScalarBytes; use subspace_erasure_coding::ErasureCoding; +use subspace_kzg::Scalar; use subspace_proof_of_space::{Table, TableGenerator}; use thiserror::Error; use tracing::debug; @@ -112,7 +114,7 @@ impl ReadingError { /// unfortunately. #[derive(Debug, Copy, Clone)] pub enum ReadSectorRecordChunksMode { - /// Read individual chunks ([`Scalar::FULL_BYTES`] in size) concurrently, which results in lower + /// Read individual chunks ([`ScalarBytes::FULL_BYTES`] in size) concurrently, which results in lower /// total data transfer, but requires for SSD to support high concurrency and low latency ConcurrentChunks, /// Read the whole sector at once and extract chunks from in-memory buffer, which uses more @@ -212,19 +214,19 @@ where }; read_chunks_inputs.into_par_iter().flatten().try_for_each( |(maybe_record_chunk, chunk_location, encoded_chunk_used, s_bucket)| { - let mut record_chunk = [0; Scalar::FULL_BYTES]; + let mut record_chunk = [0; ScalarBytes::FULL_BYTES]; if let Some(sector_bytes) = §or_bytes { record_chunk.copy_from_slice( §or_bytes[sector_contents_map_size as usize - + chunk_location as usize * Scalar::FULL_BYTES..] - [..Scalar::FULL_BYTES], + + chunk_location as usize * ScalarBytes::FULL_BYTES..] + [..ScalarBytes::FULL_BYTES], ); } else { sector .read_at( &mut record_chunk, sector_contents_map_size - + chunk_location * Scalar::FULL_BYTES as u64, + + chunk_location * ScalarBytes::FULL_BYTES as u64, ) .map_err(|error| ReadingError::FailedToReadChunk { chunk_location, @@ -268,19 +270,19 @@ where .flatten() .map( |(maybe_record_chunk, chunk_location, encoded_chunk_used, s_bucket)| async move { - let mut record_chunk = [0; Scalar::FULL_BYTES]; + let mut record_chunk = [0; ScalarBytes::FULL_BYTES]; if let Some(sector_bytes) = §or_bytes { record_chunk.copy_from_slice( §or_bytes[sector_contents_map_size as usize - + chunk_location as usize * Scalar::FULL_BYTES..] - [..Scalar::FULL_BYTES], + + chunk_location as usize * ScalarBytes::FULL_BYTES..] + [..ScalarBytes::FULL_BYTES], ); } else { record_chunk.copy_from_slice( §or .read_at( - vec![0; Scalar::FULL_BYTES], - sector_contents_map_size + chunk_location * Scalar::FULL_BYTES as u64, + vec![0; ScalarBytes::FULL_BYTES], + sector_contents_map_size + chunk_location * ScalarBytes::FULL_BYTES as u64, ) .await .map_err(|error| ReadingError::FailedToReadChunk { diff --git a/crates/subspace-farmer-components/src/segment_reconstruction.rs b/crates/subspace-farmer-components/src/segment_reconstruction.rs index 84439f542f..06beef8cb7 100644 --- a/crates/subspace-farmer-components/src/segment_reconstruction.rs +++ b/crates/subspace-farmer-components/src/segment_reconstruction.rs @@ -3,10 +3,10 @@ use futures::stream::FuturesOrdered; use futures::StreamExt; use std::sync::atomic::{AtomicUsize, Ordering}; use subspace_archiving::piece_reconstructor::{PiecesReconstructor, ReconstructorError}; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::pieces::{Piece, PieceIndex}; use subspace_core_primitives::segments::{ArchivedHistorySegment, RecordedHistorySegment}; use subspace_erasure_coding::ErasureCoding; +use subspace_kzg::Kzg; use thiserror::Error; use tokio::sync::Semaphore; use tokio::task::JoinError; diff --git a/crates/subspace-farmer/Cargo.toml b/crates/subspace-farmer/Cargo.toml index 46ecd975bd..5a0b4ef17f 100644 --- a/crates/subspace-farmer/Cargo.toml +++ b/crates/subspace-farmer/Cargo.toml @@ -51,15 +51,16 @@ serde = { version = "1.0.110", features = ["derive"] } serde_json = "1.0.128" static_assertions = "1.1.0" ss58-registry = "1.50.0" -subspace-archiving = { version = "0.1.0", path = "../subspace-archiving" } subspace-erasure-coding = { version = "0.1.0", path = "../subspace-erasure-coding" } subspace-farmer-components = { version = "0.1.0", path = "../subspace-farmer-components" } subspace-core-primitives = { version = "0.1.0", path = "../subspace-core-primitives" } +subspace-kzg = { version = "0.1.0", path = "../../shared/subspace-kzg" } subspace-metrics = { version = "0.1.0", path = "../../shared/subspace-metrics", optional = true } subspace-networking = { version = "0.1.0", path = "../subspace-networking" } subspace-proof-of-space = { version = "0.1.0", path = "../subspace-proof-of-space" } subspace-proof-of-space-gpu = { version = "0.1.0", path = "../../shared/subspace-proof-of-space-gpu", optional = true } subspace-rpc-primitives = { version = "0.1.0", path = "../subspace-rpc-primitives" } +subspace-verification = { version = "0.1.0", path = "../subspace-verification" } substrate-bip39 = "0.6.0" supports-color = { version = "3.0.1", optional = true } tempfile = "3.13.0" diff --git a/crates/subspace-farmer/src/bin/subspace-farmer/commands/benchmark.rs b/crates/subspace-farmer/src/bin/subspace-farmer/commands/benchmark.rs index 81ef277678..a951ca4677 100644 --- a/crates/subspace-farmer/src/bin/subspace-farmer/commands/benchmark.rs +++ b/crates/subspace-farmer/src/bin/subspace-farmer/commands/benchmark.rs @@ -8,7 +8,6 @@ use std::collections::HashSet; use std::fs::OpenOptions; use std::num::NonZeroUsize; use std::path::PathBuf; -use subspace_core_primitives::crypto::kzg::{embedded_kzg_settings, Kzg}; use subspace_core_primitives::pieces::Record; use subspace_core_primitives::{Blake3Hash, SolutionRange}; use subspace_erasure_coding::ErasureCoding; @@ -21,6 +20,7 @@ use subspace_farmer::single_disk_farm::{ use subspace_farmer::utils::{recommended_number_of_farming_threads, tokio_rayon_spawn_handler}; use subspace_farmer_components::reading::ReadSectorRecordChunksMode; use subspace_farmer_components::sector::sector_size; +use subspace_kzg::Kzg; use subspace_proof_of_space::Table; use subspace_rpc_primitives::SlotInfo; @@ -153,7 +153,7 @@ where } = audit_options; let sector_size = sector_size(single_disk_farm_info.pieces_in_sector()); - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), @@ -333,7 +333,7 @@ where limit_sector_count, } = prove_options; - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), diff --git a/crates/subspace-farmer/src/bin/subspace-farmer/commands/cluster/controller.rs b/crates/subspace-farmer/src/bin/subspace-farmer/commands/cluster/controller.rs index 6f40a5be03..371063ea00 100644 --- a/crates/subspace-farmer/src/bin/subspace-farmer/commands/cluster/controller.rs +++ b/crates/subspace-farmer/src/bin/subspace-farmer/commands/cluster/controller.rs @@ -18,7 +18,6 @@ use std::path::PathBuf; use std::pin::{pin, Pin}; use std::sync::Arc; use std::time::Duration; -use subspace_core_primitives::crypto::kzg::{embedded_kzg_settings, Kzg}; use subspace_farmer::cluster::controller::controller_service; use subspace_farmer::cluster::nats_client::NatsClient; use subspace_farmer::farm::plotted_pieces::PlottedPieces; @@ -30,6 +29,7 @@ use subspace_farmer::node_client::rpc_node_client::RpcNodeClient; use subspace_farmer::node_client::NodeClient; use subspace_farmer::single_disk_farm::identity::Identity; use subspace_farmer::utils::{run_future_in_dedicated_thread, AsyncJoinOnDrop}; +use subspace_kzg::Kzg; use subspace_networking::utils::piece_provider::PieceProvider; use tracing::info; @@ -164,7 +164,7 @@ pub(super) async fn controller( .map_err(|error| anyhow!("Failed to configure networking: {error}"))? }; - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let validator = Some(SegmentCommitmentPieceValidator::new( node.clone(), node_client.clone(), diff --git a/crates/subspace-farmer/src/bin/subspace-farmer/commands/cluster/farmer.rs b/crates/subspace-farmer/src/bin/subspace-farmer/commands/cluster/farmer.rs index b72322f920..9ca65ddbee 100644 --- a/crates/subspace-farmer/src/bin/subspace-farmer/commands/cluster/farmer.rs +++ b/crates/subspace-farmer/src/bin/subspace-farmer/commands/cluster/farmer.rs @@ -16,7 +16,6 @@ use std::num::NonZeroUsize; use std::pin::{pin, Pin}; use std::sync::Arc; use std::time::Duration; -use subspace_core_primitives::crypto::kzg::{embedded_kzg_settings, Kzg}; use subspace_core_primitives::pieces::Record; use subspace_core_primitives::PublicKey; use subspace_erasure_coding::ErasureCoding; @@ -35,6 +34,7 @@ use subspace_farmer::utils::{ recommended_number_of_farming_threads, run_future_in_dedicated_thread, AsyncJoinOnDrop, }; use subspace_farmer_components::reading::ReadSectorRecordChunksMode; +use subspace_kzg::Kzg; use subspace_proof_of_space::Table; use tokio::sync::{Barrier, Semaphore}; use tracing::{error, info, info_span, warn, Instrument}; @@ -182,7 +182,7 @@ where .await .map_err(|error| anyhow!("Failed to get farmer app info: {error}"))?; - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), diff --git a/crates/subspace-farmer/src/bin/subspace-farmer/commands/cluster/plotter.rs b/crates/subspace-farmer/src/bin/subspace-farmer/commands/cluster/plotter.rs index e59a2d5792..48c7812a8b 100644 --- a/crates/subspace-farmer/src/bin/subspace-farmer/commands/cluster/plotter.rs +++ b/crates/subspace-farmer/src/bin/subspace-farmer/commands/cluster/plotter.rs @@ -8,7 +8,6 @@ use std::num::NonZeroUsize; use std::pin::Pin; use std::sync::Arc; use std::time::Duration; -use subspace_core_primitives::crypto::kzg::{embedded_kzg_settings, Kzg}; use subspace_core_primitives::pieces::Record; use subspace_erasure_coding::ErasureCoding; use subspace_farmer::cluster::controller::ClusterPieceGetter; @@ -25,6 +24,7 @@ use subspace_farmer::utils::{ create_plotting_thread_pool_manager, parse_cpu_cores_sets, thread_pool_core_indices, }; use subspace_farmer_components::PieceGetter; +use subspace_kzg::Kzg; use subspace_proof_of_space::Table; use tokio::sync::Semaphore; use tracing::info; @@ -137,7 +137,7 @@ where additional_components: _, } = plotter_args; - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), diff --git a/crates/subspace-farmer/src/bin/subspace-farmer/commands/farm.rs b/crates/subspace-farmer/src/bin/subspace-farmer/commands/farm.rs index dbb85e93b6..4c61943eb6 100644 --- a/crates/subspace-farmer/src/bin/subspace-farmer/commands/farm.rs +++ b/crates/subspace-farmer/src/bin/subspace-farmer/commands/farm.rs @@ -18,7 +18,6 @@ use std::pin::pin; use std::str::FromStr; use std::sync::Arc; use std::time::Duration; -use subspace_core_primitives::crypto::kzg::{embedded_kzg_settings, Kzg}; use subspace_core_primitives::pieces::Record; use subspace_core_primitives::PublicKey; use subspace_erasure_coding::ErasureCoding; @@ -49,6 +48,7 @@ use subspace_farmer::utils::{ }; use subspace_farmer_components::reading::ReadSectorRecordChunksMode; use subspace_farmer_components::PieceGetter; +use subspace_kzg::Kzg; use subspace_metrics::{start_prometheus_metrics_server, RegistryAdapter}; use subspace_networking::utils::piece_provider::PieceProvider; use subspace_proof_of_space::Table; @@ -404,7 +404,7 @@ where .map_err(|error| anyhow!("Failed to configure networking: {error}"))? }; - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), diff --git a/crates/subspace-farmer/src/farmer_piece_getter/piece_validator.rs b/crates/subspace-farmer/src/farmer_piece_getter/piece_validator.rs index b99bbc3ff7..7427827955 100644 --- a/crates/subspace-farmer/src/farmer_piece_getter/piece_validator.rs +++ b/crates/subspace-farmer/src/farmer_piece_getter/piece_validator.rs @@ -2,12 +2,12 @@ use crate::node_client::NodeClient; use async_trait::async_trait; -use subspace_archiving::archiver::is_piece_valid; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::pieces::{Piece, PieceIndex}; +use subspace_kzg::Kzg; use subspace_networking::libp2p::PeerId; use subspace_networking::utils::piece_provider::PieceValidator; use subspace_networking::Node; +use subspace_verification::is_piece_valid; use tracing::{error, warn}; /// Farmer-specific validator for pieces retrieved from the network. diff --git a/crates/subspace-farmer/src/plotter/cpu.rs b/crates/subspace-farmer/src/plotter/cpu.rs index 5929c0b951..d1b860f97c 100644 --- a/crates/subspace-farmer/src/plotter/cpu.rs +++ b/crates/subspace-farmer/src/plotter/cpu.rs @@ -23,7 +23,6 @@ use std::pin::pin; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::time::Instant; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::sectors::SectorIndex; use subspace_core_primitives::PublicKey; use subspace_erasure_coding::ErasureCoding; @@ -32,6 +31,7 @@ use subspace_farmer_components::plotting::{ EncodeSectorOptions, PlottingError, }; use subspace_farmer_components::{FarmerProtocolInfo, PieceGetter}; +use subspace_kzg::Kzg; use subspace_proof_of_space::Table; use tokio::sync::{OwnedSemaphorePermit, Semaphore}; use tokio::task::yield_now; diff --git a/crates/subspace-farmer/src/plotter/gpu.rs b/crates/subspace-farmer/src/plotter/gpu.rs index d7a48f778a..8b0cf2ccb9 100644 --- a/crates/subspace-farmer/src/plotter/gpu.rs +++ b/crates/subspace-farmer/src/plotter/gpu.rs @@ -24,7 +24,6 @@ use std::pin::pin; use std::sync::atomic::{AtomicBool, Ordering}; use std::sync::Arc; use std::time::Instant; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::sectors::SectorIndex; use subspace_core_primitives::PublicKey; use subspace_erasure_coding::ErasureCoding; @@ -33,6 +32,7 @@ use subspace_farmer_components::plotting::{ PlottingError, RecordsEncoder, }; use subspace_farmer_components::{FarmerProtocolInfo, PieceGetter}; +use subspace_kzg::Kzg; use tokio::sync::{OwnedSemaphorePermit, Semaphore}; use tokio::task::yield_now; use tracing::{warn, Instrument}; diff --git a/crates/subspace-farmer/src/single_disk_farm.rs b/crates/subspace-farmer/src/single_disk_farm.rs index 77400b485a..0a0a27b31a 100644 --- a/crates/subspace-farmer/src/single_disk_farm.rs +++ b/crates/subspace-farmer/src/single_disk_farm.rs @@ -66,17 +66,17 @@ use std::sync::atomic::{AtomicUsize, Ordering}; use std::sync::Arc; use std::time::{Duration, Instant}; use std::{fmt, fs, io, mem}; -use subspace_core_primitives::crypto::kzg::Kzg; -use subspace_core_primitives::crypto::{blake3_hash, Scalar}; +use subspace_core_primitives::crypto::blake3_hash; use subspace_core_primitives::pieces::Record; use subspace_core_primitives::sectors::SectorIndex; use subspace_core_primitives::segments::{HistorySize, SegmentIndex}; -use subspace_core_primitives::{Blake3Hash, PublicKey}; +use subspace_core_primitives::{Blake3Hash, PublicKey, ScalarBytes}; use subspace_erasure_coding::ErasureCoding; use subspace_farmer_components::file_ext::FileExt; use subspace_farmer_components::reading::ReadSectorRecordChunksMode; use subspace_farmer_components::sector::{sector_size, SectorMetadata, SectorMetadataChecksummed}; use subspace_farmer_components::{FarmerProtocolInfo, ReadAtSync}; +use subspace_kzg::Kzg; use subspace_networking::KnownPeersManager; use subspace_proof_of_space::Table; use subspace_rpc_primitives::{FarmerAppInfo, SolutionResponse}; @@ -2464,9 +2464,9 @@ where { let start = Instant::now(); (0..Record::NUM_CHUNKS).into_par_iter().try_for_each(|_| { - let offset = thread_rng().gen_range(0_usize..sector_size / Scalar::FULL_BYTES) - * Scalar::FULL_BYTES; - farming_plot.read_at(&mut [0; Scalar::FULL_BYTES], offset as u64) + let offset = thread_rng().gen_range(0_usize..sector_size / ScalarBytes::FULL_BYTES) + * ScalarBytes::FULL_BYTES; + farming_plot.read_at(&mut [0; ScalarBytes::FULL_BYTES], offset as u64) })?; let elapsed = start.elapsed(); diff --git a/crates/subspace-farmer/src/single_disk_farm/farming.rs b/crates/subspace-farmer/src/single_disk_farm/farming.rs index 728c969288..1adb04b19f 100644 --- a/crates/subspace-farmer/src/single_disk_farm/farming.rs +++ b/crates/subspace-farmer/src/single_disk_farm/farming.rs @@ -19,7 +19,6 @@ use rayon::ThreadPool; use std::collections::HashSet; use std::sync::Arc; use std::time::Instant; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::pieces::Record; use subspace_core_primitives::pos::PosSeed; use subspace_core_primitives::sectors::SectorIndex; @@ -31,6 +30,7 @@ use subspace_farmer_components::proving::{ProvableSolutions, ProvingError}; use subspace_farmer_components::reading::ReadSectorRecordChunksMode; use subspace_farmer_components::sector::{SectorMetadata, SectorMetadataChecksummed}; use subspace_farmer_components::ReadAtSync; +use subspace_kzg::Kzg; use subspace_proof_of_space::{Table, TableGenerator}; use subspace_rpc_primitives::{SlotInfo, SolutionResponse}; use tracing::{debug, error, info, trace, warn, Span}; diff --git a/crates/subspace-service/Cargo.toml b/crates/subspace-service/Cargo.toml index 911a4e1521..30479a35c1 100644 --- a/crates/subspace-service/Cargo.toml +++ b/crates/subspace-service/Cargo.toml @@ -84,9 +84,11 @@ static_assertions = "1.1.0" subspace-archiving = { version = "0.1.0", path = "../subspace-archiving" } subspace-core-primitives = { version = "0.1.0", path = "../subspace-core-primitives" } subspace-erasure-coding = { version = "0.1.0", path = "../subspace-erasure-coding" } +subspace-kzg = { version = "0.1.0", path = "../../shared/subspace-kzg" } subspace-networking = { version = "0.1.0", path = "../subspace-networking" } subspace-proof-of-space = { version = "0.1.0", path = "../subspace-proof-of-space" } subspace-runtime-primitives = { version = "0.1.0", path = "../subspace-runtime-primitives" } +subspace-verification = { version = "0.1.0", path = "../subspace-verification" } substrate-frame-rpc-system = { git = "https://github.com/subspace/polkadot-sdk", rev = "5871818e1d736f1843eb9078f886290695165c42" } substrate-prometheus-endpoint = { git = "https://github.com/subspace/polkadot-sdk", rev = "5871818e1d736f1843eb9078f886290695165c42" } thiserror = "1.0.64" diff --git a/crates/subspace-service/src/lib.rs b/crates/subspace-service/src/lib.rs index e8b461153e..dc86f7dd6e 100644 --- a/crates/subspace-service/src/lib.rs +++ b/crates/subspace-service/src/lib.rs @@ -125,11 +125,11 @@ use std::marker::PhantomData; use std::num::NonZeroUsize; use std::sync::Arc; use std::time::Duration; -use subspace_core_primitives::crypto::kzg::{embedded_kzg_settings, Kzg}; use subspace_core_primitives::pieces::Record; use subspace_core_primitives::pot::PotSeed; use subspace_core_primitives::{BlockNumber, PublicKey, REWARD_SIGNING_CONTEXT}; use subspace_erasure_coding::ErasureCoding; +use subspace_kzg::Kzg; use subspace_networking::libp2p::multiaddr::Protocol; use subspace_networking::utils::piece_provider::PieceProvider; use subspace_proof_of_space::Table; @@ -535,16 +535,13 @@ where // TODO: Make these explicit arguments we no longer use Substate's `Configuration` let (kzg, maybe_erasure_coding) = tokio::task::block_in_place(|| { - rayon::join( - || Kzg::new(embedded_kzg_settings()), - || { - ErasureCoding::new( - NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) - .expect("Not zero; qed"), - ) - .map_err(|error| format!("Failed to instantiate erasure coding: {error}")) - }, - ) + rayon::join(Kzg::new, || { + ErasureCoding::new( + NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) + .expect("Not zero; qed"), + ) + .map_err(|error| format!("Failed to instantiate erasure coding: {error}")) + }) }); let erasure_coding = maybe_erasure_coding?; diff --git a/crates/subspace-service/src/rpc.rs b/crates/subspace-service/src/rpc.rs index 8f85891010..e5b17a3edd 100644 --- a/crates/subspace-service/src/rpc.rs +++ b/crates/subspace-service/src/rpc.rs @@ -41,9 +41,9 @@ use sp_consensus::SyncOracle; use sp_consensus_subspace::SubspaceApi; use sp_objects::ObjectsApi; use std::sync::Arc; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::{BlockNumber, PublicKey}; use subspace_erasure_coding::ErasureCoding; +use subspace_kzg::Kzg; use subspace_networking::libp2p::Multiaddr; use subspace_runtime_primitives::opaque::Block; use subspace_runtime_primitives::{AccountId, Balance, Nonce}; diff --git a/crates/subspace-service/src/sync_from_dsn/piece_validator.rs b/crates/subspace-service/src/sync_from_dsn/piece_validator.rs index d19b7d61a0..1bc44d6397 100644 --- a/crates/subspace-service/src/sync_from_dsn/piece_validator.rs +++ b/crates/subspace-service/src/sync_from_dsn/piece_validator.rs @@ -1,12 +1,12 @@ use async_trait::async_trait; use sc_client_api::AuxStore; use sc_consensus_subspace::archiver::SegmentHeadersStore; -use subspace_archiving::archiver::is_piece_valid; -use subspace_core_primitives::crypto::kzg::Kzg; use subspace_core_primitives::pieces::{Piece, PieceIndex}; +use subspace_kzg::Kzg; use subspace_networking::libp2p::PeerId; use subspace_networking::utils::piece_provider::PieceValidator; use subspace_networking::Node; +use subspace_verification::is_piece_valid; use tracing::{error, warn}; pub(crate) struct SegmentCommitmentPieceValidator { diff --git a/crates/subspace-verification/Cargo.toml b/crates/subspace-verification/Cargo.toml index d5f3bed5f8..30a133445b 100644 --- a/crates/subspace-verification/Cargo.toml +++ b/crates/subspace-verification/Cargo.toml @@ -18,17 +18,18 @@ include = [ [dependencies] codec = { package = "parity-scale-codec", version = "3.6.12", default-features = false } schnorrkel = { version = "0.11.4", default-features = false } -subspace-archiving = { version = "0.1.0", path = "../subspace-archiving", default-features = false } subspace-core-primitives = { version = "0.1.0", path = "../subspace-core-primitives", default-features = false } +subspace-kzg = { version = "0.1.0", path = "../../shared/subspace-kzg", default-features = false, optional = true } subspace-proof-of-space = { version = "0.1.0", path = "../subspace-proof-of-space", default-features = false } thiserror = { version = "1.0.64", optional = true } [features] -default = ["std"] +default = ["std", "kzg"] +kzg = ["subspace-kzg"] std = [ "codec/std", "schnorrkel/std", - "subspace-archiving/std", "subspace-core-primitives/std", + "subspace-kzg?/std", "thiserror" ] diff --git a/crates/subspace-verification/src/lib.rs b/crates/subspace-verification/src/lib.rs index b1457753b6..343f16508d 100644 --- a/crates/subspace-verification/src/lib.rs +++ b/crates/subspace-verification/src/lib.rs @@ -20,24 +20,38 @@ #![feature(array_chunks, portable_simd)] #![cfg_attr(not(feature = "std"), no_std)] +#[cfg(not(feature = "std"))] +extern crate alloc; + +#[cfg(not(feature = "std"))] +use alloc::string::String; use codec::{Decode, Encode, MaxEncodedLen}; use core::mem; +#[cfg(feature = "kzg")] use core::simd::Simd; use schnorrkel::context::SigningContext; use schnorrkel::SignatureError; -use subspace_archiving::archiver; -use subspace_core_primitives::crypto::kzg::{Commitment, Kzg, Witness}; -use subspace_core_primitives::crypto::{ - blake3_254_hash_to_scalar, blake3_hash_list, blake3_hash_with_key, Scalar, -}; -use subspace_core_primitives::pieces::Record; +#[cfg(feature = "kzg")] +use subspace_core_primitives::crypto::blake3_254_hash_to_scalar; +use subspace_core_primitives::crypto::{blake3_hash_list, blake3_hash_with_key}; +#[cfg(feature = "kzg")] +use subspace_core_primitives::pieces::{PieceArray, Record, RecordWitness}; use subspace_core_primitives::pot::PotOutput; -use subspace_core_primitives::sectors::{SectorId, SectorSlotChallenge}; +#[cfg(feature = "kzg")] +use subspace_core_primitives::sectors::SectorId; +use subspace_core_primitives::sectors::SectorSlotChallenge; +#[cfg(feature = "kzg")] +use subspace_core_primitives::segments::ArchivedHistorySegment; use subspace_core_primitives::segments::{HistorySize, SegmentCommitment}; +#[cfg(feature = "kzg")] +use subspace_core_primitives::Solution; use subspace_core_primitives::{ - Blake3Hash, BlockNumber, BlockWeight, PublicKey, RewardSignature, SlotNumber, Solution, + Blake3Hash, BlockNumber, BlockWeight, PublicKey, RewardSignature, ScalarBytes, SlotNumber, SolutionRange, }; +#[cfg(feature = "kzg")] +use subspace_kzg::{Commitment, Kzg, Scalar, Witness}; +#[cfg(feature = "kzg")] use subspace_proof_of_space::Table; /// Errors encountered by the Subspace consensus primitives. @@ -83,6 +97,9 @@ pub enum Error { /// Invalid audit chunk offset #[cfg_attr(feature = "thiserror", error("Invalid audit chunk offset"))] InvalidAuditChunkOffset, + /// Invalid chunk + #[cfg_attr(feature = "thiserror", error("Invalid chunk: {0}"))] + InvalidChunk(String), /// Invalid chunk witness #[cfg_attr(feature = "thiserror", error("Invalid chunk witness"))] InvalidChunkWitness, @@ -181,6 +198,7 @@ pub fn calculate_block_weight(solution_range: SolutionRange) -> BlockWeight { /// Verify whether solution is valid, returns solution distance that is `<= solution_range/2` on /// success. +#[cfg(feature = "kzg")] pub fn verify_solution<'a, PosTable, RewardAddress>( solution: &'a Solution, slot: SlotNumber, @@ -212,9 +230,8 @@ where return Err(Error::InvalidProofOfSpace); }; - let masked_chunk = (Simd::from(solution.chunk.to_bytes()) - ^ Simd::from(*solution.proof_of_space.hash())) - .to_array(); + let masked_chunk = + (Simd::from(*solution.chunk) ^ Simd::from(*solution.proof_of_space.hash())).to_array(); let solution_distance = calculate_solution_distance(&global_challenge, &masked_chunk, §or_slot_challenge); @@ -233,7 +250,7 @@ where .map_err(|_error| Error::InvalidChunkWitness)?, Record::NUM_S_BUCKETS, s_bucket_audit_index.into(), - &solution.chunk, + &Scalar::try_from(solution.chunk).map_err(Error::InvalidChunk)?, &Witness::try_from(solution.chunk_witness).map_err(|_error| Error::InvalidChunkWitness)?, ) { return Err(Error::InvalidChunkWitness); @@ -288,9 +305,12 @@ where .position(); // Check that piece is part of the blockchain history - if !archiver::is_record_commitment_hash_valid( + if !is_record_commitment_hash_valid( kzg, - &blake3_254_hash_to_scalar(solution.record_commitment.as_ref()), + &Scalar::try_from(blake3_254_hash_to_scalar( + solution.record_commitment.as_ref(), + )) + .expect("Create correctly by dedicated hash function; qed"), segment_commitment, &solution.record_witness, position, @@ -302,9 +322,99 @@ where Ok(solution_distance) } +/// Validate witness embedded within a piece produced by archiver +#[cfg(feature = "kzg")] +pub fn is_piece_valid( + kzg: &Kzg, + piece: &PieceArray, + segment_commitment: &SegmentCommitment, + position: u32, +) -> bool { + let (record, commitment, witness) = piece.split(); + let witness = match Witness::try_from_bytes(witness) { + Ok(witness) => witness, + _ => { + return false; + } + }; + + let mut scalars = Vec::with_capacity(record.len().next_power_of_two()); + + for record_chunk in record.iter() { + match Scalar::try_from(record_chunk) { + Ok(scalar) => { + scalars.push(scalar); + } + _ => { + return false; + } + } + } + + // Number of scalars for KZG must be a power of two elements + scalars.resize(scalars.capacity(), Scalar::default()); + + let polynomial = match kzg.poly(&scalars) { + Ok(polynomial) => polynomial, + _ => { + return false; + } + }; + + if kzg + .commit(&polynomial) + .map(|commitment| commitment.to_bytes()) + .as_ref() + != Ok(commitment) + { + return false; + } + + let Ok(segment_commitment) = Commitment::try_from(segment_commitment) else { + return false; + }; + + let commitment_hash = Scalar::try_from(blake3_254_hash_to_scalar(commitment.as_ref())) + .expect("Create correctly by dedicated hash function; qed"); + + kzg.verify( + &segment_commitment, + ArchivedHistorySegment::NUM_PIECES, + position, + &commitment_hash, + &witness, + ) +} + +/// Validate witness for record commitment hash produced by archiver +#[cfg(feature = "kzg")] +pub fn is_record_commitment_hash_valid( + kzg: &Kzg, + record_commitment_hash: &Scalar, + commitment: &SegmentCommitment, + witness: &RecordWitness, + position: u32, +) -> bool { + let Ok(commitment) = Commitment::try_from(commitment) else { + return false; + }; + let Ok(witness) = Witness::try_from(witness) else { + return false; + }; + + kzg.verify( + &commitment, + ArchivedHistorySegment::NUM_PIECES, + position, + record_commitment_hash, + &witness, + ) +} + /// Derive proof of time entropy from chunk and proof of time for injection purposes. -pub fn derive_pot_entropy(chunk: Scalar, proof_of_time: PotOutput) -> Blake3Hash { - blake3_hash_list(&[&chunk.to_bytes(), proof_of_time.as_ref()]) +#[inline] +pub fn derive_pot_entropy(chunk: &ScalarBytes, proof_of_time: PotOutput) -> Blake3Hash { + blake3_hash_list(&[chunk.as_ref(), proof_of_time.as_ref()]) } /// Derives next solution range based on the total era slots and slot probability diff --git a/shared/subspace-kzg/Cargo.toml b/shared/subspace-kzg/Cargo.toml new file mode 100644 index 0000000000..35f4d3e88f --- /dev/null +++ b/shared/subspace-kzg/Cargo.toml @@ -0,0 +1,55 @@ +[package] +name = "subspace-kzg" +description = "KZG primitives for Subspace Network" +license = "Apache-2.0" +version = "0.1.0" +authors = ["Nazar Mokrynskyi "] +edition = "2021" +include = [ + "/src", + "/Cargo.toml", +] + +[lib] +# Necessary for CLI options to work on benches +bench = false + +[[bench]] +name = "kzg" +harness = false + +[dependencies] +derive_more = { version = "1.0.0", default-features = false, features = ["full"] } +kzg = { git = "https://github.com/grandinetech/rust-kzg", rev = "6c8fcc623df3d7e8c0f30951a49bfea764f90bf4", default-features = false } +parking_lot = { version = "0.12.2", optional = true } +rust-kzg-blst = { git = "https://github.com/grandinetech/rust-kzg", rev = "6c8fcc623df3d7e8c0f30951a49bfea764f90bf4", default-features = false } +# Replacement for `parking_lot` in `no_std` environment +spin = "0.9.7" +static_assertions = "1.1.0" +subspace-core-primitives = { version = "0.1.0", path = "../../crates/subspace-core-primitives", default-features = false } +tracing = { version = "0.1.40", default-features = false } + +[dev-dependencies] +criterion = "0.5.1" +rand = { version = "0.8.5", features = ["min_const_gen"] } +rand_core = "0.6.4" + +[features] +default = [ + "std", + "parallel", +] +# Enables internal parallelism for KZG +parallel = [ + "rust-kzg-blst/parallel", +] +std = [ + "derive_more/std", + "kzg/std", + # In no-std environment we use `spin` + "parking_lot", + "rust-kzg-blst/std", + "subspace-core-primitives/std", + "tracing/std", +] + diff --git a/crates/subspace-core-primitives/benches/kzg.rs b/shared/subspace-kzg/benches/kzg.rs similarity index 87% rename from crates/subspace-core-primitives/benches/kzg.rs rename to shared/subspace-kzg/benches/kzg.rs index 5956de3dcc..351faed3ab 100644 --- a/crates/subspace-core-primitives/benches/kzg.rs +++ b/shared/subspace-kzg/benches/kzg.rs @@ -1,14 +1,14 @@ use criterion::{black_box, criterion_group, criterion_main, Criterion}; -use subspace_core_primitives::crypto::kzg::{embedded_kzg_settings, Kzg}; -use subspace_core_primitives::crypto::Scalar; use subspace_core_primitives::pieces::RawRecord; +use subspace_core_primitives::ScalarBytes; +use subspace_kzg::{Kzg, Scalar}; fn criterion_benchmark(c: &mut Criterion) { let values = (0..RawRecord::NUM_CHUNKS) - .map(|_| Scalar::from(rand::random::<[u8; Scalar::SAFE_BYTES]>())) + .map(|_| Scalar::from(rand::random::<[u8; ScalarBytes::SAFE_BYTES]>())) .collect::>(); - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); c.bench_function("create-polynomial", |b| { b.iter(|| { diff --git a/crates/subspace-core-primitives/src/crypto/kzg/eth-public-parameters.bin b/shared/subspace-kzg/src/eth-public-parameters.bin similarity index 100% rename from crates/subspace-core-primitives/src/crypto/kzg/eth-public-parameters.bin rename to shared/subspace-kzg/src/eth-public-parameters.bin diff --git a/crates/subspace-core-primitives/src/crypto/kzg.rs b/shared/subspace-kzg/src/lib.rs similarity index 50% rename from crates/subspace-core-primitives/src/crypto/kzg.rs rename to shared/subspace-kzg/src/lib.rs index 41a332bcb4..fd95740114 100644 --- a/crates/subspace-core-primitives/src/crypto/kzg.rs +++ b/shared/subspace-kzg/src/lib.rs @@ -1,11 +1,10 @@ -//! Tools for KZG commitment scheme +//! KZG primitives for Subspace Network #[cfg(test)] mod tests; extern crate alloc; -use crate::crypto::Scalar; use alloc::collections::btree_map::Entry; use alloc::collections::BTreeMap; #[cfg(not(feature = "std"))] @@ -20,12 +19,17 @@ use kzg::{FFTFr, FFTSettings, Fr, KZGSettings, G1, G2}; #[cfg(feature = "std")] use parking_lot::Mutex; use rust_kzg_blst::types::fft_settings::FsFFTSettings; +use rust_kzg_blst::types::fr::FsFr; use rust_kzg_blst::types::g1::FsG1; use rust_kzg_blst::types::g2::FsG2; use rust_kzg_blst::types::kzg_settings::FsKZGSettings; use rust_kzg_blst::types::poly::FsPoly; #[cfg(not(feature = "std"))] use spin::Mutex; +use static_assertions::const_assert_eq; +use subspace_core_primitives::pieces::{RecordCommitment, RecordWitness}; +use subspace_core_primitives::segments::SegmentCommitment; +use subspace_core_primitives::{ChunkWitness, ScalarBytes}; use tracing::debug; /// Embedded KZG settings as bytes, too big for `no_std` in most cases @@ -33,8 +37,7 @@ use tracing::debug; /// ```bash /// curl -s https://seq.ceremony.ethereum.org/info/current_state | jq '.transcripts[3].powersOfTau' | jq -r '.G1Powers + .G2Powers | map(.[2:]) | join("")' | xxd -r -p - eth-public-parameters.bin /// ``` -#[cfg(feature = "embedded-kzg-settings")] -pub const EMBEDDED_KZG_SETTINGS_BYTES: &[u8] = include_bytes!("kzg/eth-public-parameters.bin"); +pub const EMBEDDED_KZG_SETTINGS_BYTES: &[u8] = include_bytes!("eth-public-parameters.bin"); /// Number of G1 powers stored in [`EMBEDDED_KZG_SETTINGS_BYTES`] pub const NUM_G1_POWERS: usize = 32_768; /// Number of G2 powers stored in [`EMBEDDED_KZG_SETTINGS_BYTES`] @@ -43,7 +46,7 @@ pub const NUM_G2_POWERS: usize = 65; // Symmetric function is present in tests /// Function turns bytes into `FsKZGSettings`, it is up to the user to ensure that bytes make sense, /// otherwise result can be very wrong (but will not panic). -pub fn bytes_to_kzg_settings( +fn bytes_to_kzg_settings( bytes: &[u8], num_g1_powers: usize, num_g2_powers: usize, @@ -82,13 +85,6 @@ pub fn bytes_to_kzg_settings( }) } -/// Embedded KZG settings -#[cfg(feature = "embedded-kzg-settings")] -pub fn embedded_kzg_settings() -> FsKZGSettings { - bytes_to_kzg_settings(EMBEDDED_KZG_SETTINGS_BYTES, NUM_G1_POWERS, NUM_G2_POWERS) - .expect("Static bytes are correct, there is a test for this; qed") -} - /// Commitment to polynomial #[derive(Debug, Clone, From)] pub struct Polynomial(FsPoly); @@ -109,11 +105,270 @@ impl Polynomial { } } +/// Representation of a single BLS12-381 scalar value. +#[derive(Debug, Default, Copy, Clone, Eq, PartialEq, Deref, DerefMut)] +#[repr(transparent)] +pub struct Scalar(FsFr); + +const_assert_eq!( + mem::size_of::>(), + mem::size_of::>() +); +const_assert_eq!( + mem::align_of::>(), + mem::align_of::>() +); + +impl From<&[u8; ScalarBytes::SAFE_BYTES]> for Scalar { + #[inline] + fn from(value: &[u8; ScalarBytes::SAFE_BYTES]) -> Self { + let mut bytes = [0u8; ScalarBytes::FULL_BYTES]; + bytes[1..].copy_from_slice(value); + Self::try_from(bytes).expect("Safe bytes always fit into scalar and thus succeed; qed") + } +} + +impl From<[u8; ScalarBytes::SAFE_BYTES]> for Scalar { + #[inline] + fn from(value: [u8; ScalarBytes::SAFE_BYTES]) -> Self { + Self::from(&value) + } +} + +impl TryFrom<&[u8; ScalarBytes::FULL_BYTES]> for Scalar { + type Error = String; + + #[inline] + fn try_from(value: &[u8; ScalarBytes::FULL_BYTES]) -> Result { + Self::try_from(*value) + } +} + +impl TryFrom<[u8; ScalarBytes::FULL_BYTES]> for Scalar { + type Error = String; + + #[inline] + fn try_from(value: [u8; ScalarBytes::FULL_BYTES]) -> Result { + FsFr::from_bytes(&value).map(Scalar) + } +} + +impl TryFrom<&ScalarBytes> for Scalar { + type Error = String; + + #[inline] + fn try_from(value: &ScalarBytes) -> Result { + FsFr::from_bytes(value.as_ref()).map(Scalar) + } +} + +impl TryFrom for Scalar { + type Error = String; + + #[inline] + fn try_from(value: ScalarBytes) -> Result { + Self::try_from(&value) + } +} + +impl From<&Scalar> for [u8; ScalarBytes::FULL_BYTES] { + #[inline] + fn from(value: &Scalar) -> Self { + value.0.to_bytes() + } +} + +impl From for [u8; ScalarBytes::FULL_BYTES] { + #[inline] + fn from(value: Scalar) -> Self { + Self::from(&value) + } +} + +impl From<&Scalar> for ScalarBytes { + #[inline] + fn from(value: &Scalar) -> Self { + ScalarBytes::from(value.0.to_bytes()) + } +} + +impl From for ScalarBytes { + #[inline] + fn from(value: Scalar) -> Self { + Self::from(&value) + } +} + +impl Scalar { + /// Convert scalar into bytes + pub fn to_bytes(&self) -> [u8; ScalarBytes::FULL_BYTES] { + self.into() + } + + /// Convert scalar into safe bytes, returns `None` if not possible to convert due to larger + /// internal value + pub fn try_to_safe_bytes(&self) -> Option<[u8; ScalarBytes::SAFE_BYTES]> { + let bytes = self.to_bytes(); + if bytes[0] == 0 { + Some(bytes[1..].try_into().expect("Correct length; qed")) + } else { + None + } + } + + /// Convenient conversion from slice of scalar to underlying representation for efficiency + /// purposes. + #[inline] + pub fn slice_to_repr(value: &[Self]) -> &[FsFr] { + // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout + unsafe { mem::transmute(value) } + } + + /// Convenient conversion from slice of underlying representation to scalar for efficiency + /// purposes. + #[inline] + pub fn slice_from_repr(value: &[FsFr]) -> &[Self] { + // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout + unsafe { mem::transmute(value) } + } + + /// Convenient conversion from slice of optional scalar to underlying representation for efficiency + /// purposes. + #[inline] + pub fn slice_option_to_repr(value: &[Option]) -> &[Option] { + // SAFETY: `Scalar` is `#[repr(transparent)]` containing `#[repr(C)]` and we assume the + // compiler lays out optional `repr(C)` plain old data arrays the same as their optional + // transparent wrappers + unsafe { mem::transmute(value) } + } + + /// Convenient conversion from slice of optional underlying representation to scalar for efficiency + /// purposes. + #[inline] + pub fn slice_option_from_repr(value: &[Option]) -> &[Option] { + // SAFETY: `Scalar` is `#[repr(transparent)]` containing `#[repr(C)]` and we assume the + // compiler lays out optional `repr(C)` plain old data arrays the same as their optional + // transparent wrappers + unsafe { mem::transmute(value) } + } + + /// Convenient conversion from mutable slice of scalar to underlying representation for + /// efficiency purposes. + #[inline] + pub fn slice_mut_to_repr(value: &mut [Self]) -> &mut [FsFr] { + // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout + unsafe { mem::transmute(value) } + } + + /// Convenient conversion from mutable slice of underlying representation to scalar for + /// efficiency purposes. + #[inline] + pub fn slice_mut_from_repr(value: &mut [FsFr]) -> &mut [Self] { + // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory layout + unsafe { mem::transmute(value) } + } + + /// Convenient conversion from optional mutable slice of scalar to underlying representation for + /// efficiency purposes. + #[inline] + pub fn slice_option_mut_to_repr(value: &mut [Option]) -> &mut [Option] { + // SAFETY: `Scalar` is `#[repr(transparent)]` containing `#[repr(C)]` and we assume the + // compiler lays out optional `repr(C)` plain old data arrays the same as their optional + // transparent wrappers + unsafe { mem::transmute(value) } + } + + /// Convenient conversion from optional mutable slice of underlying representation to scalar for + /// efficiency purposes. + #[inline] + pub fn slice_option_mut_from_repr(value: &mut [Option]) -> &mut [Option] { + // SAFETY: `Scalar` is `#[repr(transparent)]` containing `#[repr(C)]` and we assume the + // compiler lays out optional `repr(C)` plain old data arrays the same as their optional + // transparent wrappers + unsafe { mem::transmute(value) } + } + + /// Convenient conversion from vector of scalar to underlying representation for efficiency + /// purposes. + #[inline] + pub fn vec_to_repr(value: Vec) -> Vec { + // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory + // layout, original vector is not dropped + unsafe { + let mut value = mem::ManuallyDrop::new(value); + Vec::from_raw_parts( + value.as_mut_ptr() as *mut FsFr, + value.len(), + value.capacity(), + ) + } + } + + /// Convenient conversion from vector of underlying representation to scalar for efficiency + /// purposes. + #[inline] + pub fn vec_from_repr(value: Vec) -> Vec { + // SAFETY: `Scalar` is `#[repr(transparent)]` and guaranteed to have the same memory + // layout, original vector is not dropped + unsafe { + let mut value = mem::ManuallyDrop::new(value); + Vec::from_raw_parts( + value.as_mut_ptr() as *mut Self, + value.len(), + value.capacity(), + ) + } + } + + /// Convenient conversion from vector of optional scalar to underlying representation for + /// efficiency purposes. + #[inline] + pub fn vec_option_to_repr(value: Vec>) -> Vec> { + // SAFETY: `Scalar` is `#[repr(transparent)]` containing `#[repr(C)]` and we assume the + // compiler lays out optional `repr(C)` plain old data arrays the same as their optional + // transparent wrappers, original vector is not dropped + unsafe { + let mut value = mem::ManuallyDrop::new(value); + Vec::from_raw_parts( + value.as_mut_ptr() as *mut Option, + value.len(), + value.capacity(), + ) + } + } + + /// Convenient conversion from vector of optional underlying representation to scalar for + /// efficiency purposes. + #[inline] + pub fn vec_option_from_repr(value: Vec>) -> Vec> { + // SAFETY: `Scalar` is `#[repr(transparent)]` containing `#[repr(C)]` and we assume the + // compiler lays out optional `repr(C)` plain old data arrays the same as their optional + // transparent wrappers, original vector is not dropped + unsafe { + let mut value = mem::ManuallyDrop::new(value); + Vec::from_raw_parts( + value.as_mut_ptr() as *mut Option, + value.len(), + value.capacity(), + ) + } + } +} + /// Commitment to polynomial #[derive(Debug, Default, Copy, Clone, PartialEq, Eq, From, Into, AsRef, AsMut, Deref, DerefMut)] #[repr(transparent)] pub struct Commitment(FsG1); +const_assert_eq!( + mem::size_of::>(), + mem::size_of::>() +); +const_assert_eq!( + mem::align_of::>(), + mem::align_of::>() +); + impl Commitment { /// Commitment size in bytes. const SIZE: usize = 48; @@ -152,8 +407,9 @@ impl Commitment { /// efficiency purposes. #[inline] pub fn slice_option_to_repr(value: &[Option]) -> &[Option] { - // SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory - // layout + // SAFETY: `Commitment` is `#[repr(transparent)]` containing `#[repr(C)]` and we assume the + // compiler lays out optional `repr(C)` plain old data arrays the same as their optional + // transparent wrappers unsafe { mem::transmute(value) } } @@ -161,8 +417,9 @@ impl Commitment { /// efficiency purposes. #[inline] pub fn slice_option_from_repr(value: &[Option]) -> &[Option] { - // SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory - // layout + // SAFETY: `Commitment` is `#[repr(transparent)]` containing `#[repr(C)]` and we assume the + // compiler lays out optional `repr(C)` plain old data arrays the same as their optional + // transparent wrappers unsafe { mem::transmute(value) } } @@ -188,8 +445,9 @@ impl Commitment { /// for efficiency purposes. #[inline] pub fn slice_option_mut_to_repr(value: &mut [Option]) -> &mut [Option] { - // SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory - // layout + // SAFETY: `Commitment` is `#[repr(transparent)]` containing `#[repr(C)]` and we assume the + // compiler lays out optional `repr(C)` plain old data arrays the same as their optional + // transparent wrappers unsafe { mem::transmute(value) } } @@ -197,8 +455,9 @@ impl Commitment { /// for efficiency purposes. #[inline] pub fn slice_option_mut_from_repr(value: &mut [Option]) -> &mut [Option] { - // SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory - // layout + // SAFETY: `Commitment` is `#[repr(transparent)]` containing `#[repr(C)]` and we assume the + // compiler lays out optional `repr(C)` plain old data arrays the same as their optional + // transparent wrappers unsafe { mem::transmute(value) } } @@ -238,8 +497,9 @@ impl Commitment { /// efficiency purposes. #[inline] pub fn vec_option_to_repr(value: Vec>) -> Vec> { - // SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory - // layout, original vector is not dropped + // SAFETY: `Commitment` is `#[repr(transparent)]` containing `#[repr(C)]` and we assume the + // compiler lays out optional `repr(C)` plain old data arrays the same as their optional + // transparent wrappers, original vector is not dropped unsafe { let mut value = mem::ManuallyDrop::new(value); Vec::from_raw_parts( @@ -254,8 +514,9 @@ impl Commitment { /// efficiency purposes. #[inline] pub fn vec_option_from_repr(value: Vec>) -> Vec> { - // SAFETY: `Commitment` is `#[repr(transparent)]` and guaranteed to have the same memory - // layout, original vector is not dropped + // SAFETY: `Commitment` is `#[repr(transparent)]` containing `#[repr(C)]` and we assume the + // compiler lays out optional `repr(C)` plain old data arrays the same as their optional + // transparent wrappers, original vector is not dropped unsafe { let mut value = mem::ManuallyDrop::new(value); Vec::from_raw_parts( @@ -267,6 +528,56 @@ impl Commitment { } } +impl From for RecordCommitment { + #[inline] + fn from(commitment: Commitment) -> Self { + RecordCommitment::from(commitment.to_bytes()) + } +} + +impl TryFrom<&RecordCommitment> for Commitment { + type Error = String; + + #[inline] + fn try_from(commitment: &RecordCommitment) -> Result { + Commitment::try_from(**commitment) + } +} + +impl TryFrom for Commitment { + type Error = String; + + #[inline] + fn try_from(commitment: RecordCommitment) -> Result { + Commitment::try_from(&commitment) + } +} + +impl From for SegmentCommitment { + #[inline] + fn from(commitment: Commitment) -> Self { + SegmentCommitment::from(commitment.to_bytes()) + } +} + +impl TryFrom<&SegmentCommitment> for Commitment { + type Error = String; + + #[inline] + fn try_from(commitment: &SegmentCommitment) -> Result { + Commitment::try_from(**commitment) + } +} + +impl TryFrom for Commitment { + type Error = String; + + #[inline] + fn try_from(commitment: SegmentCommitment) -> Result { + Commitment::try_from(&commitment) + } +} + impl From for [u8; Commitment::SIZE] { #[inline] fn from(commitment: Commitment) -> Self { @@ -319,6 +630,56 @@ impl Witness { } } +impl From for RecordWitness { + #[inline] + fn from(witness: Witness) -> Self { + RecordWitness::from(witness.to_bytes()) + } +} + +impl TryFrom<&RecordWitness> for Witness { + type Error = String; + + #[inline] + fn try_from(witness: &RecordWitness) -> Result { + Witness::try_from(**witness) + } +} + +impl TryFrom for Witness { + type Error = String; + + #[inline] + fn try_from(witness: RecordWitness) -> Result { + Witness::try_from(&witness) + } +} + +impl From for ChunkWitness { + #[inline] + fn from(witness: Witness) -> Self { + ChunkWitness::from(witness.to_bytes()) + } +} + +impl TryFrom<&ChunkWitness> for Witness { + type Error = String; + + #[inline] + fn try_from(witness: &ChunkWitness) -> Result { + Witness::try_from(**witness) + } +} + +impl TryFrom for Witness { + type Error = String; + + #[inline] + fn try_from(witness: ChunkWitness) -> Result { + Witness::try_from(&witness) + } +} + impl From for [u8; Witness::SIZE] { #[inline] fn from(witness: Witness) -> Self { @@ -364,11 +725,18 @@ pub struct Kzg { } impl Kzg { - /// Create new instance with given KZG settings. + /// Create new instance with embedded KZG settings. /// - /// Canonical KZG settings can be obtained using `embedded_kzg_settings()` function that becomes - /// available with `embedded-kzg-settings` feature (enabled by default). - pub fn new(kzg_settings: FsKZGSettings) -> Self { + /// NOTE: Prefer cloning to instantiation since cloning is cheap and instantiation is not! + #[expect( + clippy::new_without_default, + reason = "Default must not be implemented, because Kzg should be cloned instead. Cloning is cheap and instantiation is not." + )] + pub fn new() -> Self { + let kzg_settings = + bytes_to_kzg_settings(EMBEDDED_KZG_SETTINGS_BYTES, NUM_G1_POWERS, NUM_G2_POWERS) + .expect("Static bytes are correct, there is a test for this; qed"); + let inner = Arc::new(Inner { kzg_settings, fft_settings_cache: Mutex::default(), diff --git a/shared/subspace-kzg/src/tests.rs b/shared/subspace-kzg/src/tests.rs new file mode 100644 index 0000000000..6442885dd4 --- /dev/null +++ b/shared/subspace-kzg/src/tests.rs @@ -0,0 +1,84 @@ +use crate::{Kzg, Scalar}; +use rand::thread_rng; +use rand_core::RngCore; +use subspace_core_primitives::ScalarBytes; + +#[test] +fn basic() { + let values = (0..8) + .map(|_| Scalar::from(rand::random::<[u8; ScalarBytes::SAFE_BYTES]>())) + .collect::>(); + + let kzg = Kzg::new(); + let polynomial = kzg.poly(&values).unwrap(); + let commitment = kzg.commit(&polynomial).unwrap(); + + let num_values = values.len(); + + for (index, value) in values.iter().enumerate() { + let index = index.try_into().unwrap(); + + let witness = kzg.create_witness(&polynomial, num_values, index).unwrap(); + + assert!( + kzg.verify(&commitment, num_values, index, value, &witness), + "failed on index {index}" + ); + } +} + +#[test] +fn bytes_scalars_conversion() { + { + let mut bytes = vec![0u8; ScalarBytes::SAFE_BYTES * 16]; + thread_rng().fill_bytes(&mut bytes); + + let scalars = bytes + .chunks_exact(ScalarBytes::SAFE_BYTES) + .map(|bytes| { + Scalar::from( + <&[u8; ScalarBytes::SAFE_BYTES]>::try_from(bytes) + .expect("Chunked into correct size; qed"), + ) + }) + .collect::>(); + + { + let mut decoded_bytes = vec![0u8; bytes.len()]; + decoded_bytes + .chunks_exact_mut(ScalarBytes::SAFE_BYTES) + .zip(scalars.iter()) + .for_each(|(bytes, scalar)| { + bytes.copy_from_slice(&scalar.to_bytes()[1..]); + }); + + assert_eq!(bytes, decoded_bytes); + } + + { + let mut decoded_bytes = vec![0u8; bytes.len()]; + decoded_bytes + .chunks_exact_mut(ScalarBytes::SAFE_BYTES) + .zip(scalars.iter()) + .for_each(|(bytes, scalar)| { + bytes.copy_from_slice(&scalar.to_bytes()[1..]); + }); + + assert_eq!(bytes, decoded_bytes); + } + } + + { + let bytes = { + let mut bytes = [0u8; ScalarBytes::FULL_BYTES]; + bytes[1..].copy_from_slice(&rand::random::<[u8; ScalarBytes::SAFE_BYTES]>()); + bytes + }; + + { + let scalar = Scalar::try_from(&bytes).unwrap(); + + assert_eq!(bytes, scalar.to_bytes()); + } + } +} diff --git a/shared/subspace-proof-of-space-gpu/Cargo.toml b/shared/subspace-proof-of-space-gpu/Cargo.toml index 852ae6246d..3888584b7d 100644 --- a/shared/subspace-proof-of-space-gpu/Cargo.toml +++ b/shared/subspace-proof-of-space-gpu/Cargo.toml @@ -19,6 +19,7 @@ rust-kzg-blst = { git = "https://github.com/grandinetech/rust-kzg", rev = "6c8fc #sppark = { git = "https://github.com/dot-asm/sppark", rev = "8eeafe0f6cc0ca8211b1be93922df1b5a118bbd2", optional = true } sppark = { version = "0.1.8", optional = true } subspace-core-primitives = { version = "0.1.0", path = "../../crates/subspace-core-primitives", default-features = false, optional = true } +subspace-kzg = { version = "0.1.0", path = "../subspace-kzg", optional = true } [dev-dependencies] subspace-erasure-coding = { version = "0.1.0", path = "../../crates/subspace-erasure-coding" } @@ -39,4 +40,5 @@ _gpu = [ "dep:rust-kzg-blst", "dep:sppark", "dep:subspace-core-primitives", + "dep:subspace-kzg", ] diff --git a/shared/subspace-proof-of-space-gpu/src/cuda.rs b/shared/subspace-proof-of-space-gpu/src/cuda.rs index 43c599d9d0..4927415c1b 100644 --- a/shared/subspace-proof-of-space-gpu/src/cuda.rs +++ b/shared/subspace-proof-of-space-gpu/src/cuda.rs @@ -7,9 +7,10 @@ mod tests; use rust_kzg_blst::types::fr::FsFr; use std::ops::DerefMut; -use subspace_core_primitives::crypto::Scalar; use subspace_core_primitives::pieces::Record; use subspace_core_primitives::pos::{PosProof, PosSeed}; +use subspace_core_primitives::ScalarBytes; +use subspace_kzg::Scalar; extern "C" { /// # Returns @@ -109,7 +110,8 @@ impl CudaDevice { } let mut proof_count = 0u32; - let mut chunks_scratch_gpu = Vec::<[u8; Scalar::FULL_BYTES]>::with_capacity(challenge_len); + let mut chunks_scratch_gpu = + Vec::<[u8; ScalarBytes::FULL_BYTES]>::with_capacity(challenge_len); let mut challenge_index_gpu = Vec::::with_capacity(challenge_len); let mut parity_record_chunks = Vec::::with_capacity(Record::NUM_CHUNKS); diff --git a/shared/subspace-proof-of-space-gpu/src/cuda/tests.rs b/shared/subspace-proof-of-space-gpu/src/cuda/tests.rs index 1633764345..9cf23e4a05 100644 --- a/shared/subspace-proof-of-space-gpu/src/cuda/tests.rs +++ b/shared/subspace-proof-of-space-gpu/src/cuda/tests.rs @@ -36,9 +36,10 @@ fn basic() { let sector_id = SectorId::new(blake3_hash(b"hello"), 500); let history_size = HistorySize::ONE; let mut record = Record::new_boxed(); - record.iter_mut().enumerate().for_each(|(index, chunk)| { - *chunk = blake3_254_hash_to_scalar(&index.to_le_bytes()).to_bytes() - }); + record + .iter_mut() + .enumerate() + .for_each(|(index, chunk)| *chunk = *blake3_254_hash_to_scalar(&index.to_le_bytes())); let mut cpu_encoded_records = Record::new_zero_vec(2); for cpu_encoded_record in &mut cpu_encoded_records { diff --git a/test/subspace-test-client/Cargo.toml b/test/subspace-test-client/Cargo.toml index 369422e369..aea3ba2d82 100644 --- a/test/subspace-test-client/Cargo.toml +++ b/test/subspace-test-client/Cargo.toml @@ -36,6 +36,7 @@ subspace-archiving = { path = "../../crates/subspace-archiving" } subspace-core-primitives = { path = "../../crates/subspace-core-primitives" } subspace-erasure-coding = { path = "../../crates/subspace-erasure-coding" } subspace-farmer-components = { path = "../../crates/subspace-farmer-components" } +subspace-kzg = { version = "0.1.0", path = "../../shared/subspace-kzg" } subspace-proof-of-space = { path = "../../crates/subspace-proof-of-space" } subspace-runtime-primitives = { path = "../../crates/subspace-runtime-primitives" } subspace-service = { path = "../../crates/subspace-service" } diff --git a/test/subspace-test-client/src/lib.rs b/test/subspace-test-client/src/lib.rs index 916b9c8ccc..45cf607082 100644 --- a/test/subspace-test-client/src/lib.rs +++ b/test/subspace-test-client/src/lib.rs @@ -34,7 +34,6 @@ use sp_core::{Decode, Encode}; use std::num::{NonZeroU64, NonZeroUsize}; use std::slice; use std::sync::Arc; -use subspace_core_primitives::crypto::kzg::{embedded_kzg_settings, Kzg}; use subspace_core_primitives::objects::BlockObjectMapping; use subspace_core_primitives::pieces::Record; use subspace_core_primitives::pos::PosSeed; @@ -47,6 +46,7 @@ use subspace_farmer_components::plotting::{ }; use subspace_farmer_components::reading::ReadSectorRecordChunksMode; use subspace_farmer_components::FarmerProtocolInfo; +use subspace_kzg::Kzg; use subspace_proof_of_space::{Table, TableGenerator}; use subspace_runtime_primitives::opaque::Block; use subspace_service::{FullClient, NewFull}; @@ -121,7 +121,7 @@ async fn start_farming( { let (plotting_result_sender, plotting_result_receiver) = futures::channel::oneshot::channel(); - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let erasure_coding = ErasureCoding::new( NonZeroUsize::new(Record::NUM_S_BUCKETS.next_power_of_two().ilog2() as usize) .expect("Not zero; qed"), @@ -205,7 +205,7 @@ where PosTable: Table, Client: BlockBackend + HeaderBackend, { - let kzg = Kzg::new(embedded_kzg_settings()); + let kzg = Kzg::new(); let mut archiver = subspace_archiving::archiver::Archiver::new(kzg.clone(), erasure_coding.clone());