Skip to content

Commit

Permalink
Code-review changes
Browse files Browse the repository at this point in the history
  • Loading branch information
jasagredo committed Dec 9, 2024
1 parent 589b852 commit f4b81e5
Show file tree
Hide file tree
Showing 48 changed files with 967 additions and 1,005 deletions.
6 changes: 3 additions & 3 deletions flake.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions ouroboros-consensus-cardano/app/DBAnalyser/Parsers.hs
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
{-# LANGUAGE ApplicativeDo #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE LambdaCase #-}

module DBAnalyser.Parsers (
BlockType (..)
Expand Down Expand Up @@ -82,7 +82,7 @@ parseValidationPolicy =
_ -> Nothing

parseAnalysis :: Parser AnalysisName
parseAnalysis = asum [
parseAnalysis = Foldable.asum [
flag' ShowSlotBlockNo $ mconcat [
long "show-slot-block-no"
, help "Show slot and block number and hash of all blocks"
Expand Down Expand Up @@ -150,7 +150,7 @@ checkNoThunksParser = CheckNoThunksEvery <$> option auto
<> help "Check the ledger state for thunks every n blocks" )

parseLimit :: Parser Limit
parseLimit = asum [
parseLimit = Foldable.asum [
Limit <$> option auto (mconcat [
long "num-blocks-to-process"
, help "Maximum number of blocks we want to process"
Expand Down Expand Up @@ -244,7 +244,7 @@ parseShelleyArgs = ShelleyBlockArgs
, help "Path to config file"
, metavar "PATH"
])
<*> asum [ Nonce <$> parseNonce
<*> Foldable.asum [ Nonce <$> parseNonce
, pure NeutralNonce]
where
parseNonce = strOption (mconcat [
Expand Down
47 changes: 23 additions & 24 deletions ouroboros-consensus-cardano/app/snapshot-converter.hs
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE NamedFieldPuns #-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
{-# LANGUAGE TypeApplications #-}
Expand Down Expand Up @@ -159,20 +158,20 @@ load ::
=> Config
-> CodecConfig blk
-> IO (ExtLedgerState blk ValuesMK)
load Config{from = Legacy, inpath = pathToFS -> (fs, inpath)} ccfg = do
checkSnapshot Legacy inpath fs
load Config{from = Legacy, inpath = pathToFS -> (fs, path)} ccfg = do
checkSnapshot Legacy path fs
eSt <- fmap unstowLedgerTables
<$> runExceptT (readExtLedgerState fs (decodeDiskExtLedgerState ccfg) decode inpath)
<$> runExceptT (readExtLedgerState fs (decodeDiskExtLedgerState ccfg) decode path)
case eSt of
Left err -> throwIO $ SnapshotError err
Right st -> pure st
load Config{from = Mem, inpath = pathToFS -> (fs@(SomeHasFS hasFS), inpath)} ccfg = do
checkSnapshot Mem inpath fs
eExtLedgerSt <- runExceptT $ readExtLedgerState fs (decodeDiskExtLedgerState ccfg) decode (inpath </> mkFsPath ["state"])
load Config{from = Mem, inpath = pathToFS -> (fs@(SomeHasFS hasFS), path)} ccfg = do
checkSnapshot Mem path fs
eExtLedgerSt <- runExceptT $ readExtLedgerState fs (decodeDiskExtLedgerState ccfg) decode (path </> mkFsPath ["state"])
case eExtLedgerSt of
Left err -> throwIO $ SnapshotError err
Right extLedgerSt -> do
values <- withFile hasFS (inpath </> mkFsPath ["tables", "tvar"]) ReadMode $ \h -> do
values <- withFile hasFS (path </> mkFsPath ["tables", "tvar"]) ReadMode $ \h -> do
bs <- hGetAll hasFS h
case CBOR.deserialiseFromBytes valuesMKDecoder bs of
Left err -> throwIO $ TablesCantDeserializeError err
Expand All @@ -181,14 +180,14 @@ load Config{from = Mem, inpath = pathToFS -> (fs@(SomeHasFS hasFS), inpath)} ccf
then pure x
else throwIO TablesTrailingBytes
pure (extLedgerSt `withLedgerTables` values)
load Config{from = LMDB, inpath = pathToFS -> (fs, inpath)} ccfg = do
checkSnapshot LMDB inpath fs
eExtLedgerSt <- runExceptT $ readExtLedgerState fs (decodeDiskExtLedgerState ccfg) decode (inpath </> mkFsPath ["state"])
load Config{from = LMDB, inpath = pathToFS -> (fs, path)} ccfg = do
checkSnapshot LMDB path fs
eExtLedgerSt <- runExceptT $ readExtLedgerState fs (decodeDiskExtLedgerState ccfg) decode (path </> mkFsPath ["state"])
case eExtLedgerSt of
Left err -> throwIO $ SnapshotError err
Right extLedgerSt -> do
values <- do
dbEnv <- LMDB.openEnvironment (fsToFilePath (MountPoint ".") (inpath </> mkFsPath ["tables"])) defaultLMDBLimits
dbEnv <- LMDB.openEnvironment (fsToFilePath (MountPoint ".") (path </> mkFsPath ["tables"])) defaultLMDBLimits
Disk.LMDBMK _ dbBackingTables <- LMDB.readWriteTransaction dbEnv (Disk.getDb (K2 "utxo"))
catch (LMDB.readOnlyTransaction dbEnv $
LMDB.Cursor.runCursorAsTransaction'
Expand All @@ -209,26 +208,26 @@ store ::
-> CodecConfig blk
-> ExtLedgerState blk ValuesMK
-> IO ()
store Config{to = Legacy, outpath = pathToFS -> (fs, outpath)} ccfg state =
writeExtLedgerState fs (encodeDiskExtLedgerState ccfg) outpath (stowLedgerTables state)
store Config{to = Mem, outpath = pathToFS -> (fs@(SomeHasFS hasFS), outpath)} ccfg state = do
store Config{to = Legacy, outpath = pathToFS -> (fs, path)} ccfg state =
writeExtLedgerState fs (encodeDiskExtLedgerState ccfg) path (stowLedgerTables state)
store Config{to = Mem, outpath = pathToFS -> (fs@(SomeHasFS hasFS), path)} ccfg state = do
-- write state
createDirectoryIfMissing hasFS True outpath
writeExtLedgerState fs (encodeDiskExtLedgerState ccfg) (outpath </> mkFsPath ["state"]) (forgetLedgerTables state)
createDirectoryIfMissing hasFS True path
writeExtLedgerState fs (encodeDiskExtLedgerState ccfg) (path </> mkFsPath ["state"]) (forgetLedgerTables state)
-- write tables
createDirectoryIfMissing hasFS True $ outpath </> mkFsPath ["tables"]
withFile hasFS (outpath </> mkFsPath ["tables", "tvar"]) (WriteMode MustBeNew) $ \hf ->
createDirectoryIfMissing hasFS True $ path </> mkFsPath ["tables"]
withFile hasFS (path </> mkFsPath ["tables", "tvar"]) (WriteMode MustBeNew) $ \hf ->
void $
hPutAll hasFS hf $
CBOR.toLazyByteString $
valuesMKEncoder (projectLedgerTables state)
store Config{to = LMDB, outpath = pathToFS -> (fs@(SomeHasFS hasFS), outpath)} ccfg state = do
store Config{to = LMDB, outpath = pathToFS -> (fs@(SomeHasFS hasFS), path)} ccfg state = do
-- write state
createDirectoryIfMissing hasFS True outpath
writeExtLedgerState fs (encodeDiskExtLedgerState ccfg) (outpath </> mkFsPath ["state"]) (forgetLedgerTables state)
createDirectoryIfMissing hasFS True path
writeExtLedgerState fs (encodeDiskExtLedgerState ccfg) (path </> mkFsPath ["state"]) (forgetLedgerTables state)
-- write tables
createDirectoryIfMissing hasFS True $ outpath </> mkFsPath ["tables"]
dbEnv <- LMDB.openEnvironment (fsToFilePath (MountPoint ".") $ outpath </> mkFsPath ["tables"]) defaultLMDBLimits
createDirectoryIfMissing hasFS True $ path </> mkFsPath ["tables"]
dbEnv <- LMDB.openEnvironment (fsToFilePath (MountPoint ".") $ path </> mkFsPath ["tables"]) defaultLMDBLimits
dbState <- LMDB.readWriteTransaction dbEnv $ LMDB.getDatabase (Just "_dbstate")
dbBackingTables <-
LMDB.readWriteTransaction dbEnv $
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -163,6 +163,7 @@ library
ouroboros-consensus-protocol ^>=0.9,
ouroboros-network-api ^>=0.11,
serialise ^>=0.2,
singletons ^>=3.0,
small-steps,
sop-core ^>=0.5,
sop-extras ^>=0.2,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,13 @@

{-# OPTIONS_GHC -Wno-orphans #-}

#if __GLASGOW_HASKELL__ <= 906
{-# OPTIONS_GHC -Wno-incomplete-patterns
-Wno-incomplete-uni-patterns
-Wno-incomplete-record-updates
-Wno-overlapping-patterns #-}
#endif

module Ouroboros.Consensus.Cardano.Ledger (
CardanoTxOut (..)
, eliminateCardanoTxOut
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@
module Ouroboros.Consensus.Cardano.QueryHF () where

import Data.Functor.Product
import Data.Proxy
import Data.Singletons
import Data.SOP.BasicFunctors
import Data.SOP.Constraint
import Data.SOP.Index
Expand All @@ -54,10 +54,12 @@ newtype FlipBlockQuery footprint result x =
FlipBlockQuery (BlockQuery x footprint result)

answerCardanoQueryHF ::
( xs ~ CardanoEras c
, CardanoHardForkConstraints c
, All (Compose NoThunks WrapTxOut) xs
)
forall x xs c footprint result m.
( xs ~ CardanoEras c
, CardanoHardForkConstraints c
, All (Compose NoThunks WrapTxOut) xs
, SingI footprint
)
=> ( forall blk.
IsShelleyBlock blk
=> Index xs blk
Expand All @@ -72,15 +74,18 @@ answerCardanoQueryHF ::
-> ReadOnlyForker' m (HardForkBlock xs)
-> m result
answerCardanoQueryHF f idx cfg q dlv =
hcollapse $
hap
( (Fn $ \(Pair _ (FlipBlockQuery q')) -> case q' of {})
:* hcmap
(Proxy @(IsShelleyBlock))
(\idx' -> Fn $ \(Pair cfg' (FlipBlockQuery q')) -> K $ f (IS idx') cfg' q' dlv)
indices
)
(injectNS idx (Pair cfg (FlipBlockQuery q)))
case sing :: Sing footprint of
SQFNoTables ->
error "answerCardanoQueryHF: unreachable, this was called with a QFNoTables query"
_ -> hcollapse $
hap
( (Fn $ \(Pair _ (FlipBlockQuery q')) -> case q' of {})
:* hcmap
(Proxy @(IsShelleyBlock))
(\idx' -> Fn $ \(Pair cfg' (FlipBlockQuery q')) -> K $ f (IS idx') cfg' q' dlv)
indices
)
(injectNS idx (Pair cfg (FlipBlockQuery q)))

shelleyCardanoFilter ::
forall proto era c result.
Expand Down Expand Up @@ -111,11 +116,18 @@ instance CardanoHardForkConstraints c => BlockSupportsHFLedgerQuery (CardanoEras

queryLedgerGetTraversingFilter idx q = case idx of
-- Byron
IZ -> case q of {}
IZ -> byronCardanoFilter q
-- Shelley based
IS IZ -> shelleyCardanoFilter q
IS (IS IZ) -> shelleyCardanoFilter q
IS (IS (IS IZ)) -> shelleyCardanoFilter q
IS (IS (IS (IS IZ))) -> shelleyCardanoFilter q
IS (IS (IS (IS (IS IZ)))) -> shelleyCardanoFilter q
IS (IS (IS (IS (IS (IS IZ))))) -> shelleyCardanoFilter q
IS IZ -> shelleyCardanoFilter q
IS (IS IZ) -> shelleyCardanoFilter q
IS (IS (IS IZ)) -> shelleyCardanoFilter q
IS (IS (IS (IS IZ))) -> shelleyCardanoFilter q
IS (IS (IS (IS (IS IZ)))) -> shelleyCardanoFilter q
IS (IS (IS (IS (IS (IS IZ))))) -> shelleyCardanoFilter q
IS (IS (IS (IS (IS (IS (IS idx')))))) -> case idx' of {}

byronCardanoFilter ::
BlockQuery ByronBlock QFTraverseTables result
-> TxOut (LedgerState (HardForkBlock (CardanoEras c)))
-> Bool
byronCardanoFilter = \case {}
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
Expand All @@ -18,6 +19,12 @@
{-# LANGUAGE UndecidableInstances #-}

{-# OPTIONS_GHC -Wno-orphans #-}
#if __GLASGOW_HASKELL__ <= 906
{-# OPTIONS_GHC -Wno-incomplete-patterns
-Wno-incomplete-uni-patterns
-Wno-incomplete-record-updates
-Wno-overlapping-patterns #-}
#endif

module Ouroboros.Consensus.Shelley.Ledger.Ledger (
LedgerState (..)
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingStrategies #-}
Expand All @@ -19,6 +20,12 @@
{-# LANGUAGE UndecidableInstances #-}

{-# OPTIONS_GHC -Wno-orphans #-}
#if __GLASGOW_HASKELL__ <= 906
{-# OPTIONS_GHC -Wno-incomplete-patterns
-Wno-incomplete-uni-patterns
-Wno-incomplete-record-updates
-Wno-overlapping-patterns #-}
#endif

module Ouroboros.Consensus.Shelley.Ledger.Query (
BlockQuery (..)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
{-# LANGUAGE FlexibleContexts #-}
{-# LANGUAGE FlexibleInstances #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE LambdaCase #-}
{-# LANGUAGE MultiParamTypeClasses #-}
{-# LANGUAGE RecordWildCards #-}
{-# LANGUAGE ScopedTypeVariables #-}
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
{-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingVia #-}
{-# LANGUAGE GeneralizedNewtypeDeriving #-}
{-# LANGUAGE TypeFamilies #-}

{-# OPTIONS_GHC -Wno-orphans #-}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -400,7 +400,7 @@ storeLedgerStateAt slotNo ledgerAppMode env = do
when (blockSlot blk >= slotNo) $ storeLedgerState newLedger
when (blockSlot blk > slotNo) $ issueWarning blk
when ((unBlockNo $ blockNo blk) `mod` 1000 == 0) $ reportProgress blk
LedgerDB.reapplyThenPushNOW internal blk
LedgerDB.push internal newLedger
LedgerDB.tryFlush initLedgerDB
return (continue blk, ())
Left err -> do
Expand Down Expand Up @@ -490,7 +490,7 @@ checkNoThunksEvery
-- should catch any additional thunks in the values tables.
IOLike.evaluate (ledgerState newLedger') >>= checkNoThunks bn

LedgerDB.reapplyThenPushNOW internal blk
LedgerDB.push internal newLedger
LedgerDB.tryFlush ldb


Expand All @@ -515,18 +515,17 @@ traceLedgerProcessing ::
Analysis blk StartFromLedgerState
traceLedgerProcessing
(AnalysisEnv {db, registry, startFrom, cfg, limit}) = do
void $ processAll db registry GetBlock startFrom limit () (process initLedger internal)
void $ processAll db registry GetBlock startFrom limit () (process initLedger)
pure Nothing
where
FromLedgerState initLedger internal = startFrom

process
:: LedgerDB.LedgerDB' IO blk
-> LedgerDB.TestInternals' IO blk
-> ()
-> blk
-> IO ()
process ledgerDB intLedgerDB _ blk = do
process ledgerDB _ blk = do
frk <- LedgerDB.getForkerAtTarget ledgerDB registry VolatileTip >>= \case
Left {} -> error "Unreachable, volatile tip MUST be in the LedgerDB"
Right f -> pure f
Expand All @@ -544,7 +543,7 @@ traceLedgerProcessing
HasAnalysis.WithLedgerState blk (ledgerState oldLedger) (ledgerState newLedger'))
mapM_ Debug.traceMarkerIO traces

LedgerDB.reapplyThenPushNOW intLedgerDB blk
LedgerDB.push internal newLedger
LedgerDB.tryFlush ledgerDB

{-------------------------------------------------------------------------------
Expand Down Expand Up @@ -626,10 +625,10 @@ benchmarkLedgerOps mOutfile ledgerAppMode AnalysisEnv {db, registry, startFrom,
-- 'time' takes care of forcing the evaluation of its argument's result.
(ldgrView, tForecast) <- time $ forecast slot prevLedgerState
(tkHdrSt, tHdrTick) <- time $ tickTheHeaderState slot prevLedgerState ldgrView
(!_, tHdrApp) <- time $ applyTheHeader ldgrView tkHdrSt
(!newHeader, tHdrApp) <- time $ applyTheHeader ldgrView tkHdrSt
(tkLdgrSt, tBlkTick) <- time $ tickTheLedgerState slot prevLedgerState
let !tkLdgrSt' = applyDiffs (prevLedgerState `withLedgerTables` tables) tkLdgrSt
(!_, tBlkApp) <- time $ applyTheBlock tkLdgrSt'
(!newLedger, tBlkApp) <- time $ applyTheBlock tkLdgrSt'

currentRtsStats <- GC.getRTSStats
let
Expand Down Expand Up @@ -661,7 +660,7 @@ benchmarkLedgerOps mOutfile ledgerAppMode AnalysisEnv {db, registry, startFrom,

F.writeDataPoint outFileHandle outFormat slotDataPoint

LedgerDB.reapplyThenPushNOW intLedgerDB blk
LedgerDB.push intLedgerDB $ ExtLedgerState newLedger newHeader
LedgerDB.tryFlush ledgerDB
where
rp = blockRealPoint blk
Expand Down Expand Up @@ -772,7 +771,7 @@ getBlockApplicationMetrics (NumberOfBlocks nrBlocks) mOutFile env = do

IO.hFlush outFileHandle

LedgerDB.reapplyThenPushNOW intLedgerDB blk
LedgerDB.push intLedgerDB nextLedgerSt
LedgerDB.tryFlush ledgerDB

pure ()
Expand Down
Loading

0 comments on commit f4b81e5

Please sign in to comment.