diff --git a/bench/locli/locli.cabal b/bench/locli/locli.cabal index 94a299e1cf1..611ee1c8ba0 100644 --- a/bench/locli/locli.cabal +++ b/bench/locli/locli.cabal @@ -63,17 +63,23 @@ library Cardano.TopHandler Cardano.Util - Cardano.Analysis.Run - Cardano.Analysis.API + Cardano.Analysis.API.Chain + Cardano.Analysis.API.ChainFilter + Cardano.Analysis.API.Context + Cardano.Analysis.API.Dictionary + Cardano.Analysis.API.Field + Cardano.Analysis.API.Ground + Cardano.Analysis.API.Metrics + Cardano.Analysis.API.Run + Cardano.Analysis.API.Types + Cardano.Analysis.API.LocliVersion + Cardano.Analysis.BlockProp - Cardano.Analysis.Chain - Cardano.Analysis.ChainFilter - Cardano.Analysis.Context - Cardano.Analysis.Ground Cardano.Analysis.MachPerf - Cardano.Analysis.Version + Cardano.Analysis.Summary + Cardano.JSON Cardano.Org Cardano.Render @@ -101,6 +107,7 @@ library , extra , file-embed , filepath + , fingertree , ghc , gnuplot , iohk-monitoring diff --git a/bench/locli/src/Cardano/Analysis/API.hs b/bench/locli/src/Cardano/Analysis/API.hs index 97bdb353f37..d89b7f771be 100644 --- a/bench/locli/src/Cardano/Analysis/API.hs +++ b/bench/locli/src/Cardano/Analysis/API.hs @@ -1,608 +1,26 @@ -{-# LANGUAGE DeriveAnyClass #-} -{-# LANGUAGE EmptyDataDeriving #-} -{-# LANGUAGE GeneralisedNewtypeDeriving #-} -{-# LANGUAGE UndecidableInstances #-} -{-# OPTIONS_GHC -Wno-name-shadowing -Wno-orphans #-} -{- HLINT ignore "Use head" -} module Cardano.Analysis.API - ( module Cardano.Analysis.API - , module Cardano.Util) + ( module Data.CDF + , module Cardano.Analysis.API.Chain + , module Cardano.Analysis.API.ChainFilter + , module Cardano.Analysis.API.Context + , module Cardano.Analysis.API.Dictionary + , module Cardano.Analysis.API.Field + , module Cardano.Analysis.API.Ground + , module Cardano.Analysis.API.Metrics + , module Cardano.Analysis.API.Run + , module Cardano.Analysis.API.Types + , module Cardano.Analysis.API.LocliVersion + ) where -import Prelude ((!!)) -import Util (count) -import Cardano.Prelude hiding (head) - -import Data.Aeson (ToJSON(..), FromJSON(..)) -import Data.Text qualified as T -import Data.Text.Short (toText) -import Data.Time.Clock (NominalDiffTime) -import Options.Applicative qualified as Opt -import Text.Printf (PrintfArg) - import Data.CDF - -import Cardano.Analysis.Chain -import Cardano.Analysis.ChainFilter -import Cardano.Analysis.Context -import Cardano.Analysis.Ground -import Cardano.Analysis.Version -import Cardano.Logging.Resources.Types -import Cardano.Render -import Cardano.Unlog.LogObject hiding (Text) -import Cardano.Util - --- --- * API types --- - --- | Results of block propagation analysis. -data BlockProp f - = BlockProp - { bpVersion :: !Cardano.Analysis.Version.Version - , bpDomainSlots :: !(DataDomain SlotNo) - , bpDomainBlocks :: !(DataDomain BlockNo) - , bpForgerStarts :: !(CDF f NominalDiffTime) - , bpForgerBlkCtx :: !(CDF f NominalDiffTime) - , bpForgerLgrState :: !(CDF f NominalDiffTime) - , bpForgerLgrView :: !(CDF f NominalDiffTime) - , bpForgerLeads :: !(CDF f NominalDiffTime) - , bpForgerForges :: !(CDF f NominalDiffTime) - , bpForgerAnnouncements :: !(CDF f NominalDiffTime) - , bpForgerAdoptions :: !(CDF f NominalDiffTime) - , bpForgerSends :: !(CDF f NominalDiffTime) - , bpPeerNotices :: !(CDF f NominalDiffTime) - , bpPeerRequests :: !(CDF f NominalDiffTime) - , bpPeerFetches :: !(CDF f NominalDiffTime) - , bpPeerAnnouncements :: !(CDF f NominalDiffTime) - , bpPeerAdoptions :: !(CDF f NominalDiffTime) - , bpPeerSends :: !(CDF f NominalDiffTime) - , bpPropagation :: ![(Double, CDF f NominalDiffTime)] - , bpSizes :: !(CDF f Int) - } - deriving (Generic) -deriving instance (Show (f NominalDiffTime), Show (f Int)) => Show (BlockProp f) -deriving instance (FromJSON (f NominalDiffTime), FromJSON (f Int)) => FromJSON (BlockProp f) -deriving instance (ToJSON (f NominalDiffTime), ToJSON (f Int)) => ToJSON (BlockProp f) - -type BlockPropOne = BlockProp I -type MultiBlockProp = BlockProp (CDF I) - --- | All events related to a block. -data BlockEvents - = BlockEvents - { beBlock :: !Hash - , beBlockPrev :: !Hash - , beBlockNo :: !BlockNo - , beSlotNo :: !SlotNo - , beEpochNo :: !EpochNo - , beEpochSafeInt :: !EpochSafeInt - , beForge :: !BlockForge - , beObservations :: [BlockObservation] - , bePropagation :: !(CDF I NominalDiffTime) - -- ^ CDF of slot-start-to-adoptions on cluster - , beOtherBlocks :: [Hash] - , beErrors :: [BPError] - , beNegAcceptance :: [ChainFilter] -- ^ List of negative acceptance conditions, - -- preventing block's consideration for analysis. - } - deriving (Generic, FromJSON, ToJSON, Show) - -data BlockForge - = BlockForge - { bfForger :: !Host - , bfSlotStart :: !SlotStart - , bfBlockGap :: !NominalDiffTime -- ^ Since previous forge event - , bfBlockSize :: !Int -- ^ Bytes - , bfStarted :: !NominalDiffTime -- ^ Since slot start - , bfBlkCtx :: !(Maybe NominalDiffTime) -- ^ Since forge loop start - , bfLgrState :: !(Maybe NominalDiffTime) -- ^ Since block context - , bfLgrView :: !(Maybe NominalDiffTime) -- ^ Since ledger state - , bfLeading :: !NominalDiffTime -- ^ Since ledger view - , bfForged :: !NominalDiffTime -- ^ Since leading - , bfAnnounced :: !NominalDiffTime -- ^ Since forging - , bfSending :: !NominalDiffTime -- ^ Since announcement - , bfAdopted :: !NominalDiffTime -- ^ Since announcement - , bfChainDelta :: !Int -- ^ ChainDelta during adoption - } - deriving (Generic, FromJSON, ToJSON, Show) - -data BlockObservation - = BlockObservation - { boObserver :: !Host - , boSlotStart :: !SlotStart - , boNoticed :: !NominalDiffTime -- ^ Since slot start - , boRequested :: !NominalDiffTime -- ^ Since noticing - , boFetched :: !NominalDiffTime -- ^ Since requesting - , boAnnounced :: !(Maybe NominalDiffTime) -- ^ Since fetching - , boSending :: !(Maybe NominalDiffTime) -- ^ Since announcement - , boAdopted :: !(Maybe NominalDiffTime) -- ^ Since announcement - , boChainDelta :: !Int -- ^ ChainDelta during adoption - , boErrorsCrit :: [BPError] - , boErrorsSoft :: [BPError] - } - deriving (Generic, FromJSON, ToJSON, Show) - -data BPError - = BPError - { eHost :: !Host - , eBlock :: !Hash - , eLO :: !(Maybe LogObject) - , eDesc :: !BPErrorKind - } - deriving (FromJSON, Generic, NFData, Show, ToJSON) - -data Phase - = Notice - | Request - | Fetch - | Forge - | Acquire - | Announce - | Adopt - | Send - deriving (FromJSON, Eq, Generic, NFData, Ord, Show, ToJSON) - -data BPErrorKind - = BPEBefore !Phase !Phase - | BPEUnexpectedForObserver !Phase - | BPEUnexpectedForForger !Phase - | BPEUnexpectedAsFirst !Phase - | BPENoBlocks - | BPEDuplicateForge - | BPEMissingPhase !Phase - | BPENegativePhase !Phase !NominalDiffTime - | BPEFork !Hash - deriving (FromJSON, Generic, NFData, Show, ToJSON) - --- | The top-level representation of the machine timeline analysis results. -data MachPerf f - = MachPerf - { sVersion :: !Cardano.Analysis.Version.Version - , sDomainSlots :: !(DataDomain SlotNo) - -- distributions - , sMissCDF :: !(CDF f Double) - , sLeadsCDF :: !(CDF f Word64) - , sUtxoCDF :: !(CDF f Word64) - , sDensityCDF :: !(CDF f Double) - , sStartedCDF :: !(CDF f NominalDiffTime) - , sBlkCtxCDF :: !(CDF f NominalDiffTime) - , sLgrStateCDF :: !(CDF f NominalDiffTime) - , sLgrViewCDF :: !(CDF f NominalDiffTime) - , sLeadingCDF :: !(CDF f NominalDiffTime) - , sForgedCDF :: !(CDF f NominalDiffTime) - , sBlockGapCDF :: !(CDF f Word64) - , sSpanLensCpuCDF :: !(CDF f Int) - , sSpanLensCpuEpochCDF :: !(CDF f Int) - , sSpanLensCpuRwdCDF :: !(CDF f Int) - , sResourceCDFs :: !(Resources (CDF f Word64)) - } - deriving (Generic) - --- | One machine's performance -type MachPerfOne = MachPerf I - --- | Bunch'a machines performances -type ClusterPerf = MachPerf (CDF I) - --- | Bunch'a bunches'a machine performances. --- Same as above, since we collapse [CDF I] into CDF I -- just with more statistical confidence. -newtype MultiClusterPerf - = MultiClusterPerf { unMultiClusterPerf :: ClusterPerf } - deriving newtype (ToJSON, FromJSON) - -deriving newtype instance FromJSON a => FromJSON (I a) -deriving newtype instance ToJSON a => ToJSON (I a) -deriving instance (FromJSON (a Double), FromJSON (a Int), FromJSON (a NominalDiffTime), FromJSON (a Word64)) => FromJSON (MachPerf a) -deriving instance (NFData (a Double), NFData (a Int), NFData (a NominalDiffTime), NFData (a Word64)) => NFData (MachPerf a) -deriving instance (Show (a Double), Show (a Int), Show (a NominalDiffTime), Show (a Word64)) => Show (MachPerf a) -deriving instance (ToJSON (a Double), ToJSON (a Int), ToJSON (a NominalDiffTime), ToJSON (a Word64)) => ToJSON (MachPerf a) - -data SlotStats a - = SlotStats - { slSlot :: !SlotNo - , slEpoch :: !EpochNo - , slEpochSlot :: !EpochSlot - , slEpochSafeInt :: !EpochSafeInt - , slStart :: !SlotStart - , slCountStarts :: !Word64 - , slCountBlkCtx :: !Word64 - , slCountLgrState :: !Word64 - , slCountLgrView :: !Word64 - , slCountLeads :: !Word64 - , slCountForges :: !Word64 - , slChainDBSnap :: !Word64 - , slRejectedTx :: !Word64 - , slBlockNo :: !BlockNo - , slBlockGap :: !Word64 - , slStarted :: !(SMaybe a) - , slBlkCtx :: !(SMaybe a) - , slLgrState :: !(SMaybe a) - , slLgrView :: !(SMaybe a) - , slLeading :: !(SMaybe a) - , slForged :: !(SMaybe a) - , slMempoolTxs :: !Word64 - , slSpanTxsMem :: !(SMaybe NominalDiffTime) - , slTxsCollected :: !Word64 - , slTxsAccepted :: !Word64 - , slTxsRejected :: !Word64 - , slUtxoSize :: !Word64 - , slDensity :: !Double - , slResources :: !(Resources (Maybe Word64)) - } - deriving (Generic, Show, ToJSON) - deriving anyclass NFData - --- --- * Key properties --- -testBlockEvents :: Genesis -> BlockEvents -> ChainFilter -> Bool -testBlockEvents g@Genesis{..} - BlockEvents{beForge=BlockForge{..} - ,beObservations=seen - ,..} = \case - CBlock flt -> case flt of - BUnitaryChainDelta -> bfChainDelta == 1 - BFullnessGEq f -> - bfBlockSize > floor ((fromIntegral (maxBlockBodySize protocolParams) :: Double) * f) - BFullnessLEq f -> - bfBlockSize < floor ((fromIntegral (maxBlockBodySize protocolParams) :: Double) * f) - BSizeGEq x -> bfBlockSize >= fromIntegral x - BSizeLEq x -> bfBlockSize <= fromIntegral x - BMinimumAdoptions x -> count (isJust . boAdopted) seen >= fromIntegral x - CSlot flt -> case flt of - SlotGEq s -> beSlotNo >= s - SlotLEq s -> beSlotNo <= s - EpochGEq e -> beEpochNo >= e - EpochLEq e -> beEpochNo <= e - SlotHasLeaders -> True - EpochSafeIntGEq i -> beEpochSafeInt >= i - EpochSafeIntLEq i -> beEpochSafeInt <= i - EpSlotGEq s -> snd (g `unsafeParseSlot` beSlotNo) >= s - EpSlotLEq s -> snd (g `unsafeParseSlot` beSlotNo) <= s - -isValidBlockEvent :: Genesis -> [ChainFilter] -> BlockEvents -> Bool -isValidBlockEvent g criteria be = - all (testBlockEvents g be) criteria - -isValidBlockObservation :: BlockObservation -> Bool -isValidBlockObservation BlockObservation{..} = - -- 1. All phases are present - null boErrorsCrit - && - -- 2. All timings account for processing of a single block - boChainDelta == 1 - -testSlotStats :: Genesis -> SlotStats a -> SlotCond -> Bool -testSlotStats g SlotStats{..} = \case - SlotGEq s -> slSlot >= s - SlotLEq s -> slSlot <= s - EpochGEq s -> fromIntegral (unEpochNo slEpoch) >= s - EpochLEq s -> fromIntegral (unEpochNo slEpoch) <= s - SlotHasLeaders -> slCountLeads > 0 - EpochSafeIntGEq i -> slEpochSafeInt >= i - EpochSafeIntLEq i -> slEpochSafeInt <= i - EpSlotGEq s -> snd (g `unsafeParseSlot` slSlot) >= s - EpSlotLEq s -> snd (g `unsafeParseSlot` slSlot) <= s - --- --- * Block propagation report subsetting --- -data PropSubset - = PropFull - | PropForger - | PropPeers - | PropEndToEnd - | PropEndToEndBrief - deriving Show - -bpFieldSelectForger :: Field DSelect p a -> Bool -bpFieldSelectForger Field{fId} = elem fId - [ "fStarted", "fLeading", "fForged", "fAnnounced", "fAdopted", "fSendStart" ] - -bpFieldSelectPeers :: Field DSelect p a -> Bool -bpFieldSelectPeers Field{fId} = elem fId - [ "pNoticed", "pRequested", "pFetched", "pAnnounced", "pAdopted", "pSendStart" ] - -bpFieldSelectEndToEnd :: Field DSelect p a -> Bool -bpFieldSelectEndToEnd Field{fHead2} = elem fHead2 adoptionCentilesRendered - where - adoptionCentilesRendered :: [Text] - adoptionCentilesRendered = adoptionCentiles <&> T.drop 4 . renderAdoptionCentile - -bpFieldSelectEndToEndBrief :: Field DSelect p a -> Bool -bpFieldSelectEndToEndBrief Field{fHead2} = elem fHead2 adoptionCentilesRendered - where - adoptionCentilesRendered :: [Text] - adoptionCentilesRendered = adoptionCentilesBrief <&> T.drop 4 . renderAdoptionCentile - -propSubsetFn :: PropSubset -> (Field DSelect p a -> Bool) -propSubsetFn = \case - PropFull -> const True - PropForger -> bpFieldSelectForger - PropPeers -> bpFieldSelectPeers - PropEndToEnd -> bpFieldSelectEndToEnd - PropEndToEndBrief -> bpFieldSelectEndToEndBrief - -parsePropSubset :: Opt.Parser PropSubset -parsePropSubset = - [ Opt.flag' PropFull (Opt.long "full" <> Opt.help "Complete propagation data") - , Opt.flag' PropForger (Opt.long "forger" <> Opt.help "Only forger propagation") - , Opt.flag' PropPeers (Opt.long "peers" <> Opt.help "Only peer propagation") - , Opt.flag' PropEndToEnd (Opt.long "end-to-end" <> Opt.help "Only end-to-end propagation") - , Opt.flag' PropEndToEndBrief (Opt.long "e2e-brief" <> Opt.help "Only brief end-to-end propagation") - ] & \case - (x:xs) -> foldl (<|>) x xs - [] -> error "Crazy world, begone. 0" - -parseCDF2Aspect :: Opt.Parser CDF2Aspect -parseCDF2Aspect = - [ Opt.flag' OfOverallDataset (Opt.long "overall" <> Opt.help "Overall dataset statistical summary") - , Opt.flag' OfInterCDF (Opt.long "inter-cdf" <> Opt.help "Inter-sample (i.e. inter-CDF) stats") - ] & \case - (x:xs) -> foldl (<|>) x xs - [] -> error "Crazy world, begone. 1" - --- --- * Timeline rendering instances --- -renderAdoptionCentile :: Centile -> Text -renderAdoptionCentile = T.pack . printf "prop%0.2f" . unCentile - -adoptionCentiles :: [Centile] -adoptionCentiles = - [ Centile 0.5, Centile 0.8, Centile 0.9 - , Centile 0.92, Centile 0.94, Centile 0.96, Centile 0.98, Centile 1.0 ] - -adoptionCentilesBrief :: [Centile] -adoptionCentilesBrief = - [ Centile 0.5, Centile 0.9, Centile 0.96 ] - -instance RenderCDFs BlockProp p where - rdFields = - -- Width LeftPad - [ Field 4 0 "fStarted" (f!!0) "Loop" (DDeltaT bpForgerStarts) "Started forge loop iteration" - , Field 4 0 "fBlkCtx" (f!!1) "BkCt" (DDeltaT bpForgerBlkCtx) "Acquired block context" - , Field 4 0 "fLgrState" (f!!2) "LgSt" (DDeltaT bpForgerLgrState) "Acquired ledger state" - , Field 4 0 "fLgrView" (f!!3) "LgVi" (DDeltaT bpForgerLgrView) "Acquired ledger view" - , Field 4 0 "fLeading" (f!!4) "Lead" (DDeltaT bpForgerLeads) "Leadership check duration" - , Field 4 0 "fForged" (f!!5) "Forg" (DDeltaT bpForgerForges) "Leadership to forged" - , Field 4 0 "fAnnounced" (f!!6) "Anno" (DDeltaT bpForgerAnnouncements) "Forged to announced" - , Field 4 0 "fSendStart" (f!!7) "Send" (DDeltaT bpForgerSends) "Announced to sending" - , Field 4 0 "fAdopted" (f!!8) "Adop" (DDeltaT bpForgerAdoptions) "Announced to self-adopted" - , Field 4 0 "pNoticed" (p!!0) "Noti" (DDeltaT bpPeerNotices) "First peer notice" - , Field 4 0 "pRequested" (p!!1) "Requ" (DDeltaT bpPeerRequests) "Notice to fetch request" - , Field 4 0 "pFetched" (p!!2) "Fetc" (DDeltaT bpPeerFetches) "Fetch duration" - , Field 4 0 "pAnnounced" (p!!3) "Anno" (DDeltaT bpPeerAnnouncements) "Fetched to announced" - , Field 4 0 "pSendStart" (p!!4) "Send" (DDeltaT bpPeerSends) "Announced to sending" - , Field 4 0 "pAdopted" (p!!5) "Adop" (DDeltaT bpPeerAdoptions) "Announced to adopted" - ] ++ - [ Field 4 0 (renderAdoptionCentile ct) - (r!!i) - (T.take 4 $ T.pack $ printf "%.04f" centi) - (DDeltaT ((\(centi', d) -> - if centi' == centi then d - else error $ printf "Centile mismatch: [%d]: exp=%f act=%f" - i centi centi') - . fromMaybe - (error $ printf "No centile %d/%f in bpPropagation." i centi) - . flip atMay i . bpPropagation)) - (T.pack $ printf "%.2f adoption" centi) - | (i, ct@(Centile centi)) <- zip [0::Int ..] adoptionCentiles ] ++ - [ Field 9 0 "sizes" "Size" "bytes" (DInt bpSizes) "" - ] - where - f = nChunksEachOf 9 5 ",-------------------- Forger event Δt: --------------------." - p = nChunksEachOf 6 5 ",------- Peer event Δt: -------." - r = nChunksEachOf aLen 5 ",---- Slot-rel. Δt to adoption centile: ----." - aLen = length adoptionCentiles - -instance RenderTimeline BlockEvents where - rtFields _ = - -- Width LeftPad - [ Field 5 0 "block" "block" "no." (IWord64 (unBlockNo . beBlockNo)) "" - , Field 5 0 "abs.slot" "abs." "slot#" (IWord64 (unSlotNo . beSlotNo)) "" - , Field 6 0 "hash" "block" "hash" (IText (shortHash . beBlock)) "" - , Field 6 0 "hashPrev" "prev" "hash" (IText (shortHash . beBlockPrev)) "" - , Field 7 0 "forger" "forger" "host" (IText (toText . unHost . bfForger . beForge)) "" - , Field 6 0 "blockSize" "size" "bytes" (IInt (bfBlockSize . beForge)) "" - , Field 5 0 "blockGap" "block" "gap" (IDeltaT (bfBlockGap . beForge)) "" - , Field 3 0 "forks" "for" "-ks" (IInt (count bpeIsFork . beErrors)) "" - , Field 4 0 "fStarted" (f!!0) "Start" (IDeltaT (bfStarted . beForge)) "" - , Field 4 0 "fBlkCtx" (f!!1) "BlkCtx" (IText (maybe "?" show.bfBlkCtx .beForge)) "" - , Field 4 0 "fLgrState" (f!!2) "LgrSta" (IText (maybe "?" show.bfLgrState.beForge)) "" - , Field 4 0 "fLgrView" (f!!3) "LgrVie" (IText (maybe "?" show.bfLgrView .beForge)) "" - , Field 4 0 "fLeading" (f!!4) "Lead" (IDeltaT (bfLeading . beForge)) "" - , Field 4 0 "fForged" (f!!5) "Forge" (IDeltaT (bfForged . beForge)) "" - , Field 4 0 "fAnnounced" (f!!6) "Announ" (IDeltaT (bfAnnounced . beForge)) "" - , Field 4 0 "fSendStart" (f!!7) "Sendin" (IDeltaT (bfSending . beForge)) "" - , Field 4 0 "fAdopted" (f!!8) "Adopt" (IDeltaT (bfAdopted . beForge)) "" - , Field 4 0 "noticedVal" (p!!0) "Notic" (IDeltaT (af boNoticed . valids)) "" - , Field 4 0 "requestedVal" (p!!1) "Requd" (IDeltaT (af boRequested . valids)) "" - , Field 4 0 "fetchedVal" (p!!2) "Fetch" (IDeltaT (af boFetched . valids)) "" - , Field 4 0 "pAnnouncedVal" (p!!3) "Annou" (IDeltaT (af' boAnnounced . valids)) "" - , Field 4 0 "pSendStartVal" (p!!4) "Send" (IDeltaT (af' boSending . valids)) "" - , Field 4 0 "pAdoptedVal" (p!!5) "Adopt" (IDeltaT (af' boAdopted . valids)) "" - , Field 4 0 "pPropag0.5" (r!!0) "0.5" (IDeltaT (percSpec 0.5 . bePropagation)) "" - , Field 4 0 "pPropag0.8" (r!!1) "0.8" (IDeltaT (percSpec 0.8 . bePropagation)) "" - , Field 4 0 "pPropag0.96" (r!!2) "0.96" (IDeltaT (percSpec 0.96 . bePropagation)) "" - , Field 4 0 "pPropag1.0" (r!!3) "1.0" (IDeltaT (snd . cdfRange . bePropagation)) "" - , Field 3 0 "valid" "va-" "lid" (IText (bool "-" "+" . (== 0) . length . beNegAcceptance)) "" - , Field 3 0 "valid.observ" "good" "obsv" (IInt (length . valids)) "" - , Field 5 0 "errors" "all" "errs" (IInt (length . beErrors)) "" - , Field 3 0 "missNotic" (m!!0) "ntc" (IInt (count (bpeIsMissing Notice) . beErrors)) "" - , Field 3 0 "missReque" (m!!1) "req" (IInt (count (bpeIsMissing Request) . beErrors)) "" - , Field 3 0 "missFetch" (m!!2) "fch" (IInt (count (bpeIsMissing Fetch) . beErrors)) "" - , Field 3 0 "missAnnou" (m!!3) "ann" (IInt (count (bpeIsMissing Announce) . beErrors)) "" - , Field 3 0 "missAdopt" (m!!4) "ado" (IInt (count (bpeIsMissing Adopt) . beErrors)) "" - , Field 3 0 "missSend" (m!!5) "snd" (IInt (count (bpeIsMissing Send) . beErrors)) "" - , Field 3 0 "negAnnou" (n!!0) "ann" (IInt (count (bpeIsNegative Announce) . beErrors)) "" - , Field 3 0 "negSend" (n!!1) "snd" (IInt (count (bpeIsNegative Send) . beErrors)) "" - ] - where - valids = filter isValidBlockObservation . beObservations - f = nChunksEachOf 9 5 "--------- Forger event Δt: ---------" - p = nChunksEachOf 6 5 "-- Peer event Δt averages: --" - r = nChunksEachOf 4 5 "Propagation Δt:" - m = nChunksEachOf 6 4 "Missing" - n = nChunksEachOf 2 4 "Negative" - - percSpec :: Double -> CDF I NominalDiffTime -> NominalDiffTime - percSpec ps d = unI $ Centile ps `projectCDF` d - & fromMaybe (error $ printf "No centile %f in distribution." ps) - af f = avg . fmap f - af' f = avg . mapMaybe f - avg :: [NominalDiffTime] -> NominalDiffTime - avg [] = 0 - avg xs = (/ fromInteger (fromIntegral $ length xs)) $ sum xs - count :: (a -> Bool) -> [a] -> Int - count f = length . filter f - - bpeIsFork :: BPError -> Bool - bpeIsFork BPError{eDesc=BPEFork{}} = True - bpeIsFork _ = False - - bpeIsMissing, bpeIsNegative :: Phase -> BPError -> Bool - bpeIsMissing p BPError{eDesc=BPEMissingPhase p'} = p == p' - bpeIsMissing _ _ = False - bpeIsNegative p BPError{eDesc=BPENegativePhase p' _} = p == p' - bpeIsNegative _ _ = False - - data RTComments BlockEvents - = BEErrors - | BEFilterOuts - deriving Show - - rtCommentary BlockEvents{..} = - \case - BEErrors -> (" " <>) . show <$> beErrors - BEFilterOuts -> (" " <>) . show <$> beNegAcceptance - --- --- * Machine performance report subsetting --- -data PerfSubset - = PerfFull - | PerfSummary - deriving Show - -mtFieldsReport :: Field DSelect p a -> Bool -mtFieldsReport Field{fId} = elem fId - [ "cpuProcess", "cpuGC", "cpuMutator", "cpuSpanLenAll", "memRSS", "rtsHeap", "rtsLive", "rtsAllocation" ] - -perfSubsetFn :: PerfSubset -> (Field DSelect p a -> Bool) -perfSubsetFn = \case - PerfFull -> const True - PerfSummary -> mtFieldsReport - -parsePerfSubset :: Opt.Parser PerfSubset -parsePerfSubset = - [ Opt.flag' PerfFull (Opt.long "full" <> Opt.help "Complete performance data") - , Opt.flag' PerfSummary (Opt.long "summary" <> Opt.help "Only report-relevant perf data") - ] & \case - (x:xs) -> foldl (<|>) x xs - [] -> error "Crazy world." - -instance RenderCDFs MachPerf p where - rdFields = - -- Width LeftPad - [ Field 4 0 "missRatio" "Miss" "ratio" (DFloat sMissCDF) "Leadership checks miss ratio" - , Field 4 0 "checkΔ" (d!!0) "Start" (DDeltaT sStartedCDF) "Forge loop tardiness" - , Field 4 0 "blkCtΔ" (d!!1) "BlkCt" (DDeltaT sBlkCtxCDF) "Block context acquisition delay" - , Field 4 0 "lgrStΔ" (d!!2) "LgrSt" (DDeltaT sLgrStateCDF) "Ledger state acquisition delay" - , Field 4 0 "lgrViΔ" (d!!3) "LgrVi" (DDeltaT sLgrViewCDF) "Ledger view acquisition delay" - , Field 4 0 "leadΔ" (d!!4) "Lead" (DDeltaT sLeadingCDF) "Leadership check duration" - , Field 4 0 "forgeΔ" (d!!5) "Forge" (DDeltaT sForgedCDF) "Leading to block forged" - , Field 4 0 "blockGap" "Block" "gap" (DWord64 sBlockGapCDF) "Interblock gap" - , Field 5 0 "chainDensity" "Dens" "ity" (DFloat sDensityCDF) "Chain density" - , Field 3 0 "cpuProcess" "CPU" "%" (DWord64 (rCentiCpu.sResourceCDFs)) "Process CPU usage pct" - , Field 3 0 "cpuGC" "GC" "%" (DWord64 (rCentiGC .sResourceCDFs)) "RTS GC CPU usage pct" - , Field 3 0 "cpuMutator" "MUT" "%" (DWord64 (rCentiMut.sResourceCDFs)) "RTS Mutator CPU usage pct" - , Field 3 0 "gcMajor" "GC " "Maj" (DWord64 (rGcsMajor.sResourceCDFs)) "Major GCs Hz" - , Field 3 0 "gcMinor" "flt " "Min" (DWord64 (rGcsMinor.sResourceCDFs)) "Minor GCs Hz" - , Field 5 0 "memRSS" (m!!0) "RSS" (DWord64 (rRSS .sResourceCDFs)) "Kernel RSS MB" - , Field 5 0 "rtsHeap" (m!!1) "Heap" (DWord64 (rHeap .sResourceCDFs)) "RTS heap size MB" - , Field 5 0 "rtsLiveBytes" (m!!2) "Live" (DWord64 (rLive .sResourceCDFs)) "RTS GC live bytes MB" - , Field 5 0 "rtsAllocation" "Alloc" "MB" (DWord64 (rAlloc .sResourceCDFs)) "RTS alloc rate MB sec" - , Field 5 0 "cpuSpanLenAll" (c!!0) "All" (DInt sSpanLensCpuCDF) "CPU 85pct spans" - , Field 5 0 "cpuSpanLenEp" (c!!1) "Epoch" (DInt sSpanLensCpuEpochCDF) "CPU spans at Ep boundary" - ] - where - d = nChunksEachOf 6 5 "----------- Δt -----------" - m = nChunksEachOf 3 6 "Memory usage, MB" - c = nChunksEachOf 2 6 "CPU% spans" - -instance RenderTimeline (SlotStats NominalDiffTime) where - data RTComments (SlotStats NominalDiffTime) - deriving Show - - rtFields _ = - -- Width LeftPad - [ Field 5 0 "abs.slot" "abs." "slot#" (IWord64 (unSlotNo .slSlot)) "" - , Field 4 0 "slot" " epo" "slot" (IWord64 (unEpochSlot .slEpochSlot)) "" - , Field 2 0 "epoch" "ch " "#" (IWord64 (unEpochNo .slEpoch)) "" - , Field 3 0 "safetyInt" "safe" "int" (IWord64 (unEpochSafeInt.slEpochSafeInt)) "" - , Field 5 0 "block" "block" "no." (IWord64 (unBlockNo.slBlockNo)) "" - , Field 5 0 "blockGap" "block" "gap" (IWord64 slBlockGap) "" - , Field 3 0 "forgeLoop" "forg" "loo" (IWord64 slCountStarts) "" - , Field 3 0 "blockCtx" "blok" "ctx" (IWord64 slCountBlkCtx) "" - , Field 3 0 "ledgerState" "ledg" "sta" (IWord64 slCountLgrState) "" - , Field 3 0 "ledgerView" "ledg" "viw" (IWord64 slCountLgrView) "" - , Field 3 0 "leadShips" "ship" "win" (IWord64 slCountLeads) "" - , Field 3 0 "forges" "For" "ge" (IWord64 slCountForges) "" - , Field 4 0 "CDBSnap" "CDB" "snap" (IWord64 slChainDBSnap) "" - , Field 3 0 "rejTxs" "rej" "txs" (IWord64 slRejectedTx) "" - , Field 7 0 "startDelay" "loop" "start" (IText (smaybe "" show.slStarted)) "" - , Field 5 0 "blkCtx" "block" "ctx" (IText (smaybe "" show.slBlkCtx)) "" - , Field 5 0 "lgrState" "ledgr" "state" (IText (smaybe "" show.slLgrState)) "" - , Field 5 0 "lgrView" "ledgr" "view" (IText (smaybe "" show.slLgrView)) "" - , Field 5 0 "leadChecked" "ledsh" "chekd" (IText (smaybe "" show.slLeading)) "" - , Field 5 0 "forge" "forge" "done" (IText (smaybe "" show.slForged)) "" - , Field 4 0 "mempoolTxSpan" (t 4!!0) "span" (IText (smaybe "" show.slSpanTxsMem)) "" - , Field 4 0 "txsColl" (t 4!!1) "cold" (IWord64 slTxsCollected) "" - , Field 4 0 "txsAcc" (t 4!!2) "accd" (IWord64 slTxsAccepted) "" - , Field 4 0 "txsRej" (t 4!!3) "rejd" (IWord64 slTxsRejected) "" - , Field 5 1 "chDensity" "chain" "dens." (IFloat slDensity) "" - , Field 3 0 "CPU%" (c 3!!0) "all" (IText (d 3.rCentiCpu.slResources)) "" - , Field 3 0 "GC%" (c 3!!1) "GC" (IText (d 3.fmap (min 999).rCentiGC.slResources)) "" - , Field 3 0 "MUT%" (c 3!!2) "mut" (IText (d 3.fmap (min 999).rCentiMut.slResources)) "" - , Field 3 0 "majFlt" (g 3!!0) "maj" (IText (d 3.rGcsMajor.slResources)) "" - , Field 3 0 "minFlt" (g 3!!1) "min" (IText (d 3.rGcsMinor.slResources)) "" - , Field 6 0 "productiv" "Produc" "tivity" (IText - (\SlotStats{..}-> - f 4 $ calcProd <$> (min 6 . -- workaround for ghc-8.10.2 - fromIntegral <$> rCentiMut slResources :: Maybe Double) - <*> (fromIntegral <$> rCentiCpu slResources))) "" - , Field 5 0 "rssMB" (m 5!!0) "RSS" (IText (d 5.rRSS .slResources)) "" - , Field 5 0 "heapMB" (m 5!!1) "Heap" (IText (d 5.rHeap .slResources)) "" - , Field 5 0 "liveMB" (m 5!!2) "Live" (IText (d 5.rLive .slResources)) "" - , Field 5 0 "allocatedMB" "Allocd" "MB" (IText (d 5.rAlloc.slResources)) "" - , Field 6 0 "allocMut" "Alloc/" "mutSec" (IText - (\SlotStats{..}-> - d 5 $ - (ceiling :: Double -> Int) - <$> ((/) <$> (fromIntegral . (100 *) <$> rAlloc slResources) - <*> (fromIntegral . max 1 . (1024 *) <$> rCentiMut slResources)))) "" - , Field 7 0 "mempoolTxs" "Mempool" "txs" (IWord64 slMempoolTxs) "" - , Field 9 0 "utxoEntries" "UTxO" "entries" (IWord64 slUtxoSize) "" - -- , Field 10 0 "absSlotTime" "Absolute" "slot time" $ IText - -- (\SlotStats{..}-> - -- T.pack $ " " `splitOn` show slStart !! 1) - ] - where - t w = nChunksEachOf 4 (w + 1) "mempool tx" - c w = nChunksEachOf 3 (w + 1) "%CPU" - g w = nChunksEachOf 2 (w + 1) "GCs" - m w = nChunksEachOf 3 (w + 1) "Memory use, MB" - - d, f :: PrintfArg a => Int -> Maybe a -> Text - d width = \case - Just x -> T.pack $ printf ("%"<>"" --(if exportMode then "0" else "") - <>show width<>"d") x - Nothing -> mconcat (replicate width "-") - f width = \case - Just x -> T.pack $ printf ("%0."<>show width<>"f") x - Nothing -> mconcat (replicate width "-") - - calcProd :: Double -> Double -> Double - calcProd mut' cpu' = if cpu' == 0 then 1 else mut' / cpu' +import Cardano.Analysis.API.Chain +import Cardano.Analysis.API.ChainFilter +import Cardano.Analysis.API.Context +import Cardano.Analysis.API.Dictionary +import Cardano.Analysis.API.Field +import Cardano.Analysis.API.Ground +import Cardano.Analysis.API.Metrics +import Cardano.Analysis.API.Run +import Cardano.Analysis.API.Types +import Cardano.Analysis.API.LocliVersion diff --git a/bench/locli/src/Cardano/Analysis/Chain.hs b/bench/locli/src/Cardano/Analysis/API/Chain.hs similarity index 89% rename from bench/locli/src/Cardano/Analysis/Chain.hs rename to bench/locli/src/Cardano/Analysis/API/Chain.hs index 7b0a31db693..e15de28c001 100644 --- a/bench/locli/src/Cardano/Analysis/Chain.hs +++ b/bench/locli/src/Cardano/Analysis/API/Chain.hs @@ -1,19 +1,15 @@ {-# LANGUAGE GeneralizedNewtypeDeriving#-} - {-# OPTIONS_GHC -Wno-orphans #-} - {- HLINT ignore "Use head" -} - -module Cardano.Analysis.Chain (module Cardano.Analysis.Chain) where +module Cardano.Analysis.API.Chain (module Cardano.Analysis.API.Chain) where import Cardano.Prelude hiding (head) import Data.Aeson -import Data.Time.Clock (UTCTime, NominalDiffTime) import Data.Time.Clock qualified as Time -import Cardano.Analysis.Context -import Cardano.Analysis.Ground +import Cardano.Analysis.API.Context +import Cardano.Analysis.API.Ground -- | A pretty obvious (and dangerously assumptious) interpretation of an absolute slot number. diff --git a/bench/locli/src/Cardano/Analysis/ChainFilter.hs b/bench/locli/src/Cardano/Analysis/API/ChainFilter.hs similarity index 87% rename from bench/locli/src/Cardano/Analysis/ChainFilter.hs rename to bench/locli/src/Cardano/Analysis/API/ChainFilter.hs index fea3a237f1e..71906fbc9fe 100644 --- a/bench/locli/src/Cardano/Analysis/ChainFilter.hs +++ b/bench/locli/src/Cardano/Analysis/API/ChainFilter.hs @@ -1,9 +1,6 @@ {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE StrictData #-} - -{- HLINT ignore "Use head" -} - -module Cardano.Analysis.ChainFilter (module Cardano.Analysis.ChainFilter) where +module Cardano.Analysis.API.ChainFilter (module Cardano.Analysis.API.ChainFilter) where import Cardano.Prelude hiding (head) @@ -14,10 +11,8 @@ import Options.Applicative import Options.Applicative qualified as Opt import System.FilePath.Posix (takeBaseName) -import Cardano.Analysis.Ground import Cardano.Util - --- import Cardano.Analysis.Chain +import Cardano.Analysis.API.Ground newtype JsonFilterFile @@ -25,12 +20,13 @@ newtype JsonFilterFile deriving (Show, Eq) newtype FilterName = FilterName { unFilterName :: Text } + deriving (Eq, FromJSON, Generic, NFData, Show, ToJSON) -- | Conditions for chain subsetting data ChainFilter = CBlock BlockCond | CSlot SlotCond - deriving (FromJSON, Generic, NFData, Show, ToJSON) + deriving (Eq, FromJSON, Generic, NFData, Ord, Show, ToJSON) -- | Block classification -- primary for validity as subjects of analysis. data BlockCond @@ -41,7 +37,8 @@ data BlockCond | BSizeGEq Word64 | BSizeLEq Word64 | BMinimumAdoptions Word64 -- ^ At least this many adoptions - deriving (FromJSON, Generic, NFData, Show, ToJSON) + | BNonNegatives -- ^ Non-negative timings only + deriving (Eq, FromJSON, Generic, NFData, Ord, Show, ToJSON) data SlotCond = SlotGEq SlotNo @@ -53,7 +50,7 @@ data SlotCond | EpSlotGEq EpochSlot | EpSlotLEq EpochSlot | SlotHasLeaders - deriving (FromJSON, Generic, NFData, Show, ToJSON) + deriving (Eq, FromJSON, Generic, NFData, Ord, Show, ToJSON) cfIsSlotCond, cfIsBlockCond :: ChainFilter -> Bool cfIsSlotCond = \case { CSlot{} -> True; _ -> False; } diff --git a/bench/locli/src/Cardano/Analysis/Context.hs b/bench/locli/src/Cardano/Analysis/API/Context.hs similarity index 92% rename from bench/locli/src/Cardano/Analysis/Context.hs rename to bench/locli/src/Cardano/Analysis/API/Context.hs index b9bdaf4b047..24b79f7b60c 100644 --- a/bench/locli/src/Cardano/Analysis/Context.hs +++ b/bench/locli/src/Cardano/Analysis/API/Context.hs @@ -1,7 +1,7 @@ {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE GeneralisedNewtypeDeriving #-} {-# LANGUAGE StrictData #-} -module Cardano.Analysis.Context (module Cardano.Analysis.Context) where +module Cardano.Analysis.API.Context (module Cardano.Analysis.API.Context) where import Cardano.Prelude @@ -46,11 +46,13 @@ data PParams data GeneratorProfile = GeneratorProfile - { add_tx_size :: Word64 - , inputs_per_tx :: Word64 - , outputs_per_tx :: Word64 - , tps :: Double - , tx_count :: Word64 + { add_tx_size :: Word64 + , inputs_per_tx :: Word64 + , outputs_per_tx :: Word64 + , tps :: Double + , tx_count :: Word64 + , plutusMode :: Maybe Bool + , plutusLoopScript :: Maybe FilePath } deriving (Generic, Show, FromJSON, ToJSON) diff --git a/bench/locli/src/Cardano/Analysis/API/Dictionary.hs b/bench/locli/src/Cardano/Analysis/API/Dictionary.hs new file mode 100644 index 00000000000..4f9e7d7218f --- /dev/null +++ b/bench/locli/src/Cardano/Analysis/API/Dictionary.hs @@ -0,0 +1,41 @@ +{-# LANGUAGE DeriveAnyClass #-} +module Cardano.Analysis.API.Dictionary (module Cardano.Analysis.API.Dictionary) where + +import Cardano.Prelude + +import Data.Aeson +import Data.Map.Strict qualified as M + +import Cardano.Analysis.API.Field +import Cardano.Analysis.API.Types +import Cardano.Analysis.API.Metrics () + + +data DictEntry where + DictEntry :: + { deField :: !Text + , deShortDesc :: !Text + , deDescription :: !Text + } -> DictEntry + deriving (Generic, FromJSON, ToJSON, Show) + +data Dictionary where + Dictionary :: + { dBlockProp :: !(Map Text DictEntry) + , dClusterPerf :: !(Map Text DictEntry) + } -> Dictionary + deriving (Generic, FromJSON, ToJSON, Show) + +metricDictionary :: Dictionary +metricDictionary = + Dictionary + { dBlockProp = cdfFields @BlockProp <&> extractEntry & M.fromList + , dClusterPerf = cdfFields @MachPerf <&> extractEntry & M.fromList + } + where extractEntry :: Field DSelect p a -> (Text, DictEntry) + extractEntry Field{..} = (fId,) $ + DictEntry + { deField = fId + , deShortDesc = fShortDesc + , deDescription = fDescription + } diff --git a/bench/locli/src/Cardano/Analysis/API/Field.hs b/bench/locli/src/Cardano/Analysis/API/Field.hs new file mode 100644 index 00000000000..935d1053de5 --- /dev/null +++ b/bench/locli/src/Cardano/Analysis/API/Field.hs @@ -0,0 +1,224 @@ +{-# LANGUAGE TypeInType #-} +{-# LANGUAGE ScopedTypeVariables #-} +module Cardano.Analysis.API.Field (module Cardano.Analysis.API.Field) where + +import Cardano.Prelude hiding (head, show) + +import Data.CDF +import Data.String (fromString) +import Data.Text (unpack) + +import Cardano.JSON +import Cardano.Util +import Cardano.Analysis.API.Ground + + +data Scale + = Lin + | Log + deriving (Eq, Show) + +data Range + = Free -- No range restriction + | Z0 Int -- 1-based range + | Z1 Int -- 1-based range + | R01 + deriving (Eq, Show) + +data Unit + = Sec -- Second + | Hz -- Hertz + | B -- Byte + | KB -- Kibibyte: 2^10 + | MB -- Mibibyte: 2^20 + | KBs -- Kibibyte/s + | MBs -- Mibibyte/s + | Era -- Era + | Epo -- Epoch + | Slo -- Slots + | Blk -- Blocks + | Hsh -- Hash + | Hos -- Host + | Sig -- Sign: +/- + | Pct -- Unspecified ratio, percents + | Ev -- Events + | KEv -- Events: 10^3 + | Dat -- Date + | Tim -- Time + | Ver -- Version + | Ix -- Unspecified index + | Len -- Unspecified length + | Cnt -- Unspecified count + | Rto -- Unspecified ratio + | Uni -- Unspecified unit + | Id -- Unspefified identifier + deriving (Eq, Show) + +renderUnit :: Unit -> Text +renderUnit = \case + Sec -> "sec" + Hz -> "Hz" + B -> "B" + KB -> "KB" + MB -> "MB" + KBs -> "KB/s" + MBs -> "MB/s" + Era -> "era" + Epo -> "epoch" + Slo -> "slots" + Blk -> "blocks" + Hsh -> "hash" + Hos -> "host" + Sig -> "+/-" + Pct -> "%" + Ev -> "#" + KEv -> "#" + Dat -> "on" + Tim -> "at" + Ver -> "v" + Ix -> "[]" + Len -> "#" + Cnt -> "#" + Rto -> "/" + Uni -> "#" + Id -> "" + +data Width + = Wno + | W1 + | W2 + | W3 + | W4 + | W5 + | W6 + | W7 + | W8 + | W9 + | W10 + | W11 + | W12 + | W13 + | W14 + | W15 + | W16 + | W17 + | W18 + | W19 + | W20 + | W21 + deriving (Eq, Enum, Ord, Show) + +data Precision + = P0 + | P1 + | P2 + | P3 + deriving (Eq, Enum, Ord, Show) + +{-# INLINE width #-} +width :: Width -> Int +width Wno = 80 +width x = fromEnum x + +-- | Encapsulate all metadata about a metric (a projection) of +-- a certain projectible (a kind of analysis results): +-- - first parameter encapsulates the projection descriptor +-- - second parameter sets the arity (I vs. CDF I) +-- - third parameter is the projectible indexed by arity +data Field (s :: (Type -> Type) -> k -> Type) (p :: Type -> Type) (a :: k) + = Field + { fId :: Text + , fHead1 :: Text + , fHead2 :: Text + , fWidth :: Width + , fUnit :: Unit + , fPrecision :: Precision + , fScale :: Scale + , fRange :: Range + , fSelect :: s p a + , fShortDesc :: Text + , fDescription :: Text + } + +class CDFFields a p where + cdfFields :: [Field DSelect p a] + fieldJSONOverlay :: Field DSelect p a -> Object -> [Maybe Object] + +class TimelineFields a where + data TimelineComments a :: Type + timelineFields :: [Field ISelect I a] + rtCommentary :: a -> TimelineComments a -> [Text] + rtCommentary _ _ = [] + +data FSelect where + ISel :: TimelineFields a => (Field ISelect I a -> Bool) -> FSelect + DSel :: CDFFields a p => (Field DSelect p a -> Bool) -> FSelect + +data DSelect p a + = DInt (a p -> CDF p Int) + | DWord64 (a p -> CDF p Word64) + | DFloat (a p -> CDF p Double) + | DDeltaT (a p -> CDF p NominalDiffTime) + +data ISelect p a + = IInt (a -> Int) + | IWord64 (a -> Word64) + | IWord64M (a -> SMaybe Word64) + | IFloat (a -> Double) + | IDeltaT (a -> NominalDiffTime) + | IDeltaTM (a -> SMaybe NominalDiffTime) + | IDate (a -> UTCTime) + | ITime (a -> UTCTime) + | IText (a -> Text) + +dFields :: [FieldName] -> Field DSelect p a -> Bool +dFields fs Field{fId} = FieldName fId `elem` fs + +iFields :: [FieldName] -> Field ISelect I a -> Bool +iFields fs Field{fId} = FieldName fId `elem` fs + +filterFields :: CDFFields a p + => (Field DSelect p a -> Bool) -> [Field DSelect p a] +filterFields f = filter f cdfFields + +mapField :: a p -> (forall v. Divisible v => CDF p v -> b) -> Field DSelect p a -> b +mapField x cdfProj Field{..} = + case fSelect of + DInt (cdfProj . ($x) ->r) -> r + DWord64 (cdfProj . ($x) ->r) -> r + DFloat (cdfProj . ($x) ->r) -> r + DDeltaT (cdfProj . ($x) ->r) -> r + +mapFieldWithKey :: a p -> (forall v. Divisible v => Field DSelect p a -> CDF p v -> b) -> Field DSelect p a -> b +mapFieldWithKey x cdfProj f@Field{..} = + case fSelect of + DInt (cdfProj f . ($x) ->r) -> r + DWord64 (cdfProj f . ($x) ->r) -> r + DFloat (cdfProj f . ($x) ->r) -> r + DDeltaT (cdfProj f . ($x) ->r) -> r + +tryOverlayFieldDescription :: Field DSelect p a -> Object -> Maybe Object +tryOverlayFieldDescription Field{..} = + alterSubObject (Just . overlayJSON [ ("description", String fDescription) + , ("shortDesc", String fShortDesc) + ]) + (fromString $ unpack fId) + +processFieldOverlays :: forall a p. CDFFields a p => a p -> Object -> Object +processFieldOverlays _ o = + foldr' (\f -> handleMiss f . fieldJSONOverlay @a @p f) o cdfFields + where + handleMiss Field{..} = + fromMaybe (error $ "fieldJSONOverlay: failed to handle field " <> unpack fId) + . getFirst + . mconcat + . fmap First + + +mapSomeFieldCDF :: forall p c a. (forall b. Divisible b => CDF p b -> c) -> a p -> DSelect p a -> c +mapSomeFieldCDF f a = \case + DInt s -> f (s a) + DWord64 s -> f (s a) + DFloat s -> f (s a) + DDeltaT s -> f (s a) + diff --git a/bench/locli/src/Cardano/Analysis/Ground.hs b/bench/locli/src/Cardano/Analysis/API/Ground.hs similarity index 61% rename from bench/locli/src/Cardano/Analysis/Ground.hs rename to bench/locli/src/Cardano/Analysis/API/Ground.hs index 419c78bc5de..829dc2465ff 100644 --- a/bench/locli/src/Cardano/Analysis/Ground.hs +++ b/bench/locli/src/Cardano/Analysis/API/Ground.hs @@ -1,34 +1,45 @@ {-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE GeneralisedNewtypeDeriving #-} {-# OPTIONS_GHC -Wno-orphans #-} -module Cardano.Analysis.Ground - ( module Cardano.Analysis.Ground +module Cardano.Analysis.API.Ground + ( module Cardano.Analysis.API.Ground , module Data.DataDomain + , module Data.Time.Clock , BlockNo (..), EpochNo (..), SlotNo (..) ) where -import Prelude (String, fail, show) +import Prelude (fail, show) import Cardano.Prelude hiding (head) +import Unsafe.Coerce qualified as Unsafe import Data.Aeson import Data.Aeson.Types (toJSONKeyText) import Data.Attoparsec.Text qualified as Atto import Data.Attoparsec.Time qualified as Iso8601 +import Data.ByteString.Lazy.Char8 qualified as LBS import Data.Text qualified as T import Data.Text.Short qualified as SText import Data.Text.Short (ShortText, fromText, toText) -import Data.Time.Clock (UTCTime, NominalDiffTime) +import Data.Time.Clock (UTCTime, NominalDiffTime) import Options.Applicative import Options.Applicative qualified as Opt import Quiet (Quiet (..)) +import System.FilePath qualified as F import Cardano.Slotting.Slot (EpochNo(..), SlotNo(..)) import Ouroboros.Network.Block (BlockNo(..)) import Data.DataDomain +import Cardano.Util +newtype FieldName = FieldName { unFieldName :: Text } + deriving (Eq, Generic, Ord) + deriving newtype (FromJSON, IsString, ToJSON) + deriving anyclass NFData + deriving Show via Quiet FieldName + newtype TId = TId { unTId :: ShortText } deriving (Eq, Generic, Ord) deriving newtype (FromJSON, ToJSON) @@ -50,6 +61,26 @@ instance ToJSONKey Hash where instance FromJSONKey Hash where fromJSONKey = FromJSONKeyText (Hash . fromText) +newtype Count a = Count { unCount :: Int } + deriving (Eq, Generic, Ord, Show) + deriving newtype (FromJSON, Num, ToJSON) + deriving anyclass NFData + +countList :: (a -> Bool) -> [a] -> Count a +countList f = Count . fromIntegral . count f + +countLists :: (a -> Bool) -> [[a]] -> Count a +countLists f = Count . fromIntegral . sum . fmap (count f) + +countListAll :: [a] -> Count a +countListAll = Count . fromIntegral . length + +countListsAll :: [[a]] -> Count a +countListsAll = Count . fromIntegral . sum . fmap length + +unsafeCoerceCount :: Count a -> Count b +unsafeCoerceCount = Unsafe.unsafeCoerce + newtype Host = Host { unHost :: ShortText } deriving (Eq, Generic, Ord) deriving newtype (IsString, FromJSON, ToJSON) @@ -124,6 +155,23 @@ newtype OutputFile = OutputFile { unOutputFile :: FilePath } deriving (Show, Eq) +--- +--- Readers +--- +readJsonData :: FromJSON a => JsonInputFile a -> (Text -> b) -> ExceptT b IO a +readJsonData f err = + unJsonInputFile f + & LBS.readFile + & fmap eitherDecode + & newExceptT + & firstExceptT (err . T.pack) + +readJsonDataIO :: FromJSON a => JsonInputFile a -> IO (Either String a) +readJsonDataIO f = + unJsonInputFile f + & LBS.readFile + & fmap eitherDecode + --- --- Parsers --- @@ -240,3 +288,62 @@ optWord optname desc def = <> metavar "INT" <> help desc <> value def +-- /path/to/logs-HOSTNAME.some.ext -> HOSTNAME +hostFromLogfilename :: JsonLogfile -> Host +hostFromLogfilename (JsonLogfile f) = + Host $ fromText . stripPrefixHard "logs-" . T.pack . F.dropExtensions . F.takeFileName $ f + where + stripPrefixHard :: Text -> Text -> Text + stripPrefixHard p s = fromMaybe s $ T.stripPrefix p s + +hostDeduction :: HostDeduction -> (JsonLogfile -> Host) +hostDeduction = \case + HostFromLogfilename -> hostFromLogfilename + +dumpObject :: ToJSON a => String -> a -> JsonOutputFile a -> ExceptT Text IO () +dumpObject ident x (JsonOutputFile f) = liftIO $ do + progress ident (Q f) + withFile f WriteMode $ \hnd -> LBS.hPutStrLn hnd $ encode x + +dumpObjects :: ToJSON a => String -> [a] -> JsonOutputFile [a] -> ExceptT Text IO () +dumpObjects ident xs (JsonOutputFile f) = liftIO $ do + progress ident (Q f) + withFile f WriteMode $ \hnd -> do + forM_ xs $ LBS.hPutStrLn hnd . encode + +dumpAssociatedObjects :: ToJSON a => String -> [(JsonLogfile, a)] -> ExceptT Text IO () +dumpAssociatedObjects ident xs = liftIO $ + flip mapConcurrently_ xs $ + \(JsonLogfile f, x) -> do + progress ident (Q f) + withFile (replaceExtension f $ ident <> ".json") WriteMode $ \hnd -> + LBS.hPutStrLn hnd $ encode x + +readAssociatedObjects :: forall a. + FromJSON a => String -> [JsonLogfile] -> ExceptT Text IO [(JsonLogfile, a)] +readAssociatedObjects ident fs = firstExceptT T.pack . newExceptT . fmap sequence . fmap (fmap sequence) $ + flip mapConcurrently fs $ + \jf@(JsonLogfile f) -> do + x <- eitherDecode @a <$> LBS.readFile (replaceExtension f $ ident <> ".json") + progress ident (Q f) + pure (jf, x) + +dumpAssociatedObjectStreams :: ToJSON a => String -> [(JsonLogfile, [a])] -> ExceptT Text IO () +dumpAssociatedObjectStreams ident xss = liftIO $ + flip mapConcurrently_ xss $ + \(JsonLogfile f, xs) -> do + withFile (replaceExtension f $ ident <> ".json") WriteMode $ \hnd -> do + forM_ xs $ LBS.hPutStrLn hnd . encode + +dumpText :: String -> [Text] -> TextOutputFile -> ExceptT Text IO () +dumpText ident xs (TextOutputFile f) = liftIO $ do + progress ident (Q f) + withFile f WriteMode $ \hnd -> do + forM_ xs $ hPutStrLn hnd + +dumpAssociatedTextStreams :: String -> [(JsonLogfile, [Text])] -> ExceptT Text IO () +dumpAssociatedTextStreams ident xss = liftIO $ + flip mapConcurrently_ xss $ + \(JsonLogfile f, xs) -> do + withFile (replaceExtension f $ ident <> ".txt") WriteMode $ \hnd -> do + forM_ xs $ hPutStrLn hnd diff --git a/bench/locli/src/Cardano/Analysis/Version.hs b/bench/locli/src/Cardano/Analysis/API/LocliVersion.hs similarity index 67% rename from bench/locli/src/Cardano/Analysis/Version.hs rename to bench/locli/src/Cardano/Analysis/API/LocliVersion.hs index aa85af89694..1d7729543b8 100644 --- a/bench/locli/src/Cardano/Analysis/Version.hs +++ b/bench/locli/src/Cardano/Analysis/API/LocliVersion.hs @@ -1,7 +1,5 @@ {-# LANGUAGE DeriveAnyClass #-} -module Cardano.Analysis.Version - (Version (..), getVersion, renderProgramAndVersion) -where +module Cardano.Analysis.API.LocliVersion (module Cardano.Analysis.API.LocliVersion) where import Cardano.Prelude (NFData, mconcat) import Cardano.Git.Rev qualified (gitRev) @@ -13,23 +11,23 @@ import Paths_locli (version) import Prelude (Show) -data Version = - Version +data LocliVersion = + LocliVersion { gitRev :: Text , version :: Text } deriving (Generic, FromJSON, Show, ToJSON) deriving anyclass NFData -getVersion :: Version -getVersion = - Version +getLocliVersion :: LocliVersion +getLocliVersion = + LocliVersion Cardano.Git.Rev.gitRev (pack (showVersion Paths_locli.version)) -renderProgramAndVersion :: Version -> Text +renderProgramAndVersion :: LocliVersion -> Text renderProgramAndVersion v = mconcat [ "locli " - , Cardano.Analysis.Version.version v + , Cardano.Analysis.API.LocliVersion.version v , " (", take 6 (gitRev v), ")" ] diff --git a/bench/locli/src/Cardano/Analysis/API/Metrics.hs b/bench/locli/src/Cardano/Analysis/API/Metrics.hs new file mode 100644 index 00000000000..baf95a8d3bc --- /dev/null +++ b/bench/locli/src/Cardano/Analysis/API/Metrics.hs @@ -0,0 +1,647 @@ +{-# LANGUAGE EmptyDataDeriving #-} +{-# LANGUAGE GeneralisedNewtypeDeriving #-} +{-# LANGUAGE UndecidableInstances #-} +{-# OPTIONS_GHC -Wno-orphans #-} +{- HLINT ignore "Use head" -} +{- HLINT ignore "Use null" -} +module Cardano.Analysis.API.Metrics (module Cardano.Analysis.API.Metrics) where + +-- Prelude: +import Prelude ((!!)) +import Cardano.Prelude + +-- Global, non-prelude: +import Data.Aeson +import Data.Aeson qualified as AE +import Data.ByteString.Lazy.Char8 qualified as LBS +import Data.Text qualified as T +import Data.Text.Short qualified as ST +import Data.Time.Clock +import Data.Map.Strict qualified as M + +-- Repo-local: +import Cardano.Logging.Resources.Types + +-- Package-local: +import Data.CDF + +import Cardano.JSON +import Cardano.Util +import Cardano.Analysis.API.Context +import Cardano.Analysis.API.Field +import Cardano.Analysis.API.Ground +import Cardano.Analysis.API.Types + + +sumFieldsReport :: [FieldName] +sumFieldsReport = + [ "date.systemStart", "time.systemStart", "sumAnalysisTime" + , "batch" + , "cardano-node", "ouroboros-network" , "cardano-ledger", "plutus", "cardano-crypto", "cardano-base" + , "era" + , "delegators", "utxo" + , "add_tx_size", "inputs_per_tx", "outputs_per_tx" , "tps", "tx_count" + , "plutusScript" + , "sumLogStreams", "sumLogObjectsTotal" + , "sumFilters" + , "ddRawCount.sumDomainTime", "ddFilteredCount.sumDomainTime", "dataDomainFilterRatio.sumDomainTime" + , "ddRaw.sumStartSpread", "ddRaw.sumStopSpread" + , "ddFiltered.sumStartSpread", "ddFiltered.sumStopSpread" + , "sumDomainSlots", "sumDomainBlocks", "sumBlocksRejected" ] + +instance TimelineFields SummaryOne where + data TimelineComments SummaryOne + deriving Show + + timelineFields = + fScalar "sumAnalysisTime" W10 Dat (IText $ showText.roundUTCTimeDay.sumAnalysisTime) + "Analysis date" + "Date of analysis" + + <> fScalar "date.systemStart" W10 Dat (IDate $ systemStart.sumGenesis) + "Cluster system start date" + "Date of cluster genesis systemStart" + + <> fScalar "time.systemStart" W8 Tim (ITime $ systemStart.sumGenesis) + "Cluster system start date" + "Date of cluster genesis systemStart" + + <> fScalar "batch" Wno Id (IText $ batch.sumMeta) + "Run batch" + "" + + <> fScalar "cardano-node" W5 Ver (IText $ unCommit.mNode.manifest.sumMeta) + "cardano-node version" + "" + + <> fScalar "ouroboros-network" W5 Ver (IText $ unCommit.mNetwork.manifest.sumMeta) + "ouroboros-network version" + "" + + <> fScalar "cardano-ledger" W5 Ver (IText $ unCommit.mLedger.manifest.sumMeta) + "cardano-ledger version" + "" + + <> fScalar "plutus" W5 Ver (IText $ unCommit.mPlutus.manifest.sumMeta) + "plutus version" + "" + + <> fScalar "cardano-crypto" W5 Ver (IText $ unCommit.mCrypto.manifest.sumMeta) + "cardano-crypto version" + "" + + <> fScalar "cardano-base" W5 Ver (IText $ unCommit.mBase.manifest.sumMeta) + "cardano-base version" + "" + + <> fScalar "era" Wno Era (IText $ era.sumMeta) + "Era" + "Benchmark era" + + <> fScalar "delegators" Wno Cnt (IWord64 $ delegators.sumGenesisSpec) + "Delegation map size" + "" + + <> fScalar "utxo" Wno Cnt (IWord64 $ utxo.sumGenesisSpec) + "Starting UTxO set size" + "Extra UTxO set size at the beginning of the benchmark" + + <> fScalar "add_tx_size" Wno B (IWord64 $ add_tx_size.sumGenerator) + "Extra tx payload" + "" + + <> fScalar "inputs_per_tx" Wno Cnt (IWord64 $ inputs_per_tx.sumGenerator) + "Tx inputs" + "" + + <> fScalar "outputs_per_tx" Wno Cnt (IWord64 $ outputs_per_tx.sumGenerator) + "Tx Outputs" + "" + + <> fScalar "tps" Wno Hz (IFloat $ tps.sumGenerator) + "TPS" + "Offered load, transactions per second" + + <> fScalar "tx_count" Wno Cnt (IWord64 $ tx_count.sumGenerator) + "Transaction count" + "Number of transactions prepared for submission, but not necessarily submitted" + + <> fScalar "plutusScript" Wno Id (IText $ T.pack.fromMaybe "---".plutusLoopScript.sumGenerator) + "Plutus script" + "Name of th Plutus script used for smart contract workload generation, if any" + + <> fScalar "sumLogStreams" Wno Cnt (IInt $ unCount.sumLogStreams) + "Machines" + "Number of machines under analysis" + + <> fScalar "sumLogObjectsTotal" Wno Cnt (IInt $ unCount.sumLogObjectsTotal) + "Total log objects analysed" + "" + + <> fScalar "sumFilters" Wno Cnt (IInt $ length.snd.sumFilters) + "Number of filters applied" + "" + + <> fScalar "ddRawCount.sumDomainTime" Wno Sec (IInt $ ddRawCount.sumDomainTime) + "Run time, s" + "" + + <> fScalar "ddFilteredCount.sumDomainTime" Wno Sec (IInt $ ddFilteredCount.sumDomainTime) + "Analysed run duration, s" + "" + + <> fScalar "dataDomainFilterRatio.sumDomainTime" W4 Rto (IFloat $ dataDomainFilterRatio.sumDomainTime) + "Run time efficiency" + "" + + <> fScalar "ddRaw.sumStartSpread" Wno Sec (IDeltaT$ intvDurationSec.ddRaw.sumStartSpread) + "Node start spread, s" + "" + + <> fScalar "ddRaw.sumStopSpread" Wno Sec (IDeltaT$ intvDurationSec.ddRaw.sumStopSpread) + "Node stop spread, s" + "" + + <> fScalar "ddFiltered.sumStartSpread" Wno Sec (IDeltaT$ maybe 0 intvDurationSec.ddFiltered.sumStartSpread) + "Perf analysis start spread, s" + "" + + <> fScalar "ddFiltered.sumStopSpread" Wno Sec (IDeltaT$ maybe 0 intvDurationSec.ddFiltered.sumStopSpread) + "Perf analysis stop spread, s" + "" + + <> fScalar "sumDomainSlots" Wno Slo (IInt $ ddFilteredCount.sumDomainSlots) + "Slots analysed" + "" + + <> fScalar "sumDomainBlocks" Wno Blk (IInt $ ddFilteredCount.sumDomainBlocks) + "Blocks analysed" + "" + + <> fScalar "sumBlocksRejected" Wno Cnt (IInt $ unCount . sumBlocksRejected) + "Blocks rejected" + "" + -- fieldJSONOverlay f = (:[]) . tryOverlayFieldDescription f + +propSubsetFn :: PropSubset -> (Field DSelect p a -> Bool) +propSubsetFn = \case + PropFull -> const True + PropControl -> dFields bpFieldsControl + PropForger -> dFields bpFieldsForger + PropPeers -> dFields bpFieldsPeers + PropEndToEnd -> dFields bpFieldsEndToEnd + PropEndToEndBrief -> dFields bpFieldsEndToEndBrief + +bpFieldsControl, bpFieldsForger, bpFieldsPeers, bpFieldsEndToEnd, bpFieldsEndToEndBrief :: [FieldName] +bpFieldsControl = + [ "cdfBlocksPerHost", "cdfBlocksFilteredRatio", "cdfBlocksChainedRatio", "cdfBlockBattles", "cdfBlockSizes" ] +bpFieldsForger = + [ "cdfForgerStarts", "cdfForgerBlkCtx", "cdfForgerLgrState", "cdfForgerLgrView", "cdfForgerLeads", "cdfForgerForges", "cdfForgerAnnouncements", "cdfForgerSends", "cdfForgerAdoptions" ] +bpFieldsPeers = + [ "cdfPeerNotices", "cdfPeerRequests", "cdfPeerFetches", "cdfPeerAnnouncements", "cdfPeerSends", "cdfPeerAdoptions" ] +bpFieldsEndToEnd = + adoptionCentiles <&> FieldName . renderAdoptionCentile +bpFieldsEndToEndBrief = + adoptionCentilesBrief <&> FieldName . renderAdoptionCentile + +instance CDFFields BlockProp p where + cdfFields = + fGrp ",------------- Forger event Δt: -------------." + W4 Sec P3 Log Free + [ fGrp' "cdfForgerStarts" "Loop" (DDeltaT cdfForgerStarts) + "Started forge loop iteration" + "Forge loop iteration delay (TraceStartLeadershipCheck), relative to slot start" + , fGrp' "cdfForgerBlkCtx" "BkCt" (DDeltaT cdfForgerBlkCtx) + "Acquired block context" + "Block context acquired (TraceBlockContext), relative to forge loop beginning" + + , fGrp' "cdfForgerLgrState" "LgSt" (DDeltaT cdfForgerLgrState) + "Acquired ledger state" + "Ledger state acquired (TraceLedgerState), relative to block context acquisition" + + , fGrp' "cdfForgerLgrView" "LgVi" (DDeltaT cdfForgerLgrView) + "Acquired ledger view" + "Ledger view acquired (TraceLedgerView), relative to ledger state acquisition" + + , fGrp' "cdfForgerLeads" "Lead" (DDeltaT cdfForgerLeads) + "Leadership check duration" + "Leadership check duration (TraceNodeIsNotLeader, TraceNodeIsLeader), relative to ledger view acquisition" + + , fGrp' "cdfForgerForges" "Forg" (DDeltaT cdfForgerForges) + "Leadership to forged" + "Time spent forging the block (TraceForgedBlock), relative to positive leadership decision" + + , fGrp' "cdfForgerAnnouncements" "Anno" (DDeltaT cdfForgerAnnouncements) + "Forged to announced" + "Time until block was announced (ChainSyncServerEvent.TraceChainSyncServerRead.AddBlock), since block forging completion" + + , fGrp' "cdfForgerSends" "Send" (DDeltaT cdfForgerSends) + "Forged to sending" + "Time until block sending was initiated (TraceBlockFetchServerSendBlock), since block forging completion" + + , fGrp' "cdfForgerAdoptions" "Adop" (DDeltaT cdfForgerAdoptions) + "Forged to self-adopted" + "Time it took to adopt the block (TraceAdoptedBlock), since block forging completion" + ] + <> fGrp ",------- Peer event Δt: -------." + W4 Sec P3 Log Free + [ fGrp' "cdfPeerNotices" "Noti" (DDeltaT cdfPeerNotices) + "First peer notice" + "Time it took for the fastest peer to notice the block (ChainSyncClientEvent.TraceDownloadedHeader), since block's slot start" + + , fGrp' "cdfPeerRequests" "Requ" (DDeltaT cdfPeerRequests) + "Notice to fetch request" + "Time it took the peer to request the block body (BlockFetchClient.SendFetchRequest), after it have seen its header" + + , fGrp' "cdfPeerFetches" "Fetc" (DDeltaT cdfPeerFetches) + "Fetch duration" + "Time it took the peer to complete fetching the block (BlockFetchClient.CompletedBlockFetch), after having requested it" + + , fGrp' "cdfPeerAnnouncements" "Anno" (DDeltaT cdfPeerAnnouncements) + "Fetched to announced" + "Time it took a peer to announce the block (ChainSyncServerEvent.TraceChainSyncServerUpdate), since it was fetched" + + , fGrp' "cdfPeerSends" "Send" (DDeltaT cdfPeerSends) + "Fetched to sending" + "Time until the peer started sending the block (BlockFetchServer.SendBlock), since it was fetched" + + , fGrp' "cdfPeerAdoptions" "Adop" (DDeltaT cdfPeerAdoptions) + "Fetched to adopted" + "Time until the peer adopts the block (TraceAddBlockEvent.AddedToCurrentChain), since it was fetched" + ] + <> [ Field (renderAdoptionCentile ct) (r!!i) + (T.take 4 $ T.pack $ printf "%.04f" centi) + W4 Rto P3 Log Free + (DDeltaT $ + checkCentile i centi + . fromMaybe (error $ printf "No centile %d/%f in bpPropagation." + i centi) + . flip atMay i + . M.toList + . bpPropagation) + (T.pack $ printf "%.2f adoption" centi) + (T.pack $ printf "Time since slot start to block's adoption by %d%% of the cluster." (ceiling $ centi * 100 :: Int)) + -- (T.pack $ printf "Block adopted by %.2f fraction of the entire cluster." centi) + | (i, ct@(Centile centi)) <- zip [0::Int ..] adoptionCentiles + ] + + <> fBoth "cdfBlocksPerHost" "Host" "blks" W4 Blk P0 Lin Free (DInt cdfBlocksPerHost) + "Blocks per host" + "For a given host, number of blocks made during the entire observation period" + + <> fBoth "cdfBlocksFilteredRatio" "Filtr" "blks" W4 Rto P3 Lin R01 (DFloat cdfBlocksFilteredRatio) + "Filtered to chained block ratio" + "For a given host, ratio of blocks that passed filtering / all on chain" + + <> fBoth "cdfBlocksChainedRatio" "Chain" "blks" W4 Rto P3 Lin R01 (DFloat cdfBlocksChainedRatio) + "Chained to forged block ratio" + "For a given host, ratio of blocks that made into chain / all forged" + + <> fBoth "cdfBlockBattles" "Battl" " #" W4 Blk P0 Lin Free (DInt cdfBlockBattles) + "Height & slot battles" + "For a given block, number of all abandoned blocks at its block height. Sum of height and slot battles" + + <> fBoth "cdfBlockSizes" "Size" "bytes" W9 B P0 Lin Free (DInt cdfBlockSizes) + "Block size" + "Block size, in bytes" + where r = nChunksEachOf (length adoptionCentiles) 5 + ",-- Slot-rel. Δt to adoption centile: -." + checkCentile i centi (centi', d) = + if T.unpack centi' == printf "cdf%.2f" centi then d + else error $ printf "Centile mismatch: [%d]: exp=%f act=%s" + i centi (T.unpack centi') + + fieldJSONOverlay f kvs = + [ overlay kvs + , alterSubObject overlay "bpPropagation" kvs + ] + where overlay = tryOverlayFieldDescription f + +instance TimelineFields BlockEvents where + timelineFields = + fBoth' "beBlockNo" "block" "no." W5 Blk P0 Lin Free (IWord64 (unBlockNo.beBlockNo)) + <> fBoth' "beSlotNo" "abs." "slot#" W5 Slo P0 Lin Free (IWord64 (unSlotNo .beSlotNo)) + <> fBoth' "beBlock" "block" "hash" W6 Hsh P0 Lin Free (IText (shortHash.beBlock)) + <> fBoth' "beBlockPrev" "prev" "hash" W6 Hsh P0 Lin Free (IText (shortHash.beBlockPrev)) + <> fBoth' "bfForger" "forger" "host" W7 Hos P0 Lin Free (IText (ST.toText.unHost.bfForger . beForge)) + <> fBoth' "bfBlockSize" "size" "bytes" W6 B P0 Lin Free (IInt (bfBlockSize.beForge)) + <> fBoth' "bfBlockGap" "block" "gap" W5 Len P0 Lin Free (IDeltaT (bfBlockGap .beForge)) + <> fBoth' "bpeIsFork" "for" "-ks" W3 Blk P0 Lin Free (IInt (count bpeIsFork.beErrors)) + <> fGrp "--------- Forger event Δt: ---------" + W4 Sec P3 Log Free + [ fGrp' "bfStarted" "Start" (IDeltaT (bfStarted .beForge)) "" "" + , fGrp' "bfBlkCtx" "BlkCtx" (IDeltaTM (bfBlkCtx .beForge)) "" "" + , fGrp' "bfLgrState" "LgrSta" (IDeltaTM (bfLgrState.beForge)) "" "" + , fGrp' "bfLgrView" "LgrVie" (IDeltaTM (bfLgrView .beForge)) "" "" + , fGrp' "bfLeading" "Lead" (IDeltaT (bfLeading .beForge)) "" "" + , fGrp' "bfForged" "Forge" (IDeltaT (bfForged .beForge)) "" "" + , fGrp' "bfAnnounced" "Announ" (IDeltaT (bfAnnounced.beForge)) "" "" + , fGrp' "bfSending" "Sendin" (IDeltaT (bfSending .beForge)) "" "" + , fGrp' "bfAdopted" "Adopt" (IDeltaT (bfAdopted .beForge)) "" "" + ] + <> fGrp "-- Peer event Δt averages: --" + W4 Sec P3 Log Free + [ fGrp' "boNoticed" "Notic" (IDeltaT (af boNoticed . valids)) "" "" + , fGrp' "boRequested" "Requd" (IDeltaT (af boRequested . valids)) "" "" + , fGrp' "boFetched" "Fetch" (IDeltaT (af boFetched . valids)) "" "" + , fGrp' "boAnnounced" "Annou" (IDeltaT (af' boAnnounced . valids)) "" "" + , fGrp' "boSending" "Send" (IDeltaT (af' boSending . valids)) "" "" + , fGrp' "pAdopted" "Adopt" (IDeltaT (af' boAdopted . valids)) "" "" + ] + <> fGrp "Propagation Δt:" + W4 Sec P3 Log Free + [ fGrp' "0.50" "0.5" (IDeltaT (percSpec 0.50 . bePropagation)) "" "" + , fGrp' "0.80" "0.8" (IDeltaT (percSpec 0.80 . bePropagation)) "" "" + , fGrp' "0.96" "0.96" (IDeltaT (percSpec 0.96 . bePropagation)) "" "" + , fGrp' "1.00" "1.0" (IDeltaT (high .cdfRange. bePropagation)) "" "" + ] + <> fBoth' "beAcceptance" "va-" "lid" W3 Sig P0 Lin Free (IText (bool "-" "+" . (== 0) . length + . filter (not . snd) . beAcceptance)) + <> fBoth' "valids" "good" "obsv" W3 Ev P0 Lin Free (IInt (length.valids)) + <> fBoth' "beErrors" "all" "errs" W5 Ev P0 Lin Free (IInt (length.beErrors)) + + <> fGrp "Missing" + W3 Ev P0 Lin Free + [ fGrp' "missNotic" "ntc" (IInt (count (bpeIsMissing Notice) .beErrors)) "" "" + , fGrp' "missReque" "req" (IInt (count (bpeIsMissing Request) .beErrors)) "" "" + , fGrp' "missFetch" "fch" (IInt (count (bpeIsMissing Fetch) .beErrors)) "" "" + , fGrp' "missAnnou" "ann" (IInt (count (bpeIsMissing Announce).beErrors)) "" "" + , fGrp' "missAdopt" "ado" (IInt (count (bpeIsMissing Adopt) .beErrors)) "" "" + , fGrp' "missSend" "snd" (IInt (count (bpeIsMissing Send) .beErrors)) "" "" + ] + <> fGrp "Negative" + W3 Ev P0 Lin Free + [ fGrp' "negAnnou" "ann" (IInt (count (bpeIsNegative Announce).beErrors)) "" "" + , fGrp' "negSend" "snd" (IInt (count (bpeIsNegative Send) .beErrors)) "" "" + ] + where + valids = filter isValidBlockObservation . beObservations + + percSpec :: Double -> CDF I NominalDiffTime -> NominalDiffTime + percSpec ps d = unI $ Centile ps `projectCDF` d + & fromMaybe (error $ printf "No centile %f in distribution." ps) + af f = avg . fmap f + af' f = avg . mapSMaybe f + avg :: [NominalDiffTime] -> NominalDiffTime + avg [] = 0 + avg xs = (/ fromInteger (fromIntegral $ length xs)) $ sum xs + + bpeIsFork :: BPError -> Bool + bpeIsFork BPError{eDesc=BPEFork{}} = True + bpeIsFork _ = False + + bpeIsMissing, bpeIsNegative :: Phase -> BPError -> Bool + bpeIsMissing p BPError{eDesc=BPEMissingPhase p'} = p == p' + bpeIsMissing _ _ = False + bpeIsNegative p BPError{eDesc=BPENegativePhase p' _} = p == p' + bpeIsNegative _ _ = False + + data TimelineComments BlockEvents + = BEErrors + | BEFilterOuts + deriving Show + + rtCommentary BlockEvents{..} = + \case + BEErrors -> (" " <>) . show <$> beErrors + BEFilterOuts -> (" " <>) . show <$> filter (not . snd) beAcceptance + +perfSubsetFn :: PerfSubset -> (Field DSelect p a -> Bool) +perfSubsetFn = \case + PerfFull -> const True + PerfReport -> dFields mtFieldsReport + +mtFieldsReport :: [FieldName] +mtFieldsReport = + [ "CentiCpu", "CentiGC", "CentiMut", "cdfSpanLensCpu", "RSS", "Heap", "Live", "Alloc", "GcsMinor", "GcsMajor", "NetRd", "NetWr", "FsRd", "FsWr", "cdfStarts" ] + +instance CDFFields MachPerf p where + cdfFields = + fBoth "cdfStarts" "Loop" "starts" W4 Uni P0 Lin (Z0 1) (DWord64 cdfStarts) + "Forge loop starts" + "For any given slot, how many forging loop starts were registered" + + <> fGrp "----------- Δt -----------" W4 Sec P3 Log Free + [ fGrp' "cdfStarted" "Start" (DDeltaT cdfStarted) + "Forge loop tardiness" + "Forge loop iteration start delay (TraceStartLeadershipCheck), relative to slot start" + + , fGrp' "cdfBlkCtx" "BlkCt" (DDeltaT cdfBlkCtx) + "Block context acquisition delay" + "Block context acquired (TraceBlockContext), relative to forge loop beginning" + + , fGrp' "cdfLgrState" "LgrSt" (DDeltaT cdfLgrState) + "Ledger state acquisition delay" + "Ledger state acquired (TraceLedgerState), relative to block context acquisition" + + , fGrp' "cdfLgrView" "LgrVi" (DDeltaT cdfLgrView) + "Ledger view acquisition delay" + "Ledger view acquired (TraceLedgerView), relative to ledger state acquisition" + + , fGrp' "cdfLeading" "Lead" (DDeltaT cdfLeading) + "Leadership check duration" + "Leadership check duration (TraceNodeIsNotLeader, TraceNodeIsLeader), relative to ledger view acquisition" + + , fGrp' "cdfForged" " Forge" (DDeltaT cdfForged) + "Leading to block forged" + "Time spent forging the block (TraceForgedBlock), relative to positive leadership decision" + ] + <> fBoth "cdfBlockGap" "Block" "gap" W4 Sec P2 Lin Free (DWord64 cdfBlockGap) + "Interblock gap, s" + "Time between blocks" + + <> fBoth "cdfDensity" "Dens" "ity" W5 Rto P2 Lin Free (DFloat cdfDensity) + "Chain density" + "Chain density, for the last 'k' slots" + + <> fPct "CentiCpu" "CPU" (Z1 200) (DWord64 (rCentiCpu.mpResourceCDFs)) + "Process CPU usage, %" + "Kernel-reported CPU process usage, of a single core" + + <> fPct "CentiGC" "GC" (Z1 200) (DWord64 (rCentiGC .mpResourceCDFs)) + "RTS GC CPU usage, %" + "RTS-reported GC CPU usage, of a single core" + + <> fPct "CentiMut" "MUT" (Z1 200) (DWord64 (rCentiMut.mpResourceCDFs)) + "RTS Mutator CPU usage, %" + "RTS-reported mutator CPU usage, of a single core" + <> fW64 "GcsMajor" "GC" "Maj" W3 Ev (DWord64 (rGcsMajor.mpResourceCDFs)) + "Major GCs" + "Major GC events" + + <> fW64 "GcsMinor" "GC" "Min" W3 Ev (DWord64 (rGcsMajor.mpResourceCDFs)) + "Minor GCs" + "Minor GC events" + + <> fGrp "Memory usage, MB" W5 MB P0 Lin Free + [ fGrp' "RSS" "RSS" (DWord64 (rRSS.mpResourceCDFs)) + "Kernel RSS, MB" + "Kernel-reported RSS (Resident Set Size) of the process, MB" + + , fGrp' "Heap" "Heap" (DWord64 (rHeap.mpResourceCDFs)) + "RTS heap size, MB" + "RTS-reported heap size, MB" + + , fGrp' "Live" "Live" (DWord64 (rLive.mpResourceCDFs)) + "RTS GC live bytes, MB" + "RTS-reported GC live data size, MB" + ] + <> fBoth "Alloc" "Alloc" "MB" W5 MB P0 Lin (Z0 5000) (DWord64 (rAlloc.mpResourceCDFs)) + "RTS alloc rate MB/s" + "RTS-reported allocation rate, MB/sec" + + <> fGrp "NetIO, kB/s" W5 KBs P0 Lin Free + [ fGrp' "NetRd" "recv" (DWord64 (rNetRd.mpResourceCDFs)) + "Network reads kB/s" + "Network reads, kB/sec" + + , fGrp' "NetWr" "send" (DWord64 (rNetWr.mpResourceCDFs)) + "Network writes kB/s" + "Network writes, kB/sec" + ] + <> fGrp "FS IO, kB/s" W5 KBs P0 Lin Free + [ fGrp' "FsRd" "read" (DWord64 (rFsRd.mpResourceCDFs)) + "Filesystem reads, kB/s" + "Number of bytes which this process really did cause to be fetched from the storage layer, per second" + + , fGrp' "FsWr" "write" (DWord64 (rFsWr.mpResourceCDFs)) + "Filesystem writes, kB/s" + "Number of bytes which this process caused to be sent to the storage layer, modulo truncate(), per second" + ] + + <> fGrp "CPU% spans" W5 Len P0 Lin Free + [ fGrp' "cdfSpanLensCpu" "All" (DInt cdfSpanLensCpu) + "CPU 85% spans" + "Length of over-85% CPU usage peaks" + + , fGrp' "cdfSpanLensCpuEpoch" "Epoch" (DInt cdfSpanLensCpuEpoch) + "CPU spans at Ep boundary" + "Length of over-85% CPU usage peaks, starting at epoch boundary" + ] + + fieldJSONOverlay f kvs = + [ overlay kvs + , alterSubObject overlay "mpResourceCDFs" kvs + ] + where overlay = tryOverlayFieldDescription f + _dumpJSON _x = error $ "kvs:\n" <> (T.unpack . ST.toText . fromMaybe "" . ST.fromByteString . LBS.toStrict . AE.encode $ _x) + +instance TimelineFields (SlotStats NominalDiffTime) where + data TimelineComments (SlotStats NominalDiffTime) + deriving Show + + timelineFields = + + fW64' "slot" "abs." "slot#" W5 Slo (IWord64 (unSlotNo .slSlot)) + <> fW64' "epochSlot" "ep." "slot" W4 Slo (IWord64 (unEpochSlot .slEpochSlot)) + <> fW64' "epoch" "ep." "#" W2 Epo (IWord64 (unEpochNo .slEpoch)) + <> fW64' "safetyInt" "safe" "int" W3 Ix (IWord64 (unEpochSafeInt.slEpochSafeInt)) + <> fGrp "block" W5 Blk P0 Lin Free + [ fGrp' "block" "no." (IWord64 (unBlockNo.slBlockNo)) "" "" + , fGrp' "blockGap" "gap" (IWord64 slBlockGap) "" "" + ] + <> fGrp2 W3 Ev P0 Lin Free + [ fGrp2' "forgeLoop" "forg" "loo" (IWord64 slCountStarts) "" "" + , fGrp2' "blockCtx" "blok" "ctx" (IWord64 slCountBlkCtx) "" "" + , fGrp2' "ledgerState" "ledg" "sta" (IWord64 slCountLgrState) "" "" + , fGrp2' "ledgerView" "ledg" "viw" (IWord64 slCountLgrView) "" "" + , fGrp2' "leadShips" "ship" "win" (IWord64 slCountLeads) "" "" + , fGrp2' "forges" "For" "ge" (IWord64 slCountForges) "" "" + , fGrp2' "CDBSnap" "CDB" "snap" (IWord64 slChainDBSnap) "" "" + , fGrp2' "rejTxs" "rej" "txs" (IWord64 slRejectedTx) "" "" + ] + <> fGrp2 W5 Sec P3 Log Free + [ fGrp2' "startDelay" "loop" "start" (IDeltaTM slStarted) "" "" + , fGrp2' "blkCtx" "block" "ctx" (IDeltaTM slBlkCtx) "" "" + , fGrp2' "lgrState" "ledgr" "state" (IDeltaTM slLgrState) "" "" + , fGrp2' "lgrView" "ledgr" "view" (IDeltaTM slLgrView) "" "" + , fGrp2' "leadChecked" "ledsh" "chekd" (IDeltaTM slLeading) "" "" + , fGrp2' "forge" "forge" "done" (IDeltaTM slForged) "" "" + ] + <> fGrpF "mempool tx" W4 + [ fGrpF' "SpanTxsMem" "span" Sec P3 Log Free (IDeltaTM slSpanTxsMem) "" "" + , fGrpF' "TxsCollected" "cold" Uni P0 Lin Free (IWord64 slTxsCollected) "" "" + , fGrpF' "TxsAccepted" "accd" Uni P0 Lin Free (IWord64 slTxsAccepted) "" "" + , fGrpF' "TxsRejected" "rejd" Uni P0 Lin Free (IWord64 slTxsRejected) "" "" + ] + <> fBoth "chDensity" "chain" "dens." W5 Rto P2 Lin Free (IFloat slDensity) "" "" + + <> fGrp "%CPU" W4 Pct P1 Lin (Z1 200) + [ fGrp' "CPU%" "all" (IWord64M (fm rCentiCpu.slResources)) "" "" + , fGrp' "GC%" "GC" (IWord64M (fm rCentiGC .slResources)) "" "" + , fGrp' "MUT%" "mut" (IWord64M (fm rCentiMut.slResources)) "" "" + ] + <> fGrp "GCs" W4 Ev P0 Lin Free + [ fGrp' "majFlt" "maj" (IWord64M (fm rGcsMajor.slResources)) "" "" + , fGrp' "minFlt" "min" (IWord64M (fm rGcsMinor.slResources)) "" "" ] + <> fGrp "Memory use, MB" W5 Pct P0 Lin (Z1 200) + [ fGrp' "rssMB" "RSS" (IWord64M (fm rRSS .slResources)) "" "" + , fGrp' "heapMB" "Heap" (IWord64M (fm rHeap .slResources)) "" "" + , fGrp' "liveMB" "Live" (IWord64M (fm rLive .slResources)) "" "" + ] + <> fBoth "allocatedMB" "Allocd" "MB" W6 MBs P0 Lin Free (IWord64M (fm rAlloc.slResources)) "" "" + <> fBoth "mempoolTxs" "Mempool" "txs" W7 Uni P0 Lin Free (IWord64 slMempoolTxs) "" "" + <> fBoth "utxoSize" "UTxO" "entries" W9 Uni P0 Lin Free (IWord64 slUtxoSize) "" "" + where fm = fmap + +-- * Instances, depending on the metrics' instances: +-- +instance (ToJSON (f NominalDiffTime), ToJSON (f Int), ToJSON (f Double), ToJSON (f (Count BlockEvents)), ToJSON (f (DataDomain SlotNo)), ToJSON (f (DataDomain BlockNo))) => ToJSON (BlockProp f) where + toJSON x = AE.genericToJSON AE.defaultOptions x + & \case + Object o -> Object $ processFieldOverlays x o + _ -> error "Heh, serialised BlockProp to a non-Object." + +instance (ToJSON (a Double), ToJSON (a Int), ToJSON (a NominalDiffTime), ToJSON (a (DataDomain UTCTime)), ToJSON (a Word64), ToJSON (a (DataDomain SlotNo)), ToJSON (a (DataDomain BlockNo))) => ToJSON (MachPerf a) where + toJSON x = AE.genericToJSON AE.defaultOptions x + & \case + Object o -> Object $ processFieldOverlays x o + _ -> error "Heh, serialised BlockProp to a non-Object." + +deriving newtype instance ToJSON MultiClusterPerf + +-- * Field definition auxiliaries: +-- +fScalar :: Text -> Width -> Unit -> s p a -> Text -> Text -> [Field s p a] +fScalar id w u sel sd d = [Field id "" "" w u P0 Lin Free sel sd d] + +fBoth :: Text -> Text -> Text -> Width -> Unit -> Precision -> Scale -> Range -> s p a -> Text -> Text -> [Field s p a] +fBoth id h1 h2 wi u p s r sel sd d = [Field id h1 h2 wi u p s r sel sd d] + +fBoth' :: Text -> Text -> Text -> Width -> Unit -> Precision -> Scale -> Range -> s p a -> [Field s p a] +fBoth' id h1 h2 wi u p s r sel = fBoth id h1 h2 wi u p s r sel "" "" + +fW64 :: Text -> Text -> Text -> Width -> Unit -> s p a -> Text -> Text -> [Field s p a] +fW64 id h1 h2 wi u = fBoth id h1 h2 wi u P0 Lin Free + +fW64' :: Text -> Text -> Text -> Width -> Unit -> s p a -> [Field s p a] +fW64' id h1 h2 wi u sel = fBoth id h1 h2 wi u P0 Lin Free sel "" "" + +fGrp :: Text -> Width -> Unit -> Precision -> Scale -> Range -> [Unit -> Precision -> Scale -> Range -> Width -> Text -> [Field s p a]] -> [Field s p a] +fGrp hTop w u p s r fs = mconcat $ + zip fs (nChunksEachOf (length fs) (width w + 1) hTop) + <&> \(f, chunk) -> f u p s r w chunk + +-- fUni :: Text -> Text -> Width -> Unit -> Precision -> Scale -> Range -> s p a -> Text -> Text -> [Field s p a] +-- fUni id h1 wi u p s r sel sd d = [Field id h1 (renderUnit u) wi u p s r sel sd d] + +fGrp' :: Text -> Text -> s p a -> Text -> Text -> Unit -> Precision -> Scale -> Range -> Width -> Text -> [Field s p a] +fGrp' id h2 sel sd d u p s r wi h1 = [Field id h1 h2 wi u p s r sel sd d] + +fGrp2 :: Width -> Unit -> Precision -> Scale -> Range -> [Unit -> Precision -> Scale -> Range -> Width -> [Field s p a]] -> [Field s p a] +fGrp2 w u p s r fs = mconcat $ + fs <&> \f -> f u p s r w + +fGrp2' :: Text -> Text -> Text -> s p a -> Text -> Text -> Unit -> Precision -> Scale -> Range -> Width -> [Field s p a] +fGrp2' id h1 h2 sel sd d u p s r wi = [Field id h1 h2 wi u p s r sel sd d] + +fGrpF :: Text -> Width -> [Width -> Text -> [Field s p a]] -> [Field s p a] +fGrpF hTop w fs = mconcat $ + zip fs (nChunksEachOf (length fs) (width w) hTop) + <&> \(f, chunk) -> f w chunk + +fGrpF' :: Text -> Text -> Unit -> Precision -> Scale -> Range -> s p a -> Text -> Text -> Width -> Text -> [Field s p a] +fGrpF' id h2 u p s r sel sd d wi h1 = [Field id h1 h2 wi u p s r sel sd d] + +fPct :: Text -> Text -> Range -> s p a -> Text -> Text -> [Field s p a] +fPct id h1 r sel sd d = [Field id h1 (renderUnit Pct) W3 Pct P0 Lin r sel sd d] diff --git a/bench/locli/src/Cardano/Analysis/API/Run.hs b/bench/locli/src/Cardano/Analysis/API/Run.hs new file mode 100644 index 00000000000..32080e9ff03 --- /dev/null +++ b/bench/locli/src/Cardano/Analysis/API/Run.hs @@ -0,0 +1,73 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE StrictData #-} +{-# OPTIONS_GHC -Wno-incomplete-patterns -Wno-name-shadowing -Wno-orphans #-} +module Cardano.Analysis.API.Run (module Cardano.Analysis.API.Run) where + +import Cardano.Prelude + +import Control.Monad (fail) +import Data.Aeson qualified as Aeson + +import Cardano.Util +import Cardano.Analysis.API.ChainFilter +import Cardano.Analysis.API.Context +import Cardano.Analysis.API.Ground + +data AnalysisCmdError + = AnalysisCmdError !Text + | MissingRunContext + | MissingLogfiles + | RunMetaParseError !(JsonInputFile RunPartial) !Text + | GenesisParseError !(JsonInputFile Genesis) !Text + | ChainFiltersParseError !JsonFilterFile !Text + deriving Show + +data ARunWith a + = Run + { genesisSpec :: GenesisSpec + , generatorProfile :: GeneratorProfile + , metadata :: Metadata + , genesis :: a + } + deriving (Generic, Show, ToJSON) + +type RunPartial = ARunWith () +type Run = ARunWith Genesis + +instance FromJSON RunPartial where + parseJSON = withObject "Run" $ \v -> do + meta :: Object <- v .: "meta" + profile_content <- meta .: "profile_content" + generator <- profile_content .: "generator" + -- + genesisSpec <- profile_content .: "genesis" + generatorProfile <- parseJSON $ Aeson.Object generator + -- + tag <- meta .: "tag" + profile <- meta .: "profile" + batch <- meta .: "batch" + manifest <- meta .: "manifest" + + eraGtor <- generator .:? "era" + eraTop <- profile_content .:? "era" + era <- case eraGtor <|> eraTop of + Just x -> pure x + Nothing -> fail "While parsing run metafile: missing era specification" + -- + let metadata = Metadata{..} + genesis = () + pure Run{..} + +readRun :: JsonInputFile Genesis -> JsonInputFile RunPartial -> ExceptT AnalysisCmdError IO Run +readRun shelleyGenesis runmeta = do + runPartial <- readJsonData runmeta (RunMetaParseError runmeta) + progress "meta" (Q $ unJsonInputFile runmeta) + run <- readJsonData shelleyGenesis (GenesisParseError shelleyGenesis) + <&> completeRun runPartial + progress "genesis" (Q $ unJsonInputFile shelleyGenesis) + progress "run" (J run) + pure run + + where + completeRun :: RunPartial -> Genesis -> Run + completeRun Run{..} g = Run { genesis = g, .. } diff --git a/bench/locli/src/Cardano/Analysis/API/Types.hs b/bench/locli/src/Cardano/Analysis/API/Types.hs new file mode 100644 index 00000000000..0aa22e604f0 --- /dev/null +++ b/bench/locli/src/Cardano/Analysis/API/Types.hs @@ -0,0 +1,451 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE GeneralisedNewtypeDeriving #-} +{-# LANGUAGE UndecidableInstances #-} +{-# OPTIONS_GHC -Wno-name-shadowing -Wno-orphans #-} +module Cardano.Analysis.API.Types (module Cardano.Analysis.API.Types) where + +import Cardano.Prelude hiding (head) + +import Data.Text qualified as T +import Options.Applicative qualified as Opt + +import Data.CDF + +import Cardano.Logging.Resources.Types + +import Cardano.Unlog.LogObject hiding (Text) +import Cardano.Util + +import Cardano.Analysis.API.Chain +import Cardano.Analysis.API.ChainFilter +import Cardano.Analysis.API.Context +import Cardano.Analysis.API.Ground +import Cardano.Analysis.API.LocliVersion + +-- +-- * API types +-- + +-- | Overall summary of all analyses. +data Summary f where + Summary :: + { sumAnalysisTime :: !UTCTime + , sumMeta :: !Metadata + , sumGenesis :: !Genesis + , sumGenesisSpec :: !GenesisSpec + , sumGenerator :: !GeneratorProfile + , sumLogStreams :: !(Count [LogObject]) + , sumLogObjectsTotal :: !(Count LogObject) + , sumFilters :: !([FilterName], [ChainFilter]) + , sumChainRejectionStats :: ![(ChainFilter, Int)] + , sumBlocksRejected :: !(Count BlockEvents) + , sumDomainTime :: !(DataDomain UTCTime) + , sumStartSpread :: !(DataDomain UTCTime) + , sumStopSpread :: !(DataDomain UTCTime) + , sumDomainSlots :: !(DataDomain SlotNo) + , sumDomainBlocks :: !(DataDomain BlockNo) + , cdfLogObjects :: !(CDF f Int) + , cdfLogObjectsEmitted :: !(CDF f Int) + } -> Summary f + deriving (Generic) + +type SummaryOne = Summary I +type MultiSummary = Summary (CDF I) + +deriving instance (FromJSON (f Int), FromJSON (f Double)) => FromJSON (Summary f) +deriving instance ( ToJSON (f Int), ToJSON (f Double)) => ToJSON (Summary f) +deriving instance ( Show (f Int), Show (f Double)) => Show (Summary f) + +data BlockStats + = BlockStats + { bsFiltered :: Count ForgerEvents + , bsRejected :: Count ForgerEvents + , bsUnchained :: Count ForgerEvents + } + deriving (Generic, FromJSON, ToJSON) + +bsTotal, bsChained :: BlockStats -> Count ForgerEvents +bsTotal BlockStats{..} = bsFiltered + bsRejected + bsUnchained +bsChained BlockStats{..} = bsFiltered + bsRejected + +-- | Results of block propagation analysis. +data BlockProp f + = BlockProp + { bpVersion :: !Cardano.Analysis.API.LocliVersion.LocliVersion + , bpDomainSlots :: ![DataDomain SlotNo] + , bpDomainBlocks :: ![DataDomain BlockNo] + , cdfForgerStarts :: !(CDF f NominalDiffTime) + , cdfForgerBlkCtx :: !(CDF f NominalDiffTime) + , cdfForgerLgrState :: !(CDF f NominalDiffTime) + , cdfForgerLgrView :: !(CDF f NominalDiffTime) + , cdfForgerLeads :: !(CDF f NominalDiffTime) + , cdfForgerForges :: !(CDF f NominalDiffTime) + , cdfForgerAnnouncements :: !(CDF f NominalDiffTime) + , cdfForgerAdoptions :: !(CDF f NominalDiffTime) + , cdfForgerSends :: !(CDF f NominalDiffTime) + , cdfPeerNotices :: !(CDF f NominalDiffTime) + , cdfPeerRequests :: !(CDF f NominalDiffTime) + , cdfPeerFetches :: !(CDF f NominalDiffTime) + , cdfPeerAnnouncements :: !(CDF f NominalDiffTime) + , cdfPeerAdoptions :: !(CDF f NominalDiffTime) + , cdfPeerSends :: !(CDF f NominalDiffTime) + , cdfBlocksPerHost :: !(CDF f Int) + , cdfBlocksFilteredRatio :: !(CDF f Double) + , cdfBlocksChainedRatio :: !(CDF f Double) + , cdfBlockBattles :: !(CDF f Int) + , cdfBlockSizes :: !(CDF f Int) + , bpPropagation :: !(Map Text (CDF f NominalDiffTime)) + } + deriving (Generic) +deriving instance (Show (f NominalDiffTime), Show (f Int), Show (f Double), Show (f (Count BlockEvents)), Show (f (DataDomain SlotNo)), Show (f (DataDomain BlockNo))) => Show (BlockProp f) +deriving instance (FromJSON (f NominalDiffTime), FromJSON (f Int), FromJSON (f Double), FromJSON (f (Count BlockEvents)), FromJSON (f (DataDomain SlotNo)), FromJSON (f (DataDomain BlockNo))) => FromJSON (BlockProp f) + +type BlockPropOne = BlockProp I +type MultiBlockProp = BlockProp (CDF I) + +-- | The top-level representation of the machine timeline analysis results. +data MachPerf f + = MachPerf + { mpVersion :: !Cardano.Analysis.API.LocliVersion.LocliVersion + , mpDomainSlots :: ![DataDomain SlotNo] + , cdfHostSlots :: !(CDF f Word64) + -- distributions + , cdfStarts :: !(CDF f Word64) + , cdfLeads :: !(CDF f Word64) + , cdfUtxo :: !(CDF f Word64) + , cdfDensity :: !(CDF f Double) + , cdfStarted :: !(CDF f NominalDiffTime) + , cdfBlkCtx :: !(CDF f NominalDiffTime) + , cdfLgrState :: !(CDF f NominalDiffTime) + , cdfLgrView :: !(CDF f NominalDiffTime) + , cdfLeading :: !(CDF f NominalDiffTime) + , cdfForged :: !(CDF f NominalDiffTime) + , cdfBlockGap :: !(CDF f Word64) + , cdfSpanLensCpu :: !(CDF f Int) + , cdfSpanLensCpuEpoch :: !(CDF f Int) + , cdfSpanLensCpuRwd :: !(CDF f Int) + , mpResourceCDFs :: !(Resources (CDF f Word64)) + } + deriving (Generic) + +-- | One machine's performance +type MachPerfOne = MachPerf I + +-- | Bunch'a machines performances +type ClusterPerf = MachPerf (CDF I) + +-- | Bunch'a bunches'a machine performances. +-- Same as above, since we collapse [CDF I] into CDF I -- just with more statistical confidence. +newtype MultiClusterPerf + = MultiClusterPerf { unMultiClusterPerf :: ClusterPerf } + deriving newtype (FromJSON) + +-- * BlockProp +-- +data Chain + = Chain + { cDomSlots :: !(DataDomain SlotNo) + , cDomBlocks :: !(DataDomain BlockNo) + , cRejecta :: ![BlockEvents] + , cMainChain :: ![BlockEvents] + , cBlockStats :: !(Map Host BlockStats) + } + +-- | Block's events, as seen by its forger. +data ForgerEvents a + = ForgerEvents + { bfeHost :: !Host + , bfeBlock :: !Hash + , bfeBlockPrev :: !Hash + , bfeBlockNo :: !BlockNo + , bfeSlotNo :: !SlotNo + , bfeSlotStart :: !SlotStart + , bfeEpochNo :: !EpochNo + , bfeBlockSize :: !(SMaybe Int) + , bfeStarted :: !(SMaybe a) + , bfeBlkCtx :: !(SMaybe a) + , bfeLgrState :: !(SMaybe a) + , bfeLgrView :: !(SMaybe a) + , bfeLeading :: !(SMaybe a) + , bfeForged :: !(SMaybe a) + , bfeAnnounced :: !(SMaybe a) + , bfeSending :: !(SMaybe a) + , bfeAdopted :: !(SMaybe a) + , bfeChainDelta :: !Int + , bfeErrs :: [BPError] + } + deriving (Generic, NFData, FromJSON, ToJSON, Show) + +-- | All events related to a block. +data BlockEvents + = BlockEvents + { beBlock :: !Hash + , beBlockPrev :: !Hash + , beBlockNo :: !BlockNo + , beSlotNo :: !SlotNo + , beEpochNo :: !EpochNo + , beEpochSafeInt :: !EpochSafeInt + , beForge :: !BlockForge + , beObservations :: ![BlockObservation] + , beForks :: !(Count BlockEvents) + , bePropagation :: !(CDF I NominalDiffTime) + -- ^ CDF of slot-start-to-adoptions on cluster + , beOtherBlocks :: ![Hash] + , beErrors :: ![BPError] + , beAcceptance :: ![(ChainFilter, Bool)] + -- ^ List of acceptance conditions, + -- affecting block's consideration for analysis. + } + deriving (Generic, FromJSON, ToJSON, Show) + +data BlockForge + = BlockForge + { bfForger :: !Host + , bfSlotStart :: !SlotStart + , bfBlockGap :: !NominalDiffTime -- ^ Since previous forge event + , bfBlockSize :: !Int -- ^ Bytes + , bfStarted :: !NominalDiffTime -- ^ Since slot start + , bfBlkCtx :: !(SMaybe NominalDiffTime) -- ^ Since forge loop start + , bfLgrState :: !(SMaybe NominalDiffTime) -- ^ Since block context + , bfLgrView :: !(SMaybe NominalDiffTime) -- ^ Since ledger state + , bfLeading :: !NominalDiffTime -- ^ Since ledger view + , bfForged :: !NominalDiffTime -- ^ Since leading + , bfAnnounced :: !NominalDiffTime -- ^ Since forging + , bfSending :: !NominalDiffTime -- ^ Since announcement + , bfAdopted :: !NominalDiffTime -- ^ Since announcement + , bfChainDelta :: !Int -- ^ ChainDelta during adoption + } + deriving (Generic, FromJSON, ToJSON, Show) + +allBlockForgeTimes :: Monoid (f Text) => + (Text -> NominalDiffTime -> f Text) -> BlockForge -> f Text +allBlockForgeTimes f BlockForge{..} + = f "bfBlockGap" bfBlockGap + <> f "bfStarted" bfStarted + <> smaybe mempty (f "bfBlkCtx") bfBlkCtx + <> smaybe mempty (f "bfLgrState") bfLgrState + <> smaybe mempty (f "bfLgrView") bfLgrView + <> f "bfLeading" bfLeading + <> f "bfForged" bfForged + <> f "bfAnnounced" bfAnnounced + <> f "bfSending" bfSending + <> f "bfAdopted" bfAdopted + +data BlockObservation + = BlockObservation + { boObserver :: !Host + , boSlotStart :: !SlotStart + , boNoticed :: !NominalDiffTime -- ^ Since slot start + , boRequested :: !NominalDiffTime -- ^ Since noticing + , boFetched :: !NominalDiffTime -- ^ Since requesting + , boAnnounced :: !(SMaybe NominalDiffTime) -- ^ Since fetching + , boSending :: !(SMaybe NominalDiffTime) -- ^ Since announcement + , boAdopted :: !(SMaybe NominalDiffTime) -- ^ Since announcement + , boChainDelta :: !Int -- ^ ChainDelta during adoption + , boErrorsCrit :: ![BPError] + , boErrorsSoft :: ![BPError] + } + deriving (Generic, FromJSON, ToJSON, Show) + +allBlockObservationTimes :: (Monoid (f Text)) => + (Text -> NominalDiffTime -> f Text) -> BlockObservation -> f Text +allBlockObservationTimes f BlockObservation{..} + = f "boNoticed" boNoticed + <> f "boRequested" boRequested + <> f "boFetched" boFetched + <> smaybe mempty (f "boAnnounced") boAnnounced + <> smaybe mempty (f "boSending" ) boSending + <> smaybe mempty (f "boAdopted" ) boAdopted + +data BPError + = BPError + { eHost :: !Host + , eBlock :: !Hash + , eLO :: !(Maybe LogObject) + , eDesc :: !BPErrorKind + } + deriving (FromJSON, Generic, NFData, Show, ToJSON) + +data Phase + = Notice + | Request + | Fetch + | Forge + | Acquire + | Announce + | Adopt + | Send + deriving (FromJSON, Eq, Generic, NFData, Ord, Show, ToJSON) + +data BPErrorKind + = BPEBefore !Phase !Phase + | BPEUnexpectedForObserver !Phase + | BPEUnexpectedForForger !Phase + | BPEUnexpectedAsFirst !Phase + | BPENoBlocks + | BPEDuplicateForge + | BPEMissingPhase !Phase + | BPENegativePhase !Phase !NominalDiffTime + | BPEFork !Hash + deriving (FromJSON, Generic, NFData, Show, ToJSON) + +-- | From SlotStats collection: +data RunScalars + = RunScalars + { rsElapsed :: Maybe NominalDiffTime + , rsSubmitted :: Maybe Word64 + , rsThreadwiseTps :: Maybe [Double] + } + deriving stock Generic + deriving anyclass NFData + +-- * MachPerf / ClusterPef +-- +deriving newtype instance FromJSON a => FromJSON (I a) +deriving newtype instance ToJSON a => ToJSON (I a) +deriving instance (FromJSON (a Double), FromJSON (a Int), FromJSON (a NominalDiffTime), FromJSON (a Word64), FromJSON (a (DataDomain SlotNo)), FromJSON (a (DataDomain UTCTime))) => FromJSON (MachPerf a) +deriving instance (NFData (a Double), NFData (a Int), NFData (a NominalDiffTime), NFData (a Word64), NFData (a (DataDomain SlotNo)), NFData (a (DataDomain UTCTime))) => NFData (MachPerf a) +deriving instance (Show (a Double), Show (a Int), Show (a NominalDiffTime), Show (a Word64), Show (a (DataDomain SlotNo)), Show (a (DataDomain UTCTime))) => Show (MachPerf a) + +data SlotStats a + = SlotStats + { slSlot :: !SlotNo + , slEpoch :: !EpochNo + , slEpochSlot :: !EpochSlot + , slEpochSafeInt :: !EpochSafeInt + , slStart :: !SlotStart + , slCountStarts :: !Word64 + , slCountBlkCtx :: !Word64 + , slCountLgrState :: !Word64 + , slCountLgrView :: !Word64 + , slCountLeads :: !Word64 + , slCountForges :: !Word64 + , slChainDBSnap :: !Word64 + , slRejectedTx :: !Word64 + , slBlockNo :: !BlockNo + , slBlockGap :: !Word64 + , slStarted :: !(SMaybe a) + , slBlkCtx :: !(SMaybe a) + , slLgrState :: !(SMaybe a) + , slLgrView :: !(SMaybe a) + , slLeading :: !(SMaybe a) + , slForged :: !(SMaybe a) + , slMempoolTxs :: !Word64 + , slSpanTxsMem :: !(SMaybe NominalDiffTime) + , slTxsCollected :: !Word64 + , slTxsAccepted :: !Word64 + , slTxsRejected :: !Word64 + , slUtxoSize :: !Word64 + , slDensity :: !Double + , slResources :: !(SMaybe (Resources Word64)) + } + deriving (Generic, Show, ToJSON) + deriving anyclass NFData + +-- +-- * Key properties +-- +testBlockEvents :: Genesis -> BlockEvents -> ChainFilter -> Bool +testBlockEvents g@Genesis{..} + BlockEvents{beForge=forge@BlockForge{..} + ,beObservations=seen + ,..} = \case + CBlock flt -> case flt of + BUnitaryChainDelta -> bfChainDelta == 1 + BFullnessGEq f -> + bfBlockSize > floor ((fromIntegral (maxBlockBodySize protocolParams) :: Double) * f) + BFullnessLEq f -> + bfBlockSize < floor ((fromIntegral (maxBlockBodySize protocolParams) :: Double) * f) + BSizeGEq x -> bfBlockSize >= fromIntegral x + BSizeLEq x -> bfBlockSize <= fromIntegral x + BMinimumAdoptions x -> count (isSJust . boAdopted) seen >= fromIntegral x + BNonNegatives -> null $ + allBlockForgeTimes noteFieldIfNeg forge <> + concatMap (allBlockObservationTimes noteFieldIfNeg) seen + where noteFieldIfNeg :: Text -> NominalDiffTime -> [Text] + noteFieldIfNeg f x = [ f | x >= 0 ] + CSlot flt -> case flt of + SlotGEq s -> beSlotNo >= s + SlotLEq s -> beSlotNo <= s + EpochGEq e -> beEpochNo >= e + EpochLEq e -> beEpochNo <= e + SlotHasLeaders -> True + EpochSafeIntGEq i -> beEpochSafeInt >= i + EpochSafeIntLEq i -> beEpochSafeInt <= i + EpSlotGEq s -> snd (g `unsafeParseSlot` beSlotNo) >= s + EpSlotLEq s -> snd (g `unsafeParseSlot` beSlotNo) <= s + +isValidBlockObservation :: BlockObservation -> Bool +isValidBlockObservation BlockObservation{..} = + -- 1. All phases are present + null boErrorsCrit + && + -- 2. All timings account for processing of a single block + boChainDelta == 1 + +testSlotStats :: Genesis -> SlotStats a -> SlotCond -> Bool +testSlotStats g SlotStats{..} = \case + SlotGEq s -> slSlot >= s + SlotLEq s -> slSlot <= s + EpochGEq s -> fromIntegral (unEpochNo slEpoch) >= s + EpochLEq s -> fromIntegral (unEpochNo slEpoch) <= s + SlotHasLeaders -> slCountLeads > 0 + EpochSafeIntGEq i -> slEpochSafeInt >= i + EpochSafeIntLEq i -> slEpochSafeInt <= i + EpSlotGEq s -> snd (g `unsafeParseSlot` slSlot) >= s + EpSlotLEq s -> snd (g `unsafeParseSlot` slSlot) <= s + +-- +-- * Block propagation report subsetting +-- +data PropSubset + = PropFull + | PropControl + | PropForger + | PropPeers + | PropEndToEnd + | PropEndToEndBrief + deriving Show + +parsePropSubset :: Opt.Parser PropSubset +parsePropSubset = + [ Opt.flag' PropFull (Opt.long "full" <> Opt.help "Complete propagation data") + , Opt.flag' PropControl (Opt.long "control" <> Opt.help "Only overall control data") + , Opt.flag' PropForger (Opt.long "forger" <> Opt.help "Only forger propagation") + , Opt.flag' PropPeers (Opt.long "peers" <> Opt.help "Only peer propagation") + , Opt.flag' PropEndToEnd (Opt.long "end-to-end" <> Opt.help "Only end-to-end propagation") + , Opt.flag' PropEndToEndBrief (Opt.long "e2e-brief" <> Opt.help "Only brief end-to-end propagation") + ] & \case + (x:xs) -> foldl (<|>) x xs + [] -> error "Crazy world, begone. 0" + +-- +-- * Timeline rendering instances +-- +renderAdoptionCentile :: Centile -> Text +renderAdoptionCentile = T.pack . printf "cdf%0.2f" . unCentile + +adoptionCentiles :: [Centile] +adoptionCentiles = + [ Centile 0.5, Centile 0.8, Centile 0.9 + , Centile 0.92, Centile 0.94, Centile 0.96, Centile 0.98, Centile 1.0 ] + +adoptionCentilesBrief :: [Centile] +adoptionCentilesBrief = + [ Centile 0.5, Centile 0.9, Centile 0.96 ] + +-- +-- * Machine performance report subsetting +-- +data PerfSubset + = PerfFull + | PerfReport + deriving Show + +parsePerfSubset :: Opt.Parser PerfSubset +parsePerfSubset = + [ Opt.flag' PerfFull (Opt.long "full" <> Opt.help "Complete performance data") + , Opt.flag' PerfReport (Opt.long "report" <> Opt.help "Only report-relevant perf data") + ] & \case + (x:xs) -> foldl (<|>) x xs + [] -> error "Crazy world." diff --git a/bench/locli/src/Cardano/Analysis/BlockProp.hs b/bench/locli/src/Cardano/Analysis/BlockProp.hs index 8d29c8b0ddd..13711bc5313 100644 --- a/bench/locli/src/Cardano/Analysis/BlockProp.hs +++ b/bench/locli/src/Cardano/Analysis/BlockProp.hs @@ -12,14 +12,14 @@ module Cardano.Analysis.BlockProp , blockProp) where -import Prelude (String, (!!), error, head, last, id, show, tail) +import Prelude (String, (!!), error, head, last, id, show, tail, read) import Cardano.Prelude hiding (head, show) import Control.Arrow ((***), (&&&)) import Data.Aeson (ToJSON(..), FromJSON(..)) import Data.Bifunctor import Data.Function (on) -import Data.List (dropWhileEnd, intercalate) +import Data.List (dropWhileEnd, intercalate, partition) import Data.Map.Strict (Map) import Data.Map.Strict qualified as Map import Data.Maybe (catMaybes, mapMaybe, isNothing) @@ -41,79 +41,55 @@ import Ouroboros.Network.Block (BlockNo(..)) import Data.Accum import Data.CDF -import Cardano.Analysis.API -import Cardano.Analysis.Chain -import Cardano.Analysis.ChainFilter -import Cardano.Analysis.Ground -import Cardano.Analysis.Run -import Cardano.Analysis.Version import Cardano.Render import Cardano.Unlog.LogObject hiding (Text) import Cardano.Unlog.Resources import Cardano.Util +import Cardano.Analysis.API + summariseMultiBlockProp :: [Centile] -> [BlockPropOne] -> Either CDFError MultiBlockProp summariseMultiBlockProp _ [] = error "Asked to summarise empty list of BlockPropOne" summariseMultiBlockProp centiles bs@(headline:_) = do - bpForgerStarts <- cdf2OfCDFs comb $ bs <&> bpForgerStarts - bpForgerBlkCtx <- cdf2OfCDFs comb $ bs <&> bpForgerBlkCtx - bpForgerLgrState <- cdf2OfCDFs comb $ bs <&> bpForgerLgrState - bpForgerLgrView <- cdf2OfCDFs comb $ bs <&> bpForgerLgrView - bpForgerLeads <- cdf2OfCDFs comb $ bs <&> bpForgerLeads - bpForgerForges <- cdf2OfCDFs comb $ bs <&> bpForgerForges - bpForgerAdoptions <- cdf2OfCDFs comb $ bs <&> bpForgerAdoptions - bpForgerAnnouncements <- cdf2OfCDFs comb $ bs <&> bpForgerAnnouncements - bpForgerSends <- cdf2OfCDFs comb $ bs <&> bpForgerSends - bpPeerNotices <- cdf2OfCDFs comb $ bs <&> bpPeerNotices - bpPeerRequests <- cdf2OfCDFs comb $ bs <&> bpPeerRequests - bpPeerFetches <- cdf2OfCDFs comb $ bs <&> bpPeerFetches - bpPeerAdoptions <- cdf2OfCDFs comb $ bs <&> bpPeerAdoptions - bpPeerAnnouncements <- cdf2OfCDFs comb $ bs <&> bpPeerAnnouncements - bpPeerSends <- cdf2OfCDFs comb $ bs <&> bpPeerSends - bpSizes <- cdf2OfCDFs comb $ bs <&> bpSizes - bpPropagation <- sequence $ transpose (bs <&> bpPropagation) <&> + cdfForgerStarts <- cdf2OfCDFs comb $ bs <&> cdfForgerStarts + cdfForgerBlkCtx <- cdf2OfCDFs comb $ bs <&> cdfForgerBlkCtx + cdfForgerLgrState <- cdf2OfCDFs comb $ bs <&> cdfForgerLgrState + cdfForgerLgrView <- cdf2OfCDFs comb $ bs <&> cdfForgerLgrView + cdfForgerLeads <- cdf2OfCDFs comb $ bs <&> cdfForgerLeads + cdfForgerForges <- cdf2OfCDFs comb $ bs <&> cdfForgerForges + cdfForgerAdoptions <- cdf2OfCDFs comb $ bs <&> cdfForgerAdoptions + cdfForgerAnnouncements <- cdf2OfCDFs comb $ bs <&> cdfForgerAnnouncements + cdfForgerSends <- cdf2OfCDFs comb $ bs <&> cdfForgerSends + cdfPeerNotices <- cdf2OfCDFs comb $ bs <&> cdfPeerNotices + cdfPeerRequests <- cdf2OfCDFs comb $ bs <&> cdfPeerRequests + cdfPeerFetches <- cdf2OfCDFs comb $ bs <&> cdfPeerFetches + cdfPeerAdoptions <- cdf2OfCDFs comb $ bs <&> cdfPeerAdoptions + cdfPeerAnnouncements <- cdf2OfCDFs comb $ bs <&> cdfPeerAnnouncements + cdfPeerSends <- cdf2OfCDFs comb $ bs <&> cdfPeerSends + cdfBlockBattles <- cdf2OfCDFs comb $ bs <&> cdfBlockBattles + cdfBlockSizes <- cdf2OfCDFs comb $ bs <&> cdfBlockSizes + cdfBlocksPerHost <- cdf2OfCDFs comb $ bs <&> cdfBlocksPerHost + cdfBlocksFilteredRatio <- cdf2OfCDFs comb $ bs <&> cdfBlocksFilteredRatio + cdfBlocksChainedRatio <- cdf2OfCDFs comb $ bs <&> cdfBlocksChainedRatio + bpPropagation <- sequence $ transpose (bs <&> Map.toList . bpPropagation) <&> \case [] -> Left CDFEmptyDataset xs@((d,_):ds) -> do unless (all (d ==) $ fmap fst ds) $ - Left $ CDFIncoherentSamplingCentiles [Centile . fst <$> xs] + Left $ CDFIncoherentSamplingCentiles [Centile . read . T.unpack . T.drop 3 . fst <$> xs] (d,) <$> cdf2OfCDFs comb (snd <$> xs) pure $ BlockProp { bpVersion = bpVersion headline - , bpDomainSlots = dataDomainsMergeOuter $ bs <&> bpDomainSlots - , bpDomainBlocks = dataDomainsMergeOuter $ bs <&> bpDomainBlocks + , bpDomainSlots = concat $ bs <&> bpDomainSlots + , bpDomainBlocks = concat $ bs <&> bpDomainBlocks + , bpPropagation = Map.fromList bpPropagation , .. } where comb :: forall a. Divisible a => Combine I a comb = stdCombine1 centiles --- | Block's events, as seen by its forger. -data ForgerEvents a - = ForgerEvents - { bfeHost :: !Host - , bfeBlock :: !Hash - , bfeBlockPrev :: !Hash - , bfeBlockNo :: !BlockNo - , bfeSlotNo :: !SlotNo - , bfeSlotStart :: !SlotStart - , bfeEpochNo :: !EpochNo - , bfeBlockSize :: !(Maybe Int) - , bfeStarted :: !(Maybe a) - , bfeBlkCtx :: !(Maybe a) - , bfeLgrState :: !(Maybe a) - , bfeLgrView :: !(Maybe a) - , bfeLeading :: !(Maybe a) - , bfeForged :: !(Maybe a) - , bfeAnnounced :: !(Maybe a) - , bfeSending :: !(Maybe a) - , bfeAdopted :: !(Maybe a) - , bfeChainDelta :: !Int - , bfeErrs :: [BPError] - } - deriving (Generic, NFData, FromJSON, ToJSON, Show) - bfePrevBlock :: ForgerEvents a -> Maybe Hash bfePrevBlock x = case bfeBlockNo x of 0 -> Nothing @@ -127,19 +103,19 @@ data ObserverEvents a , boeBlockNo :: !BlockNo , boeSlotNo :: !SlotNo , boeSlotStart :: !SlotStart - , boeNoticed :: !(Maybe a) - , boeRequested :: !(Maybe a) - , boeFetched :: !(Maybe a) - , boeAnnounced :: !(Maybe a) - , boeSending :: !(Maybe a) - , boeAdopted :: !(Maybe a) + , boeNoticed :: !(SMaybe a) + , boeRequested :: !(SMaybe a) + , boeFetched :: !(SMaybe a) + , boeAnnounced :: !(SMaybe a) + , boeSending :: !(SMaybe a) + , boeAdopted :: !(SMaybe a) , boeChainDelta :: !Int , boeErrorsCrit :: [BPError] , boeErrorsSoft :: [BPError] } deriving (Generic, NFData, FromJSON, ToJSON, Show) -mbePhaseIndex :: Map Phase (MachBlockEvents a -> Maybe a) +mbePhaseIndex :: Map Phase (MachBlockEvents a -> SMaybe a) mbePhaseIndex = Map.fromList [ (Notice, mbeNoticed) , (Request, mbeRequested) @@ -151,7 +127,7 @@ mbePhaseIndex = Map.fromList , (Adopt, mbeAdopted) ] -mbeGetProjection :: Phase -> (MachBlockEvents a -> Maybe a) +mbeGetProjection :: Phase -> (MachBlockEvents a -> SMaybe a) mbeGetProjection k = Map.lookup k mbePhaseIndex & fromMaybe (error $ "Unknown phase: " <> show k) @@ -178,6 +154,9 @@ mapMbe f o e = \case MOE x -> o x MBE x -> e x +mbeForge :: MachBlockEvents a -> Maybe (ForgerEvents a) +mbeForge = mapMbe Just (const Nothing) (const Nothing) + partitionMbes :: [MachBlockEvents a] -> ([ForgerEvents a], [ObserverEvents a], [BPError]) partitionMbes = go [] [] [] where @@ -227,16 +206,16 @@ ordBlockEv l r | mbeObsvP r = LT | otherwise = EQ -mbeNoticed, mbeRequested, mbeAcquired, mbeAnnounced, mbeSending, mbeAdopted :: MachBlockEvents a -> Maybe a -mbeNoticed = mapMbe (const Nothing) boeNoticed (const Nothing) -mbeRequested = mapMbe (const Nothing) boeRequested (const Nothing) -mbeAcquired = mapMbe bfeForged boeFetched (const Nothing) -mbeAnnounced = mapMbe bfeAnnounced boeAnnounced (const Nothing) -mbeSending = mapMbe bfeSending boeSending (const Nothing) -mbeAdopted = mapMbe bfeAdopted boeAdopted (const Nothing) +mbeNoticed, mbeRequested, mbeAcquired, mbeAnnounced, mbeSending, mbeAdopted :: MachBlockEvents a -> SMaybe a +mbeNoticed = mapMbe (const SNothing) boeNoticed (const SNothing) +mbeRequested = mapMbe (const SNothing) boeRequested (const SNothing) +mbeAcquired = mapMbe bfeForged boeFetched (const SNothing) +mbeAnnounced = mapMbe bfeAnnounced boeAnnounced (const SNothing) +mbeSending = mapMbe bfeSending boeSending (const SNothing) +mbeAdopted = mapMbe bfeAdopted boeAdopted (const SNothing) -mbeBlockSize :: MachBlockEvents a -> Maybe Int -mbeBlockSize = mapMbe bfeBlockSize (const Nothing) (const Nothing) +mbeBlockSize :: MachBlockEvents a -> SMaybe Int +mbeBlockSize = mapMbe bfeBlockSize (const SNothing) (const SNothing) mbeHost :: MachBlockEvents a -> Host mbeHost = mapMbe bfeHost boeHost eHost @@ -248,21 +227,25 @@ mbeBlockNo :: MachBlockEvents a -> BlockNo mbeBlockNo = mapMbe bfeBlockNo boeBlockNo (const (-1)) -- | Machine's private view of all the blocks. -type MachBlockMap a +type MachHashBlockEvents a = Map.Map Hash (MachBlockEvents a) +-- An accumulator for: tip-block-events & the set of all blocks events data MachView = MachView { mvHost :: !Host - , mvBlocks :: !(MachBlockMap UTCTime) - , mvStarted :: !(Maybe UTCTime) - , mvBlkCtx :: !(Maybe UTCTime) - , mvLgrState :: !(Maybe UTCTime) - , mvLgrView :: !(Maybe UTCTime) - , mvLeading :: !(Maybe UTCTime) + , mvBlocks :: !(MachHashBlockEvents UTCTime) + , mvStarted :: !(SMaybe UTCTime) + , mvBlkCtx :: !(SMaybe UTCTime) + , mvLgrState :: !(SMaybe UTCTime) + , mvLgrView :: !(SMaybe UTCTime) + , mvLeading :: !(SMaybe UTCTime) } deriving (FromJSON, Generic, NFData, ToJSON) +mvForges :: MachView -> [ForgerEvents UTCTime] +mvForges = mapMaybe (mbeForge . snd) . Map.toList . mvBlocks + machViewMaxBlock :: MachView -> MachBlockEvents UTCTime machViewMaxBlock MachView{..} = Map.elems mvBlocks @@ -277,34 +260,51 @@ beForgedAt BlockEvents{beForge=BlockForge{..}} = buildMachViews :: Run -> [(JsonLogfile, [LogObject])] -> IO [(JsonLogfile, MachView)] buildMachViews run = mapConcurrentlyPure (fst &&& blockEventMapsFromLogObjects run) -rebuildChain :: Run -> [ChainFilter] -> [FilterName] -> [(JsonLogfile, MachView)] -> IO (DataDomain SlotNo, DataDomain BlockNo, [BlockEvents]) -rebuildChain run@Run{genesis} flts fltNames xs@(fmap snd -> machViews) = do - progress "tip" $ Q $ show $ bfeBlock tipBlock - pure (domSlot, domBlock, chain) +blockEventsAcceptance :: Genesis -> [ChainFilter] -> BlockEvents -> [(ChainFilter, Bool)] +blockEventsAcceptance genesis flts be = flts <&> (id &&& testBlockEvents genesis be) + +rebuildChain :: Run -> [ChainFilter] -> [FilterName] -> [(JsonLogfile, MachView)] -> Chain +rebuildChain run@Run{genesis} flts fltNames xs@(fmap snd -> machViews) = + Chain + { cDomSlots = DataDomain + (Interval (blk0 & beSlotNo) (blkL & beSlotNo)) + (mFltDoms <&> fst3) + (beSlotNo blkL - beSlotNo blk0 & fromIntegral . unSlotNo) + (mFltDoms <&> thd3 & fromMaybe 0) + , cDomBlocks = DataDomain + (Interval (blk0 & beBlockNo) (blkL & beBlockNo)) + (mFltDoms <&> snd3) + (length cMainChain) + (length accepta) + , cBlockStats = Map.fromList $ machViews <&> (mvHost &&& mvBlockStats) + , .. + } where - (blk0, blkL) = (head chain, last chain) - mblkV = - liftA2 (,) (find (null . beNegAcceptance) chain) - (find (null . beNegAcceptance) (reverse chain)) - domSlot = DataDomain - (blk0 & beSlotNo) (blkL & beSlotNo) - (mblkV <&> beSlotNo . fst) - (mblkV <&> beSlotNo . snd) - (beSlotNo blkL - beSlotNo blk0 & fromIntegral . unSlotNo) - (mblkV & - maybe 0 (fromIntegral . unSlotNo . uncurry (on (-) beSlotNo))) - domBlock = DataDomain - (blk0 & beBlockNo) (blkL & beBlockNo) - (mblkV <&> beBlockNo . fst) - (mblkV <&> beBlockNo . snd) - (length chain) - (length acceptableChain) - - acceptableChain = filter (null . beNegAcceptance) chain - chain = computeChainBlockGaps $ - doRebuildChain (fmap deltifyEvents <$> eventMaps) tipHash - - eventMaps = mvBlocks <$> machViews + cMainChain = computeChainBlockGaps $ + doRebuildChain (fmap deltifyEvents <$> eventMaps) tipHash + (accepta, cRejecta) = partition (all snd . beAcceptance) cMainChain + + blkSets :: (Set Hash, Set Hash) + blkSets@(acceptaBlocks, rejectaBlocks) = + both (Set.fromList . fmap beBlock) (accepta, cRejecta) + mvBlockStats :: MachView -> BlockStats + mvBlockStats (fmap bfeBlock . mvForges -> fs) = BlockStats {..} + where bsUnchained = (countListAll fs & unsafeCoerceCount) + - bsFiltered - bsRejected + bsFiltered = countList (`Set.member` acceptaBlocks) fs & unsafeCoerceCount + bsRejected = countList (`Set.member` rejectaBlocks) fs & unsafeCoerceCount + + (blk0, blkL) = (head &&& last) cMainChain + mFltDoms :: Maybe (Interval SlotNo, Interval BlockNo, Int) + mFltDoms = + liftA2 (,) (find (all snd . beAcceptance) cMainChain) + (find (all snd . beAcceptance) (reverse cMainChain)) + <&> \firstLastBlk -> + (,,) (uncurry Interval $ both beSlotNo firstLastBlk) + (uncurry Interval $ both beBlockNo firstLastBlk) + (fromIntegral . unSlotNo . uncurry (on (flip (-)) beSlotNo) $ firstLastBlk) + + eventMaps = machViews <&> mvBlocks finalBlockEv = maximumBy ordBlockEv $ machViewMaxBlock <$> machViews @@ -320,13 +320,13 @@ rebuildChain run@Run{genesis} flts fltNames xs@(fmap snd -> machViews) = do step prevForge x@(beForgedAt -> at) = (at, x { beForge = (beForge x) { bfBlockGap = at `diffUTCTime` prevForge } }) - rewindChain :: [MachBlockMap a] -> Int -> Hash -> Hash + rewindChain :: [MachHashBlockEvents a] -> Int -> Hash -> Hash rewindChain eventMaps count tip = go tip count where go tip = \case 0 -> tip n -> go (bfeBlockPrev $ getBlockForge eventMaps tip) (n - 1) - getBlockForge :: [MachBlockMap a] -> Hash -> ForgerEvents a + getBlockForge :: [MachHashBlockEvents a] -> Hash -> ForgerEvents a getBlockForge xs h = mapMaybe (Map.lookup h) xs & find mbeForgP @@ -338,25 +338,32 @@ rebuildChain run@Run{genesis} flts fltNames xs@(fmap snd -> machViews) = do & mapMbe id (error "Silly invariant failed.") (error "Silly invariant failed.") adoptionMap :: [Map Hash UTCTime] - adoptionMap = Map.mapMaybe mbeAdopted <$> eventMaps - - heightMap :: Map BlockNo (Set Hash) - heightMap = foldr (\em acc -> - Map.foldr - (\mbe -> Map.alter - (maybe (Just $ Set.singleton (mbeBlock mbe)) - (Just . Set.insert (mbeBlock mbe))) - (mbeBlockNo mbe)) - acc em) - mempty eventMaps - - doRebuildChain :: [MachBlockMap NominalDiffTime] -> Hash -> [BlockEvents] - doRebuildChain machBlockMaps tip = go (Just tip) [] + adoptionMap = Map.mapMaybe (lazySMaybe . mbeAdopted) <$> eventMaps + + heightHostMap :: (Map BlockNo (Set Hash), Map Host (Set Hash)) + heightHostMap@(heightMap, hostMap) + = foldr (\MachView{..} (accHeight, accHost) -> + (,) + (Map.foldr + (\mbe -> Map.alter + (maybe (Just $ Set.singleton (mbeBlock mbe)) + (Just . Set.insert (mbeBlock mbe))) + (mbeBlockNo mbe)) + accHeight mvBlocks) + (Map.insert + mvHost + (Map.elems mvBlocks + & Set.fromList . fmap bfeBlock . mapMaybe mbeForge) + accHost)) + (mempty, mempty) machViews + + doRebuildChain :: [MachHashBlockEvents NominalDiffTime] -> Hash -> [BlockEvents] + doRebuildChain machBlockMaps chainTipHash = go (Just chainTipHash) [] where go Nothing acc = acc - go (Just h) acc = - case partitionMbes $ mapMaybe (Map.lookup h) machBlockMaps of + go (Just hash) acc = + case partitionMbes $ mapMaybe (Map.lookup hash) machBlockMaps of ([], _, ers) -> error $ mconcat - [ "No forger for hash ", show h + [ "No forger for hash ", show hash , "\nErrors:\n" ] ++ intercalate "\n" (show <$> ers) blkEvs@(forgerEv:_, oEvs, ers) -> @@ -391,122 +398,141 @@ rebuildChain run@Run{genesis} flts fltNames xs@(fmap snd -> machViews) = do -- that it has no impact on statistics, quite frankly , bfAnnounced = bfeAnnounced <|> (if True -- bfeBlockNo == 0 -- silliness - then Just 0.01 else Nothing) + then SJust 0.01 else SNothing) & handleMiss "Δt Announced (forger)" , bfSending = bfeSending <|> (if True -- bfeBlockNo == 0 -- silliness - then Just 0.01 else Nothing) + then SJust 0.01 else SNothing) & handleMiss "Δt Sending (forger)" , bfAdopted = bfeAdopted <|> (if True -- bfeBlockNo == 0 -- silliness - then Just 0.01 else Nothing) + then SJust 0.01 else SNothing) & handleMiss "Δt Adopted (forger)" , bfChainDelta = bfeChainDelta } + , beForks = unsafeCoerceCount $ countListAll otherBlocks , beObservations = - catMaybes $ + catSMaybes $ os <&> \ObserverEvents{..}-> BlockObservation - <$> Just boeHost - <*> Just bfeSlotStart + <$> SJust boeHost + <*> SJust bfeSlotStart <*> boeNoticed <*> boeRequested <*> boeFetched - <*> Just boeAnnounced - <*> Just boeSending - <*> Just boeAdopted - <*> Just boeChainDelta - <*> Just boeErrorsCrit - <*> Just boeErrorsSoft + <*> SJust boeAnnounced + <*> SJust boeSending + <*> SJust boeAdopted + <*> SJust boeChainDelta + <*> SJust boeErrorsCrit + <*> SJust boeErrorsSoft , bePropagation = cdf adoptionCentiles adoptions - , beOtherBlocks = otherBlocks + , beOtherBlocks = otherBlocks <&> + \(ForgerEvents{bfeBlock}, _) -> bfeBlock , beErrors = errs - <> (otherBlocks <&> - \blk -> - fail' (findForger blk) bfeBlock $ BPEFork blk) + <> (otherBlocks <&> snd) <> bfeErrs <> concatMap boeErrorsCrit os <> concatMap boeErrorsSoft os - , beNegAcceptance = - -- Again, here we find out filters which reject this block: - filter (not . testBlockEvents genesis blockEvents) flts + , beAcceptance = blockEventsAcceptance genesis flts blockEvents } adoptions = (fmap (`sinceSlot` bfeSlotStart) . Map.lookup bfeBlock) `mapMaybe` adoptionMap - otherBlocks = Map.lookup bfeBlockNo heightMap - & handleMiss "height map" - & Set.delete bfeBlock - & Set.toList - - findForger :: Hash -> Host + otherBlocks = otherBlockHashes <&> + \blk -> + let forger = findForger blk in + (forger, + fail' (bfeHost forger) bfeBlock (BPEFork blk)) + otherBlockHashes = Map.lookup bfeBlockNo heightMap + & strictMaybe + & handleMiss "height map" + & Set.delete bfeBlock + & Set.toList + + findForger :: Hash -> ForgerEvents UTCTime findForger hash = maybe - (Host "?") - (mapMbe bfeHost (error "Invariant failed") (error "Invariant failed")) + (error $ "Unknown host for block " <> show hash) + (mapMbe id (error "Invariant failed") (error "Invariant failed")) (mapMaybe (Map.lookup hash) eventMaps & find mbeForgP) fail' :: Host -> Hash -> BPErrorKind -> BPError fail' host hash desc = BPError host hash Nothing desc - handleMiss :: String -> Maybe a -> a - handleMiss slotDesc = fromMaybe $ error $ mconcat + handleMiss :: String -> SMaybe a -> a + handleMiss slotDesc = fromSMaybe $ error $ mconcat [ "While processing ", show bfeBlockNo, " hash ", show bfeBlock , " forged by ", show (unHost host) , " -- missing: ", slotDesc ] -blockProp :: Run -> [BlockEvents] -> DataDomain SlotNo -> DataDomain BlockNo -> IO BlockPropOne -blockProp run@Run{genesis} fullChain domSlot domBlock = do - progress "block-propagation" $ J (domSlot, domBlock) +blockProp :: Run -> Chain -> IO BlockPropOne +blockProp run@Run{genesis} Chain{..} = do pure $ BlockProp - { bpDomainSlots = domSlot - , bpDomainBlocks = domBlock - , bpForgerStarts = forgerEventsCDF (Just . bfStarted . beForge) - , bpForgerBlkCtx = forgerEventsCDF (bfBlkCtx . beForge) - , bpForgerLgrState = forgerEventsCDF (bfLgrState . beForge) - , bpForgerLgrView = forgerEventsCDF (bfLgrView . beForge) - , bpForgerLeads = forgerEventsCDF (Just . bfLeading . beForge) - , bpForgerForges = forgerEventsCDF (Just . bfForged . beForge) - , bpForgerAnnouncements = forgerEventsCDF (Just . bfAnnounced . beForge) - , bpForgerSends = forgerEventsCDF (Just . bfSending . beForge) - , bpForgerAdoptions = forgerEventsCDF (Just . bfAdopted . beForge) - , bpPeerNotices = observerEventsCDF (Just . boNoticed) "noticed" - , bpPeerRequests = observerEventsCDF (Just . boRequested) "requested" - , bpPeerFetches = observerEventsCDF (Just . boFetched) "fetched" - , bpPeerAnnouncements = observerEventsCDF boAnnounced "announced" - , bpPeerSends = observerEventsCDF boSending "sending" - , bpPeerAdoptions = observerEventsCDF boAdopted "adopted" - , bpPropagation = - [ (p', forgerEventsCDF (Just . unI . projectCDF' "bePropagation" p . bePropagation)) + { bpDomainSlots = [cDomSlots] + , bpDomainBlocks = [cDomBlocks] + , cdfForgerStarts = forgerEventsCDF (SJust . bfStarted . beForge) + , cdfForgerBlkCtx = forgerEventsCDF (bfBlkCtx . beForge) + , cdfForgerLgrState = forgerEventsCDF (bfLgrState . beForge) + , cdfForgerLgrView = forgerEventsCDF (bfLgrView . beForge) + , cdfForgerLeads = forgerEventsCDF (SJust . bfLeading . beForge) + , cdfForgerForges = forgerEventsCDF (SJust . bfForged . beForge) + , cdfForgerAnnouncements = forgerEventsCDF (SJust . bfAnnounced . beForge) + , cdfForgerSends = forgerEventsCDF (SJust . bfSending . beForge) + , cdfForgerAdoptions = forgerEventsCDF (SJust . bfAdopted . beForge) + , cdfPeerNotices = observerEventsCDF (SJust . boNoticed) "noticed" + , cdfPeerRequests = observerEventsCDF (SJust . boRequested) "requested" + , cdfPeerFetches = observerEventsCDF (SJust . boFetched) "fetched" + , cdfPeerAnnouncements = observerEventsCDF boAnnounced "announced" + , cdfPeerSends = observerEventsCDF boSending "sending" + , cdfPeerAdoptions = observerEventsCDF boAdopted "adopted" + , bpPropagation = Map.fromList + [ ( T.pack $ printf "cdf%.2f" p' + , forgerEventsCDF (SJust . unI . projectCDF' "bePropagation" p . bePropagation)) | p@(Centile p') <- adoptionCentiles <> [Centile 1.0] ] - , bpSizes = forgerEventsCDF (Just . bfBlockSize . beForge) - , bpVersion = getVersion + , cdfBlockBattles = forgerEventsCDF (SJust . unCount . beForks) + , cdfBlockSizes = forgerEventsCDF (SJust . bfBlockSize . beForge) + , bpVersion = getLocliVersion + , cdfBlocksPerHost = cdf stdCentiles (blockStats <&> unCount + . bsTotal) + , cdfBlocksFilteredRatio = cdf stdCentiles (blockStats <&> + uncurry ((/) `on` + fromIntegral . unCount) + . (bsFiltered &&& bsChained)) + , cdfBlocksChainedRatio = cdf stdCentiles (blockStats <&> + uncurry ((/) `on` + fromIntegral . unCount) + . (bsChained &&& bsTotal)) } where - analysisChain = filter (null . beNegAcceptance) fullChain + blockStats = Map.elems cBlockStats + + analysisChain :: [BlockEvents] + analysisChain = filter (all snd . beAcceptance) cMainChain - forgerEventsCDF :: Divisible a => (BlockEvents -> Maybe a) -> CDF I a + forgerEventsCDF :: Divisible a => (BlockEvents -> SMaybe a) -> CDF I a forgerEventsCDF = flip (witherToDistrib (cdf stdCentiles)) analysisChain + observerEventsCDF :: (BlockObservation -> SMaybe NominalDiffTime) -> String -> CDF I NominalDiffTime observerEventsCDF = mapChainToPeerBlockObservationCDF stdCentiles analysisChain mapChainToBlockEventCDF :: Divisible a => [Centile] -> [BlockEvents] - -> (BlockEvents -> Maybe a) + -> (BlockEvents -> SMaybe a) -> CDF I a mapChainToBlockEventCDF percs cbes proj = cdf percs $ - mapMaybe proj cbes + mapSMaybe proj cbes mapChainToPeerBlockObservationCDF :: [Centile] -> [BlockEvents] - -> (BlockObservation -> Maybe NominalDiffTime) + -> (BlockObservation -> SMaybe NominalDiffTime) -> String -> CDF I NominalDiffTime mapChainToPeerBlockObservationCDF percs cbes proj desc = @@ -515,15 +541,15 @@ blockProp run@Run{genesis} fullChain domSlot domBlock = do where blockObservations :: BlockEvents -> [NominalDiffTime] blockObservations be = - proj `mapMaybe` filter isValidBlockObservation (beObservations be) + proj `mapSMaybe` filter isValidBlockObservation (beObservations be) witherToDistrib :: ([b] -> CDF p b) - -> (a -> Maybe b) + -> (a -> SMaybe b) -> [a] -> CDF p b witherToDistrib distrify proj xs = - distrify $ mapMaybe proj xs + distrify $ mapSMaybe proj xs -- | Given a single machine's log object stream, recover its block map. blockEventMapsFromLogObjects :: Run -> (JsonLogfile, [LogObject]) -> MachView @@ -536,11 +562,11 @@ blockEventMapsFromLogObjects run (f@(unJsonLogfile -> fp), xs@(x:_)) = MachView { mvHost = loHost x , mvBlocks = mempty - , mvStarted = Nothing - , mvBlkCtx = Nothing - , mvLgrState = Nothing - , mvLgrView = Nothing - , mvLeading = Nothing + , mvStarted = SNothing + , mvBlkCtx = SNothing + , mvLgrState = SNothing + , mvLgrView = SNothing + , mvLeading = SNothing } blockPropMachEventsStep :: Run -> JsonLogfile -> MachView -> LogObject -> MachView @@ -554,32 +580,32 @@ blockPropMachEventsStep run@Run{genesis} (JsonLogfile fp) mv@MachView{..} lo = c loHost loBlock loBlockNo loSlotNo (slotStart genesis loSlotNo) -- t+0: slot start - (Just loAt) -- Noticed - Nothing -- Requested - Nothing -- Fetched - Nothing -- Announced - Nothing -- Sending - Nothing 0 -- Adopted & chain delta + (SJust loAt) -- Noticed + SNothing -- Requested + SNothing -- Fetched + SNothing -- Announced + SNothing -- Sending + SNothing 0 -- Adopted & chain delta [] []) & doInsert loBlock -- 1. Request (observer only) LogObject{loAt, loHost, loBody=LOBlockFetchClientRequested{loBlock,loLength}} -> let mbe0 = getBlock loBlock & fromMaybe (fail loHost loBlock $ BPEUnexpectedAsFirst Request) - in if isJust (mbeRequested mbe0) then mv else + in if isSJust (mbeRequested mbe0) then mv else bimapMbe' (const . Left $ fail' loHost loBlock $ BPEUnexpectedForForger Request) - (\x -> Right x { boeRequested=Just loAt, boeChainDelta=loLength `max` boeChainDelta x }) + (\x -> Right x { boeRequested=SJust loAt, boeChainDelta=loLength `max` boeChainDelta x }) mbe0 & doInsert loBlock -- 2. Acquire:Fetch (observer only) LogObject{loAt, loHost, loBody=LOBlockFetchClientCompletedFetch{loBlock}} -> let mbe0 = getBlock loBlock & fromMaybe (fail loHost loBlock $ BPEUnexpectedAsFirst Fetch) - in if isJust (mbeAcquired mbe0) then mv else + in if isSJust (mbeAcquired mbe0) then mv else bimapMbe' (const . Left $ fail' loHost loBlock (BPEUnexpectedForForger Fetch)) - (\x -> Right x { boeFetched=Just loAt }) + (\x -> Right x { boeFetched=SJust loAt }) mbe0 & doInsert loBlock -- 2. Acquire:Forge (forger only) @@ -599,16 +625,16 @@ blockPropMachEventsStep run@Run{genesis} (JsonLogfile fp) mv@MachView{..} lo = c , bfeSlotNo = loSlotNo , bfeSlotStart = slotStart genesis loSlotNo , bfeEpochNo = fst $ genesis `unsafeParseSlot` loSlotNo - , bfeBlockSize = Nothing + , bfeBlockSize = SNothing , bfeStarted = mvStarted , bfeBlkCtx = mvBlkCtx , bfeLgrState = mvLgrState , bfeLgrView = mvLgrView , bfeLeading = mvLeading - , bfeForged = Just loAt - , bfeAnnounced = Nothing - , bfeSending = Nothing - , bfeAdopted = Nothing + , bfeForged = SJust loAt + , bfeAnnounced = SNothing + , bfeSending = SNothing + , bfeAdopted = SNothing , bfeChainDelta = 0 , bfeErrs = [] }) @@ -619,14 +645,14 @@ blockPropMachEventsStep run@Run{genesis} (JsonLogfile fp) mv@MachView{..} lo = c let mbe0 = getBlock loBlock & fromMaybe (fail loHost loBlock $ BPEUnexpectedAsFirst Adopt) in - if isJust (mbeAdopted mbe0) && isJust (mbeBlockSize mbe0) + if isSJust (mbeAdopted mbe0) && isSJust (mbeBlockSize mbe0) then mv else mbe0 - & (if isJust (mbeAdopted mbe0) then id else + & (if isSJust (mbeAdopted mbe0) then id else bimapMbe - (\x -> x { bfeAdopted=Just loAt, bfeChainDelta=loLength }) - (\x -> x { boeAdopted=Just loAt, boeChainDelta=loLength `max` boeChainDelta x})) - & (if isJust (mbeBlockSize mbe0) || isNothing loSize then id else + (\x -> x { bfeAdopted=SJust loAt, bfeChainDelta=loLength }) + (\x -> x { boeAdopted=SJust loAt, boeChainDelta=loLength `max` boeChainDelta x})) + & (if isSJust (mbeBlockSize mbe0) || isSNothing loSize then id else bimapMbe (\x -> x { bfeBlockSize=loSize }) id) @@ -635,33 +661,33 @@ blockPropMachEventsStep run@Run{genesis} (JsonLogfile fp) mv@MachView{..} lo = c LogObject{loAt, loHost, loBody=LOChainSyncServerSendHeader{loBlock}} -> let mbe0 = getBlock loBlock & fromMaybe (fail loHost loBlock $ BPEUnexpectedAsFirst Announce) - in if isJust (mbeAnnounced mbe0) then mv else + in if isSJust (mbeAnnounced mbe0) then mv else bimapMbe - (\x -> x { bfeAnnounced=Just loAt }) - (\x -> x { boeAnnounced=Just loAt }) + (\x -> x { bfeAnnounced=SJust loAt }) + (\x -> x { boeAnnounced=SJust loAt }) mbe0 & doInsert loBlock -- 5. Sending started LogObject{loAt, loHost, loBody=LOBlockFetchServerSending{loBlock}} -> let mbe0 = getBlock loBlock & fromMaybe (fail loHost loBlock $ BPEUnexpectedAsFirst Send) - in if isJust (mbeSending mbe0) then mv else + in if isSJust (mbeSending mbe0) then mv else bimapMbe - (\x -> x { bfeSending=Just loAt }) - (\x -> x { boeSending=Just loAt }) + (\x -> x { bfeSending=SJust loAt }) + (\x -> x { boeSending=SJust loAt }) mbe0 & doInsert loBlock LogObject{loAt, loBody=LOTraceStartLeadershipCheck{}} -> - mv { mvStarted = Just loAt } + mv { mvStarted = SJust loAt } LogObject{loAt, loBody=LOBlockContext{}} -> - mv { mvBlkCtx = Just loAt } + mv { mvBlkCtx = SJust loAt } LogObject{loAt, loBody=LOLedgerState{}} -> - mv { mvLgrState = Just loAt } + mv { mvLgrState = SJust loAt } LogObject{loAt, loBody=LOLedgerView{}} -> - mv { mvLgrView = Just loAt } + mv { mvLgrView = SJust loAt } LogObject{loAt, loBody=LOTraceLeadershipDecided _ leading} -> if not leading then mv - else mv { mvLeading = Just loAt } + else mv { mvLeading = SJust loAt } _ -> mv where fail' :: Host -> Hash -> BPErrorKind -> BPError @@ -713,12 +739,12 @@ collectEventErrors :: MachBlockEvents NominalDiffTime -> [Phase] -> [BPError] collectEventErrors mbe phases = [ BPError (mbeHost mbe) (mbeBlock mbe) Nothing $ case (miss, proj) of - (,) True _ -> BPEMissingPhase phase - (,) _ (Just neg) -> BPENegativePhase phase neg + (,) True _ -> BPEMissingPhase phase + (,) _ (SJust neg) -> BPENegativePhase phase neg _ -> error "Impossible." | phase <- phases , let proj = mbeGetProjection phase mbe - , let miss = isNothing proj - , let neg = ((< 0) <$> proj) == Just True + , let miss = isSNothing proj + , let neg = ((< 0) <$> proj) == SJust True , miss || neg ] diff --git a/bench/locli/src/Cardano/Analysis/MachPerf.hs b/bench/locli/src/Cardano/Analysis/MachPerf.hs index fee8228acd9..4cccc818dab 100644 --- a/bench/locli/src/Cardano/Analysis/MachPerf.hs +++ b/bench/locli/src/Cardano/Analysis/MachPerf.hs @@ -1,10 +1,8 @@ -{-# LANGUAGE DeriveAnyClass #-} {-# LANGUAGE StrictData #-} {-# OPTIONS_GHC -Wno-incomplete-patterns -Wno-name-shadowing -Wno-orphans #-} {- HLINT ignore "Use head" -} module Cardano.Analysis.MachPerf (module Cardano.Analysis.MachPerf) where -import Prelude (head, last) import Cardano.Prelude hiding (head) import Cardano.Prelude qualified as CP @@ -15,271 +13,33 @@ import Data.Text.Short (toText) import Data.Vector (Vector) import Data.Vector qualified as Vec -import Data.Time.Clock (NominalDiffTime, UTCTime, diffUTCTime) import Data.Time.Clock qualified as Time import Data.CDF - +import Cardano.Util import Cardano.Analysis.API -import Cardano.Analysis.Chain -import Cardano.Analysis.ChainFilter -import Cardano.Analysis.Context -import Cardano.Analysis.Ground -import Cardano.Analysis.Run import Cardano.Unlog.LogObject hiding (Text) import Cardano.Unlog.Resources -summariseMultiClusterPerf :: [Centile] -> [ClusterPerf] -> Either CDFError MultiClusterPerf -summariseMultiClusterPerf _ [] = error "Asked to summarise empty list of MachPerfOne" -summariseMultiClusterPerf centiles mps@(headline:_) = do - sMissCDF <- cdf2OfCDFs comb $ mps <&> sMissCDF - sLeadsCDF <- cdf2OfCDFs comb $ mps <&> sLeadsCDF - sUtxoCDF <- cdf2OfCDFs comb $ mps <&> sUtxoCDF - sDensityCDF <- cdf2OfCDFs comb $ mps <&> sDensityCDF - sStartedCDF <- cdf2OfCDFs comb $ mps <&> sStartedCDF - sBlkCtxCDF <- cdf2OfCDFs comb $ mps <&> sBlkCtxCDF - sLgrStateCDF <- cdf2OfCDFs comb $ mps <&> sLgrStateCDF - sLgrViewCDF <- cdf2OfCDFs comb $ mps <&> sLgrViewCDF - sLeadingCDF <- cdf2OfCDFs comb $ mps <&> sLeadingCDF - sForgedCDF <- cdf2OfCDFs comb $ mps <&> sForgedCDF - sBlockGapCDF <- cdf2OfCDFs comb $ mps <&> sBlockGapCDF - sSpanLensCpuCDF <- cdf2OfCDFs comb $ mps <&> sSpanLensCpuCDF - sSpanLensCpuEpochCDF <- cdf2OfCDFs comb $ mps <&> sSpanLensCpuEpochCDF - sSpanLensCpuRwdCDF <- cdf2OfCDFs comb $ mps <&> sSpanLensCpuRwdCDF - sResourceCDFs <- sequence $ traverse identity (mps <&> sResourceCDFs) <&> - \case - [] -> Left CDFEmptyDataset - (xs :: [CDF (CDF I) Word64]) -> cdf2OfCDFs comb xs :: Either CDFError (CDF (CDF I) Word64) - - pure . MultiClusterPerf $ MachPerf - { sVersion = sVersion headline - , sDomainSlots = dataDomainsMergeOuter $ mps <&> sDomainSlots - , .. - } - where - comb :: forall a. Divisible a => Combine (CDF I) a - comb = stdCombine2 centiles - -summariseClusterPerf :: [Centile] -> [MachPerfOne] -> Either CDFError ClusterPerf -summariseClusterPerf _ [] = error "Asked to summarise empty list of MachPerfOne" -summariseClusterPerf centiles mps@(headline:_) = do - sMissCDF <- cdf2OfCDFs comb $ mps <&> sMissCDF - sLeadsCDF <- cdf2OfCDFs comb $ mps <&> sLeadsCDF - sUtxoCDF <- cdf2OfCDFs comb $ mps <&> sUtxoCDF - sDensityCDF <- cdf2OfCDFs comb $ mps <&> sDensityCDF - sStartedCDF <- cdf2OfCDFs comb $ mps <&> sStartedCDF - sBlkCtxCDF <- cdf2OfCDFs comb $ mps <&> sBlkCtxCDF - sLgrStateCDF <- cdf2OfCDFs comb $ mps <&> sLgrStateCDF - sLgrViewCDF <- cdf2OfCDFs comb $ mps <&> sLgrViewCDF - sLeadingCDF <- cdf2OfCDFs comb $ mps <&> sLeadingCDF - sForgedCDF <- cdf2OfCDFs comb $ mps <&> sForgedCDF - sBlockGapCDF <- cdf2OfCDFs comb $ mps <&> sBlockGapCDF - sSpanLensCpuCDF <- cdf2OfCDFs comb $ mps <&> sSpanLensCpuCDF - sSpanLensCpuEpochCDF <- cdf2OfCDFs comb $ mps <&> sSpanLensCpuEpochCDF - sSpanLensCpuRwdCDF <- cdf2OfCDFs comb $ mps <&> sSpanLensCpuRwdCDF - sResourceCDFs <- sequence $ traverse identity (mps <&> sResourceCDFs) <&> - \case - [] -> Left CDFEmptyDataset - (xs :: [CDF I Word64]) -> cdf2OfCDFs comb xs :: Either CDFError (CDF (CDF I) Word64) - - pure MachPerf - { sVersion = sVersion headline - , sDomainSlots = dataDomainsMergeOuter $ mps <&> sDomainSlots - , .. - } - where - comb :: forall a. Divisible a => Combine I a - comb = stdCombine1 centiles - --- | A side-effect of analysis -data RunScalars - = RunScalars - { rsElapsed :: Maybe NominalDiffTime - , rsSubmitted :: Maybe Word64 - , rsThreadwiseTps :: Maybe (Vector Double) - } - deriving stock Generic - deriving anyclass NFData - - -deltifySlotStats :: Genesis -> SlotStats UTCTime -> SlotStats NominalDiffTime -deltifySlotStats gsis s@SlotStats{..} = - s - { slStarted = slStarted <&> (`sinceSlot` slotStart gsis slSlot) - , slBlkCtx = diffUTCTime <$> slBlkCtx <*> slStarted - , slLgrState = diffUTCTime <$> slLgrState <*> slBlkCtx - , slLgrView = diffUTCTime <$> slLgrView <*> slLgrState - , slLeading = (diffUTCTime <$> slLeading <*> slLgrView) - <|> - (diffUTCTime <$> slLeading <*> slStarted) - , slForged = diffUTCTime <$> slForged <*> slLeading - } - +-- * 1. Collect SlotStats & RunScalars: +-- collectSlotStats :: Run -> [(JsonLogfile, [LogObject])] -> IO (Either Text [(JsonLogfile, (RunScalars, [SlotStats UTCTime]))]) collectSlotStats run = fmap sequence <$> mapConcurrentlyPure (timelineFromLogObjects run) -runSlotFilters :: - NFData a => - Run - -> [ChainFilter] - -> [(JsonLogfile, [SlotStats a])] - -> IO (DataDomain SlotNo, [(JsonLogfile, [SlotStats a])]) -runSlotFilters Run{genesis} flts slots = do - filtered <- mapConcurrentlyPure (fmap $ filterSlotStats flts) slots - let samplePre = slots !! 0 & snd - samplePost = filtered !! 0 & snd - domain = mkDataDomain - ((CP.head samplePre <&> slSlot) & fromMaybe 0) - ((lastMay samplePre <&> slSlot) & fromMaybe 0) - ((CP.head samplePost <&> slSlot) & fromMaybe 0) - ((lastMay samplePost <&> slSlot) & fromMaybe 0) - (fromIntegral . unSlotNo) - progress "filtered-slotstats-slot-domain" $ J domain - pure $ (,) domain filtered - - where - filterSlotStats :: [ChainFilter] -> [SlotStats a] -> [SlotStats a] - filterSlotStats filters = - filter (\x -> all (testSlotStats genesis x) slotFilters) - where - slotFilters :: [SlotCond] - slotFilters = catSlotFilters filters - -data SlotStatsSummary - = SlotStatsSummary - { sssMissRatios :: [Double] - , sssSpanLensCpu :: [Int] - , sssSpanLensCpuEpoch :: [Int] - , sssSpanLensCpuRwd :: [Int] - } - -slotStatsSummary :: forall a. Run -> [SlotStats a] -> SlotStatsSummary -slotStatsSummary Run{genesis=Genesis{epochLength}} slots = - SlotStatsSummary{..} - where - sssMissRatios = missRatio . (maxStarts -) <$> startCounts - sssSpanLensCpu = spanLen <$> spansCpu - sssSpanLensCpuRwd = Vec.length <$> filter (spanContainsEpochSlot rewardCalcBeginSlot) spansCpu - sssSpanLensCpuEpoch = Vec.length <$> spansCpuEpoch - - startCounts = slCountStarts <$> slots - maxStarts = maximum startCounts - - rewardCalcBeginSlot = 3 + floor @Double (fromIntegral epochLength * 0.4) - - missRatio :: Word64 -> Double - missRatio = (/ fromIntegral maxStarts) . fromIntegral - - spansCpu :: [Vector (SlotStats a)] - spansCpu = spans - ((/= Just False) . fmap (>=85) . rCentiCpu . slResources) - (toList slots) - - spansCpuEpoch :: [Vector (SlotStats a)] - spansCpuEpoch = filter (spanContainsEpochSlot 3) spansCpu <&> - \v-> let tailEpoch = slEpoch (Vec.last v) - in if tailEpoch == slEpoch (Vec.head v) then v - else Vec.dropWhile ((tailEpoch == ) . slEpoch) v - - spanLen :: Vector (SlotStats a) -> Int - spanLen = fromIntegral . unSlotNo . uncurry (-) . (slSlot *** slSlot) . (Vec.last &&& Vec.head) - - spanContainsEpochSlot :: Word64 -> Vector (SlotStats a) -> Bool - spanContainsEpochSlot s = - uncurry (&&) - . ((s >) . unEpochSlot . slEpochSlot . Vec.head &&& - (s <) . unEpochSlot . slEpochSlot . Vec.last) - -slotStatsMachPerf :: Run -> (JsonLogfile, [SlotStats NominalDiffTime]) -> Either Text (JsonLogfile, MachPerfOne) -slotStatsMachPerf _ (JsonLogfile f, []) = - Left $ "slotStatsMachPerf: zero filtered slots from " <> pack f -slotStatsMachPerf run (f, slots) = - Right . (f,) $ MachPerf - { sVersion = getVersion - , sDomainSlots = mkDataDomainInj (slSlot $ head slots) (slSlot $ last slots) - (fromIntegral . unSlotNo) - -- - , sMissCDF = dist sssMissRatios - , sLeadsCDF = dist (slCountLeads <$> slots) - , sUtxoCDF = dist (slUtxoSize <$> slots) - , sDensityCDF = dist (slDensity <$> slots) - , sStartedCDF = dist (slStarted `mapSMaybe` slots) - , sBlkCtxCDF = dist (slBlkCtx `mapSMaybe` slots) - , sLgrStateCDF = dist (slLgrState `mapSMaybe` slots) - , sLgrViewCDF = dist (slLgrView `mapSMaybe` slots) - , sLeadingCDF = dist (slLeading `mapSMaybe` slots) - , sForgedCDF = dist (filter (/= 0) $ slForged `mapSMaybe` slots) - , sBlockGapCDF = dist (slBlockGap <$> slots) - , sSpanLensCpuCDF = dist sssSpanLensCpu - , sSpanLensCpuEpochCDF = dist sssSpanLensCpuEpoch - , sSpanLensCpuRwdCDF = dist sssSpanLensCpuRwd - , sResourceCDFs = computeResCDF stdCentiles resDistProjs slots - } - where - dist :: Divisible a => [a] -> CDF I a - dist = cdf stdCentiles - - SlotStatsSummary{..} = slotStatsSummary run slots - - resDistProjs = - Resources - { rCentiCpu = rCentiCpu . slResources - , rCentiGC = rCentiGC . slResources - , rCentiMut = rCentiMut . slResources - , rGcsMajor = rGcsMajor . slResources - , rGcsMinor = rGcsMinor . slResources - , rRSS = rRSS . slResources - , rHeap = rHeap . slResources - , rLive = rLive . slResources - , rAlloc = rAlloc . slResources - , rCentiBlkIO = rCentiBlkIO . slResources - , rThreads = rThreads . slResources - } - --- The "fold" state that accumulates as we process 'LogObject's into a stream --- of 'SlotStats'. -data TimelineAccum - = TimelineAccum - { aResAccums :: ResAccums - , aResTimestamp :: UTCTime - , aMempoolTxs :: Word64 - , aBlockNo :: BlockNo - , aLastBlockSlot :: SlotNo - , aSlotStats :: [SlotStats UTCTime] - , aRunScalars :: RunScalars - , aTxsCollectedAt:: Map.Map TId UTCTime - , aHost :: Host - } - -forTAHead :: TimelineAccum -> (SlotStats UTCTime -> SlotStats UTCTime) -> TimelineAccum -forTAHead xs@TimelineAccum{aSlotStats=s:ss} f = xs {aSlotStats=f s:ss} - -forTANth :: TimelineAccum -> Int -> (SlotStats UTCTime -> SlotStats UTCTime) -> TimelineAccum -forTANth xs@TimelineAccum{aSlotStats=ss, aHost} n f = - xs { aSlotStats = mapNth f n ss } - where - mapNth :: (a -> a) -> Int -> [a] -> [a] - mapNth f n xs = - case splitAt n xs of - (pre, x:post) -> pre <> (f x : post) - _ -> error $ mconcat - [ "mapNth: couldn't go ", show n, "-deep into the timeline, " - , "host=", unpack . toText $ unHost aHost - ] timelineFromLogObjects :: Run -> (JsonLogfile, [LogObject]) -> Either Text (JsonLogfile, (RunScalars, [SlotStats UTCTime])) timelineFromLogObjects _ (JsonLogfile f, []) = Left $ "timelineFromLogObjects: zero logobjects from " <> pack f -timelineFromLogObjects run@Run{genesis} (f, xs) = +timelineFromLogObjects run@Run{genesis} (f, xs') = Right . (f,) - $ foldl' (timelineStep run) - zeroTimelineAccum - xs + $ foldl' (timelineStep run f) zeroTimelineAccum xs & (aRunScalars &&& reverse . aSlotStats) where + xs = filter ((/= "DecodeError") . loKind) xs' + firstRelevantLogObjectTime :: UTCTime firstRelevantLogObjectTime = loAt (head xs) `max` systemStart genesis firstLogObjectHost :: Host @@ -292,14 +52,14 @@ timelineFromLogObjects run@Run{genesis} (f, xs) = , aResTimestamp = firstRelevantLogObjectTime , aMempoolTxs = 0 , aBlockNo = 0 - , aLastBlockSlot = 0 -- Genesis counts : -) + , aLastBlockSlot = 0 -- Genesis counts , aSlotStats = [zeroSlotStats] , aRunScalars = zeroRunScalars , aTxsCollectedAt= mempty , aHost = firstLogObjectHost } zeroRunScalars :: RunScalars - zeroRunScalars = RunScalars Nothing Nothing Nothing + zeroRunScalars = RunScalars Nothing Nothing Nothing zeroSlotStats :: SlotStats UTCTime zeroSlotStats = SlotStats @@ -327,15 +87,15 @@ timelineFromLogObjects run@Run{genesis} (f, xs) = , slTxsRejected = 0 , slUtxoSize = 0 , slDensity = 0 - , slResources = pure Nothing + , slResources = SNothing , slChainDBSnap = 0 , slRejectedTx = 0 , slBlockNo = 0 , slBlockGap = 0 } -timelineStep :: Run -> TimelineAccum -> LogObject -> TimelineAccum -timelineStep Run{genesis} a@TimelineAccum{aSlotStats=cur:_, ..} lo = +timelineStep :: Run -> JsonLogfile -> TimelineAccum -> LogObject -> TimelineAccum +timelineStep Run{genesis} f a@TimelineAccum{aSlotStats=cur:_, ..} lo = let continue :: SlotNo -> UTCTime -> TimelineAccum continue slot loAt = if slot < slSlot cur then a @@ -360,6 +120,7 @@ timelineStep Run{genesis} a@TimelineAccum{aSlotStats=cur:_, ..} lo = [ desc, " for a future slot=", show slot , " cur=", show (slSlot cur) , " host=", unpack . toText $ unHost host + , " file=", unJsonLogfile f ] else forExistingSlot slot a x in if loAt lo < systemStart genesis then a else @@ -369,7 +130,7 @@ timelineStep Run{genesis} a@TimelineAccum{aSlotStats=cur:_, ..} lo = LogObject{loAt, loBody=LOResources rs} -> continue slot loAt & mapExistingSlot slot - (\sl -> sl { slResources = Just <$> extractResAccums accs }) + (\sl -> sl { slResources = SJust $ extractResAccums accs }) & \a' -> a' { aResAccums = accs , aResTimestamp = loAt } @@ -398,13 +159,13 @@ timelineStep Run{genesis} a@TimelineAccum{aSlotStats=cur:_, ..} lo = LogObject{loBody=LOLedgerTookSnapshot} -> forTAHead a \s-> s { slChainDBSnap = slChainDBSnap cur + 1 } - LogObject{loBody=LOGeneratorSummary _noFails sent elapsed threadwiseTps} -> - a { aRunScalars = aRunScalars - { rsThreadwiseTps = Just threadwiseTps - , rsElapsed = Just elapsed - , rsSubmitted = Just sent - } - } + LogObject{loBody=LOGeneratorSummary _noFails rssub rselap rsthr} -> + a { aRunScalars = + RunScalars + { rsSubmitted = Just rssub + , rsElapsed = Just rselap + , rsThreadwiseTps = Just rsthr + } } LogObject{loBody=LOTxsCollected coll, loTid, loAt} -> (forTAHead a \s-> s { slTxsCollected = slTxsCollected cur + max 0 (fromIntegral coll) }) @@ -486,7 +247,37 @@ timelineStep Run{genesis} a@TimelineAccum{aSlotStats=cur:_, ..} lo = , slForged = SJust now } _ -> a -timelineStep _ a _ = a +timelineStep _ _ a _ = a +-- The "fold" state that accumulates as we process 'LogObject's into a stream +-- of 'SlotStats'. +data TimelineAccum + = TimelineAccum + { aResAccums :: ResAccums + , aResTimestamp :: UTCTime + , aMempoolTxs :: Word64 + , aBlockNo :: BlockNo + , aLastBlockSlot :: SlotNo + , aSlotStats :: [SlotStats UTCTime] + , aRunScalars :: RunScalars + , aTxsCollectedAt:: Map.Map TId UTCTime + , aHost :: Host + } + +forTAHead :: TimelineAccum -> (SlotStats UTCTime -> SlotStats UTCTime) -> TimelineAccum +forTAHead xs@TimelineAccum{aSlotStats=s:ss} f = xs {aSlotStats=f s:ss} + +forTANth :: TimelineAccum -> Int -> (SlotStats UTCTime -> SlotStats UTCTime) -> TimelineAccum +forTANth xs@TimelineAccum{aSlotStats=ss, aHost} n f = + xs { aSlotStats = mapNth f n ss } + where + mapNth :: (a -> a) -> Int -> [a] -> [a] + mapNth f n xs = + case splitAt n xs of + (pre, x:post) -> pre <> (f x : post) + _ -> error $ mconcat + [ "mapNth: couldn't go ", show n, "-deep into the timeline, " + , "host=", unpack . toText $ unHost aHost + ] lastBlockSlot :: BlockNo -> TimelineAccum -> SlotNo lastBlockSlot new TimelineAccum{aSlotStats=SlotStats{..}:_,..} = @@ -496,14 +287,21 @@ lastBlockSlot new TimelineAccum{aSlotStats=SlotStats{..}:_,..} = patchSlotGap :: Genesis -> SlotNo -> TimelineAccum -> TimelineAccum patchSlotGap genesis curSlot a@TimelineAccum{aSlotStats=last:_, ..} = - a & go (unSlotNo $ curSlot - gapStartSlot) gapStartSlot + a & if gapLen < 1000 + then go gapLen gapStartSlot + else error $ mconcat + [ "patchSlotGap: gap too large: ", show gapLen, ", " + , "curSlot=", show curSlot, ", " + , "gapStartSlot=", show gapStartSlot, ", " + ] where gapStartSlot = slSlot last + 1 + gapLen = unSlotNo $ curSlot - gapStartSlot go :: Word64 -> SlotNo -> TimelineAccum -> TimelineAccum go 0 _ acc = acc - go gapLen patchSlot acc = - go (gapLen - 1) (patchSlot + 1) (acc & addGapSlot patchSlot) + go remainingGap patchSlot acc = + go (remainingGap - 1) (patchSlot + 1) (acc & addGapSlot patchSlot) addGapSlot :: SlotNo -> TimelineAccum -> TimelineAccum addGapSlot slot acc = @@ -538,15 +336,11 @@ patchSlotGap genesis curSlot a@TimelineAccum{aSlotStats=last:_, ..} = , slRejectedTx = 0 , slBlockNo = aBlockNo , slBlockGap = unSlotNo $ slot - aLastBlockSlot - , slResources = maybeDiscard - <$> discardObsoleteValues - <*> extractResAccums aResAccums} + , slResources = SJust $ zeroObsoleteValues + <*> extractResAccums aResAccums} : aSlotStats acc } - where maybeDiscard :: (Word64 -> Maybe Word64) -> Word64 -> Maybe Word64 - maybeDiscard f = f - - slStart = slotStart genesis slot + where slStart = slotStart genesis slot addTimelineSlot :: Genesis -> SlotNo -> UTCTime -> TimelineAccum -> TimelineAccum addTimelineSlot genesis slot time a@TimelineAccum{..} = @@ -581,59 +375,214 @@ addTimelineSlot genesis slot time a@TimelineAccum{..} = , slRejectedTx = 0 , slBlockNo = aBlockNo , slBlockGap = unSlotNo $ slot - aLastBlockSlot - , slResources = maybeDiscard - <$> discardObsoleteValues - <*> extractResAccums aResAccums} + , slResources = SJust $ zeroObsoleteValues + <*> extractResAccums aResAccums} : aSlotStats } - where maybeDiscard :: (Word64 -> Maybe Word64) -> Word64 -> Maybe Word64 - maybeDiscard f = f + where slStart = slotStart genesis slot - slStart = slotStart genesis slot +-- * 2. Filter SlotStats: +-- +runSlotFilters :: + NFData a => + Run + -> [ChainFilter] + -> [(JsonLogfile, [SlotStats a])] + -> IO (DataDomain SlotNo, [(JsonLogfile, [SlotStats a])]) +runSlotFilters Run{genesis} flts slots = + mapConcurrentlyPure (fmap $ filterSlotStats flts) slots + <&> \filtered -> + (,) (domain filtered) filtered + where + domain :: [(JsonLogfile, [SlotStats a])] -> DataDomain SlotNo + domain filtered = mkDataDomain + ((CP.head samplePre <&> slSlot) & fromMaybe 0) + ((lastMay samplePre <&> slSlot) & fromMaybe 0) + ((CP.head samplePost <&> slSlot) & fromMaybe 0) + ((lastMay samplePost <&> slSlot) & fromMaybe 0) + (fromIntegral . unSlotNo) + where + samplePre = slots !! 0 & snd + samplePost = filtered !! 0 & snd + + filterSlotStats :: [ChainFilter] -> [SlotStats a] -> [SlotStats a] + filterSlotStats filters = + filter (\x -> all (testSlotStats genesis x) slotFilters) + where + slotFilters :: [SlotCond] + slotFilters = catSlotFilters filters -data DerivedSlot - = DerivedSlot - { dsSlot :: SlotNo - , dsBlockGap :: Word64 +-- * 3. Post-process: +-- +deltifySlotStats :: Genesis -> SlotStats UTCTime -> SlotStats NominalDiffTime +deltifySlotStats gsis s@SlotStats{..} = + s + { slStarted = slStarted <&> (`sinceSlot` slotStart gsis slSlot) + , slBlkCtx = diffUTCTime <$> slBlkCtx <*> slStarted + , slLgrState = diffUTCTime <$> slLgrState <*> slBlkCtx + , slLgrView = diffUTCTime <$> slLgrView <*> slLgrState + , slLeading = (diffUTCTime <$> slLeading <*> slLgrView) + <|> + (diffUTCTime <$> slLeading <*> slStarted) + , slForged = diffUTCTime <$> slForged <*> slLeading } -derivedSlotsHeader :: String -derivedSlotsHeader = - "Slot,BlockGap span" +-- Field 6 "productiv" "Produc" "tivity" (IText +-- (\SlotStats{..}-> +-- f 4 $ calcProd <$> (min 6 . -- workaround for ghc-8.10.2 +-- fromIntegral <$> rCentiMut slResources :: Maybe Double) +-- <*> (fromIntegral <$> rCentiCpu slResources))) "" "" -renderDerivedSlot :: DerivedSlot -> String -renderDerivedSlot DerivedSlot{..} = - mconcat - [ show (unSlotNo dsSlot), ",", show dsBlockGap - ] +-- Field 6 "allocMut" "Alloc/" "mutSec" (IText +-- (\SlotStats{..}-> +-- d 5 $ +-- (ceiling :: Double -> Int) +-- <$> ((/) <$> (fromIntegral . (100 *) <$> rAlloc slResources) +-- <*> (fromIntegral . max 1 . (1024 *) <$> rCentiMut slResources)))) "" "" -computeDerivedVectors :: [SlotStats a] -> ([DerivedSlot], [DerivedSlot]) -computeDerivedVectors ss = - (\(_,_,d0,d1) -> (d0, d1)) $ - foldr step (0, 0, [], []) ss +-- Field 10 0 "absSlotTime" "Absolute" "slot time" $ IText +-- (\SlotStats{..}-> +-- T.pack $ " " `splitOn` show slStart !! 1) + +data SlotStatsSummary + = SlotStatsSummary + { sssSpanLensCpu :: [Int] + , sssSpanLensCpuEpoch :: [Int] + , sssSpanLensCpuRwd :: [Int] + } + +slotStatsSummary :: forall a. Run -> [SlotStats a] -> SlotStatsSummary +slotStatsSummary Run{genesis=Genesis{epochLength}} slots = + SlotStatsSummary{..} where - step :: - SlotStats a - -> (Word64, Word64, [DerivedSlot], [DerivedSlot]) - -> (Word64, Word64, [DerivedSlot], [DerivedSlot]) - step SlotStats{..} (lastBlockGap, spanBLSC, accD0, accD1) = - if lastBlockGap < slBlockGap - then ( slBlockGap - , slBlockGap - , DerivedSlot - { dsSlot = slSlot - , dsBlockGap = slBlockGap - }:accD0 - , DerivedSlot - { dsSlot = slSlot - , dsBlockGap = slBlockGap - }:accD1 - ) - else ( slBlockGap - , spanBLSC - , DerivedSlot - { dsSlot = slSlot - , dsBlockGap = spanBLSC - }:accD0 - , accD1 - ) + sssSpanLensCpu = spanLen <$> spansCpu + sssSpanLensCpuRwd = Vec.length <$> filter (spanContainsEpochSlot rewardCalcBeginSlot) spansCpu + sssSpanLensCpuEpoch = Vec.length <$> spansCpuEpoch + + rewardCalcBeginSlot = 3 + floor @Double (fromIntegral epochLength * 0.4) + + spansCpu :: [Vector (SlotStats a)] + spansCpu = spans + ((/= SJust False) . fmap ((>=85) . rCentiCpu) . slResources) + (toList slots) + + spansCpuEpoch :: [Vector (SlotStats a)] + spansCpuEpoch = filter (spanContainsEpochSlot 3) spansCpu <&> + \v-> let tailEpoch = slEpoch (Vec.last v) + in if tailEpoch == slEpoch (Vec.head v) then v + else Vec.dropWhile ((tailEpoch == ) . slEpoch) v + + spanLen :: Vector (SlotStats a) -> Int + spanLen = fromIntegral . unSlotNo . uncurry (-) . (slSlot *** slSlot) . (Vec.last &&& Vec.head) + + spanContainsEpochSlot :: Word64 -> Vector (SlotStats a) -> Bool + spanContainsEpochSlot s = + uncurry (&&) + . ((s >) . unEpochSlot . slEpochSlot . Vec.head &&& + (s <) . unEpochSlot . slEpochSlot . Vec.last) + +-- * 4. Summarise SlotStats & SlotStatsSummary into MachPerf: +-- +slotStatsMachPerf :: Run -> (JsonLogfile, [SlotStats NominalDiffTime]) -> Either Text (JsonLogfile, MachPerfOne) +slotStatsMachPerf _ (JsonLogfile f, []) = + Left $ "slotStatsMachPerf: zero filtered slots from " <> pack f +slotStatsMachPerf run (f, slots) = + Right . (f,) $ MachPerf + { mpVersion = getLocliVersion + , mpDomainSlots = [domSlots] + , cdfHostSlots = dist [fromIntegral $ ddFilteredCount domSlots] + -- + , cdfStarts = dist (slCountStarts <$> slots) + , cdfLeads = dist (slCountLeads <$> slots) + , cdfUtxo = dist (slUtxoSize <$> slots) + , cdfDensity = dist (slDensity <$> slots) + , cdfStarted = dist (slStarted `mapSMaybe` slots) + , cdfBlkCtx = dist (slBlkCtx `mapSMaybe` slots) + , cdfLgrState = dist (slLgrState `mapSMaybe` slots) + , cdfLgrView = dist (slLgrView `mapSMaybe` slots) + , cdfLeading = dist (slLeading `mapSMaybe` slots) + , cdfForged = dist (filter (/= 0) $ slForged `mapSMaybe` slots) + , cdfBlockGap = dist (slBlockGap <$> slots) + , cdfSpanLensCpu = dist sssSpanLensCpu + , cdfSpanLensCpuEpoch = dist sssSpanLensCpuEpoch + , cdfSpanLensCpuRwd = dist sssSpanLensCpuRwd + , mpResourceCDFs = computeResCDF stdCentiles slResources slots + , .. + } + where + domSlots = mkDataDomainInj sFirst sLast (fromIntegral . unSlotNo) + + (,) sFirst sLast = (slSlot . head &&& slSlot . last) slots + + dist :: Divisible a => [a] -> CDF I a + dist = cdf stdCentiles + + SlotStatsSummary{..} = slotStatsSummary run slots + +-- * 5. Multi-machine & multi-run summaries: +-- +summariseClusterPerf :: [Centile] -> [MachPerfOne] -> Either CDFError ClusterPerf +summariseClusterPerf _ [] = error "Asked to summarise empty list of MachPerfOne" +summariseClusterPerf centiles mps@(headline:_) = do + cdfHostSlots <- cdf2OfCDFs comb $ mps <&> cdfHostSlots + cdfStarts <- cdf2OfCDFs comb $ mps <&> cdfStarts + cdfLeads <- cdf2OfCDFs comb $ mps <&> cdfLeads + cdfUtxo <- cdf2OfCDFs comb $ mps <&> cdfUtxo + cdfDensity <- cdf2OfCDFs comb $ mps <&> cdfDensity + cdfStarted <- cdf2OfCDFs comb $ mps <&> cdfStarted + cdfBlkCtx <- cdf2OfCDFs comb $ mps <&> cdfBlkCtx + cdfLgrState <- cdf2OfCDFs comb $ mps <&> cdfLgrState + cdfLgrView <- cdf2OfCDFs comb $ mps <&> cdfLgrView + cdfLeading <- cdf2OfCDFs comb $ mps <&> cdfLeading + cdfForged <- cdf2OfCDFs comb $ mps <&> cdfForged + cdfBlockGap <- cdf2OfCDFs comb $ mps <&> cdfBlockGap + cdfSpanLensCpu <- cdf2OfCDFs comb $ mps <&> cdfSpanLensCpu + cdfSpanLensCpuEpoch <- cdf2OfCDFs comb $ mps <&> cdfSpanLensCpuEpoch + cdfSpanLensCpuRwd <- cdf2OfCDFs comb $ mps <&> cdfSpanLensCpuRwd + mpResourceCDFs <- sequence $ traverse identity (mps <&> mpResourceCDFs) <&> + \case + [] -> Left CDFEmptyDataset + (xs :: [CDF I Word64]) -> cdf2OfCDFs comb xs :: Either CDFError (CDF (CDF I) Word64) + + pure MachPerf + { mpVersion = mpVersion headline + , mpDomainSlots = domSlots + , .. + } + where + domSlots = concat $ mps <&> mpDomainSlots + + comb :: forall a. Divisible a => Combine I a + comb = stdCombine1 centiles + +summariseMultiClusterPerf :: [Centile] -> [ClusterPerf] -> Either CDFError MultiClusterPerf +summariseMultiClusterPerf _ [] = error "Asked to summarise empty list of MachPerfOne" +summariseMultiClusterPerf centiles mps@(headline:_) = do + cdfHostSlots <- cdf2OfCDFs comb $ mps <&> cdfHostSlots + cdfStarts <- cdf2OfCDFs comb $ mps <&> cdfStarts + cdfLeads <- cdf2OfCDFs comb $ mps <&> cdfLeads + cdfUtxo <- cdf2OfCDFs comb $ mps <&> cdfUtxo + cdfDensity <- cdf2OfCDFs comb $ mps <&> cdfDensity + cdfStarted <- cdf2OfCDFs comb $ mps <&> cdfStarted + cdfBlkCtx <- cdf2OfCDFs comb $ mps <&> cdfBlkCtx + cdfLgrState <- cdf2OfCDFs comb $ mps <&> cdfLgrState + cdfLgrView <- cdf2OfCDFs comb $ mps <&> cdfLgrView + cdfLeading <- cdf2OfCDFs comb $ mps <&> cdfLeading + cdfForged <- cdf2OfCDFs comb $ mps <&> cdfForged + cdfBlockGap <- cdf2OfCDFs comb $ mps <&> cdfBlockGap + cdfSpanLensCpu <- cdf2OfCDFs comb $ mps <&> cdfSpanLensCpu + cdfSpanLensCpuEpoch <- cdf2OfCDFs comb $ mps <&> cdfSpanLensCpuEpoch + cdfSpanLensCpuRwd <- cdf2OfCDFs comb $ mps <&> cdfSpanLensCpuRwd + mpResourceCDFs <- sequence $ traverse identity (mps <&> mpResourceCDFs) <&> + \case + [] -> Left CDFEmptyDataset + (xs :: [CDF (CDF I) Word64]) -> cdf2OfCDFs comb xs :: Either CDFError (CDF (CDF I) Word64) + + pure . MultiClusterPerf $ MachPerf + { mpVersion = mpVersion headline + , mpDomainSlots = concat $ mps <&> mpDomainSlots + , .. + } + where + comb :: forall a. Divisible a => Combine (CDF I) a + comb = stdCombine2 centiles diff --git a/bench/locli/src/Cardano/Analysis/Run.hs b/bench/locli/src/Cardano/Analysis/Run.hs deleted file mode 100644 index 107672ee6a9..00000000000 --- a/bench/locli/src/Cardano/Analysis/Run.hs +++ /dev/null @@ -1,147 +0,0 @@ -{-# LANGUAGE DeriveAnyClass #-} -{-# LANGUAGE StrictData #-} -{-# OPTIONS_GHC -Wno-incomplete-patterns -Wno-name-shadowing -Wno-orphans #-} -module Cardano.Analysis.Run - ( module Cardano.Analysis.Run - , module Cardano.Analysis.Version - ) -where - -import Cardano.Prelude - -import Control.Monad (fail) -import Data.Aeson qualified as Aeson -import Data.Aeson (FromJSON(..), Object, ToJSON(..), withObject, (.:), (.:?)) -import Data.ByteString.Lazy.Char8 qualified as LBS -import Data.Text qualified as T -import Data.Time.Clock hiding (secondsToNominalDiffTime) -import Data.Time.Clock.POSIX - -import Cardano.Analysis.ChainFilter -import Cardano.Analysis.Context -import Cardano.Analysis.Ground -import Cardano.Analysis.Version -import Cardano.Util - --- | Explain the poor human a little bit of what was going on: -data Anchor - = Anchor - { aRuns :: [Text] - , aFilters :: [FilterName] - , aSlots :: Maybe (DataDomain SlotNo) - , aBlocks :: Maybe (DataDomain BlockNo) - , aVersion :: Cardano.Analysis.Version.Version - , aWhen :: UTCTime - } - -runAnchor :: Run -> UTCTime -> [FilterName] -> Maybe (DataDomain SlotNo) -> Maybe (DataDomain BlockNo) -> Anchor -runAnchor Run{..} = tagsAnchor [tag metadata] - -tagsAnchor :: [Text] -> UTCTime -> [FilterName] -> Maybe (DataDomain SlotNo) -> Maybe (DataDomain BlockNo) -> Anchor -tagsAnchor aRuns aWhen aFilters aSlots aBlocks = - Anchor { aVersion = getVersion, .. } - -renderAnchor :: Anchor -> Text -renderAnchor a = mconcat - [ "runs: ", renderAnchorRuns a, ", " - , renderAnchorNoRuns a - ] - -renderAnchorRuns :: Anchor -> Text -renderAnchorRuns Anchor{..} = mconcat - [ T.intercalate ", " aRuns ] - -renderAnchorFiltersAndDomains :: Anchor -> Text -renderAnchorFiltersAndDomains a@Anchor{..} = mconcat - [ "filters: ", case aFilters of - [] -> "unfiltered" - xs -> T.intercalate ", " (unFilterName <$> xs) - , renderAnchorDomains a] - -renderAnchorDomains :: Anchor -> Text -renderAnchorDomains Anchor{..} = mconcat $ - maybe [] ((:[]) . renderDomain "slot" (show . unSlotNo)) aSlots - <> - maybe [] ((:[]) . renderDomain "block" (show . unBlockNo)) aBlocks - where renderDomain :: Text -> (a -> Text) -> DataDomain a -> Text - renderDomain ty r DataDomain{..} = mconcat - [ ", ", ty - , " range: raw(", r ddRawFirst, "-", r ddRawLast , ")" - , " filtered(" - , maybe "none" r ddFilteredFirst, "-" - , maybe "none" r ddFilteredLast , ")" - ] - -renderAnchorNoRuns :: Anchor -> Text -renderAnchorNoRuns a@Anchor{..} = mconcat - [ renderAnchorFiltersAndDomains a - , ", ", renderProgramAndVersion aVersion - , ", analysed at ", renderAnchorDate a - ] - --- Rounds time to seconds. -renderAnchorDate :: Anchor -> Text -renderAnchorDate = show . posixSecondsToUTCTime . secondsToNominalDiffTime . fromIntegral @Int . round . utcTimeToPOSIXSeconds . aWhen - -data AnalysisCmdError - = AnalysisCmdError !Text - | MissingRunContext - | MissingLogfiles - | RunMetaParseError !(JsonInputFile RunPartial) !Text - | GenesisParseError !(JsonInputFile Genesis) !Text - | ChainFiltersParseError !JsonFilterFile !Text - deriving Show - -data ARunWith a - = Run - { genesisSpec :: GenesisSpec - , generatorProfile :: GeneratorProfile - , metadata :: Metadata - , genesis :: a - } - deriving (Generic, Show, ToJSON) - -type RunPartial = ARunWith () -type Run = ARunWith Genesis - -instance FromJSON RunPartial where - parseJSON = withObject "Run" $ \v -> do - meta :: Object <- v .: "meta" - profile_content <- meta .: "profile_content" - generator <- profile_content .: "generator" - -- - genesisSpec <- profile_content .: "genesis" - generatorProfile <- parseJSON $ Aeson.Object generator - -- - tag <- meta .: "tag" - profile <- meta .: "profile" - batch <- meta .: "batch" - manifest <- meta .: "manifest" - - eraGtor <- generator .:? "era" - eraTop <- profile_content .:? "era" - era <- case eraGtor <|> eraTop of - Just x -> pure x - Nothing -> fail "While parsing run metafile: missing era specification" - -- - let metadata = Metadata{..} - genesis = () - pure Run{..} - -readRun :: JsonInputFile Genesis -> JsonInputFile RunPartial -> ExceptT AnalysisCmdError IO Run -readRun shelleyGenesis runmeta = do - runPartial <- firstExceptT (RunMetaParseError runmeta . T.pack) - (newExceptT $ - Aeson.eitherDecode @RunPartial <$> LBS.readFile (unJsonInputFile runmeta)) - progress "meta" (Q $ unJsonInputFile runmeta) - run <- firstExceptT (GenesisParseError shelleyGenesis . T.pack) - (newExceptT $ - Aeson.eitherDecode @Genesis <$> LBS.readFile (unJsonInputFile shelleyGenesis)) - <&> completeRun runPartial - progress "genesis" (Q $ unJsonInputFile shelleyGenesis) - progress "run" (J run) - pure run - - where - completeRun :: RunPartial -> Genesis -> Run - completeRun Run{..} g = Run { genesis = g, .. } diff --git a/bench/locli/src/Cardano/Analysis/Summary.hs b/bench/locli/src/Cardano/Analysis/Summary.hs new file mode 100644 index 00000000000..eb8a492458a --- /dev/null +++ b/bench/locli/src/Cardano/Analysis/Summary.hs @@ -0,0 +1,83 @@ +{-# LANGUAGE UndecidableInstances #-} +{-# OPTIONS_GHC -Wno-name-shadowing -Wno-orphans #-} +module Cardano.Analysis.Summary (module Cardano.Analysis.Summary) where + +import Prelude (head, last) +import Cardano.Prelude + +import Data.Map.Strict qualified as Map + +import Cardano.Analysis.API +import Cardano.Unlog.LogObject +import Cardano.Util + + +computeSummary :: + UTCTime + -> Metadata + -> Genesis + -> GenesisSpec + -> GeneratorProfile + -> [(Count Cardano.Prelude.Text, [LogObject])] + -> ([FilterName], [ChainFilter]) + -> ClusterPerf + -> BlockPropOne + -> Chain + -> Summary I +computeSummary sumAnalysisTime + sumMeta + sumGenesis + sumGenesisSpec + sumGenerator + loCountsObjLists + sumFilters + MachPerf{..} + BlockProp{..} + Chain{..} + = + Summary + { sumLogStreams = countListAll objLists + , sumLogObjectsTotal = countListsAll objLists + , sumBlocksRejected = countListAll cRejecta + , sumDomainTime = + DataDomain (Interval minStartRaw maxStopRaw) (Just $ Interval minStartFlt maxStopFlt) + (maxStopRaw `utcTimeDeltaSec` minStartRaw) + (maxStopFlt `utcTimeDeltaSec` minStartFlt) + , sumStartSpread = + DataDomain (Interval minStartRaw maxStartRaw) (Just $ Interval minStartFlt maxStartFlt) + (maxStartRaw `utcTimeDeltaSec` minStartRaw) + (maxStartFlt `utcTimeDeltaSec` minStartFlt) + , sumStopSpread = + DataDomain (Interval minStopRaw maxStopRaw) (Just $ Interval minStopFlt maxStopFlt) + (maxStopRaw `utcTimeDeltaSec` minStopRaw) + (maxStopFlt `utcTimeDeltaSec` minStopFlt) + , sumDomainSlots = Prelude.head mpDomainSlots + , sumDomainBlocks = Prelude.head bpDomainBlocks + -- + , cdfLogObjects = cdf stdCentiles (length <$> objLists) + , cdfLogObjectsEmitted = cdf stdCentiles (loCountsObjLists <&> unCount . fst) + , .. + } + where + objLists = loCountsObjLists <&> snd + + (,) minStartRaw maxStartRaw = (minimum &&& maximum) losFirsts + (,) minStopRaw maxStopRaw = (minimum &&& maximum) losLasts + losFirsts = objLists <&> loAt . Prelude.head + losLasts = objLists <&> loAt . Prelude.last + + (,) minStartFlt maxStartFlt = (timeOf *** timeOf) startMinMaxS + (,) minStopFlt maxStopFlt = (timeOf *** timeOf) stopMinMaxS + startMinMaxS = (minimum &&& maximum) slotFirsts + stopMinMaxS = (minimum &&& maximum) slotLasts + slotFirsts = slotDomains <&> low + slotLasts = slotDomains <&> high + slotDomains = catMaybes (ddFiltered <$> mpDomainSlots) + timeOf = unSlotStart . slotStart sumGenesis + + sumChainRejectionStats = + cRejecta + <&> fmap fst . filter (not . snd) . beAcceptance + & concat + & foldr' (\k m -> Map.insertWith (+) k 1 m) Map.empty + & Map.toList diff --git a/bench/locli/src/Cardano/Command.hs b/bench/locli/src/Cardano/Command.hs index f9e1e762c1f..e2f4ec84b60 100644 --- a/bench/locli/src/Cardano/Command.hs +++ b/bench/locli/src/Cardano/Command.hs @@ -1,7 +1,7 @@ -{-# OPTIONS_GHC -fmax-pmcheck-models=15000 #-} +{-# OPTIONS_GHC -fmax-pmcheck-models=25000 #-} module Cardano.Command (module Cardano.Command) where -import Cardano.Prelude hiding (State, head) +import Cardano.Prelude hiding (State) import Data.Aeson qualified as Aeson import Data.ByteString qualified as BS @@ -18,15 +18,12 @@ import System.Posix.Files qualified as IO import Cardano.Analysis.API import Cardano.Analysis.BlockProp -import Cardano.Analysis.ChainFilter -import Cardano.Analysis.Context -import Cardano.Analysis.Ground import Cardano.Analysis.MachPerf -import Cardano.Analysis.Run +import Cardano.Analysis.Summary import Cardano.Render -import Cardano.Unlog.LogObject hiding (Text) import Cardano.Report -import Data.CDF +import Cardano.Unlog.LogObject hiding (Text) +import Cardano.Util hiding (head) data CommandError = CommandError ChainCommand Text @@ -42,6 +39,56 @@ newtype Command = ChainCommand [ChainCommand] deriving Show +data ChainCommand + = ListLogobjectKeys TextOutputFile + | ListLogobjectKeysLegacy TextOutputFile + + | MetaGenesis (JsonInputFile RunPartial) (JsonInputFile Genesis) + + | Unlog [JsonLogfile] (Maybe HostDeduction) Bool [LOAnyType] + | DumpLogObjects + + | BuildMachViews + | DumpMachViews + | ReadMachViews [JsonLogfile] + + | RebuildChain [JsonFilterFile] [ChainFilter] + | DumpChain (JsonOutputFile [BlockEvents]) (JsonOutputFile [BlockEvents]) + | ReadChain (JsonInputFile [BlockEvents]) + | TimelineChain TextOutputFile [TimelineComments BlockEvents] + + | CollectSlots [JsonLogfile] + | DumpSlotsRaw + | FilterSlots [JsonFilterFile] [ChainFilter] + | DumpSlots + | TimelineSlots + + | ComputePropagation + | RenderPropagation RenderFormat TextOutputFile PropSubset + | ReadPropagations [JsonInputFile BlockPropOne] + | ComputeMultiPropagation + | RenderMultiPropagation RenderFormat TextOutputFile PropSubset CDF2Aspect + + | ComputeMachPerf + | RenderMachPerf RenderFormat PerfSubset + + | ComputeClusterPerf + | RenderClusterPerf RenderFormat TextOutputFile PerfSubset + | ReadClusterPerfs [JsonInputFile MultiClusterPerf] + | ComputeMultiClusterPerf + | RenderMultiClusterPerf RenderFormat TextOutputFile PerfSubset CDF2Aspect + + | ComputeSummary + | RenderSummary RenderFormat TextOutputFile + | ReadSummaries [JsonInputFile SummaryOne] + + | Compare InputDir (Maybe TextInputFile) TextOutputFile + [( JsonInputFile SummaryOne + , JsonInputFile ClusterPerf + , JsonInputFile BlockPropOne)] + + deriving Show + parseChainCommand :: Parser ChainCommand parseChainCommand = subparser (mconcat [ commandGroup "Common data: logobject keys, run metafile & genesis" @@ -64,7 +111,11 @@ parseChainCommand = (optJsonLogfile "log" "JSON log stream") <*> optional (parseHostDeduction "host-from-log-filename" - "Derive hostname from log filename: logs-HOSTNAME.*")) + "Derive hostname from log filename: logs-HOSTNAME.*") + <*> Opt.flag False True (Opt.long "lodecodeerror-ok" + <> Opt.help "Allow non-EOF LODecodeError logobjects") + <*> many + (optLOAnyType "ok-loany" "[MULTI] Allow a particular LOAnyType")) , op "dump-logobjects" "Dump lifted log object streams, alongside input files" (DumpLogObjects & pure) ]) <|> @@ -91,11 +142,12 @@ parseChainCommand = <$> optJsonInputFile "chain" "Block event stream (JSON)") , op "dump-chain" "Dump chain" (DumpChain - <$> optJsonOutputFile "chain" "JSON chain output file") + <$> optJsonOutputFile "chain" "JSON chain output file" + <*> optJsonOutputFile "chain-rejecta" "JSON rejected chain output file") , op "timeline-chain" "Render chain timeline" (TimelineChain <$> optTextOutputFile "timeline" "Render a human-readable reconstructed chain view" - <*> many parseRTCommentsBP) + <*> many parseTimelineCommentsBP) ]) <|> subparser (mconcat [ commandGroup "Machine performance analysis: slot stats" @@ -147,18 +199,31 @@ parseChainCommand = , op "render-clusterperf" "Write cluster performance stats" (writerOpts RenderClusterPerf "Render" <*> parsePerfSubset) - - , op "read-clusterperfs" "Read multi-run cluster performance analysis as JSON" - (ReadMultiClusterPerf + , op "read-clusterperfs" "Read some cluster performance analyses as JSON" + (ReadClusterPerfs <$> some (optJsonInputFile "clusterperf" "JSON cluster performance input file")) + , op "compute-multi-clusterperf" "Consolidate cluster performance stats." (ComputeMultiClusterPerf & pure) , op "render-multi-clusterperf" "Write multi-run cluster performance results" (writerOpts RenderMultiClusterPerf "Render" <*> parsePerfSubset <*> parseCDF2Aspect) + ]) <|> + + subparser (mconcat [ commandGroup "Analysis summary" + , op "compute-summary" "Compute run analysis summary" + (ComputeSummary & pure) + , op "render-summary" "Render run analysis summary" + (writerOpts RenderSummary "Render") + , op "read-summaries" "Read analysis summaries" + (ReadSummaries + <$> some + (optJsonInputFile "summary" "JSON block propagation input file")) + ]) <|> + subparser (mconcat [ commandGroup "Run comparison" , op "compare" "Generate a report comparing multiple runs" (Compare <$> optInputDir "ede" "Directory with EDE templates." @@ -166,9 +231,10 @@ parseChainCommand = (optTextInputFile "template" "Template to use as base.") <*> optTextOutputFile "report" "Report .org file to create." <*> some - ((,) - <$> optJsonInputFile "run-metafile" "The meta.json file of a benchmark run" - <*> optJsonInputFile "shelley-genesis" "Genesis file of the run" + ((,,) + <$> optJsonInputFile "summary" "JSON analysis summary input file" + <*> optJsonInputFile "perf" "JSON cluster performance input file" + <*> optJsonInputFile "prop" "JSON block propagation input file" )) ]) where @@ -184,25 +250,21 @@ parseChainCommand = <> Opt.help desc ) -parseRTCommentsBP :: Parser (RTComments BlockEvents) -parseRTCommentsBP = + optLOAnyType :: String -> String -> Parser LOAnyType + optLOAnyType opt desc = + Opt.option Opt.auto + ( Opt.long opt + <> Opt.help desc + <> Opt.metavar "LOAnyType" ) + +parseTimelineCommentsBP :: Parser (TimelineComments BlockEvents) +parseTimelineCommentsBP = [ Opt.flag' BEErrors (Opt.long "chain-errors" <> Opt.help "Show per-block anomalies") , Opt.flag' BEFilterOuts (Opt.long "filter-reasons" <> Opt.help "Explain per-block filter-out reasons") ] & \case (x:xs) -> foldl (<|>) x xs [] -> error "Crazy world." -parseRenderFormat :: Parser RenderFormat -parseRenderFormat = - [ Opt.flag' AsJSON (Opt.long "json" <> Opt.help "Full JSON dump output file") - , Opt.flag' AsGnuplot (Opt.long "gnuplot" <> Opt.help "%s-pattern for separate Gnuplot output files, per CDF") - , Opt.flag' AsOrg (Opt.long "org" <> Opt.help "Org mode table output file") - , Opt.flag' AsReport (Opt.long "report" <> Opt.help "Org mode table output file, brief stats") - , Opt.flag' AsPretty (Opt.long "pretty" <> Opt.help "Text report output file") - ] & \case - (x:xs) -> foldl (<|>) x xs - [] -> error "Crazy world." - writerOpt :: (RenderFormat -> TextOutputFile -> a) -> String -> RenderFormat -> Parser a writerOpt ctor desc mode = ctor mode <$> optTextOutputFile opt (desc <> descSuf) where @@ -212,7 +274,7 @@ writerOpt ctor desc mode = ctor mode <$> optTextOutputFile opt (desc <> descSuf) AsJSON -> (,) "json" " results as complete JSON dump" AsGnuplot -> (,) "gnuplot" " as individual Gnuplot files" AsOrg -> (,) "org" " as Org-mode table" - AsReport -> (,) "report" " as Org-mode summary table" + AsReport -> (,) "org-report" " as Org-mode summary table" AsPretty -> (,) "pretty" " as text report" writerOpts :: (RenderFormat -> TextOutputFile -> a) -> String -> Parser a @@ -222,65 +284,18 @@ writerOpts ctor desc = enumFromTo minBound maxBound (x:xs) -> foldl (<|>) x xs [] -> error "Crazy world." -data ChainCommand - = ListLogobjectKeys TextOutputFile - | ListLogobjectKeysLegacy TextOutputFile - - | MetaGenesis (JsonInputFile RunPartial) (JsonInputFile Genesis) - - | Unlog [JsonLogfile] (Maybe HostDeduction) - | DumpLogObjects - - | BuildMachViews - | DumpMachViews - | ReadMachViews [JsonLogfile] - - | RebuildChain [JsonFilterFile] [ChainFilter] - | DumpChain (JsonOutputFile [BlockEvents]) - | ReadChain (JsonInputFile [BlockEvents]) - | TimelineChain TextOutputFile [RTComments BlockEvents] - - | CollectSlots [JsonLogfile] - | DumpSlotsRaw - | FilterSlots [JsonFilterFile] [ChainFilter] - | DumpSlots - | TimelineSlots - - | ComputePropagation - | RenderPropagation RenderFormat TextOutputFile PropSubset - | ReadPropagations [JsonInputFile BlockPropOne] - - | ComputeMultiPropagation - | RenderMultiPropagation RenderFormat TextOutputFile PropSubset CDF2Aspect - - | ComputeMachPerf - | RenderMachPerf RenderFormat PerfSubset - - | ComputeClusterPerf - | RenderClusterPerf RenderFormat TextOutputFile PerfSubset - - | ReadMultiClusterPerf [JsonInputFile MultiClusterPerf] - | ComputeMultiClusterPerf - | RenderMultiClusterPerf RenderFormat TextOutputFile PerfSubset CDF2Aspect - - | Compare InputDir (Maybe TextInputFile) TextOutputFile - [(JsonInputFile RunPartial, JsonInputFile Genesis)] - - deriving Show - data State = State { -- common sWhen :: UTCTime - , sFilters :: [FilterName] + , sFilters :: ([FilterName], [ChainFilter]) , sTags :: [Text] , sRun :: Maybe Run , sObjLists :: Maybe [(JsonLogfile, [LogObject])] , sDomSlots :: Maybe (DataDomain SlotNo) - , sDomBlocks :: Maybe (DataDomain BlockNo) -- propagation , sMachViews :: Maybe [(JsonLogfile, MachView)] - , sChain :: Maybe [BlockEvents] + , sChain :: Maybe Chain , sBlockProp :: Maybe [BlockPropOne] , sMultiBlockProp :: Maybe MultiBlockProp -- performance @@ -290,16 +305,46 @@ data State , sMachPerf :: Maybe [(JsonLogfile, MachPerfOne)] , sClusterPerf :: Maybe [ClusterPerf] , sMultiClusterPerf :: Maybe MultiClusterPerf + -- + , sSummaries :: Maybe [SummaryOne] } +callComputeSummary :: State -> Either Text SummaryOne +callComputeSummary = + \case + State{sRun = Nothing} -> err "a run" + State{sObjLists = Nothing} -> err "logobjects" + State{sObjLists = Just []} -> err "logobjects" + State{sClusterPerf = Nothing} -> err "cluster performance results" + State{sBlockProp = Nothing} -> err "block propagation results" + State{sChain = Nothing} -> err "chain" + State{ sObjLists = Just (fmap snd -> objLists) + , sClusterPerf = Just [clusterPerf] + , sBlockProp = Just [blockProp'] + , sChain = Just chain + , sRun = Just Run{..} + , ..} -> Right $ + computeSummary sWhen metadata genesis genesisSpec generatorProfile + (zip (Count <$> [0..]) objLists) sFilters + clusterPerf blockProp' chain + _ -> err "Impossible to get here." + where + err = Left . ("Summary of a run requires " <>) + sRunAnchor :: State -> Anchor -sRunAnchor State{sRun = Just run, sFilters, sWhen, sDomSlots, sDomBlocks} - = runAnchor run sWhen sFilters sDomSlots sDomBlocks +sRunAnchor State{sRun = Just run, sFilters, sWhen, sClusterPerf, sChain} + = runAnchor run sWhen sFilters + ((sClusterPerf <&> fmap (head . mpDomainSlots) . head & join.join) <|> + (sChain <&> cDomSlots)) + (sChain <&> cDomBlocks) sRunAnchor _ = error "sRunAnchor with no run." sTagsAnchor :: State -> Anchor -sTagsAnchor State{sFilters, sTags, sWhen, sDomSlots, sDomBlocks} - = tagsAnchor sTags sWhen sFilters sDomSlots sDomBlocks +sTagsAnchor State{sFilters, sTags, sWhen, sClusterPerf, sChain} + = tagsAnchor sTags sWhen sFilters + ((sClusterPerf <&> fmap (head . mpDomainSlots) . head & join.join) <|> + (sChain <&> cDomSlots)) + (sChain <&> cDomBlocks) runChainCommand :: State -> ChainCommand -> ExceptT CommandError IO State @@ -316,18 +361,21 @@ runChainCommand s runChainCommand s c@(MetaGenesis runMeta shelleyGenesis) = do + progress "run" (Q $ printf "reading run metadata & Shelley genesis") run <- readRun shelleyGenesis runMeta & firstExceptT (fromAnalysisError c) pure s { sRun = Just run } runChainCommand s - c@(Unlog logs mHostDed) = do - los <- runLiftLogObjects logs mHostDed + c@(Unlog logs mHostDed okDErr okAny) = do + progress "logs" (Q $ printf "parsing %d log files" $ length logs) + los <- runLiftLogObjects logs mHostDed okDErr okAny & firstExceptT (CommandError c) pure s { sObjLists = Just los } runChainCommand s@State{sObjLists=Just objs} c@DumpLogObjects = do + progress "logobjs" (Q $ printf "dumping %d logobject streams" $ length objs) dumpAssociatedObjectStreams "logobjs" objs & firstExceptT (CommandError c) pure s runChainCommand _ c@DumpLogObjects = missingCommandData c @@ -337,6 +385,7 @@ runChainCommand _ c@DumpLogObjects = missingCommandData c runChainCommand s@State{sRun=Just run, sObjLists=Just objs} BuildMachViews = do + progress "machviews" (Q $ printf "building %d machviews" $ length objs) mvs <- buildMachViews run objs & liftIO pure s { sMachViews = Just mvs } runChainCommand _ c@BuildMachViews = missingCommandData c @@ -344,53 +393,63 @@ runChainCommand _ c@BuildMachViews = missingCommandData c runChainCommand s@State{sMachViews=Just machViews} c@DumpMachViews = do + progress "machviews" (Q $ printf "dumping %d machviews" $ length machViews) dumpAssociatedObjects "mach-views" machViews & firstExceptT (CommandError c) pure s runChainCommand _ c@DumpMachViews = missingCommandData c ["machine views"] runChainCommand s c@(ReadMachViews fs) = do + progress "machviews" (Q $ printf "reading %d machviews" $ length fs) machViews <- readAssociatedObjects "mach-views" fs & firstExceptT (CommandError c) pure s { sMachViews = Just machViews } runChainCommand s@State{sRun=Just run, sMachViews=Just mvs} c@(RebuildChain fltfs fltExprs) = do + progress "machviews" (Q $ printf "filtering %d machviews" $ length mvs) (fltFiles, (<> [ FilterName "inline-expr" | not (null fltExprs)]) -> fltNames) <- readFilters fltfs & firstExceptT (CommandError c) let flts = fltFiles <> fltExprs - (domSlot, domBlock, chain) <- rebuildChain run flts fltNames mvs - & liftIO + forM_ flts $ + progress "filter" . Q . show + + let chain = rebuildChain run flts fltNames mvs + progress "tip" $ Q . show . beBlock . last $ cMainChain chain + pure s { sChain = Just chain - , sDomSlots = Just domSlot - , sDomBlocks = Just domBlock - , sFilters = fltNames + , sFilters = (fltNames, flts) } -- pure s { sChain = Just chain } runChainCommand _ c@RebuildChain{} = missingCommandData c ["run metadata & genesis", "reconstructed chain"] -runChainCommand s - c@(ReadChain f) = do - chain <- mapM (Aeson.eitherDecode @BlockEvents) - . filter ((> 5) . LBS.length) - . LBS.split '\n' - <$> LBS.readFile (unJsonInputFile f) - & newExceptT - & firstExceptT (CommandError c . pack) - pure s { sChain = Just chain } - -runChainCommand s@State{sChain=Just chain} - c@(DumpChain f) = do - dumpObjects "chain" chain f & firstExceptT (CommandError c) +runChainCommand _ ReadChain{} = do + pure $ error "ReadChain not implemented" + -- progress "chain" (Q $ printf "reading chain") + -- chain <- mapM (Aeson.eitherDecode @BlockEvents) + -- . filter ((> 5) . LBS.length) + -- . LBS.split '\n' + -- <$> LBS.readFile (unJsonInputFile f) + -- & newExceptT + -- & firstExceptT (CommandError c . pack) + -- pure s { sChain = Just chain } + +runChainCommand s@State{sChain=Just Chain{..}} + c@(DumpChain f fRej) = do + progress "chain" (Q $ printf "dumping chain") + dumpObjects "chain" cMainChain f & firstExceptT (CommandError c) + progress "chain-rejecta" (Q $ printf "dumping chain rejecta") + dumpObjects "chain-rejecta" cRejecta fRej & firstExceptT (CommandError c) pure s runChainCommand _ c@DumpChain{} = missingCommandData c ["chain"] -runChainCommand s@State{sRun=Just run, sChain=Just chain} +runChainCommand s@State{sRun=Just _run, sChain=Just Chain{..}} c@(TimelineChain f comments) = do - dumpText "chain" (renderTimeline run (const True) comments chain) f + progress "chain" (Q $ printf "dumping prettyprinted chain") + dumpText "chain" (renderTimeline (const True) comments cMainChain) f & firstExceptT (CommandError c) pure s runChainCommand _ c@TimelineChain{} = missingCommandData c @@ -401,16 +460,19 @@ runChainCommand s@State{sRun=Just run, sObjLists=Just objs} let nonIgnored = flip filter objs $ (`notElem` ignores) . fst forM_ ignores $ progress "perf-ignored-log" . R . unJsonLogfile + progress "slots" (Q $ printf "building slot %d timelines" $ length objs) (scalars, slotsRaw) <- fmap (mapAndUnzip redistribute) <$> collectSlotStats run nonIgnored & newExceptT & firstExceptT (CommandError c) - pure s { sScalars = Just scalars, sSlotsRaw = Just (fmap (fmap (deltifySlotStats (genesis run))) <$> slotsRaw) } + pure s { sScalars = Just scalars + , sSlotsRaw = Just (fmap (fmap (deltifySlotStats (genesis run))) <$> slotsRaw) } runChainCommand _ c@CollectSlots{} = missingCommandData c ["run metadata & genesis", "lifted logobjects"] runChainCommand s@State{sSlotsRaw=Just slotsRaw} c@DumpSlotsRaw = do + progress "slots" (Q $ printf "dumping %d unfiltered slot timelines" $ length slotsRaw) dumpAssociatedObjectStreams "raw-slots" slotsRaw & firstExceptT (CommandError c) pure s runChainCommand _ c@DumpSlotsRaw = missingCommandData c @@ -418,48 +480,56 @@ runChainCommand _ c@DumpSlotsRaw = missingCommandData c runChainCommand s@State{sRun=Just run, sSlotsRaw=Just slotsRaw} c@(FilterSlots fltfs fltExprs) = do + progress "slots" (Q $ printf "filtering %d slot timelines" $ length slotsRaw) (fltFiles, (<> [ FilterName "inline-expr" | not (null fltExprs)]) -> fltNames) <- readFilters fltfs & firstExceptT (CommandError c) let flts = fltFiles <> fltExprs + forM_ flts $ + progress "filter" . Q . show (domSlots, fltrd) <- runSlotFilters run flts slotsRaw & liftIO & firstExceptT (CommandError c) + progress "filtered-slotstats-slot-domain" $ J domSlots when (maximum (length . snd <$> fltrd) == 0) $ throwE $ CommandError c $ mconcat [ "All ", show $ maximum (length . snd <$> slotsRaw), " slots filtered out." ] pure s { sSlots = Just fltrd , sDomSlots = Just domSlots - , sFilters = fltNames + , sFilters = (fltNames, flts) } runChainCommand _ c@FilterSlots{} = missingCommandData c ["run metadata & genesis", "unfiltered slot stats"] runChainCommand s@State{sSlots=Just slots} c@DumpSlots = do + progress "slots" (Q $ printf "dumping %d slot timelines" $ length slots) dumpAssociatedObjectStreams "slots" slots & firstExceptT (CommandError c) pure s runChainCommand _ c@DumpSlots = missingCommandData c ["filtered slots"] -runChainCommand s@State{sRun=Just run, sSlots=Just slots} +runChainCommand s@State{sRun=Just _run, sSlots=Just slots} c@TimelineSlots = do + progress "mach" (Q $ printf "dumping %d slot timelines" $ length slots) dumpAssociatedTextStreams "mach" - (fmap (fmap $ renderTimeline run (const True) []) slots) + (fmap (fmap $ renderTimeline (const True) []) slots) & firstExceptT (CommandError c) pure s runChainCommand _ c@TimelineSlots{} = missingCommandData c ["run metadata & genesis", "filtered slots"] -runChainCommand s@State{sRun=Just run, sChain=Just chain, sDomSlots=Just domS, sDomBlocks=Just domB} +runChainCommand s@State{sRun=Just run, sChain=Just chain@Chain{..}} ComputePropagation = do - prop <- blockProp run chain domS domB & liftIO + progress "block-propagation" $ J (cDomBlocks, cDomSlots) + prop <- blockProp run chain & liftIO pure s { sBlockProp = Just [prop] } runChainCommand _ c@ComputePropagation = missingCommandData c ["run metadata & genesis", "chain", "data domains for slots & blocks"] runChainCommand s@State{sBlockProp=Just [prop]} c@(RenderPropagation mode f subset) = do + progress "block-propagation" $ Q "rendering block propagation CDFs" forM_ (renderAnalysisCDFs (sRunAnchor s) (propSubsetFn subset) OfOverallDataset Nothing mode prop) $ \(name, body) -> dumpText (T.unpack name) body (modeFilename f name mode) @@ -470,7 +540,8 @@ runChainCommand _ c@RenderPropagation{} = missingCommandData c runChainCommand s@State{} c@(ReadPropagations fs) = do - xs <- mapConcurrently (fmap (Aeson.eitherDecode @BlockPropOne) . LBS.readFile . unJsonInputFile) fs + progress "block-propagations" (Q $ printf "reading %d block propagations" $ length fs) + xs <- mapConcurrently readJsonDataIO fs & fmap sequence & newExceptT & firstExceptT (CommandError c . show) @@ -479,6 +550,7 @@ runChainCommand s@State{} runChainCommand s@State{sBlockProp=Just props} c@ComputeMultiPropagation = do + progress "block-propagations" (Q $ printf "computing %d block propagations" $ length props) xs <- pure (summariseMultiBlockProp (nEquicentiles $ max 7 (length props)) props) & newExceptT & firstExceptT (CommandError c . show) @@ -488,6 +560,7 @@ runChainCommand _ c@ComputeMultiPropagation{} = missingCommandData c runChainCommand s@State{sMultiBlockProp=Just prop} c@(RenderMultiPropagation mode f subset aspect) = do + progress "block-propagations" (Q "rendering multi-run block propagation") forM_ (renderAnalysisCDFs (sTagsAnchor s) (propSubsetFn subset) aspect Nothing mode prop) $ \(name, body) -> dumpText (T.unpack name) body (modeFilename f name mode) @@ -498,6 +571,7 @@ runChainCommand _ c@RenderMultiPropagation{} = missingCommandData c runChainCommand s@State{sRun=Just run, sSlots=Just slots} c@ComputeMachPerf = do + progress "machperf" (Q $ printf "computing %d machine performances" $ length slots) perf <- mapConcurrentlyPure (slotStatsMachPerf run) slots & fmap sequence & newExceptT @@ -508,6 +582,7 @@ runChainCommand _ c@ComputeMachPerf{} = missingCommandData c runChainCommand s@State{sMachPerf=Just perf} c@(RenderMachPerf _mode _subset) = do + progress "machperf" (Q $ printf "dumping %d machine performance stats" $ length perf) dumpAssociatedObjects "perf-stats" perf & firstExceptT (CommandError c) pure s @@ -516,6 +591,7 @@ runChainCommand _ c@RenderMachPerf{} = missingCommandData c runChainCommand s@State{sMachPerf=Just machPerfs} c@ComputeClusterPerf = do + progress "clusterperf" (Q $ printf "summarising %d machine performances" $ length machPerfs) clusterPerf <- pure (summariseClusterPerf (nEquicentiles $ max 7 (length machPerfs)) (machPerfs <&> snd)) & newExceptT & firstExceptT (CommandError c . show) @@ -523,9 +599,10 @@ runChainCommand s@State{sMachPerf=Just machPerfs} runChainCommand _ c@ComputeClusterPerf{} = missingCommandData c ["machine performance stats"] -runChainCommand s@State{sClusterPerf=Just [prop]} +runChainCommand s@State{sClusterPerf=Just [perf]} c@(RenderClusterPerf mode f subset) = do - forM_ (renderAnalysisCDFs (sRunAnchor s) (perfSubsetFn subset) OfOverallDataset Nothing mode prop) $ + progress "clusterperf" (Q $ printf "rendering cluster performance") + forM_ (renderAnalysisCDFs (sRunAnchor s) (perfSubsetFn subset) OfOverallDataset Nothing mode perf) $ \(name, body) -> dumpText (T.unpack name) body (modeFilename f name mode) & firstExceptT (CommandError c) @@ -534,7 +611,8 @@ runChainCommand _ c@RenderClusterPerf{} = missingCommandData c ["multi-run block propagation"] runChainCommand s@State{} - c@(ReadMultiClusterPerf fs) = do + c@(ReadClusterPerfs fs) = do + progress "clusterperfs" (Q $ printf "reading %d cluster performances" $ length fs) xs <- mapConcurrently (fmap (Aeson.eitherDecode @ClusterPerf) . LBS.readFile . unJsonInputFile) fs & fmap sequence & newExceptT @@ -544,6 +622,7 @@ runChainCommand s@State{} runChainCommand s@State{sClusterPerf=Just perfs} c@ComputeMultiClusterPerf = do + progress "clusterperfs" (Q $ printf "summarising %d cluster performances" $ length perfs) xs <- pure (summariseMultiClusterPerf (nEquicentiles $ max 7 (length perfs)) perfs) & newExceptT & firstExceptT (CommandError c . show) @@ -553,6 +632,7 @@ runChainCommand _ c@ComputeMultiClusterPerf{} = missingCommandData c runChainCommand s@State{sMultiClusterPerf=Just (MultiClusterPerf perf)} c@(RenderMultiClusterPerf mode f subset aspect) = do + progress "clusterperfs" (Q $ printf "rendering multi-run cluster performance") forM_ (renderAnalysisCDFs (sTagsAnchor s) (perfSubsetFn subset) aspect Nothing mode perf) $ \(name, body) -> dumpText (T.unpack name) body (modeFilename f name mode) @@ -561,9 +641,39 @@ runChainCommand s@State{sMultiClusterPerf=Just (MultiClusterPerf perf)} runChainCommand _ c@RenderMultiClusterPerf{} = missingCommandData c ["multi-run cluster preformance stats"] +runChainCommand s c@ComputeSummary = do + progress "summary" (Q "summarising a run") + summary <- pure (callComputeSummary s) + & newExceptT + & firstExceptT (CommandError c . show) + pure s { sSummaries = Just [summary] } + +runChainCommand s@State{sSummaries = Just (summary:_)} c@(RenderSummary fmt f) = do + progress "summary" (Q $ printf "rendering summary") + dumpText "summary" body (modeFilename f "" fmt) + & firstExceptT (CommandError c) + pure s + where body = renderSummary fmt (sRunAnchor s) (iFields sumFieldsReport) summary +runChainCommand _ c@RenderSummary{} = missingCommandData c + ["run summary"] + +runChainCommand s@State{} + c@(ReadSummaries fs) = do + progress "summaries" (Q $ printf "reading %d run summaries" $ length fs) + xs <- mapConcurrently (fmap (Aeson.eitherDecode @SummaryOne) . LBS.readFile . unJsonInputFile) fs + & fmap sequence + & newExceptT + & firstExceptT (CommandError c . show) + pure s { sSummaries = Just xs } + runChainCommand s c@(Compare ede mTmpl outf@(TextOutputFile outfp) runs) = do - xs :: [Run] <- forM runs $ - \(mf,gf)-> readRun gf mf & firstExceptT (fromAnalysisError c) + progress "report" (Q $ printf "rendering report for %d runs" $ length runs) + xs :: [(SummaryOne, ClusterPerf, BlockPropOne)] <- forM runs $ + \(sumf,cpf,bpf)-> + (,,) + <$> readJsonData sumf (CommandError c) + <*> readJsonData cpf (CommandError c) + <*> readJsonData bpf (CommandError c) (tmpl, orgReport) <- case xs of baseline:deltas@(_:_) -> liftIO $ Cardano.Report.generate ede mTmpl baseline deltas @@ -572,7 +682,7 @@ runChainCommand s c@(Compare ede mTmpl outf@(TextOutputFile outfp) runs) = do dumpText "report" [orgReport] outf & firstExceptT (CommandError c) - let tmplPath = Cardano.Analysis.API.replaceExtension outfp "ede" + let tmplPath = Cardano.Util.replaceExtension outfp "ede" liftIO . unlessM (IO.fileExist tmplPath) $ BS.writeFile tmplPath tmpl @@ -585,7 +695,7 @@ missingCommandData c xs = _reportBanner :: [FilterName] -> FilePath -> Text _reportBanner fnames inputfp = mconcat [ "input: ", logfileRunIdentifier (pack inputfp), " " - , "locli: ", gitRev getVersion, " " + , "locli: ", gitRev getLocliVersion, " " , "filters: ", T.intercalate " " (unFilterName <$> fnames) ] where @@ -610,7 +720,7 @@ runCommand (ChainCommand cs) = do where initialState :: UTCTime -> State initialState now = - State now [] [] + State now ([], []) [] Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing Nothing diff --git a/bench/locli/src/Cardano/JSON.hs b/bench/locli/src/Cardano/JSON.hs new file mode 100644 index 00000000000..b19d6469a6d --- /dev/null +++ b/bench/locli/src/Cardano/JSON.hs @@ -0,0 +1,36 @@ +module Cardano.JSON + ( module Cardano.JSON + , Value(..), Object) +where + +import Prelude (error) +import Cardano.Prelude hiding (head) + +import Data.Aeson (Value (..)) +import Data.Aeson.Types (Key, Object) +import Data.Aeson.KeyMap qualified as KM +import Data.Aeson.KeyMap (fromMapText) +import Data.Map.Strict qualified as M + + +alterSubObject :: (Object -> Maybe Object) -> Key -> Object -> Maybe Object +alterSubObject f k = + KM.alterF (\case + Just (Object o) -> Just . Object <$> f o + Just x -> error $ mconcat [ "tryAlterObject: non-object at key " + , show k, ": ", show x ] + Nothing -> Nothing) + k + +mapSubObject :: (Object -> Object) -> Key -> Object -> Object +mapSubObject f k = + runIdentity . + KM.alterF (\case + Just (Object o) -> Identity . Just . Object $ f o + Just x -> error $ mconcat [ "tryAlterObject: non-object at key " + , show k, ": ", show x ] + Nothing -> error $ mconcat [ "tryAlterObject: missing key ", show k ]) + k + +overlayJSON :: [(Text, Value)] -> Object -> Object +overlayJSON = (<>) . fromMapText . M.fromList diff --git a/bench/locli/src/Cardano/Org.hs b/bench/locli/src/Cardano/Org.hs index b975da6705e..0a5d85c3885 100644 --- a/bench/locli/src/Cardano/Org.hs +++ b/bench/locli/src/Cardano/Org.hs @@ -5,6 +5,8 @@ module Cardano.Org (module Cardano.Org) where import Cardano.Prelude import Data.Text qualified as T +import Cardano.Util + data Org = Props @@ -21,6 +23,7 @@ data Org , tSummaryHeaders :: [Text] , tSummaryValues :: [[Text]] , tFormula :: [Text] + , tConstants :: [(Text, Text)] } deriving (Show) @@ -33,13 +36,18 @@ render Props{..} = <> (oBody <&> render & mconcat) +render Table{tConstants = _:_, tExtended = False} = + error "Asked to render a non-extended Org table with an extended table feature: named constants" + render Table{..} = - tableHLine - : tableRow jusAllHeaders - : tableHLine - : fmap tableRow (transpose jusAllColumns) + renderTableHLine + : renderTableRow jusAllHeaders + : renderTableHLine + : fmap renderTableRow (transpose jusAllColumns) & flip (<>) jusAllSummaryLines + & flip (<>) + jusAllConstantLines & flip (<>) (bool [ "#+TBLFM:" <> (tFormula & T.intercalate "::") ] [] (null tFormula)) where @@ -57,21 +65,39 @@ render Table{..} = jusAllSummaryLines :: [Text] jusAllSummaryLines = if null tSummaryHeaders then [] else - tableHLine : - fmap tableRow (zipWith (:) - (tSummaryHeaders <&> T.justifyRight rowHdrWidth ' ') - (transpose (justifySourceColumns tSummaryValues)) - <&> consIfSpecial " ") + renderTableHLine : + fmap renderTableRow (zipWith (:) + (tSummaryHeaders <&> T.justifyRight rowHdrWidth ' ') + (transpose (justifySourceColumns tSummaryValues)) + <&> consIfSpecial (bool " " "#" tExtended)) + + justifySourceColumns :: [[Text]] -> [[Text]] + justifySourceColumns = zipWith (\w-> fmap (T.justifyRight w ' ')) colWidths + + jusAllConstantLines :: [Text] + jusAllConstantLines = + if null tConstants then [] else + renderTableHLine : + fmap renderTableRow (zipWith (:) + (cycle ["_", "#"]) + constRows) + where + constRows = (chunksOf nTotalColumns tConstants -- we can fit so many definitions per row + & mapLast (\row -> row <> replicate (nTotalColumns - length row) ("", "")) -- last row needs completion + & fmap (`zip` allColWidths)) -- and we supply column widths for justification + <&> transpose . fmap (\((name, value), w) -> -- each row -> row pair of justified [Name, Definition] + [ T.justifyRight w ' ' name + , T.justifyRight w ' ' value]) + & concat -- merge into a single list of rows - rowHdrWidth :: Int + rowHdrWidth, nTotalColumns :: Int rowHdrWidth = maximum $ length <$> (maybeToList tApexHeader <> tRowHeaders <> tSummaryHeaders) + nTotalColumns = length allColWidths - justifySourceColumns :: [[Text]] -> [[Text]] - justifySourceColumns = zipWith (\w-> fmap (T.justifyRight w ' ')) colWidths - - colWidths :: [Int] + colWidths, allColWidths :: [Int] + allColWidths = rowHdrWidth : colWidths colWidths = maximum . fmap length <$> (tColumns & zipWith (:) tColHeaders @@ -79,15 +105,15 @@ render Table{..} = else zipWith (<>) tSummaryValues) specialCol :: [Text] - specialCol = replicate (length tRowHeaders) "#" + specialCol = length tRowHeaders `replicate` "#" consIfSpecial :: a -> [a] -> [a] consIfSpecial x = bool identity (x:) tExtended - tableRow :: [Text] -> Text - tableRow xs = "| " <> T.intercalate " | " xs <> " |" - tableHLine :: Text - tableHLine = ("|-" <>) . (<> "-|") . T.intercalate "-+-" . (flip T.replicate "-" <$>) $ - rowHdrWidth - : colWidths - & consIfSpecial 1 + renderTableRow :: [Text] -> Text + renderTableRow xs = "| " <> T.intercalate " | " xs <> " |" + renderTableHLine :: Text + renderTableHLine = ("|-" <>) . (<> "-|") . T.intercalate "-+-" . (flip T.replicate "-" <$>) $ + rowHdrWidth + : colWidths + & consIfSpecial 1 diff --git a/bench/locli/src/Cardano/Render.hs b/bench/locli/src/Cardano/Render.hs index be88575c1ee..a6e6095cce8 100644 --- a/bench/locli/src/Cardano/Render.hs +++ b/bench/locli/src/Cardano/Render.hs @@ -2,106 +2,172 @@ {-# LANGUAGE ScopedTypeVariables #-} module Cardano.Render (module Cardano.Render) where -import Prelude (head, id, show) -import Cardano.Prelude hiding (head, show) +import Prelude (id, show) +import Cardano.Prelude hiding (head, show) -import Data.Aeson (ToJSON) -import Data.Aeson.Text (encodeToLazyText) -import Data.List (dropWhileEnd) -import Data.Text qualified as T -import Data.Text.Lazy qualified as LT -import Data.Time.Clock (NominalDiffTime) +import Data.Aeson.Text (encodeToLazyText) +import Data.List (dropWhileEnd) +import Data.Text qualified as T +import Data.Text.Lazy qualified as LT +import Options.Applicative qualified as Opt import Data.CDF -import Cardano.Analysis.Ground -import Cardano.Analysis.Run import Cardano.Org import Cardano.Util +import Cardano.Analysis.API + + +-- | Explain the poor human a little bit of what was going on: +data Anchor + = Anchor + { aRuns :: [Text] + , aFilters :: ([FilterName], [ChainFilter]) + , aSlots :: Maybe (DataDomain SlotNo) + , aBlocks :: Maybe (DataDomain BlockNo) + , aVersion :: LocliVersion + , aWhen :: UTCTime + } +runAnchor :: Run -> UTCTime -> ([FilterName], [ChainFilter]) -> Maybe (DataDomain SlotNo) -> Maybe (DataDomain BlockNo) -> Anchor +runAnchor Run{..} = tagsAnchor [tag metadata] + +tagsAnchor :: [Text] -> UTCTime -> ([FilterName], [ChainFilter]) -> Maybe (DataDomain SlotNo) -> Maybe (DataDomain BlockNo) -> Anchor +tagsAnchor aRuns aWhen aFilters aSlots aBlocks = + Anchor { aVersion = getLocliVersion, .. } + +renderAnchor :: Anchor -> Text +renderAnchor a = mconcat + [ "runs: ", renderAnchorRuns a, ", " + , renderAnchorNoRuns a + ] + +renderAnchorRuns :: Anchor -> Text +renderAnchorRuns Anchor{..} = mconcat + [ T.intercalate ", " aRuns ] + +renderAnchorFiltersAndDomains :: Anchor -> Text +renderAnchorFiltersAndDomains a@Anchor{..} = mconcat + [ "filters: ", case fst aFilters of + [] -> "unfiltered" + xs -> T.intercalate ", " (unFilterName <$> xs) + , renderAnchorDomains a] + +renderAnchorDomains :: Anchor -> Text +renderAnchorDomains Anchor{..} = mconcat $ + maybe [] ((:[]) . renderDomain "slot" (showText . unSlotNo)) aSlots + <> + maybe [] ((:[]) . renderDomain "block" (showText . unBlockNo)) aBlocks + where renderDomain :: Text -> (a -> Text) -> DataDomain a -> Text + renderDomain ty r DataDomain{..} = mconcat + [ ", ", ty + , " range: raw(", renderIntv r ddRaw, ", " + , showText ddRawCount, " total)" + , " filtered(", maybe "none" + (renderIntv r) ddFiltered, ", " + , showText ddFilteredCount, " total), " + , "filtered ", T.take 4 . showText $ ((/) @Double `on` fromIntegral) + ddFilteredCount ddRawCount + ] -class RenderCDFs a p where - rdFields :: [Field DSelect p a] - -class RenderTimeline a where - data RTComments a :: Type - rtFields :: Run -> [Field ISelect I a] - rtCommentary :: a -> RTComments a -> [Text] - rtCommentary _ _ = [] - --- | Encapsulate all information necessary to render a column (projection) of --- a certain projectible (a kind of analysis results): --- - first parameter encapsulates the projection descriptor --- - second parameter sets the arity (I vs. CDF I) --- - third parameter is the projectible indexed by arity -data Field (s :: (Type -> Type) -> k -> Type) (p :: Type -> Type) (a :: k) - = Field - { fWidth :: Int - , fLeftPad :: Int - , fId :: Text - , fHead1 :: Text - , fHead2 :: Text - , fSelect :: s p a - , fDesc :: Text - } +renderAnchorNoRuns :: Anchor -> Text +renderAnchorNoRuns a@Anchor{..} = mconcat + [ renderAnchorFiltersAndDomains a + , ", ", renderProgramAndVersion aVersion + , ", analysed at ", renderAnchorDate a + ] -mapField :: a p -> (forall v. Divisible v => CDF p v -> b) -> Field DSelect p a -> b -mapField x cdfProj Field{..} = - case fSelect of - DInt (cdfProj . ($x) ->r) -> r - DWord64 (cdfProj . ($x) ->r) -> r - DFloat (cdfProj . ($x) ->r) -> r - DDeltaT (cdfProj . ($x) ->r) -> r +-- Rounds time to seconds. +renderAnchorDate :: Anchor -> Text +renderAnchorDate = showText . posixSecondsToUTCTime . secondsToNominalDiffTime . fromIntegral @Int . round . utcTimeToPOSIXSeconds . aWhen + +justifyHead, justifyData, justifyCentile, justifyProp :: Int -> Text -> Text +justifyHead w = T.center w ' ' +justifyData w = T.justifyLeft w ' ' +justifyCentile w = T.justifyLeft w ' ' +justifyProp w = T.center w ' ' renderCentiles :: Int -> [Centile] -> [Text] renderCentiles wi = fmap (T.take wi . T.pack . printf "%f" . unCentile) +renderScalar :: a -> Field ISelect I a -> Text +renderScalar v Field{..} = + let wi = width fWidth + packWi = T.pack.take wi + showDt = packWi.dropWhileEnd (== 's').show + showInt = T.pack.printf "%d" + showW64 = T.pack.printf "%d" + in case fSelect of + IInt (($v)->x) -> showInt x + IWord64M (($v)->x) -> smaybe "---" showW64 x + IWord64 (($v)->x) -> showW64 x + IFloat (($v)->x) -> packWi $ printf "%F" x + IDeltaTM (($v)->x) -> smaybe "---" showDt x + IDeltaT (($v)->x) -> showDt x + IDate (($v)->x) -> packWi $ take 10 $ show x + ITime (($v)->x) -> packWi $ take 8 $ drop 11 $ show x + IText (($v)->x) -> T.take wi . T.dropWhileEnd (== 's') $ x + renderFieldCentiles :: a p -> (forall v. Divisible v => CDF p v -> [[v]]) -> Field DSelect p a -> [[Text]] renderFieldCentiles x cdfProj Field{..} = case fSelect of DInt (cdfProj . ($x) ->ds) -> ds <&> fmap (p.printf "%d") DWord64 (cdfProj . ($x) ->ds) -> ds <&> fmap (p.printf "%d") DFloat (cdfProj . ($x) ->ds) -> ds <&> fmap (p.printf "%F") - DDeltaT (cdfProj . ($x) ->ds) -> ds <&> fmap (T.justifyRight fWidth ' '.T.dropWhileEnd (== 's').p.show) + DDeltaT (cdfProj . ($x) ->ds) -> ds <&> fmap (justifyData (width fWidth).T.dropWhileEnd (== 's').p.show) where p = T.pack renderFieldCentilesWidth :: a p -> (forall v. Divisible v => CDF p v -> [[v]]) -> Field DSelect p a -> [[Text]] renderFieldCentilesWidth x cdfProj Field{..} = case fSelect of - DInt (cdfProj . ($x) ->ds) -> ds <&> fmap (T.pack.printf "%*d" fWidth) - DWord64 (cdfProj . ($x) ->ds) -> ds <&> fmap (T.pack.printf "%*d" fWidth) - DFloat (cdfProj . ($x) ->ds) -> ds <&> fmap (renderFloatStr fWidth.printf "%*F" fWidth) + DInt (cdfProj . ($x) ->ds) -> ds <&> fmap (T.center (width fWidth) ' '.T.pack.printf "%d") + DWord64 (cdfProj . ($x) ->ds) -> ds <&> fmap (T.center (width fWidth) ' '.T.pack.printf "%d") + DFloat (cdfProj . ($x) ->ds) -> ds <&> fmap (renderFloatStr fWidth.printf "%*F" (width fWidth + 1)) DDeltaT (cdfProj . ($x) ->ds) -> ds <&> fmap (renderFloatStr fWidth.dropWhileEnd (== 's').show) -renderFloatStr :: Int -> String -> Text -renderFloatStr w = T.justifyRight w ' '. T.take w . T.pack . stripLeadingZero +renderFloatStr :: Width -> String -> Text +renderFloatStr w = justifyData w'. T.take w' . T.pack . stripLeadingZero where + w' = width w stripLeadingZero = \case '0':xs@('.':_) -> xs xs -> xs -data DSelect p a - = DInt (a p -> CDF p Int) - | DWord64 (a p -> CDF p Word64) - | DFloat (a p -> CDF p Double) - | DDeltaT (a p -> CDF p NominalDiffTime) - -data ISelect p a - = IInt (a -> Int) - | IWord64 (a -> Word64) - | IFloat (a -> Double) - | IDeltaT (a -> NominalDiffTime) - | IText (a -> Text) - -mapSomeFieldCDF :: forall p c a. (forall b. Divisible b => CDF p b -> c) -> a p -> DSelect p a -> c -mapSomeFieldCDF f a = \case - DInt s -> f (s a) - DWord64 s -> f (s a) - DFloat s -> f (s a) - DDeltaT s -> f (s a) - -renderTimeline :: forall (a :: Type). RenderTimeline a => Run -> (Field ISelect I a -> Bool) -> [RTComments a] -> [a] -> [Text] -renderTimeline run flt comments xs = +renderSummary :: forall f a. (a ~ Summary f, TimelineFields a, ToJSON a) + => RenderFormat -> Anchor -> (Field ISelect I a -> Bool) -> a -> [Text] +renderSummary AsJSON _ _ x = (:[]) . LT.toStrict $ encodeToLazyText x +renderSummary AsGnuplot _ _ _ = error "renderSummary: output not supported: gnuplot" +renderSummary AsPretty _ _ _ = error "renderSummary: output not supported: pretty" +renderSummary _ a fieldSelr summ = + render $ + Props + { oProps = [ ("TITLE", renderAnchorRuns a ) + , ("SUBTITLE", renderAnchorFiltersAndDomains a) + , ("DATE", renderAnchorDate a) + , ("VERSION", renderProgramAndVersion (aVersion a)) + ] + , oConstants = [] + , oBody = (:[]) $ + Table + { tColHeaders = ["Value"] + , tExtended = True + , tApexHeader = Just "Parameter" + , tColumns = --transpose $ + [fields' <&> renderScalar summ] + -- , tColumns = [kvs <&> snd] + , tRowHeaders = fields' <&> fShortDesc + , tSummaryHeaders = [] + , tSummaryValues = [] + , tFormula = [] + , tConstants = [] + } + } + where + fields' :: [Field ISelect I a] + fields' = filter fieldSelr timelineFields + +renderTimeline :: forall (a :: Type). TimelineFields a => (Field ISelect I a -> Bool) -> [TimelineComments a] -> [a] -> [Text] +renderTimeline flt comments xs = concatMap (uncurry fLine) $ zip xs [(0 :: Int)..] where fLine :: a -> Int -> [Text] @@ -114,48 +180,42 @@ renderTimeline run flt comments xs = : concat (fmap (rtCommentary l) comments)) entry :: a -> Text - entry v = renderLineDist $ - \Field{..} -> - case fSelect of - IInt (($v)->x) -> T.pack $ printf "%*d" fWidth x - IWord64 (($v)->x) -> T.pack $ printf "%*d" fWidth x - IFloat (($v)->x) -> T.pack $ take fWidth $ printf "%*F" (fWidth - 2) x - IDeltaT (($v)->x) -> T.pack $ take fWidth $ printf "%-*s" fWidth $ dropWhileEnd (== 's') $ show x - IText (($v)->x) -> T.take fWidth . T.dropWhileEnd (== 's') $ x - - fields :: [Field ISelect I a] - fields = filter flt $ rtFields run + entry = renderLineDist . renderScalar - head1, head2 :: Maybe Text - head1 = if all ((== 0) . T.length . fHead1) fields then Nothing - else Just (renderLineHead1 (uncurry T.take . ((+1) . fWidth &&& fHead1))) - head2 = if all ((== 0) . T.length . fHead2) fields then Nothing - else Just (renderLineHead2 (uncurry T.take . ((+1) . fWidth &&& fHead2))) + fields' :: [Field ISelect I a] + fields' = filter flt timelineFields - renderLineHead1 = mconcat . renderLine' (const 0) ((+ 1) . fWidth) - renderLineHead2 = mconcat . renderLine' fLeftPad ((+ 1) . fWidth) - renderLineDist = T.intercalate " " . renderLine' fLeftPad fWidth - - renderLine' :: - (Field ISelect I a -> Int) -> (Field ISelect I a -> Int) -> (Field ISelect I a -> Text) -> [Text] - renderLine' lpfn wfn rfn = renderField lpfn wfn rfn <$> fields - renderField lpfn wfn rfn f = T.replicate (lpfn f) " " <> T.center (wfn f) ' ' (rfn f) - -mapRenderCDF :: forall p a. RenderCDFs a p + head1, head2 :: Maybe Text + head1 = if all ((== 0) . T.length . fHead1) fields' then Nothing + else Just (renderLineHead (uncurry T.take . ((+1).width.fWidth&&&fHead1))) + head2 = if all ((== 0) . T.length . fHead2) fields' then Nothing + else Just (renderLineHead (uncurry T.take . ((+1).width.fWidth&&&fHead2))) + + -- Different strategies: fields are forcefully separated, + -- whereas heads can use the extra space + renderLineHead = mconcat . renderLine' justifyHead (toEnum.(+ 1).width.fWidth) + renderLineDist :: (Field ISelect I a -> Text) -> Text + renderLineDist = T.intercalate " " . renderLine' justifyData fWidth + + renderLine' :: (Int -> Text -> Text) -> (Field ISelect I a -> Width) -> (Field ISelect I a -> Text) -> [Text] + renderLine' jfn wfn rfn = fields' + <&> \f -> jfn (width $ wfn f) (rfn f) + +mapRenderCDF :: forall p a. CDFFields a p => (Field DSelect p a -> Bool) -> Maybe [Centile] -> (forall c. Divisible c => p c -> [c]) -> a p -> [[Text]] mapRenderCDF fieldSelr centiSelr fSampleProps x = - fields -- list of fields + fields' -- list of fields <&> renderFieldCentiles x cdfSamplesProps -- for each field, list of per-sample lists of properties - & ([renderCentiles 6 centiles] :) + & (transpose [renderCentiles 6 centiles] :) & transpose -- for each sample, list of per-field lists of properties & fmap (fmap $ T.intercalate " ") where -- Pick relevant fields: - fields :: [Field DSelect p a] - fields = filter fieldSelr rdFields + fields' :: [Field DSelect p a] + fields' = filter fieldSelr cdfFields -- Pick relevant centiles: subsetCenti :: CDF p b -> CDF p b @@ -165,7 +225,7 @@ mapRenderCDF fieldSelr centiSelr fSampleProps x = centiles = mapSomeFieldCDF (centilesCDF . subsetCenti) x - (fSelect $ head rdFields) + (fSelect $ head cdfFields) -- Get relevant values: for each selected sample, a list of properties (avg, min, max, avg-/+stddev) cdfSamplesProps :: Divisible c => CDF p c -> [[c]] @@ -177,7 +237,7 @@ data RenderFormat | AsOrg | AsReport | AsPretty - deriving (Show, Bounded, Enum) + deriving (Eq, Show, Bounded, Enum) -- | When rendering a CDF-of-CDFs _and_ subsetting the data, how to subset: data CDF2Aspect @@ -185,6 +245,14 @@ data CDF2Aspect | OfInterCDF -- ^ Inter-sample (i.e. inter-CDF) stats. deriving (Show, Bounded, Enum) +parseCDF2Aspect :: Opt.Parser CDF2Aspect +parseCDF2Aspect = + [ Opt.flag' OfOverallDataset (Opt.long "overall" <> Opt.help "Overall dataset statistical summary") + , Opt.flag' OfInterCDF (Opt.long "inter-cdf" <> Opt.help "Inter-sample (i.e. inter-CDF) stats") + ] & \case + (x:xs) -> foldl (<|>) x xs + [] -> error "Crazy world, begone. 1" + modeFilename :: TextOutputFile -> Text -> RenderFormat -> TextOutputFile modeFilename orig@(TextOutputFile f) name = \case AsJSON -> orig @@ -193,13 +261,13 @@ modeFilename orig@(TextOutputFile f) name = \case AsReport -> orig AsPretty -> orig -renderAnalysisCDFs :: forall a p. (RenderCDFs a p, KnownCDF p, ToJSON (a p)) => Anchor -> (Field DSelect p a -> Bool) -> CDF2Aspect -> Maybe [Centile] -> RenderFormat -> a p -> [(Text, [Text])] +renderAnalysisCDFs :: forall a p. (CDFFields a p, KnownCDF p, ToJSON (a p)) => Anchor -> (Field DSelect p a -> Bool) -> CDF2Aspect -> Maybe [Centile] -> RenderFormat -> a p -> [(Text, [Text])] renderAnalysisCDFs _anchor _fieldSelr _c2a _centileSelr AsJSON x = (:[]) . ("",) . (:[]) . LT.toStrict $ encodeToLazyText x renderAnalysisCDFs anchor fieldSelr _c2a _centileSelr AsGnuplot x = - filter fieldSelr rdFields <&> + filter fieldSelr cdfFields <&> \Field{fId=cdfField} -> (,) cdfField $ "# " <> renderAnchor anchor : @@ -217,32 +285,33 @@ renderAnalysisCDFs a@Anchor{..} fieldSelr _c2a centileSelr AsOrg x = , oConstants = [] , oBody = (:[]) $ Table - { tColHeaders = fields <&> fId + { tColHeaders = fields' <&> fId , tExtended = True , tApexHeader = Just "centile" - , tColumns = fields <&> fmap (T.intercalate ":") . renderFieldCentilesWidth x cdfSamplesProps + , tColumns = fields' <&> fmap (T.intercalate ":") . renderFieldCentilesWidth x cdfSamplesProps , tRowHeaders = percSpecs <&> T.take 6 . T.pack . printf "%.4f" . unCentile , tSummaryHeaders = ["avg", "samples"] - , tSummaryValues = [ fields <&> - \f@Field{..} -> mapField x (T.take (fWidth + 1) . T.pack . printf "%f" . cdfAverageVal) f - , fields <&> + , tSummaryValues = [ fields' <&> + \f@Field{..} -> mapField x (T.take (width fWidth + 1) . T.pack . printf "%f" . cdfAverageVal) f + , fields' <&> \f@Field{} -> mapField x (T.pack . printf "%d" . cdfSize) f ] & transpose , tFormula = [] + , tConstants = [] } } where cdfSamplesProps :: Divisible c => CDF p c -> [[c]] cdfSamplesProps = fmap (pure . unliftCDFVal cdfIx . snd) . cdfSamples . restrictCDF - fields :: [Field DSelect p a] - fields = filter fieldSelr rdFields + fields' :: [Field DSelect p a] + fields' = filterFields fieldSelr restrictCDF :: forall c. CDF p c -> CDF p c restrictCDF = maybe id subsetCDF centileSelr percSpecs :: [Centile] - percSpecs = maybe (mapSomeFieldCDF centilesCDF x (fSelect . head $ rdFields @a @p)) + percSpecs = maybe (mapSomeFieldCDF centilesCDF x (fSelect . head $ cdfFields @a @p)) id centileSelr @@ -261,50 +330,57 @@ renderAnalysisCDFs a@Anchor{..} fieldSelr aspect _centileSelr AsReport x = , tExtended = True , tApexHeader = Just "metric" , tColumns = transpose $ - fields <&> + fields' <&> fmap (T.take 6 . T.pack . printf "%f") - . mapField x (snd hdrsProjs) - , tRowHeaders = fields <&> fDesc + . mapFieldWithKey x (snd hdrsProjs) + , tRowHeaders = fields' <&> fShortDesc , tSummaryHeaders = [] , tSummaryValues = [] , tFormula = [] + , tConstants = [("nSamples", + fields' <&> mapField x (T.pack . show . cdfSize) & head)] } } where - fields :: [Field DSelect p a] - fields = filter fieldSelr rdFields + fields' :: [Field DSelect p a] + fields' = filter fieldSelr cdfFields - hdrsProjs :: forall v. (Divisible v) => ([Text], CDF p v -> [Double]) + hdrsProjs :: forall v. (Divisible v) => ([Text], Field DSelect p a -> CDF p v -> [Double]) hdrsProjs = aspectColHeadersAndProjections aspect aspectColHeadersAndProjections :: forall v. (Divisible v) - => CDF2Aspect -> ([Text], CDF p v -> [Double]) + => CDF2Aspect -> ([Text], Field DSelect p a -> CDF p v -> [Double]) aspectColHeadersAndProjections = \case OfOverallDataset -> (,) - ["average", "CoV", "min", "max", "stddev", "size"] - \c@CDF{..} -> + ["average", "CoV", "min", "max", "stddev", "range", "precision", "size"] + \Field{..} c@CDF{cdfRange=Interval cdfMin cdfMax, ..} -> let avg = cdfAverageVal c & toDouble in [ avg , cdfStddev / avg - , fromRational . toRational $ fst cdfRange - , fromRational . toRational $ snd cdfRange + , fromRational . toRational $ cdfMin + , fromRational . toRational $ cdfMax , cdfStddev + , fromRational . toRational $ cdfMax - cdfMin + , fromIntegral $ fromEnum fPrecision , fromIntegral cdfSize ] OfInterCDF -> (,) - ["average", "CoV", "min", "max", "stddev", "size"] - (cdfArity + ["average", "CoV", "min", "max", "stddev", "range", "precision", "size"] + (\Field{..} -> + cdfArity (error "Cannot do inter-CDF statistics on plain CDFs") - (\CDF{cdfAverage} -> - let avg = cdfAverageVal cdfAverage & toDouble in + (\CDF{cdfAverage=cdfAvg@CDF{cdfRange=Interval minAvg maxAvg,..}} -> + let avg = cdfAverageVal cdfAvg & toDouble in [ avg - , cdfStddev cdfAverage / avg - , toDouble . fst $ cdfRange cdfAverage - , toDouble . snd $ cdfRange cdfAverage - , cdfStddev cdfAverage - , fromIntegral $ cdfSize cdfAverage + , cdfStddev / avg + , toDouble minAvg + , toDouble maxAvg + , cdfStddev + , toDouble $ maxAvg - minAvg + , fromIntegral $ fromEnum fPrecision + , fromIntegral cdfSize ])) renderAnalysisCDFs a fieldSelr _c2a centiSelr AsPretty x = @@ -315,30 +391,31 @@ renderAnalysisCDFs a fieldSelr _c2a centiSelr AsPretty x = <> sizeAvg where head1, head2 :: Maybe Text - head1 = if all ((== 0) . T.length . fHead1) fields then Nothing - else Just (renderLineHead1 (uncurry T.take . ((+1) . fWidth &&& fHead1))) - head2 = if all ((== 0) . T.length . fHead2) fields then Nothing - else Just (renderLineHead2 (uncurry T.take . ((+1) . fWidth &&& fHead2))) - renderLineHead1 = mconcat . (" ":) . renderLine' (const 0) ((+ 1) . fWidth) - renderLineHead2 = mconcat . (" %tile ":) . renderLine' fLeftPad ((+ 1) . fWidth) + head1 = if all ((== 0) . T.length . fHead1) fields' then Nothing + else Just (renderLineHead1 (uncurry T.take . ((+1) . width . fWidth &&& fHead1))) + head2 = if all ((== 0) . T.length . fHead2) fields' then Nothing + else Just (renderLineHead2 (uncurry T.take . ((+1) . width . fWidth &&& fHead2))) + renderLineHead1 = mconcat . (" ":) . renderLine' justifyHead (toEnum . (+ 1) . width . fWidth) + renderLineHead2 = mconcat . (" %tile":) . renderLine' justifyHead (toEnum . (+ 1) . width . fWidth) pLines :: [Text] - pLines = fields + pLines = fields' <&> - fmap (T.intercalate " ") . + -- fmap (T.intercalate " ") . + fmap T.concat . renderFieldCentilesWidth x cdfSamplesProps - & ((T.justifyLeft 6 ' ' <$> renderCentiles 6 centiles) :) + & ((justifyCentile 6 <$> renderCentiles 6 centiles) :) & transpose & fmap (T.intercalate " ") - fields :: [Field DSelect p a] - fields = filter fieldSelr rdFields + fields' :: [Field DSelect p a] + fields' = filter fieldSelr cdfFields centiles :: [Centile] centiles = mapSomeFieldCDF centilesCDF x - (fSelect $ head rdFields) + (fSelect $ head cdfFields) cdfSamplesProps :: Divisible c => CDF p c -> [[c]] cdfSamplesProps = fmap (pure . unliftCDFVal cdfIx . snd) @@ -347,34 +424,33 @@ renderAnalysisCDFs a fieldSelr _c2a centiSelr AsPretty x = sizeAvg :: [Text] sizeAvg = fmap (T.intercalate " ") - [ (T.center 6 ' ' "avg" :) $ - (\f -> flip (renderField fLeftPad fWidth) f $ const $ + [ (justifyCentile 6 "avg" :) $ + (\f -> flip (renderField justifyData fWidth) f $ const $ mapSomeFieldCDF (fit (fWidth f) .T.pack . printf "%F" . cdfAverageVal) x (fSelect f)) - <$> fields - , (T.center 6 ' ' "size" :) $ - (\f -> flip (renderField fLeftPad fWidth) f $ const $ + <$> fields' + , (justifyProp 6 "size" :) $ + (\f -> flip (renderField justifyHead fWidth) f $ const $ mapSomeFieldCDF (fit (fWidth f) . T.pack . show . cdfSize) x (fSelect f)) - <$> fields + <$> fields' ] - fit :: Int -> Text -> Text - fit w t = if T.length t > w && - -- Drop all non-floats, and floats with significant digit overflowing: - maybe True (> w) (T.findIndex (== '.') t) - then "..." - else T.take w t - - renderLine' :: - (Field DSelect p a -> Int) -> (Field DSelect p a -> Int) -> (Field DSelect p a -> Text) -> [Text] - renderLine' lefPad width rend = renderField lefPad width rend <$> fields - renderField :: forall f. (f -> Int) -> (f -> Int) -> (f -> Text) -> f -> Text - renderField _lefPad width rend f = - --T.replicate (lefPad f) " " <> - T.center (width f) ' ' (rend f) + fit :: Width -> Text -> Text + fit (width -> w) t = + if T.length t > w && + -- Drop all non-floats, and floats with significant digit overflowing: + maybe True (> w) (T.findIndex (== '.') t) + then "..." + else T.take w t + + renderLine' :: (Int -> Text -> Text) -> (Field DSelect p a -> Width) -> (Field DSelect p a -> Text) -> [Text] + renderLine' jfn wfn rfn = renderField jfn wfn rfn <$> fields' + + renderField :: forall f. (Int -> Text -> Text) -> (f -> Width) -> (f -> Text) -> f -> Text + renderField jfn wfn rend f = jfn (width $ wfn f) (rend f) -- populationIndices :: [Int] -- populationIndices = [1..maxPopulationSize] -- maxPopulationSize :: Int - -- maxPopulationSize = last . sort $ mapSomeFieldCDF cdfSize x . fSelect <$> rdFields @a @p + -- maxPopulationSize = last . sort $ mapSomeFieldCDF cdfSize x . fSelect <$> cdfFields @a @p diff --git a/bench/locli/src/Cardano/Report.hs b/bench/locli/src/Cardano/Report.hs index e716d3d7ff9..288026f7220 100644 --- a/bench/locli/src/Cardano/Report.hs +++ b/bench/locli/src/Cardano/Report.hs @@ -1,5 +1,7 @@ +{-# LANGUAGE TypeInType #-} {-# LANGUAGE GeneralisedNewtypeDeriving #-} {-# LANGUAGE OverloadedStrings #-} +{-# LANGUAGE ScopedTypeVariables #-} module Cardano.Report ( module Cardano.Report ) @@ -7,77 +9,128 @@ where import Cardano.Prelude -import Data.Aeson (FromJSON (..), ToJSON (..), object) import Data.ByteString qualified as BS import Data.HashMap.Lazy qualified as HM -import Data.List (last) +import Data.Map.Strict qualified as Map import Data.Text qualified as T import Data.Text.Lazy qualified as LT import Data.Time.Clock +import System.FilePath as FS import System.Posix.User -import Text.EDE +import Text.EDE hiding (Id) +import Data.CDF +import Cardano.Util import Cardano.Analysis.API -import Cardano.Analysis.Context -import Cardano.Analysis.Ground -import Cardano.Analysis.Run hiding (Version) -import Cardano.Analysis.Run qualified as Run newtype Author = Author { unAuthor :: Text } deriving newtype (FromJSON, ToJSON) newtype Revision = Revision { unRevision :: Int } deriving newtype (FromJSON, ToJSON) newtype ShortId = ShortId { unShortId :: Text } deriving newtype (FromJSON, ToJSON) -data Report - = Report - { rAuthor :: !Author - , rDate :: !UTCTime - , rRevision :: !Revision - , rLocliVersion :: !Run.Version - , rTarget :: !Version +data ReportMeta + = ReportMeta + { rmAuthor :: !Author + , rmDate :: !UTCTime + , rmRevision :: !Revision + , rmLocliVersion :: !LocliVersion + , rmTarget :: !Version } -instance ToJSON Report where - toJSON Report{..} = - object - [ "author" .= rAuthor - , "date" .= rDate - , "revision" .= rRevision - , "locli" .= rLocliVersion - , "target" .= rTarget - ] +instance ToJSON ReportMeta where + toJSON ReportMeta{..} = object + [ "author" .= rmAuthor + , "date" .= rmDate + , "revision" .= rmRevision + , "locli" .= rmLocliVersion + , "target" .= rmTarget + ] -getReport :: Version -> Maybe Revision -> IO Report -getReport rTarget mrev = do - rAuthor <- (getUserEntryForName =<< getLoginName) <&> Author . T.pack . userGecos - rDate <- getCurrentTime - let rRevision = fromMaybe (Revision 1) mrev - rLocliVersion = Run.getVersion - pure Report{..} +getReport :: Version -> Maybe Revision -> IO ReportMeta +getReport rmTarget mrev = do + rmAuthor <- (getUserEntryForName =<< getLoginName) <&> Author . T.pack . userGecos + rmDate <- getCurrentTime + let rmRevision = fromMaybe (Revision 1) mrev + rmLocliVersion = getLocliVersion + pure ReportMeta{..} data Workload = WValue - | WPlutus + | WPlutusLoopCountdown + | WPlutusLoopSECP instance ToJSON Workload where toJSON = \case - WValue -> "Value" - WPlutus -> "Plutus" - -data RunSpec - = RunSpec - { rsMeta :: !Metadata - , rsShortId :: !ShortId - , rsWorkload :: !Workload - , rsManifest :: !Manifest + WValue -> "value-only" + WPlutusLoopCountdown -> "Plutus countdown loop" + WPlutusLoopSECP -> "Plutus SECP loop" + +data Section where + STable :: + { sData :: !(a p) + , sFields :: !FSelect + , sNameCol :: !Text + , sValueCol :: !Text + , sDataRef :: !Text + , sOrgFile :: !Text + , sTitle :: !Text + } -> Section + +summaryReportSection :: SummaryOne -> Section +summaryReportSection summ = + STable summ (ISel @SummaryOne $ iFields sumFieldsReport) "Parameter" "Value" "summary" "summary.org" + "Overall run parameters" + +analysesReportSections :: MachPerf (CDF I) -> BlockProp I -> [Section] +analysesReportSections mp bp = + [ STable mp (DSel @MachPerf $ dFields mtFieldsReport) "metric" "average" "perf" "clusterperf.report.org" + "Resource Usage" + + , STable bp (DSel @BlockProp $ dFields bpFieldsControl) "metric" "average" "control" "blockprop.control.org" + "Anomaly control" + + , STable bp (DSel @BlockProp $ dFields bpFieldsForger) "metric" "average" "forge" "blockprop.forger.org" + "Forging" + + , STable bp (DSel @BlockProp $ dFields bpFieldsPeers) "metric" "average" "peers" "blockprop.peers.org" + "Individual peer propagation" + + , STable bp (DSel @BlockProp $ dFields bpFieldsEndToEnd) "metric" "average" "end2end" "blockprop.endtoend.org" + "End-to-end propagation" + ] + +-- +-- Representation of a run, structured for template generator's needs. +-- + +liftTmplRun :: Summary a -> TmplRun +liftTmplRun Summary{sumGenerator=GeneratorProfile{..} + ,sumMeta=meta@Metadata{..}} = + TmplRun + { trMeta = meta + , trManifest = manifest & unsafeShortenManifest 5 + , trWorkload = + case ( plutusMode & fromMaybe False + , plutusLoopScript & fromMaybe "" & FS.takeFileName & FS.dropExtension ) of + (False, _) -> WValue + (True, "loop") -> WPlutusLoopCountdown + (True, "schnorr-secp256k1-loop") -> WPlutusLoopSECP + (_, scr) -> + error $ "Unknown Plutus script: " <> scr + } + +data TmplRun + = TmplRun + { trMeta :: !Metadata + , trWorkload :: !Workload + , trManifest :: !Manifest } -instance ToJSON RunSpec where - toJSON RunSpec{rsManifest=Manifest{..},..} = +instance ToJSON TmplRun where + toJSON TmplRun{trManifest=Manifest{..},..} = object - [ "meta" .= rsMeta - , "id" .= rsShortId - , "workload" .= rsWorkload + [ "meta" .= trMeta + , "workload" .= trWorkload , "branch" .= mNodeBranch , "ver" .= mNodeApproxVer , "rev" .= @@ -92,29 +145,73 @@ instance ToJSON RunSpec where ] ] -liftRunSpec :: Run -> RunSpec -liftRunSpec Run{..} = - RunSpec - { rsMeta = metadata - , rsShortId = ShortId "rc4" - , rsWorkload = WValue - , rsManifest = manifest metadata & unsafeShortenManifest 5 - } +liftTmplSection :: Section -> TmplSection +liftTmplSection = + \case + STable{..} -> + TmplTable + { tsTitle = sTitle + , tsNameCol = sNameCol + , tsValueCol = sValueCol + , tsDataRef = sDataRef + , tsOrgFile = sOrgFile + , tsRowPrecs = fs <&> fromEnum + , tsVars = [ ("nSamples", "Sample count") + ] + } + where fs = case sFields of + ISel sel -> filter sel timelineFields <&> fPrecision + DSel sel -> filter sel cdfFields <&> fPrecision -generate :: InputDir -> Maybe TextInputFile -> Run -> [Run] -> IO (ByteString, Text) -generate (InputDir ede) mReport (liftRunSpec -> base) (fmap liftRunSpec -> runs) = do - ctx <- getReport (last runs & rsManifest & mNodeApproxVer) Nothing +data TmplSection + = TmplTable + { tsTitle :: !Text + , tsNameCol :: !Text + , tsValueCol :: !Text + , tsDataRef :: !Text + , tsOrgFile :: !Text + , tsRowPrecs :: ![Int] + , tsVars :: ![(Text, Text)] -- map from Org constant name to description + } + +instance ToJSON TmplSection where + toJSON TmplTable{..} = object + [ "title" .= tsTitle + , "nameCol" .= tsNameCol + , "valueCol" .= tsValueCol + , "dataRef" .= tsDataRef + , "orgFile" .= tsOrgFile + -- Yes, strange as it is, this is the encoding to ease iteration in ED-E. + , "rowPrecs" .= tsRowPrecs + , "vars" .= Map.fromList (zip tsVars ([0..] <&> flip T.replicate ">" . (length tsVars -)) + <&> \((k, name), angles) -> + (k, Map.fromList @Text + [("name", name), + ("angles", angles)])) + ] + +generate :: InputDir -> Maybe TextInputFile + -> (SummaryOne, ClusterPerf, BlockPropOne) -> [(SummaryOne, ClusterPerf, BlockPropOne)] + -> IO (ByteString, Text) +generate (InputDir ede) mReport (summ, cp, bp) rest = do + ctx <- getReport (last restTmpls & trManifest & mNodeApproxVer) Nothing tmplRaw <- BS.readFile (maybe defaultReportPath unTextInputFile mReport) tmpl <- parseWith defaultSyntax (includeFile ede) "report" tmplRaw result (error . show) (pure . (tmplRaw,) . LT.toStrict) $ tmpl >>= \x -> - renderWith fenv x (env ctx base runs) + renderWith fenv x (env ctx baseTmpl restTmpls) where + baseTmpl = liftTmplRun summ + restTmpls = fmap (liftTmplRun. fst3) rest + defaultReportPath = ede <> "/report.ede" fenv = HM.fromList [] env rc b rs = fromPairs - [ "report" .= rc - , "base" .= b - , "runs" .= rs + [ "report" .= rc + , "base" .= b + , "runs" .= rs + , "summary" .= liftTmplSection (summaryReportSection summ) + , "analyses" .= (liftTmplSection <$> analysesReportSections cp bp) + , "dictionary" .= metricDictionary ] diff --git a/bench/locli/src/Cardano/Unlog/LogObject.hs b/bench/locli/src/Cardano/Unlog/LogObject.hs index c215240bc43..46f116fe805 100644 --- a/bench/locli/src/Cardano/Unlog/LogObject.hs +++ b/bench/locli/src/Cardano/Unlog/LogObject.hs @@ -14,7 +14,6 @@ import Prelude (head, id, show, unzip3) import Cardano.Prelude hiding (Text, head, show) import Control.Monad (fail) -import Data.Aeson (FromJSON(..), ToJSON(..), Value(..), Object, (.:), (.:?)) import Data.Aeson qualified as AE import Data.Aeson.KeyMap qualified as KeyMap import Data.Aeson.Types (Parser) @@ -23,15 +22,13 @@ import Data.ByteString.Lazy qualified as LBS import Data.Text qualified as LText import Data.Text.Short qualified as Text import Data.Text.Short (ShortText, fromText, toText) -import Data.Time.Clock (NominalDiffTime, UTCTime) -import Data.Tuple.Extra (fst3, snd3, thd3) import Data.Map qualified as Map import Data.Vector (Vector) import Data.Vector qualified as V import Cardano.Logging.Resources.Types -import Cardano.Analysis.Ground +import Cardano.Analysis.API.Ground import Cardano.Util import Data.Accum (zeroUTCTime) @@ -39,11 +36,11 @@ import Data.Accum (zeroUTCTime) type Text = ShortText -runLiftLogObjects :: [JsonLogfile] -> Maybe HostDeduction +runLiftLogObjects :: [JsonLogfile] -> Maybe HostDeduction -> Bool -> [LOAnyType] -> ExceptT LText.Text IO [(JsonLogfile, [LogObject])] -runLiftLogObjects fs (fmap hostDeduction -> mHostDed) = liftIO $ +runLiftLogObjects fs (fmap hostDeduction -> mHostDed) okDErr anyOks = liftIO $ do forConcurrently fs - (\f -> (f,) . fmap (setLOhost f mHostDed) <$> readLogObjectStream (unJsonLogfile f)) + (\f -> (f,) . fmap (setLOhost f mHostDed) <$> readLogObjectStream (unJsonLogfile f) okDErr anyOks) where setLOhost :: JsonLogfile -> Maybe (JsonLogfile -> Host) -> LogObject -> LogObject setLOhost _ Nothing lo = lo @@ -52,14 +49,38 @@ runLiftLogObjects fs (fmap hostDeduction -> mHostDed) = liftIO $ -- joinT :: (IO a, IO b) -> IO (a, b) -- joinT (a, b) = (,) <$> a <*> b -readLogObjectStream :: FilePath -> IO [LogObject] -readLogObjectStream f = +readLogObjectStream :: FilePath -> Bool -> [LOAnyType] -> IO [LogObject] +readLogObjectStream f okDErr anyOks = LBS.readFile f <&> - fmap (either (LogObject zeroUTCTime "Cardano.Analysis.DecodeError" "DecodeError" "" (TId "0") . LODecodeError) - id - . AE.eitherDecode) + (if okDErr then id else + filter ((\case + LODecodeError input err -> error + (printf "Decode error while parsing %s:\n%s\non input:\n>>> %s" f (Text.toString err) (Text.toString input)) + _ -> True) + . loBody)) . + filter ((\case + LOAny laty obj -> + if elem laty anyOks then True else + error + (printf "Unexpected LOAny while parsing %s -- %s: %s" f (show laty) (show obj)) + _ -> True) + . loBody) . + filter (not . isDecodeError "Error in $: not enough input" . loBody) . + fmap (\bs -> + AE.eitherDecode bs & + either + (LogObject zeroUTCTime "Cardano.Analysis.DecodeError" "DecodeError" "" (TId "0") + . LODecodeError (Text.fromByteString (LBS.toStrict bs) + & fromMaybe "#") + . Text.fromText + . LText.pack) + id) . LBS.split (fromIntegral $ fromEnum '\n') + where + isDecodeError x = \case + LODecodeError _ x' -> x == x' + _ -> False data LogObject = LogObject @@ -89,7 +110,7 @@ type Threeple t = (t, t, t) interpreters :: Threeple (Map Text (Object -> Parser LOBody)) interpreters = map3ple Map.fromList . unzip3 . fmap ent $ -- Every second: - [ (,,,) "Resources" "Resources" "" $ + [ (,,,) "Resources" "Resources" "Resources" $ \v -> LOResources <$> parsePartialResourceStates (Object v) -- Leadership: @@ -147,18 +168,17 @@ interpreters = map3ple Map.fromList . unzip3 . fmap ent $ <$> v .: "block" -- Forwarding: - , (,,,) "ChainSyncServerEvent.TraceChainSyncServerRead.AddBlock" "ChainSyncServerHeader.ChainSyncServerEvent.ServerRead.AddBlock" "" $ - \v -> LOChainSyncServerSendHeader - <$> v .: "block" + , (,,,) "ChainSyncServerEvent.TraceChainSyncServerRead.AddBlock" "unknown0" "unknown1" $ + \v -> LOChainSyncServerSendHeader . fromMaybe (error $ "Incompatible LOChainSyncServerSendHeader: " <> show v) + <$> v .:? "block" - , (,,,) "ChainSyncServerEvent.TraceChainSyncServerReadBlocked.AddBlock" "ChainSyncServerHeader.ChainSyncServerEvent.ServerReadBlocked.AddBlock" "ChainSync.ServerHeader.Update" $ + , (,,,) "ChainSyncServerEvent.TraceChainSyncServerReadBlocked.AddBlock" "ChainSyncServerEvent.TraceChainSyncServerUpdate" "ChainSync.ServerHeader.Update" $ \v -> case ( KeyMap.lookup "risingEdge" v , KeyMap.lookup "blockingRead" v , KeyMap.lookup "rollBackTo" v) of - -- Skip the falling edge & non-blocking reads: - (Just (Bool False), _, _) -> pure $ LOAny v - (_, Just (Bool False), _) -> pure $ LOAny v - (_, _, Just _) -> pure $ LOAny v + (Just (Bool False), _, _) -> pure $ LOAny LAFallingEdge v + (_, Just (Bool False), _) -> pure $ LOAny LANonBlocking v + (_, _, Just _) -> pure $ LOAny LARollback v -- Should be either rising edge+rollforward, or legacy: _ -> do blockLegacy <- v .:? "block" @@ -178,7 +198,7 @@ interpreters = map3ple Map.fromList . unzip3 . fmap ent $ , (,,,) "TraceAddBlockEvent.AddedToCurrentChain" "ChainDB.AddBlockEvent.AddedToCurrentChain" "ChainDB.AddBlockEvent.AddedToCurrentChain" $ \v -> LOBlockAddedToCurrentChain <$> ((v .: "newtip") <&> hashFromPoint) - <*> pure Nothing + <*> pure SNothing <*> (v .:? "chainLengthDelta" -- Compat for node versions 1.27 and older: <&> fromMaybe 1) @@ -186,7 +206,7 @@ interpreters = map3ple Map.fromList . unzip3 . fmap ent $ , (,,,) "TraceAdoptedBlock" "Forge.AdoptedBlock" "Forge.Loop.AdoptedBlock" $ \v -> LOBlockAddedToCurrentChain <$> v .: "blockHash" - <*> ((v .: "blockSize") <&> Just) + <*> ((v .: "blockSize") <&> SJust) <*> pure 1 -- Ledger snapshots: @@ -240,9 +260,12 @@ interpreters = map3ple Map.fromList . unzip3 . fmap ent $ map3ple :: (a -> b) -> (a,a,a) -> (b,b,b) map3ple f (x,y,z) = (f x, f y, f z) -logObjectStreamInterpreterKeysLegacy, logObjectStreamInterpreterKeysOldOrg, logObjectStreamInterpreterKeys :: [Text] -logObjectStreamInterpreterKeysLegacy = Map.keys (interpreters & fst3) -logObjectStreamInterpreterKeysOldOrg = Map.keys (interpreters & snd3) +logObjectStreamInterpreterKeysLegacy, logObjectStreamInterpreterKeys :: [Text] +logObjectStreamInterpreterKeysLegacy = + logObjectStreamInterpreterKeysLegacy1 <> logObjectStreamInterpreterKeysLegacy2 + where + logObjectStreamInterpreterKeysLegacy1 = Map.keys (interpreters & fst3) + logObjectStreamInterpreterKeysLegacy2 = Map.keys (interpreters & snd3) logObjectStreamInterpreterKeys = Map.keys (interpreters & thd3) data LOBody @@ -294,7 +317,7 @@ data LOBody -- Adoption: | LOBlockAddedToCurrentChain { loBlock :: !Hash - , loSize :: !(Maybe Int) + , loSize :: !(SMaybe Int) , loLength :: !Int } -- Ledger snapshots: @@ -306,13 +329,25 @@ data LOBody | LOMempoolTxs !Word64 | LOMempoolRejectedTx -- Generator: - | LOGeneratorSummary !Bool !Word64 !NominalDiffTime (Vector Double) + | LOGeneratorSummary !Bool !Word64 !NominalDiffTime ![Double] -- Everything else: - | LOAny !Object - | LODecodeError !String - deriving (Generic, Show) + | LOAny !LOAnyType !Object + | LODecodeError + { loRawText :: !ShortText + , loError :: !ShortText + } + deriving (Eq, Generic, Show) deriving anyclass NFData +data LOAnyType + = LAFallingEdge + | LANonBlocking + | LARollback + | LANoInterpreter + deriving (Eq, Generic, NFData, Read, Show, ToJSON) + +deriving instance Eq ResourceStats + instance ToJSON LOBody instance FromJSON LogObject where @@ -332,14 +367,14 @@ instance FromJSON LogObject where <*> pure kind <*> v .: "host" <*> v .: "thread" - <*> case Map.lookup ns (thd3 interpreters) <|> - Map.lookup ns (snd3 interpreters) <|> - Map.lookup (ns + <*> case Map.lookup ns (thd3 interpreters) + <|> Map.lookup ns (snd3 interpreters) + <|> Map.lookup (kind & Text.stripPrefix "Cardano.Node." - & fromMaybe "") (snd3 interpreters) <|> - Map.lookup kind (fst3 interpreters) of + & fromMaybe kind) (snd3 interpreters) + <|> Map.lookup kind (fst3 interpreters) of Just interp -> interp unwrapped - Nothing -> pure $ LOAny unwrapped + Nothing -> pure $ LOAny LANoInterpreter unwrapped where unwrap :: Text -> Text -> Object -> Parser (Object, Text) unwrap wrappedKeyPred unwrapKey v = do @@ -359,16 +394,20 @@ extendObject k _ _ = error . Text.unpack $ "Summary key '" <> k <> "' does not s parsePartialResourceStates :: Value -> Parser (Resources Word64) parsePartialResourceStates = AE.withObject "NodeSetup" $ - \o -> - Resources - <$> o .: "CentiCpu" - <*> o .: "CentiGC" - <*> o .: "CentiMut" - <*> o .: "GcsMajor" - <*> o .: "GcsMinor" - <*> o .: "Alloc" - <*> o .: "Live" - <*> (o .:? "Heap" <&> fromMaybe 0) - <*> o .: "RSS" - <*> o .: "CentiBlkIO" - <*> o .: "Threads" + \o -> do + rCentiCpu <- o .: "CentiCpu" + rCentiGC <- o .: "CentiGC" + rCentiMut <- o .: "CentiMut" + rGcsMajor <- o .: "GcsMajor" + rGcsMinor <- o .: "GcsMinor" + rAlloc <- o .: "Alloc" + rLive <- o .:? "Heap" <&> fromMaybe 0 + rHeap <- o .: "Live" + rRSS <- o .: "RSS" + rCentiBlkIO <- o .: "CentiBlkIO" + rNetRd <- o .:? "NetRd" <&> fromMaybe 0 + rNetWr <- o .:? "NetWr" <&> fromMaybe 0 + rFsRd <- o .:? "FsRd" <&> fromMaybe 0 + rFsWr <- o .:? "FsWr" <&> fromMaybe 0 + rThreads <- o .: "Threads" + pure Resources{..} diff --git a/bench/locli/src/Cardano/Unlog/Resources.hs b/bench/locli/src/Cardano/Unlog/Resources.hs index fdefe8fc3b7..428526684cc 100644 --- a/bench/locli/src/Cardano/Unlog/Resources.hs +++ b/bench/locli/src/Cardano/Unlog/Resources.hs @@ -8,8 +8,7 @@ module Cardano.Unlog.Resources , updateResAccums , extractResAccums , computeResCDF - , ResContinuity - , discardObsoleteValues + , zeroObsoleteValues -- * Re-exports , Resources(..) ) where @@ -18,8 +17,7 @@ import Cardano.Prelude import Data.Accum import Data.CDF -import Data.Time.Clock (UTCTime) - +import Cardano.Util import Cardano.Logging.Resources.Types deriving instance Foldable Resources @@ -41,6 +39,10 @@ mkResAccums = , rLive = mkAccumNew `divAccum` 1048576 , rAlloc = mkAccumDelta `divAccum` 1048576 , rCentiBlkIO = mkAccumTicksShare + , rNetRd = mkAccumDelta `divAccum` 1024 + , rNetWr = mkAccumDelta `divAccum` 1024 + , rFsRd = mkAccumDelta `divAccum` 1024 + , rFsWr = mkAccumDelta `divAccum` 1024 , rThreads = mkAccumNew } @@ -55,30 +57,29 @@ extractResAccums = (aCurrent <$>) computeResCDF :: forall a . [Centile] - -> Resources (a -> Maybe Word64) + -> (a -> SMaybe (Resources Word64)) -> [a] -> Resources (CDF I Word64) -computeResCDF centiles projs xs = - compDist <$> projs - where - compDist :: (a -> Maybe Word64) -> CDF I Word64 - compDist proj = cdf centiles - (catMaybes . toList $ proj <$> xs) - -type ResContinuity a = Resources (a -> Maybe a) +computeResCDF centiles proj xs = + cdf centiles + <$> traverse identity (proj `mapSMaybe` xs) -discardObsoleteValues :: ResContinuity a -discardObsoleteValues = +zeroObsoleteValues :: Num a => Resources (a -> a) +zeroObsoleteValues = Resources - { rCentiCpu = Just - , rCentiGC = Just - , rCentiMut = Just - , rGcsMajor = const Nothing - , rGcsMinor = const Nothing - , rRSS = Just - , rHeap = Just - , rLive = Just - , rAlloc = const Nothing - , rCentiBlkIO = Just - , rThreads = Just + { rCentiCpu = identity + , rCentiGC = identity + , rCentiMut = identity + , rGcsMajor = const 0 + , rGcsMinor = const 0 + , rRSS = identity + , rHeap = identity + , rLive = identity + , rAlloc = const 0 + , rCentiBlkIO = identity + , rNetRd = const 0 + , rNetWr = const 0 + , rFsRd = const 0 + , rFsWr = const 0 + , rThreads = identity } diff --git a/bench/locli/src/Cardano/Util.hs b/bench/locli/src/Cardano/Util.hs index e1d9c194fb2..7afe8deaec2 100644 --- a/bench/locli/src/Cardano/Util.hs +++ b/bench/locli/src/Cardano/Util.hs @@ -1,7 +1,18 @@ +{-# LANGUAGE DeriveAnyClass #-} +{-# LANGUAGE DeriveFunctor #-} {-# OPTIONS_GHC -Wno-orphans #-} +{-# OPTIONS_GHC -Wno-incomplete-patterns #-} +{- HLINT ignore "Use list literal pattern" -} module Cardano.Util ( module Prelude - , module Cardano.Util + , module Util + , module Data.Aeson + , module Data.IntervalMap.FingerTree + , module Data.SOP.Strict + , module Data.List.Split + , module Data.Time.Clock + , module Data.Time.Clock.POSIX + , module Data.Tuple.Extra , module Cardano.Ledger.BaseTypes , module Control.Arrow , module Control.Applicative @@ -9,22 +20,30 @@ module Cardano.Util , module Control.Monad.Trans.Except.Extra , module Ouroboros.Consensus.Util.Time , module Text.Printf + , module Cardano.Util ) where -import Prelude (String, error) +import Prelude (String, error, head, last) import Cardano.Prelude +import Util hiding (fst3, snd3, third3, uncurry3, firstM, secondM) +import Data.Aeson (FromJSON (..), ToJSON (..), Object, Value (..), (.:), (.:?), withObject, object) +import Data.Aeson qualified as AE +import Data.Tuple.Extra hiding ((&&&), (***)) import Control.Arrow ((&&&), (***)) import Control.Applicative ((<|>)) import Control.Concurrent.Async (forConcurrently, forConcurrently_, mapConcurrently, mapConcurrently_) import Control.DeepSeq qualified as DS import Control.Monad.Trans.Except.Extra (firstExceptT, newExceptT) -import Data.Aeson (FromJSON, ToJSON, encode, eitherDecode) import Data.ByteString.Lazy.Char8 qualified as LBS +import Data.IntervalMap.FingerTree (Interval (..), low, high, point) import Data.List (span) +import Data.List.Split (chunksOf) import Data.Text qualified as T -import Data.Text.Short (fromText) +import Data.SOP.Strict +import Data.Time.Clock (NominalDiffTime, UTCTime (..), diffUTCTime) +import Data.Time.Clock.POSIX import Data.Vector (Vector) import Data.Vector qualified as Vec import GHC.Base (build) @@ -34,10 +53,32 @@ import System.FilePath qualified as F import Ouroboros.Consensus.Util.Time -import Cardano.Analysis.Ground import Cardano.Ledger.BaseTypes (StrictMaybe (..), fromSMaybe) +-- * Data.IntervalMap.FingerTree.Interval +-- +deriving instance FromJSON a => (FromJSON (Interval a)) +deriving instance Functor Interval +deriving instance ToJSON a => (ToJSON (Interval a)) +deriving instance NFData a => (NFData (Interval a)) + +unionIntv, intersectIntv :: Ord a => [Interval a] -> Interval a +unionIntv xs = Interval (low lo) (high hi) + where lo = minimumBy (compare `on` low) xs + hi = maximumBy (compare `on` high) xs +intersectIntv xs = Interval (low lo) (high hi) + where lo = maximumBy (compare `on` low) xs + hi = minimumBy (compare `on` high) xs + +renderIntv :: (a -> Text) -> Interval a -> Text +renderIntv f (Interval lo hi) = f lo <> "-" <> f hi + +intvDurationSec :: Interval UTCTime -> NominalDiffTime +intvDurationSec = uncurry diffUTCTime . (high &&& low) + +-- * SMaybe +-- type SMaybe a = StrictMaybe a instance Alternative StrictMaybe where @@ -50,6 +91,31 @@ smaybe :: b -> (a -> b) -> StrictMaybe a -> b smaybe x _ SNothing = x smaybe _ f (SJust x) = f x +isSJust :: SMaybe a -> Bool +isSJust = \case + SNothing -> False + SJust{} -> True + +isSNothing :: SMaybe a -> Bool +isSNothing = \case + SNothing -> True + SJust{} -> False + +{-# INLINE strictMaybe #-} +strictMaybe :: Maybe a -> SMaybe a +strictMaybe = \case + Nothing -> SNothing + Just a -> SJust a + +{-# INLINE lazySMaybe #-} +lazySMaybe :: SMaybe a -> Maybe a +lazySMaybe = \case + SNothing -> Nothing + SJust a -> Just a + +catSMaybes :: [SMaybe a] -> [a] +catSMaybes xs = [x | SJust x <- xs] + mapSMaybe :: (a -> StrictMaybe b) -> [a] -> [b] mapSMaybe _ [] = [] mapSMaybe f (x:xs) = @@ -75,18 +141,17 @@ mapConcurrentlyPure f = mapConcurrently (evaluate . DS.force . f) -mapAndUnzip :: (a -> (b, c)) -> [a] -> ([b], [c]) -mapAndUnzip _ [] = ([], []) -mapAndUnzip f (x:xs) - = let (r1, r2) = f x - (rs1, rs2) = mapAndUnzip f xs - in - (r1:rs1, r2:rs2) - mapHead :: (a -> a) -> [a] -> [a] mapHead f (x:xs) = f x:xs mapHead _ [] = error "mapHead: partial" +mapLast :: (a -> a) -> [a] -> [a] +mapLast _ [] = error "mapHead: partial" +mapLast f xs' = reverse $ go [] xs' + where go acc = \case + x:[] -> f x:acc + x:xs -> go ( x:acc) xs + redistribute :: (a, (b, c)) -> ((a, b), (a, c)) redistribute (a, (b, c)) = ((a, b), (a, c)) @@ -104,78 +169,17 @@ data F | forall a. ToJSON a => J a progress :: MonadIO m => String -> F -> m () -progress key = putStrLn . T.pack . \case - R x -> printf "{ \"%s\": %s }" key x - Q x -> printf "{ \"%s\": \"%s\" }" key x - L xs -> printf "{ \"%s\": \"%s\" }" key (Cardano.Prelude.intercalate "\", \"" xs) - J x -> printf "{ \"%s\": %s }" key (LBS.unpack $ encode x) - --- /path/to/logs-HOSTNAME.some.ext -> HOSTNAME -hostFromLogfilename :: JsonLogfile -> Host -hostFromLogfilename (JsonLogfile f) = - Host $ fromText . stripPrefixHard "logs-" . T.pack . F.dropExtensions . F.takeFileName $ f - where - stripPrefixHard :: Text -> Text -> Text - stripPrefixHard p s = fromMaybe s $ T.stripPrefix p s - -hostDeduction :: HostDeduction -> (JsonLogfile -> Host) -hostDeduction = \case - HostFromLogfilename -> hostFromLogfilename +progress key = putStr . T.pack . \case + R x -> printf "{ \"%s\": %s }\n" key x + Q x -> printf "{ \"%s\": \"%s\" }\n" key x + L xs -> printf "{ \"%s\": \"%s\" }\n" key (Cardano.Prelude.intercalate "\", \"" xs) + J x -> printf "{ \"%s\": %s }\n" key (LBS.unpack $ AE.encode x) -- Dumping to files -- replaceExtension :: FilePath -> String -> FilePath replaceExtension f new = F.dropExtension f <> "." <> new -dumpObject :: ToJSON a => String -> a -> JsonOutputFile a -> ExceptT Text IO () -dumpObject ident x (JsonOutputFile f) = liftIO $ do - progress ident (Q f) - withFile f WriteMode $ \hnd -> LBS.hPutStrLn hnd $ encode x - -dumpObjects :: ToJSON a => String -> [a] -> JsonOutputFile [a] -> ExceptT Text IO () -dumpObjects ident xs (JsonOutputFile f) = liftIO $ do - progress ident (Q f) - withFile f WriteMode $ \hnd -> do - forM_ xs $ LBS.hPutStrLn hnd . encode - -dumpAssociatedObjects :: ToJSON a => String -> [(JsonLogfile, a)] -> ExceptT Text IO () -dumpAssociatedObjects ident xs = liftIO $ - flip mapConcurrently_ xs $ - \(JsonLogfile f, x) -> do - progress ident (Q f) - withFile (replaceExtension f $ ident <> ".json") WriteMode $ \hnd -> - LBS.hPutStrLn hnd $ encode x - -readAssociatedObjects :: forall a. - FromJSON a => String -> [JsonLogfile] -> ExceptT Text IO [(JsonLogfile, a)] -readAssociatedObjects ident fs = firstExceptT T.pack . newExceptT . fmap sequence . fmap (fmap sequence) $ - flip mapConcurrently fs $ - \jf@(JsonLogfile f) -> do - x <- eitherDecode @a <$> LBS.readFile (replaceExtension f $ ident <> ".json") - progress ident (Q f) - pure (jf, x) - -dumpAssociatedObjectStreams :: ToJSON a => String -> [(JsonLogfile, [a])] -> ExceptT Text IO () -dumpAssociatedObjectStreams ident xss = liftIO $ - flip mapConcurrently_ xss $ - \(JsonLogfile f, xs) -> do - progress ident (Q f) - withFile (replaceExtension f $ ident <> ".json") WriteMode $ \hnd -> do - forM_ xs $ LBS.hPutStrLn hnd . encode - -dumpText :: String -> [Text] -> TextOutputFile -> ExceptT Text IO () -dumpText ident xs (TextOutputFile f) = liftIO $ do - progress ident (Q f) - withFile f WriteMode $ \hnd -> do - forM_ xs $ hPutStrLn hnd - -dumpAssociatedTextStreams :: String -> [(JsonLogfile, [Text])] -> ExceptT Text IO () -dumpAssociatedTextStreams ident xss = liftIO $ - flip mapConcurrently_ xss $ - \(JsonLogfile f, xs) -> do - progress ident (Q f) - withFile (replaceExtension f $ ident <> ".txt") WriteMode $ \hnd -> do - forM_ xs $ hPutStrLn hnd spans :: forall a. (a -> Bool) -> [a] -> [Vector a] spans f = go [] @@ -187,3 +191,23 @@ spans f = go [] ([], rest) -> go acc rest (ac, rest) -> go (Vec.fromList ac:acc) rest + +{-# INLINE norm2Tuple #-} +norm2Tuple :: ((a, b), c) -> (a, (b, c)) +norm2Tuple ((a, b), c) = (a, (b, c)) + +{-# INLINE showText #-} +showText :: Show a => a -> Text +showText = T.pack . show + +roundUTCTimeSec, roundUTCTimeDay :: UTCTime -> UTCTime +roundUTCTimeSec = + posixSecondsToUTCTime . fromIntegral @Integer . truncate . utcTimeToPOSIXSeconds +roundUTCTimeDay (UTCTime day _) = UTCTime day 0 + +utcTimeDeltaSec :: UTCTime -> UTCTime -> Int +utcTimeDeltaSec x y = diffUTCTime x y & round + +foldEmpty :: r -> ([a] -> r) -> [a] -> r +foldEmpty r _ [] = r +foldEmpty _ f l = f l diff --git a/bench/locli/src/Data/CDF.hs b/bench/locli/src/Data/CDF.hs index 12d1f3186f9..748fedfd365 100644 --- a/bench/locli/src/Data/CDF.hs +++ b/bench/locli/src/Data/CDF.hs @@ -2,6 +2,7 @@ {-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE GADTs #-} +{-# LANGUAGE GeneralisedNewtypeDeriving #-} {-# LANGUAGE FlexibleContexts #-} {-# LANGUAGE FlexibleInstances #-} {-# LANGUAGE ImportQualifiedPost #-} @@ -50,12 +51,10 @@ module Data.CDF , module Data.SOP.Strict ) where -import Prelude ((!!), head, show) +import Prelude ((!!), show) import Cardano.Prelude hiding (head, show) -import Data.Aeson (FromJSON(..), ToJSON(..)) import Data.SOP.Strict -import Data.Time.Clock (NominalDiffTime) import Data.Vector qualified as Vec import Statistics.Sample qualified as Stat @@ -65,8 +64,8 @@ import Cardano.Util -- | Centile specifier: a fractional in range of [0; 1]. newtype Centile = Centile { unCentile :: Double } - deriving (Eq, Generic, FromJSON, ToJSON, Show) - deriving anyclass NFData + deriving (Eq, Show) + deriving newtype (FromJSON, ToJSON, NFData) renderCentile :: Int -> Centile -> String renderCentile width = \case @@ -155,18 +154,21 @@ weightedAverage xs = data CDF p a = CDF { cdfSize :: Int - , cdfAverage :: p a + , cdfAverage :: p Double , cdfStddev :: Double - , cdfRange :: (a, a) + , cdfRange :: Interval a , cdfSamples :: [(Centile, p a)] } - deriving (Eq, Functor, Generic, Show) - deriving anyclass NFData + deriving (Functor, Generic) -instance (FromJSON (p a), FromJSON a) => FromJSON (CDF p a) -instance ( ToJSON (p a), ToJSON a) => ToJSON (CDF p a) +deriving instance (Eq a, Eq (p a), Eq (p Double)) => Eq (CDF p a) +deriving instance (Show a, Show (p a), Show (p Double)) => Show (CDF p a) +deriving instance (NFData a, NFData (p a), NFData (p Double)) => NFData (CDF p a) -cdfAverageVal :: (KnownCDF p, Divisible a) => CDF p a -> Double +instance (FromJSON (p a), FromJSON (p Double), FromJSON a) => FromJSON (CDF p a) +instance ( ToJSON (p a), ToJSON (p Double), ToJSON a) => ToJSON (CDF p a) + +cdfAverageVal :: (KnownCDF p) => CDF p a -> Double cdfAverageVal = cdfArity (toDouble . unI . cdfAverage) @@ -203,7 +205,7 @@ zeroCDF = { cdfSize = 0 , cdfAverage = liftCDFVal 0 cdfIx , cdfStddev = 0 - , cdfRange = (0, 0) + , cdfRange = Interval 0 0 , cdfSamples = mempty } @@ -214,7 +216,7 @@ cdf centiles (sort -> sorted) = { cdfSize = size , cdfAverage = I . fromDouble $ Stat.mean doubleVec , cdfStddev = Stat.stdDev doubleVec - , cdfRange = (mini, maxi) + , cdfRange = Interval mini maxi , cdfSamples = centiles <&> \spec -> @@ -247,13 +249,13 @@ type family CDFProj a where CDFProj (CDF (CDF I) a) = CDF I a -- indexCDF i d = snd $ cdfSamples (trace (printf "i=%d of %d" i (length $ cdfSamples d) :: String) d) !! i -liftCDFVal :: forall a p. a -> CDFIx p -> p a +liftCDFVal :: forall a p. Real a => a -> CDFIx p -> p a liftCDFVal x = \case CDFI -> I x CDF2 -> CDF { cdfSize = 1 - , cdfAverage = I x + , cdfAverage = I $ toDouble x , cdfStddev = 0 - , cdfRange = (x, x) + , cdfRange = point x , cdfSamples = [] , .. } @@ -263,12 +265,12 @@ unliftCDFVal CDF2 CDF{cdfAverage=I cdfAverage} = (1 :: a) `divide` (1 / toDouble unliftCDFValExtra :: forall a p. Divisible a => CDFIx p -> p a -> [a] unliftCDFValExtra CDFI (I x) = [x] -unliftCDFValExtra i@CDF2 c@CDF{cdfRange=(mi, ma), ..} = [ mean - , mi - , ma - , mean - stddev - , mean + stddev - ] +unliftCDFValExtra i@CDF2 c@CDF{cdfRange=Interval mi ma, ..} = [ mean + , mi + , ma + , mean - stddev + , mean + stddev + ] where mean = unliftCDFVal i c stddev = (1 :: a) `divide` (1 / cdfStddev) @@ -300,7 +302,7 @@ data Combine p a = Combine { cWeightedAverages :: !([(Int, Double)] -> Double) , cStddevs :: !([Double] -> Double) - , cRanges :: !([(a, a)] -> (a, a)) + , cRanges :: !([Interval a] -> Interval a) , cWeightedSamples :: !([(Int, a)] -> a) , cCDF :: !([p a] -> Either CDFError (CDF I a)) } @@ -309,14 +311,11 @@ stdCombine1 :: forall a. (Divisible a) => [Centile] -> Combine I a stdCombine1 cs = Combine { cWeightedAverages = weightedAverage - , cRanges = outerRange + , cRanges = unionIntv , cStddevs = maximum -- it's an approximation , cWeightedSamples = weightedAverage , cCDF = Right . cdf cs . fmap unI } - where - outerRange xs = (,) (minimum $ fst <$> xs) - (maximum $ snd <$> xs) stdCombine2 :: Divisible a => [Centile] -> Combine (CDF I) a stdCombine2 cs = @@ -327,8 +326,7 @@ stdCombine2 cs = } -- | Collapse basic CDFs. -collapseCDFs :: forall a. Divisible a - => Combine I a -> [CDF I a] -> Either CDFError (CDF I a) +collapseCDFs :: forall a. Combine I a -> [CDF I a] -> Either CDFError (CDF I a) collapseCDFs _ [] = Left CDFEmptyDataset collapseCDFs Combine{..} xs = do unless (all (head lengths ==) lengths) $ @@ -363,7 +361,7 @@ collapseCDFs Combine{..} xs = do -- | Polymorphic, but practically speaking, intended for either: -- 1. given a ([I] -> CDF I) function, and a list of (CDF I), produce a CDF (CDF I), or -- 2. given a ([CDF I] -> CDF I) function, and a list of (CDF (CDF I)), produce a CDF (CDF I) -cdf2OfCDFs :: forall a p. (Divisible a, KnownCDF p) +cdf2OfCDFs :: forall a p. (KnownCDF p) => Combine p a -> [CDF p a] -> Either CDFError (CDF (CDF I) a) cdf2OfCDFs _ [] = Left CDFEmptyDataset cdf2OfCDFs Combine{..} xs = do @@ -384,8 +382,8 @@ cdf2OfCDFs Combine{..} xs = do } where nCDFs = length xs - averages :: [a] - averages = xs <&> unI . cdfAverage . cdfArity identity cdfAverage + averages :: [Double] + averages = xs <&> unI . cdfArity cdfAverage (cdfAverage . cdfAverage) sizes = xs <&> cdfSize samples = xs <&> cdfSamples diff --git a/bench/locli/src/Data/DataDomain.hs b/bench/locli/src/Data/DataDomain.hs index 8331f5770e3..de623be288c 100644 --- a/bench/locli/src/Data/DataDomain.hs +++ b/bench/locli/src/Data/DataDomain.hs @@ -1,48 +1,56 @@ {-# LANGUAGE DeriveAnyClass #-} -module Data.DataDomain (module Data.DataDomain) where +{-# OPTIONS_GHC -Wno-orphans #-} +module Data.DataDomain + ( module Data.DataDomain + ) +where import Cardano.Prelude -import Data.Aeson (FromJSON, ToJSON) +import Cardano.Util + + +-- * DataDomain +-- data DataDomain a = DataDomain - { ddRawFirst :: !a - , ddRawLast :: !a - , ddFilteredFirst :: !(Maybe a) - , ddFilteredLast :: !(Maybe a) - , ddRawCount :: Int - , ddFilteredCount :: Int + { ddRaw :: !(Interval a) + , ddFiltered :: !(Maybe (Interval a)) + , ddRawCount :: !Int + , ddFilteredCount :: !Int } deriving (Generic, Show, ToJSON, FromJSON) deriving anyclass NFData -- Perhaps: Plutus.V1.Ledger.Slot.SlotRange = Interval Slot +dataDomainFilterRatio :: DataDomain a -> Double +dataDomainFilterRatio DataDomain{..} = + fromIntegral ddFilteredCount / fromIntegral ddRawCount + mkDataDomainInj :: a -> a -> (a -> Int) -> DataDomain a -mkDataDomainInj f l measure = DataDomain f l (Just f) (Just l) delta delta - where delta = measure l - measure f +mkDataDomainInj f l measure = + DataDomain (Interval f l) (Just (Interval f l)) delta delta + where delta = measure l - measure f mkDataDomain :: a -> a -> a -> a -> (a -> Int) -> DataDomain a mkDataDomain f l f' l' measure = - DataDomain f l (Just f') (Just l') (measure l - measure f) (measure l' - measure f') + DataDomain (Interval f l) (Just (Interval f' l')) + (measure l - measure f) (measure l' - measure f') -dataDomainsMergeInner :: Ord a => [DataDomain a] -> DataDomain a -dataDomainsMergeInner xs = +unionDataDomains :: Ord a => [DataDomain a] -> DataDomain a +unionDataDomains xs = DataDomain - { ddRawFirst = maximum $ xs <&> ddRawFirst - , ddRawLast = minimum $ xs <&> ddRawLast - , ddFilteredFirst = bool (Just . maximum $ xs & mapMaybe ddFilteredFirst) Nothing (null xs) - , ddFilteredLast = bool (Just . maximum $ xs & mapMaybe ddFilteredLast) Nothing (null xs) - , ddRawCount = sum $ xs <&> ddRawCount - , ddFilteredCount = sum $ xs <&> ddFilteredCount + { ddRaw = unionIntv $ xs <&> ddRaw + , ddFiltered = foldEmpty Nothing (Just . unionIntv) $ ddFiltered `mapMaybe` xs + , ddRawCount = sum $ xs <&> ddRawCount + , ddFilteredCount = sum $ xs <&> ddFilteredCount } -dataDomainsMergeOuter :: Ord a => [DataDomain a] -> DataDomain a -dataDomainsMergeOuter xs = +intersectDataDomains :: Ord a => [DataDomain a] -> DataDomain a +intersectDataDomains xs = DataDomain - { ddRawFirst = minimum $ xs <&> ddRawFirst - , ddRawLast = maximum $ xs <&> ddRawLast - , ddFilteredFirst = minimum $ xs <&> ddFilteredFirst - , ddFilteredLast = maximum $ xs <&> ddFilteredLast - , ddRawCount = sum $ xs <&> ddRawCount - , ddFilteredCount = sum $ xs <&> ddFilteredCount + { ddRaw = intersectIntv $ xs <&> ddRaw + , ddFiltered = foldEmpty Nothing (Just . intersectIntv) $ ddFiltered `mapMaybe` xs + , ddRawCount = sum $ xs <&> ddRawCount + , ddFilteredCount = sum $ xs <&> ddFilteredCount } diff --git a/bench/locli/test/Test/Analysis/CDF.hs b/bench/locli/test/Test/Analysis/CDF.hs index 4615e7009ca..f265a1fa0b8 100644 --- a/bench/locli/test/Test/Analysis/CDF.hs +++ b/bench/locli/test/Test/Analysis/CDF.hs @@ -2,7 +2,6 @@ {-# OPTIONS_GHC -Wno-missing-signatures #-} module Test.Analysis.CDF where -import Prelude (head) import Cardano.Prelude hiding (handle, head) import Hedgehog @@ -68,7 +67,7 @@ prop_CDF_I_2x2 = property $ cdfI_2x2 === { cdfSize = 2 , cdfAverage = I 0.5 , cdfStddev = 0.7071067811865476 - , cdfRange = (0.0,1.0) + , cdfRange = Interval 0.0 1.0 , cdfSamples = [(Centile 0.25,I 0.0) ,(Centile 0.75,I 1.0)]} @@ -81,20 +80,20 @@ prop_CDF_CDF_I_3x3 = property $ cdf2_3x3 === { cdfSize = 3 , cdfAverage = I 1.0 , cdfStddev = 0.0 - , cdfRange = (1.0,1.0) + , cdfRange = Interval 1.0 1.0 , cdfSamples = [(Centile 0.16666666666666666, I 1.0) ,(Centile 0.5, I 1.0) ,(Centile 0.8333333333333333, I 1.0)]} , cdfStddev = 1.0 - , cdfRange = (0.0,2.0) + , cdfRange = Interval 0.0 2.0 , cdfSamples = [(Centile 0.16666666666666666 ,CDF { cdfSize = 3 , cdfAverage = I 0.0 , cdfStddev = 0.0 - , cdfRange = (0.0,0.0) + , cdfRange = Interval 0.0 0.0 , cdfSamples = [(Centile 0.16666666666666666, I 0.0) ,(Centile 0.5, I 0.0) @@ -104,7 +103,7 @@ prop_CDF_CDF_I_3x3 = property $ cdf2_3x3 === { cdfSize = 3 , cdfAverage = I 1.0 , cdfStddev = 0.0 - , cdfRange = (1.0,1.0) + , cdfRange = Interval 1.0 1.0 , cdfSamples = [(Centile 0.16666666666666666, I 1.0) ,(Centile 0.5, I 1.0) @@ -114,7 +113,7 @@ prop_CDF_CDF_I_3x3 = property $ cdf2_3x3 === { cdfSize = 3 , cdfAverage = I 2.0 , cdfStddev = 0.0 - , cdfRange = (2.0,2.0) + , cdfRange = Interval 2.0 2.0 , cdfSamples = [(Centile 0.16666666666666666, I 2.0) ,(Centile 0.5, I 2.0) @@ -128,20 +127,20 @@ prop_CDF_CDF_I_3x3_shifted = property $ cdf2_3x3sh === { cdfSize = 3 , cdfAverage = I 1.0 , cdfStddev = 1.0 - , cdfRange = (0.0,2.0) + , cdfRange = Interval 0.0 2.0 , cdfSamples = [(Centile 0.16666666666666666, I 0.0) ,(Centile 0.5, I 1.0) ,(Centile 0.8333333333333333, I 2.0)]} , cdfStddev = 1.0 - , cdfRange = (-1.0,3.0) + , cdfRange = Interval (-1.0) 3.0 , cdfSamples = [(Centile 0.16666666666666666 ,CDF { cdfSize = 3 , cdfAverage = I 0.0 , cdfStddev = 1.0 - , cdfRange = (-1.0,1.0) + , cdfRange = Interval (-1.0) 1.0 , cdfSamples = [(Centile 0.16666666666666666, I (-1.0)) ,(Centile 0.5, I 0.0) @@ -151,7 +150,7 @@ prop_CDF_CDF_I_3x3_shifted = property $ cdf2_3x3sh === { cdfSize = 3 , cdfAverage = I 1.0 , cdfStddev = 1.0 - , cdfRange = (0.0,2.0) + , cdfRange = Interval 0.0 2.0 , cdfSamples = [(Centile 0.16666666666666666, I 0.0) ,(Centile 0.5, I 1.0) @@ -161,7 +160,7 @@ prop_CDF_CDF_I_3x3_shifted = property $ cdf2_3x3sh === { cdfSize = 3 , cdfAverage = I 2.0 , cdfStddev = 1.0 - , cdfRange = (1.0,3.0) + , cdfRange = Interval 1.0 3.0 , cdfSamples = [(Centile 0.16666666666666666, I 1.0) ,(Centile 0.5, I 2.0) @@ -175,20 +174,20 @@ prop_CDF_CDF_I_3x3x3_collapsed_shifted = property $ cdf2_3x3x3sh === { cdfSize = 3 , cdfAverage = I 1.0 , cdfStddev = 2.0 - , cdfRange = (-1.0,3.0) + , cdfRange = Interval (-1.0) 3.0 , cdfSamples = [(Centile 0.16666666666666666, I (-1.0)) ,(Centile 0.5, I 1.0) ,(Centile 0.8333333333333333, I 3.0)]} , cdfStddev = 1.0 - , cdfRange = (-3.0,5.0) + , cdfRange = Interval (-3.0) 5.0 , cdfSamples = [(Centile 0.16666666666666666 ,CDF { cdfSize = 9 , cdfAverage = I 0.0 , cdfStddev = 1.0 - , cdfRange = (-3.0,3.0) + , cdfRange = Interval (-3.0) 3.0 , cdfSamples = [(Centile 0.16666666666666666, I (-1.0)) ,(Centile 0.5, I 0.0) @@ -198,7 +197,7 @@ prop_CDF_CDF_I_3x3x3_collapsed_shifted = property $ cdf2_3x3x3sh === { cdfSize = 9 , cdfAverage = I 1.0 , cdfStddev = 1.0 - , cdfRange = (-2.0,4.0) + , cdfRange = Interval (-2.0) 4.0 , cdfSamples = [(Centile 0.16666666666666666, I 0.0) ,(Centile 0.5, I 1.0) @@ -208,8 +207,8 @@ prop_CDF_CDF_I_3x3x3_collapsed_shifted = property $ cdf2_3x3x3sh === { cdfSize = 9 , cdfAverage = I 2.0 , cdfStddev = 1.0 - , cdfRange = (-1.0,5.0) - , cdfSamples = + , cdfRange = Interval (-1.0) 5.0 + , cdfSamples = [(Centile 0.16666666666666666, I 1.0) ,(Centile 0.5, I 2.0) ,(Centile 0.8333333333333333, I 3.0)]})]} diff --git a/bench/locli/test/Test/Unlog/Org.hs b/bench/locli/test/Test/Unlog/Org.hs index e77da017821..562b6724b55 100644 --- a/bench/locli/test/Test/Unlog/Org.hs +++ b/bench/locli/test/Test/Unlog/Org.hs @@ -29,6 +29,7 @@ prop_Org_render_simple_table = property $ render , tSummaryHeaders = [] , tSummaryValues = [] , tFormula = [] + , tConstants = [] } === [ "|-------+--------+------+------------|" @@ -55,6 +56,7 @@ prop_Org_render_summarised_simple_table = property $ render , ["1", "2"] ] , tFormula = [] + , tConstants = [] } === [ "|------------+---------+---------+------------|" @@ -85,6 +87,7 @@ prop_Org_render_extended_table = property $ render , tSummaryHeaders = [] , tSummaryValues = [] , tFormula = [] + , tConstants = [] } ] } @@ -119,6 +122,12 @@ prop_Org_render_extended_summarised_table = property $ render , ["1", "2"] ] , tFormula = [] + , tConstants = [ ("count", "0") + , ("all", "1") + , ("the", "2") + , ("things", "3") + , ("properly", "4") + ] } ] } @@ -132,8 +141,13 @@ prop_Org_render_extended_summarised_table = property $ render , "| # | two | a | 11.0 | a |" , "| # | three | ...... | | 111.0 |" , "|---+------------+---------+---------+------------|" - , "| | aaaveragee | 0000000 | | 1 |" - , "| | q | | 0000000 | 2 |" + , "| # | aaaveragee | 0000000 | | 1 |" + , "| # | q | | 0000000 | 2 |" + , "|---+------------+---------+---------+------------|" + , "| _ | count | all | the | things |" + , "| # | 0 | 1 | 2 | 3 |" + , "| _ | properly | | | |" + , "| # | 4 | | | |" ] tests :: IO Bool diff --git a/bench/workbench.gnuplot b/bench/workbench.gnuplot index bc78e741ecd..fca02819725 100644 --- a/bench/workbench.gnuplot +++ b/bench/workbench.gnuplot @@ -13,51 +13,51 @@ rundir = "../" cdfI_2(cdf, title, run1ti, run1, run2ti, run2) = \ "set title '".title."';". \ - "plot '".rundir."/".run1."/analysis/".cdf.".cdf' using 1:2 t '".run1ti."', '". \ - rundir."/".run2."/analysis/".cdf.".cdf' using 1:2 t '".run2ti."' ". \ + "plot '".rundir."/".run1."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run1ti."', '". \ + rundir."/".run2."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run2ti."' ". \ "" cdfI_3(cdf, title, run1ti, run1, run2ti, run2, run3ti, run3) = \ "set title '".title."';". \ - "plot '".rundir."/".run1."/analysis/".cdf.".cdf' using 1:2 t '".run1ti."', '". \ - rundir."/".run2."/analysis/".cdf.".cdf' using 1:2 t '".run2ti."', '". \ - rundir."/".run3."/analysis/".cdf.".cdf' using 1:2 t '".run3ti."' ". \ + "plot '".rundir."/".run1."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run1ti."', '". \ + rundir."/".run2."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run2ti."', '". \ + rundir."/".run3."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run3ti."' ". \ "" cdfI_4(cdf, title, run1ti, run1, run2ti, run2, run3ti, run3, run4ti, run4) = \ "set title '".title."';". \ - "plot '".rundir."/".run1."/analysis/".cdf.".cdf' using 1:2 t '".run1ti."', '". \ - rundir."/".run2."/analysis/".cdf.".cdf' using 1:2 t '".run2ti."', '". \ - rundir."/".run3."/analysis/".cdf.".cdf' using 1:2 t '".run3ti."', '". \ - rundir."/".run4."/analysis/".cdf.".cdf' using 1:2 t '".run4ti."' ". \ + "plot '".rundir."/".run1."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run1ti."', '". \ + rundir."/".run2."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run2ti."', '". \ + rundir."/".run3."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run3ti."', '". \ + rundir."/".run4."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run4ti."' ". \ "" cdfI_5(cdf, title, run1ti, run1, run2ti, run2, run3ti, run3, run4ti, run4, run5ti, run5) = \ "set title '".title."';". \ - "plot '".rundir."/".run1."/analysis/".cdf.".cdf' using 1:2 t '".run1ti."', '". \ - rundir."/".run2."/analysis/".cdf.".cdf' using 1:2 t '".run2ti."', '". \ - rundir."/".run3."/analysis/".cdf.".cdf' using 1:2 t '".run3ti."', '". \ - rundir."/".run4."/analysis/".cdf.".cdf' using 1:2 t '".run4ti."', '". \ - rundir."/".run5."/analysis/".cdf.".cdf' using 1:2 t '".run5ti."' ". \ + "plot '".rundir."/".run1."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run1ti."', '". \ + rundir."/".run2."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run2ti."', '". \ + rundir."/".run3."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run3ti."', '". \ + rundir."/".run4."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run4ti."', '". \ + rundir."/".run5."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run5ti."' ". \ "" cdfI_6(cdf, title, run1ti, run1, run2ti, run2, run3ti, run3, run4ti, run4, run5ti, run5, run6ti, run6) = \ "set title '".title."';". \ - "plot '".rundir."/".run1."/analysis/".cdf.".cdf' using 1:2 t '".run1ti."', '". \ - rundir."/".run2."/analysis/".cdf.".cdf' using 1:2 t '".run2ti."', '". \ - rundir."/".run3."/analysis/".cdf.".cdf' using 1:2 t '".run3ti."', '". \ - rundir."/".run4."/analysis/".cdf.".cdf' using 1:2 t '".run4ti."', '". \ - rundir."/".run5."/analysis/".cdf.".cdf' using 1:2 t '".run5ti."', '". \ - rundir."/".run6."/analysis/".cdf.".cdf' using 1:2 t '".run6ti."' ". \ + "plot '".rundir."/".run1."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run1ti."', '". \ + rundir."/".run2."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run2ti."', '". \ + rundir."/".run3."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run3ti."', '". \ + rundir."/".run4."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run4ti."', '". \ + rundir."/".run5."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run5ti."', '". \ + rundir."/".run6."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run6ti."' ". \ "" cdfI_7(cdf, title, run1ti, run1, run2ti, run2, run3ti, run3, run4ti, run4, run5ti, run5, run6ti, run6, run7ti, run7) = \ "set title '".title."';". \ - "plot '".rundir."/".run1."/analysis/".cdf.".cdf' using 1:2 t '".run1ti."', '". \ - rundir."/".run2."/analysis/".cdf.".cdf' using 1:2 t '".run2ti."', '". \ - rundir."/".run3."/analysis/".cdf.".cdf' using 1:2 t '".run3ti."', '". \ - rundir."/".run4."/analysis/".cdf.".cdf' using 1:2 t '".run4ti."', '". \ - rundir."/".run5."/analysis/".cdf.".cdf' using 1:2 t '".run5ti."', '". \ - rundir."/".run6."/analysis/".cdf.".cdf' using 1:2 t '".run6ti."', '". \ - rundir."/".run7."/analysis/".cdf.".cdf' using 1:2 t '".run7ti."' ". \ + "plot '".rundir."/".run1."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run1ti."', '". \ + rundir."/".run2."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run2ti."', '". \ + rundir."/".run3."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run3ti."', '". \ + rundir."/".run4."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run4ti."', '". \ + rundir."/".run5."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run5ti."', '". \ + rundir."/".run6."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run6ti."', '". \ + rundir."/".run7."/analysis/cdf/".cdf.".cdf' using 1:2 t '".run7ti."' ". \ "" diff --git a/cardano-tracer/src/Cardano/Tracer/Handlers/RTView/UI/JS/.gitignore b/cardano-tracer/src/Cardano/Tracer/Handlers/RTView/UI/JS/.gitignore new file mode 100644 index 00000000000..f5593add106 --- /dev/null +++ b/cardano-tracer/src/Cardano/Tracer/Handlers/RTView/UI/JS/.gitignore @@ -0,0 +1 @@ +ChartJS.hs \ No newline at end of file diff --git a/nix/nixos/tx-generator-service.nix b/nix/nixos/tx-generator-service.nix index 1d4d697e472..7f36c0ad489 100644 --- a/nix/nixos/tx-generator-service.nix +++ b/nix/nixos/tx-generator-service.nix @@ -119,8 +119,10 @@ in pkgs.commonLib.defServiceModule }; configExeArgsFn = cfg: [ - "json_highlevel" - "${pkgs.writeText "tx-gen-config.json" (cfg.decideRunScript cfg)}" + "json_highlevel" + (if cfg.runScriptFile != null + then cfg.runScriptFile + else "${pkgs.writeText "tx-gen-config.json" (cfg.decideRunScript cfg)}") ] ++ optionals (cfg.tracerSocketPath != null) [ "--cardano-tracer" cfg.tracerSocketPath ]; diff --git a/nix/workbench/analyse.sh b/nix/workbench/analyse.sh index 4fcf07d49b5..f574c5dd576 100644 --- a/nix/workbench/analyse.sh +++ b/nix/workbench/analyse.sh @@ -1,25 +1,39 @@ usage_analyse() { usage "analyse" "Analyse cluster runs" </dev/null 2>&1 -then aws='true'; fi - -## Work around the odd parallelism bug killing performance on AWS: -if test -n "$aws" -then locli_rts_args=(+RTS -N1 -A128M -RTS) - echo "{ \"aws\": true }" -else locli_rts_args=() - echo "{ \"aws\": false }" -fi - local op=${1:-standard}; if test $# != 0; then shift; fi case "$op" in # 'read-mach-views' "${logs[@]/#/--log }" + compare | cmp ) - local baseline=$1; shift - progress "analysis" "$(white comparing) $(colorise $*) $(plain against baseline) $(white $baseline)" - analyse ${sargs[*]} multi-call "$baseline $*" 'compare' + local runs=($(expand_runspecs $*)) + local baseline=${runs[0]} + progress "analysis" "$(white comparing) $(colorise ${runs[*]:1}) $(plain against baseline) $(white $baseline)" + analyse "${sargs[@]}" multi-call 'compare' "${runs[*]}" 'compare' ;; recompare | recmp ) - local baseline=$1; shift - progress "analysis" "$(white regenerating comparison) of $(colorise $*) $(plain against baseline) $(white $baseline)" - analyse ${sargs[*]} multi-call "$baseline $*" 'update' + local runs=($(expand_runspecs $*)) + local baseline=${runs[0]} + progress "analysis" "$(white regenerating comparison) of $(colorise ${runs[*]:1}) $(plain against baseline) $(white $baseline)" + analyse "${sargs[@]}" multi-call 'compare' "${runs[*]}" 'update' ;; - multi-run | multi ) - progress "analysis" "$(white multi-summary) on runs: $(colorise $*)" - - analyse ${sargs[*]} full "$*" - analyse ${sargs[*]} multi-run-summary "$*" - ;; - - multi-run-pattern | multi-pattern | multipat | mp ) - analyse ${sargs[*]} multi-run $(run list-pattern $1) - ;; - - multi-run-summary | multi-summary | summary | sum ) + variance | var ) local script=( read-clusterperfs compute-multi-clusterperf @@ -119,12 +129,34 @@ case "$op" in compute-multi-propagation multi-propagation-json multi-propagation-org - multi-propagation-{forger,peers,endtoend} + multi-propagation-{control,forger,peers,endtoend} multi-propagation-gnuplot multi-propagation-full ) - progress "analysis" "$(white multi-summary), calling script: $(colorise ${script[*]})" - analyse ${sargs[*]} multi-call "$*" ${script[*]} + verbose "analysis" "$(white variance), calling script: $(colorise ${script[*]})" + analyse "${sargs[@]}" multi-call 'variance' "$*" ${script[*]} + ;; + + rerender | render ) + local script=( + context + + read-propagations + propagation-json + propagation-org + propagation-{control,forger,peers,endtoend} + propagation-gnuplot + propagation-full + + read-clusterperfs + clusterperf-json + clusterperf-gnuplot + clusterperf-org + clusterperf-report + clusterperf-full + ) + verbose "analysis" "$(white full), calling script: $(colorise ${script[*]})" + analyse "${sargs[@]}" map "call ${script[*]}" "$@" ;; full | standard | std ) @@ -133,7 +165,8 @@ case "$op" in context build-mach-views $(test -n "$dump_machviews" && echo 'dump-mach-views') - rebuild-chain $(test -n "$dump_chain" && echo 'dump-chain') + rebuild-chain + dump-chain chain-timeline collect-slots $(test -n "$dump_slots_raw" && echo 'dump-slots-raw') @@ -143,7 +176,7 @@ case "$op" in compute-propagation propagation-json propagation-org - propagation-{forger,peers,endtoend} + propagation-{control,forger,peers,endtoend} propagation-gnuplot propagation-full @@ -156,9 +189,51 @@ case "$op" in clusterperf-org clusterperf-report clusterperf-full + + compute-summary + summary-json + summary-report ) - progress "analysis" "$(white full), calling script: $(colorise ${script[*]})" - analyse ${sargs[*]} map "call ${script[*]}" "$@" + verbose "analysis" "$(white full), calling script: $(colorise ${script[*]})" + analyse "${sargs[@]}" map "call ${script[*]}" "$@" + ;; + + block-propagation | blockprop | bp ) + local script=( + logs $(test -n "$dump_logobjects" && echo 'dump-logobjects') + context + + build-mach-views $(test -n "$dump_machviews" && echo 'dump-mach-views') + rebuild-chain + dump-chain + chain-timeline + + compute-propagation + propagation-json + propagation-org + propagation-{control,forger,peers,endtoend} + propagation-gnuplot + propagation-full + ) + verbose "analysis" "$(white full), calling script: $(colorise ${script[*]})" + analyse "${sargs[@]}" map "call ${script[*]}" "$@" + ;; + + re-block-propagation | reblockprop | rebp ) + fail "re-block-propagation is broken: read-chain not implemented" + local script=( + read-chain + chain-timeline + + compute-propagation + propagation-json + propagation-org + propagation-{control,forger,peers,endtoend} + propagation-gnuplot + propagation-full + ) + verbose "analysis" "$(white full), calling script: $(colorise ${script[*]})" + analyse "${sargs[@]}" map "call ${script[*]}" "$@" ;; performance | perf ) @@ -180,11 +255,11 @@ case "$op" in clusterperf-report clusterperf-full ) - progress "analysis" "$(white performance), calling script: $(colorise ${script[*]})" - analyse ${sargs[*]} map "call ${script[*]}" "$@" + verbose "analysis" "$(white performance), calling script: $(colorise ${script[*]})" + analyse "${sargs[@]}" map "call ${script[*]}" "$@" ;; - performance-single-host | perf-single ) + performance-host | perf-host ) local usage="USAGE: wb analyse $op HOST" local host=${1:?usage}; shift @@ -199,8 +274,8 @@ case "$op" in compute-machperf render-machperf ) - progress "analysis" "$(with_color white performance), calling script: $(colorise ${script[*]})" - analyse ${sargs[*]} map "call --host $host ${script[*]}" "$@" + verbose "analysis" "$(with_color white performance), calling script: $(colorise ${script[*]})" + analyse "${sargs[@]}" map "call --host $host ${script[*]}" "$@" ;; map ) @@ -208,7 +283,7 @@ case "$op" in ## Meaning: map OP over RUNS, optionally giving flags/options to OP local preop=${1:?usage}; shift - local runs=($*); if test $# = 0; then runs=(current); fi + local runs=($(expand_runspecs $*)) local op_split=($preop) local op=${op_split[0]} @@ -216,17 +291,24 @@ case "$op" in ## This is magical and stupid, but oh well, it's the cost of abstraction: ## 1. We are passing all '--long-option VAL' pairs to the mapped preop ## 2. We are passing all '-opt-flag' flags to the mapped preop - for ((i=1; i<=${#op_split[*]}; i++)) - do local arg=${op_split[$i]} argnext=${op_split[$((i+1))]} + local nops=${#op_split[*]} + for ((i=1; i<=$nops; i++)) + do local arg=${op_split[$i]} + if test $i -lt $((nops - 1)) + then argnext=${op_split[$((i+1))]} + else argnext= + fi case "$arg" in - --* ) op_args+=($arg $argnext); i=$((i+1));; + --* ) test $i -lt $nops || \ + fail "No value passed for option: $arg" + op_args+=($arg "$argnext"); i=$((i+1));; -* ) op_args+=($arg);; * ) break;; esac; done - local args=${op_split[*]:$i} - progress "analyse" "mapping op $(with_color yellow $op ${op_args[*]}) $(with_color cyan $args) over runs: $(with_color white ${runs[*]})" + local args=${op_split[@]:$i} + progress "analyse" "mapping op $(with_color yellow $op "${op_args[@]}") $(with_color cyan $args) over runs: $(with_color white ${runs[*]})" for r in ${runs[*]} - do analyse ${sargs[*]} prepare $r - analyse ${sargs[*]} $op ${op_args[*]} $r ${args[*]} + do analyse "${sargs[@]}" prepare $r + analyse "${sargs[@]}" $op "${op_args[@]}" $r "${args[@]}" done ;; @@ -239,15 +321,18 @@ case "$op" in --host ) host=$2; shift;; * ) break;; esac; shift; done - local name=${1:?$usage}; shift - local dir=$(run get "$name") + local run=${1:?$usage}; shift + local dir=$(run get "$run") local adir=$dir/analysis - test -n "$dir" -a -d "$adir" || fail "malformed run: $name" + test -n "$dir" -a -d "$adir" || fail "malformed run: $run" local logfiles=( $(if test -z "$host" then ls "$adir"/logs-*.flt.json else ls "$adir"/logs-$host.flt.json; fi)) + test ${#logfiles[*]} -gt 0 || + fail "no files match $adir"'/logs-*.flt.json' + local minus_logfiles=( $(for host in ${perf_omit_hosts[*]} do ls "$adir"/logs-$host.flt.json; done)) @@ -258,81 +343,104 @@ case "$op" in ' "$dir"/profile.json --raw-output) analysis_set_filters "$filter_names" filter_exprs+=($(jq '(.analysis.filter_exprs // []) - | map(tojson) - | join(",") - ' "$dir"/profile.json --raw-output)) + | map(tojson) + | join(",") + ' "$dir"/profile.json --raw-output)) fi - progress "analysis" "filters exprs: $(yellow ${filter_exprs[*]})" - filters+=(${filter_exprs[*]/#/--filter-expr }) + local filter_exprs_q=("${filter_exprs[@]@Q}") + filters+=(${filter_exprs_q[@]/#/--filter-expr }) + progress "analysis" "filters exprs: $(yellow "${filter_exprs[@]}")" local v0 v1 v2 v3 v4 v5 v6 v7 v8 v9 va vb vc vd ve vf vg vh vi vj vk vl vm vn vo - v0=("$@") - v1=(${v0[*]/#logs/ 'unlog' --host-from-log-filename ${logfiles[*]/#/--log }}) - v2=(${v1[*]/#context/ 'meta-genesis' --run-metafile "$dir"/meta.json - --shelley-genesis "$dir"/genesis-shelley.json }) - v5=(${v2[*]/#rebuild-chain/ 'rebuild-chain' ${filters[*]}}) - v6=(${v5[*]/#dump-chain/ 'dump-chain' --chain "$adir"/chain.json}) - v7=(${v6[*]/#chain-timeline/ 'timeline-chain' --timeline "$adir"/chain.txt ${filter_reasons:+--filter-reasons} ${chain_errors:+--chain-errors}}) - v8=(${v7[*]/#collect-slots/ 'collect-slots' ${minus_logfiles[*]/#/--ignore-log }}) - v9=(${v8[*]/#filter-slots/ 'filter-slots' ${filters[*]}}) - va=(${v9[*]/#propagation-json/ 'render-propagation' --json "$adir"/blockprop.json --full}) - vb=(${va[*]/#propagation-org/ 'render-propagation' --org "$adir"/blockprop.org --full}) - vc=(${vb[*]/#propagation-forger/ 'render-propagation' --report "$adir"/blockprop.forger.org --forger}) - vd=(${vc[*]/#propagation-peers/ 'render-propagation' --report "$adir"/blockprop.peers.org --peers }) - ve=(${vd[*]/#propagation-endtoend/ 'render-propagation' --report "$adir"/blockprop.endtoend.org --end-to-end}) - vf=(${ve[*]/#propagation-gnuplot/ 'render-propagation' --gnuplot "$adir"/%s.cdf --full}) - vg=(${vf[*]/#propagation-full/ 'render-propagation' --pretty "$adir"/blockprop-full.txt --full}) - vh=(${vg[*]/#clusterperf-json/ 'render-clusterperf' --json "$adir"/clusterperf.json --full }) - vi=(${vh[*]/#clusterperf-org/ 'render-clusterperf' --org "$adir"/clusterperf.org --full }) - vj=(${vi[*]/#clusterperf-report/ 'render-clusterperf' --report "$adir"/clusterperf.report.org --summary }) - vk=(${vj[*]/#clusterperf-gnuplot/ 'render-clusterperf' --gnuplot "$adir"/%s.cdf --full }) - vl=(${vk[*]/#clusterperf-full/ 'render-clusterperf' --pretty "$adir"/clusterperf-full.txt --full }) - local ops_final=(${vl[*]}) - - progress "analysis | locli" "$(with_color reset ${locli_rts_args[@]}) $(colorise "${ops_final[@]}")" - time locli "${locli_rts_args[@]}" "${ops_final[@]}" + v0=( $* ) + v1=("${v0[@]/#logs/ 'unlog' --host-from-log-filename ${analysis_allowed_loanys[*]/#/--ok-loany } ${logfiles[*]/#/--log } }") + v2=("${v1[@]/#context/ 'meta-genesis' --run-metafile \"$dir\"/meta.json + --shelley-genesis \"$dir\"/genesis-shelley.json }") + v3=("${v2[@]/#read-chain/ 'read-chain' --chain \"$adir\"/chain.json}") + v4=("${v3[@]/#rebuild-chain/ 'rebuild-chain' ${filters[@]}}") + v5=("${v4[@]/#dump-chain/ 'dump-chain' --chain \"$adir\"/chain.json --chain-rejecta \"$adir\"/chain-rejecta.json}") + v6=("${v5[@]/#chain-timeline/ 'timeline-chain' --timeline \"$adir\"/chain.txt ${filter_reasons:+--filter-reasons} ${chain_errors:+--chain-errors}}") + v7=("${v6[@]/#collect-slots/ 'collect-slots' ${minus_logfiles[*]/#/--ignore-log }}") + v8=("${v7[@]/#filter-slots/ 'filter-slots' ${filters[@]}}") + v9=("${v8[@]/#propagation-json/ 'render-propagation' --json \"$adir\"/blockprop.json --full}") + va=("${v9[@]/#propagation-org/ 'render-propagation' --org \"$adir\"/blockprop.org --full}") + vb=("${va[@]/#propagation-control/ 'render-propagation' --org-report \"$adir\"/blockprop.control.org --control}") + vc=("${vb[@]/#propagation-forger/ 'render-propagation' --org-report \"$adir\"/blockprop.forger.org --forger}") + vd=("${vc[@]/#propagation-peers/ 'render-propagation' --org-report \"$adir\"/blockprop.peers.org --peers }") + ve=("${vd[@]/#propagation-endtoend/ 'render-propagation' --org-report \"$adir\"/blockprop.endtoend.org --end-to-end}") + vf=("${ve[@]/#propagation-gnuplot/ 'render-propagation' --gnuplot \"$adir\"/cdf/%s.cdf --full}") + vg=("${vf[@]/#propagation-full/ 'render-propagation' --pretty \"$adir\"/blockprop-full.txt --full}") + vh=("${vg[@]/#clusterperf-json/ 'render-clusterperf' --json \"$adir\"/clusterperf.json --full }") + vi=("${vh[@]/#clusterperf-org/ 'render-clusterperf' --org \"$adir\"/clusterperf.org --full }") + vj=("${vi[@]/#clusterperf-report/ 'render-clusterperf' --org-report \"$adir\"/clusterperf.report.org --report }") + vk=("${vj[@]/#clusterperf-gnuplot/ 'render-clusterperf' --gnuplot \"$adir\"/cdf/%s.cdf --full }") + vl=("${vk[@]/#clusterperf-full/ 'render-clusterperf' --pretty \"$adir\"/clusterperf-full.txt --full }") + vm=("${vl[@]/#read-clusterperfs/ 'read-clusterperfs' --clusterperf \"$adir\"/clusterperf.json }") + vn=("${vm[@]/#read-propagations/ 'read-propagations' --prop \"$adir\"/blockprop.json }") + vo=("${vn[@]/#summary-json/ 'render-summary' --json \"$adir\"/summary.json}") + vp=("${vo[@]/#summary-report/ 'render-summary' --org-report \"$adir\"/summary.org}") + local ops_final=() + for v in "${vp[@]}" + do eval ops_final+=($v); done + + call_locli "$rtsmode" "${ops_final[@]}" + + local analysis_jsons=($(ls $adir/*.json | + fgrep -v -e '.flt.json' \ + -e '.logobjs.json' \ + -e 'chain-rejecta.json' \ + -e 'chain.json' + )) + progress "analyse" "prettifying JSON data: ${analysis_jsons[*]}" + time json_compact_prettify "${analysis_jsons[@]}" + progress "output" "run: $(white $run) subdir: $(yellow analysis)" ;; multi-call ) - local usage="USAGE: wb analyse $op \"RUN-NAMES..\" OPS.." + local usage="USAGE: wb analyse $op SUFFIX \"RUN-NAMES..\" OPS.." - local runs=${1:?$usage}; shift - local dirs=( $(for run in $runs; do run get "$run"; echo; done)) + local suffix=${1:?$usage}; shift + local runs=($(expand_runspecs ${1:?$usage})); shift + + local dirs=( $(for run in ${runs[*]}; do run get "$run"; echo; done)) local adirs=( $(for dir in ${dirs[*]}; do echo $dir/analysis; done)) local props=( $(for adir in ${adirs[*]}; do echo --prop ${adir}/blockprop.json; done)) local cperfs=($(for adir in ${adirs[*]}; do echo --clusterperf ${adir}/clusterperf.json; done)) local compares=($(for adir in ${adirs[*]} - do echo --run-metafile ${adir}/../meta.json \ - --shelley-genesis ${adir}/../genesis-shelley.json + do echo --summary ${adir}/summary.json \ + --perf ${adir}/clusterperf.json \ + --prop ${adir}/blockprop.json done)) - local run=$(for dir in ${dirs[*]}; do basename $dir; done | sort -r | head -n1 | cut -d. -f1-2)-multirun + local run=$(for dir in ${dirs[*]}; do basename $dir; done | sort -r | head -n1 | cut -d. -f1-2)_$suffix local adir=$(run get-rundir)/$run - mkdir -p "$adir" - progress "analysis | multi-call" "run $(yellow $run), runs: $(white $runs)" + mkdir -p "$adir/cdf" + progress "analysis | multi-call" "output $(yellow $run), inputs: $(white ${runs[*]})" local v0 v1 v2 v3 v4 v5 v6 v7 v8 v9 va vb vc vd ve vf vg vh vi vj vk vl vm vn vo v0=("$@") v1=(${v0[*]/#read-clusterperfs/ 'read-clusterperfs' ${cperfs[*]} }) v2=(${v1[*]/#read-propagations/ 'read-propagations' ${props[*]} }) - v3=(${v2[*]/#multi-clusterperf-json/ 'render-multi-clusterperf' --json $adir/'multi-clusterperf.json' --full $multi_aspect }) - v4=(${v3[*]/#multi-clusterperf-org/ 'render-multi-clusterperf' --org $adir/'multi-clusterperf.org' --full $multi_aspect }) - v5=(${v4[*]/#multi-clusterperf-report/ 'render-multi-clusterperf' --report $adir/'multi-clusterperf.report.org' --summary $multi_aspect }) - v6=(${v5[*]/#multi-clusterperf-gnuplot/ 'render-multi-clusterperf' --gnuplot $adir/'%s.cdf' --full $multi_aspect }) - v7=(${v6[*]/#multi-clusterperf-full/ 'render-multi-clusterperf' --pretty $adir/'multi-clusterperf-full.txt' --full $multi_aspect }) - v8=(${v7[*]/#multi-propagation-json/ 'render-multi-propagation' --json $adir/'multi-blockprop.json' --full $multi_aspect }) - v9=(${v8[*]/#multi-propagation-org/ 'render-multi-propagation' --org $adir/'multi-blockprop.org' --full $multi_aspect }) - va=(${v9[*]/#multi-propagation-forger/ 'render-multi-propagation' --report $adir/'multi-blockprop-forger.org' --forger $multi_aspect }) - vb=(${va[*]/#multi-propagation-peers/ 'render-multi-propagation' --report $adir/'multi-blockprop-peers.org' --peers $multi_aspect }) - vc=(${vb[*]/#multi-propagation-endtoend/ 'render-multi-propagation' --report $adir/'multi-blockprop-endtoend.org' --end-to-end $multi_aspect }) - vd=(${vc[*]/#multi-propagation-gnuplot/ 'render-multi-propagation' --gnuplot $adir/'%s.cdf' --full $multi_aspect }) - ve=(${vd[*]/#multi-propagation-full/ 'render-multi-propagation' --pretty $adir/'multi-blockprop-full.txt' --full $multi_aspect }) + v3=(${v2[*]/#multi-clusterperf-json/ 'render-multi-clusterperf' --json $adir/'clusterperf.json' --full $multi_aspect }) + v4=(${v3[*]/#multi-clusterperf-org/ 'render-multi-clusterperf' --org $adir/'clusterperf.org' --full $multi_aspect }) + v5=(${v4[*]/#multi-clusterperf-report/ 'render-multi-clusterperf' --report $adir/'clusterperf.report.org' --report $multi_aspect }) + v6=(${v5[*]/#multi-clusterperf-gnuplot/ 'render-multi-clusterperf' --gnuplot $adir/cdf/'%s.cdf' --full $multi_aspect }) + v7=(${v6[*]/#multi-clusterperf-full/ 'render-multi-clusterperf' --pretty $adir/'clusterperf-full.txt' --full $multi_aspect }) + v8=(${v7[*]/#multi-propagation-json/ 'render-multi-propagation' --json $adir/'blockprop.json' --full $multi_aspect }) + v9=(${v8[*]/#multi-propagation-org/ 'render-multi-propagation' --org $adir/'blockprop.org' --full $multi_aspect }) + va=(${v9[*]/#multi-propagation-control/ 'render-multi-propagation' --report $adir/'blockprop.forger.org' --forger $multi_aspect }) + va=(${v9[*]/#multi-propagation-forger/ 'render-multi-propagation' --report $adir/'blockprop.forger.org' --forger $multi_aspect }) + vb=(${va[*]/#multi-propagation-peers/ 'render-multi-propagation' --report $adir/'blockprop.peers.org' --peers $multi_aspect }) + vc=(${vb[*]/#multi-propagation-endtoend/ 'render-multi-propagation' --report $adir/'blockprop.endtoend.org' --end-to-end $multi_aspect }) + vd=(${vc[*]/#multi-propagation-gnuplot/ 'render-multi-propagation' --gnuplot $adir/cdf/'%s.cdf' --full $multi_aspect }) + ve=(${vd[*]/#multi-propagation-full/ 'render-multi-propagation' --pretty $adir/'blockprop-full.txt' --full $multi_aspect }) vf=(${ve[*]/#compare/ 'compare' --ede nix/workbench/ede --report $adir/report-$run.org ${compares[*]} }) vg=(${vf[*]/#update/ 'compare' --ede nix/workbench/ede --report $adir/report-$run.org ${compares[*]} --template $adir/report-$run.ede }) local ops_final=(${vg[*]}) - progress "analysis | locli" "$(with_color reset ${locli_rts_args[@]}) $(colorise "${ops_final[@]}")" - time locli "${locli_rts_args[@]}" "${ops_final[@]}" + call_locli "$rtsmode" "${ops_final[@]}" + + progress "output" "run: $(white $run)" ;; prepare | prep ) @@ -346,24 +454,33 @@ case "$op" in progress "analyse" "preparing run for analysis: $(with_color white $name)" local adir=$dir/analysis - mkdir -p "$adir" + mkdir -p "$adir/cdf" ## 0. ask locli what it cares about local keyfile="$adir"/substring-keys - case $(jq '.node.tracing_backend // "iohk-monitoring"' --raw-output $dir/profile.json) in + local key_old=$(sha256sum "$keyfile" | cut -d' ' -f1) + local tracing_backend=$(jq '.node.tracing_backend // "iohk-monitoring"' --raw-output $dir/profile.json) + case "$tracing_backend" in trace-dispatcher ) locli 'list-logobject-keys' --keys "$keyfile";; iohk-monitoring ) locli 'list-logobject-keys-legacy' --keys-legacy "$keyfile";; + * ) fail "Unknown tracing backend: $tracing_backend" esac + local key_new=$(sha256sum "$keyfile" | cut -d' ' -f1) ## 1. unless already done, filter logs according to locli's requirements local logdirs=($(ls -d "$dir"/node-*/ 2>/dev/null)) local logfiles=($(ls "$adir"/logs-node-*.flt.json 2>/dev/null)) - local prefilter=$(test -z "${logfiles[*]}" -o -n "$refresh" && echo 'true' || echo 'false') - echo "{ \"prefilter\": $prefilter }" - if test x$prefilter != xtrue + local prefilter=$(if test -z "${logfiles[*]}" + then echo 'prefiltered-logs-not-yet-created' + elif test "$key_new" != "$key_old" + then echo 'prefiltering-keyset-changed' + else echo 'false' + fi) + echo "{ \"prefilter\": \"$prefilter\" }" + if test "$prefilter" = "false" then return; fi - progress "analyse" "filtering logs: $(with_color black ${logdirs[@]})" + verbose "analyse" "filtering logs: $(with_color black ${logdirs[@]})" local grep_params=( --binary-files=text --file="$keyfile" @@ -376,12 +493,75 @@ case "$op" in if test -z "$logfiles" then msg "no logs in $d, skipping.."; fi local output="$adir"/logs-$(basename "$d").flt.json - grep ${grep_params[*]} $logfiles > "$output" & + grep ${grep_params[*]} $logfiles | grep '^{' > "$output" & done wait;; - * ) usage_analyse;; esac + trace-frequencies | trace-freq | freq ) + local new_only= sargs=() + while test $# -gt 0 + do case "$1" in + --new-only ) sargs+=(--new-only);; + * ) break;; esac; shift; done + local usage="USAGE: wb analyse $op LOGFILE" + + local logfile=${1:?usage}; shift + + trace_frequencies "${sargs[@]}" "" "$logfile" > "${logfile}.freq" + + local src=$(wc -l <"$logfile") + local res=$(cut -d' ' -f1 "${logfile}.freq" | + xargs echo | + sed 's/ /, /g; s/^/\[/; s/$/\]/' | + jq add) + if test $src != $res + then local col=red; else local col=green; fi + progress "trace-freq" "total in source: $(white $src)" + progress_ne "trace-freq" "total counted: $(with_color $col $res)";; + + chain-rejecta-reasons | chain-rejecta | rejecta ) + local usage="USAGE: wb analyse $op [RUN-NAME=current]" + + local name=${1:-current}; if test $# != 0; then shift; fi + local dir=$(run get "$name") + test -n "$dir" || fail "malformed run: $name" + local rejecta=$dir/analysis/chain-rejecta.json + + jq '.beBlockNo as $no + | .beAcceptance + | { block: $no + , nacks: map( select(.[1] == false) + | .[0].contents.tag) + } + ' $rejecta --compact-output + wc -l $rejecta + ;; + + * ) progress "analyse" "unexpected 'analyse' subop: $(red $op)" + usage_analyse;; esac +} + +call_locli() { + local rtsmode="$1"; shift + local args=("$@") + + if test -z "$rtsmode" + then if curl --connect-timeout 0.5 http://169.254.169.254/latest/meta-data >/dev/null 2>&1 + then rtsmode='aws' + else rtsmode='hipar'; fi; fi + + echo "{ \"rtsmode\": \"$rtsmode\" }" + case "$rtsmode" in + aws ) ## Work around the odd parallelism bug killing performance on AWS: + locli_args+=(+RTS -N1 -A128M -RTS);; + lomem ) locli_args+=(+RTS -N3 -A8M -RTS);; + hipar ) locli_args+=();; + * ) fail "unknown rtsmode: $rtsmode";; + esac + + verbose "analysis | locli" "$(with_color reset ${locli_args[@]}) $(colorise "${ops_final[@]}")" + time locli "${locli_args[@]}" "${args[@]}" } num_jobs="\j" @@ -407,45 +587,64 @@ analysis_set_filters() { filters+=(${filter_files[*]/#/--filter }) } -analysis_classify_traces() { +classify_traces() { + jq --raw-output '(try .ns[0] // .ns) + ":" + (.data.kind //.data.val.kind)' 2>/dev/null | sort -u +} + +trace_frequencies() { + local new_only= + while test $# -gt 0 + do case "$1" in + --new-only ) new_only='true';; + * ) break;; esac; shift; done + + local types="$1"; shift + local files=("$@") + + if test -z "$types" + then types="$(cat "${files[@]}" | classify_traces)" + fi + + for ty in $types + do local ns=$(cut -d: -f1 <<<$ty) + local kind=$(cut -d: -f2 <<<$ty) + if test -n "$new_only" + then echo $(grep -hFe '"ns":"'$ns'"' "${files[@]}" | wc -l) $ty + else echo $(grep -hFe '"ns":"'$ns'"' "${files[@]}" | grep -Fe '"kind":"'$kind'"' | wc -l) $ty + fi + done | + sort -nr +} + +analysis_run_classify_traces() { local name=${1:-current}; if test $# != 0; then shift; fi local node=${1:-node-0}; if test $# != 0; then shift; fi local dir=$(run get "$name") progress "analysis" "enumerating namespace from logs of $(with_color yellow $node)" - grep -h '^{' $dir/$node/stdout* | jq --raw-output '(try .ns[0] // .ns) + ":" + (.data.kind // "")' 2>/dev/null | sort -u + grep -h '^{' $dir/$node/stdout* | classify_traces # grep -h '^{' $dir/$node/stdout* | jq --raw-output '.ns' 2>/dev/null | tr -d ']["' | sort -u } analysis_trace_frequencies() { - local same_types= while test $# -gt 0 do case "$1" in - --same-types | --same | -s ) same_types='true';; * ) break;; esac; shift; done local name=${1:-current}; if test $# != 0; then shift; fi local dir=$(run get "$name") local types=() - if test -n "$same_types" - then types=($(analysis_classify_traces $name 'node-0')) - progress_ne "analysis" "message frequencies: "; fi - for nodedir in $dir/node-*/ do local node=$(basename $nodedir) - if test -z "$same_types" - then types=($(analysis_classify_traces $name $node)) - progress "analysis" "message frequencies: $(with_color yellow $node)"; fi - - for type in ${types[*]} - do local ns=$(cut -d: -f1 <<<$type) - local kind=$(cut -d: -f2 <<<$type) - echo $(grep -h "\"$ns\".*\"$kind\"\|\"$kind\".*\"$ns\"" $nodedir/stdout* | wc -l) $type - done | - sort -nr > $nodedir/log-namespace-occurence-stats.txt - test -n "$same_types" && echo -n ' '$node >&2 + progress "analysis" "message frequencies: $(with_color yellow $node)" + + types=($(analysis_run_classify_traces $name $node)) + trace_frequencies \ + "${types[*]}" \ + $nodedir/stdout* \ + > $nodedir/log-namespace-occurence-stats.txt done echo >&2 } diff --git a/nix/workbench/chain-filters/adopt-10.json b/nix/workbench/chain-filters/adopt-10.json new file mode 100644 index 00000000000..d1e04c1d6a2 --- /dev/null +++ b/nix/workbench/chain-filters/adopt-10.json @@ -0,0 +1,7 @@ +[ { "tag":"CBlock" + , "contents": + { "tag": "BMinimumAdoptions" + , "contents": 10 + } + } +] diff --git a/nix/workbench/chain-filters/model.json b/nix/workbench/chain-filters/model.json index c26fcf3f8f3..1980f25f9f6 100644 --- a/nix/workbench/chain-filters/model.json +++ b/nix/workbench/chain-filters/model.json @@ -10,4 +10,10 @@ , "contents": 50 } } +, { "tag": "CBlock" + , "contents": + { "tag": "BUnitaryChainDelta" + , "contents": true + } + } ] diff --git a/nix/workbench/chain-filters/nonnegative.json b/nix/workbench/chain-filters/nonnegative.json new file mode 100644 index 00000000000..711ff25f1b0 --- /dev/null +++ b/nix/workbench/chain-filters/nonnegative.json @@ -0,0 +1,7 @@ +[ { "tag": "CBlock" + , "contents": + { "tag": "BNonNegatives" + , "contents": true + } + } +] diff --git a/nix/workbench/ede/chart.ede b/nix/workbench/ede/chart.ede index 68e981c35fc..8d202508d1d 100644 --- a/nix/workbench/ede/chart.ede +++ b/nix/workbench/ede/chart.ede @@ -14,9 +14,9 @@ set yrange [*:*] set ylabel "{{ args.ylabel }}" {% endif %} eval cdfI_{{ (runs | length) + 1 }}("{{ args.metric }}", "{{ args.title }}", \ - "{{ base.ver }}/{{ base.meta.era | toTitle }}", "{{ base.meta.tag }}", \ + "{{ base.meta.tag }}/{{ base.meta.batch }}/{{ base.meta.era | toTitle }}", "{{ base.meta.tag }}", \ {% for run in runs %} - "{{ run.value.ver }}/{{ run.value.meta.era | toTitle }}", "{{ run.value.meta.tag }}"{% if !run.last %},{% endif %} \ + "{{ run.value.meta.tag }}/{{ run.value.meta.batch }}/{{ run.value.meta.era | toTitle }}", "{{ run.value.meta.tag }}"{% if !run.last %},{% endif %} \ {% endfor %} ) #+end_src diff --git a/nix/workbench/ede/report.ede b/nix/workbench/ede/report.ede index 3b55bf0c43a..6f6f7891853 100644 --- a/nix/workbench/ede/report.ede +++ b/nix/workbench/ede/report.ede @@ -1,4 +1,4 @@ -#+CONSTANTS: perf=clusterperf.report.org forge=blockprop.forger.org peers=blockprop.peers.org end2end=blockprop.endtoend.org +#+CONSTANTS: {{ summary.dataRef }}={{ summary.orgFile }}{% for sec in analyses %} {{ sec.value.dataRef }}={{ sec.value.orgFile }} {% endfor %} #+CONSTANTS: base=../{{ base.meta.tag }}/analysis {% for run in runs %} #+CONSTANTS: run{{ run.index }}=../{{ run.value.meta.tag }}/analysis @@ -18,29 +18,20 @@ *** Manifest -We compare ... relative to ={{ base.ver }}= /{{ base.meta.era | toTitle }}, under {{ base.workload }} workload. +We compare {% for run in runs %}{%if !run.first%}{%if !run.last%}, {%else%} and {%endif%}{%endif%}{{ run.value.ver }}/{{ run.value.meta.era | toTitle }}{% endfor %} relative to ={{ base.ver }}=/{{ base.meta.era | toTitle }}, under {{ base.workload }} workload. -{% include "manifest.ede" %} +{% include "table.ede" with table = summary %} ***** Revision history - rev 1, {{ report.date }}: initial release *** Analysis -***** Resource Usage +{% for sec in analyses %} +***** {{ sec.value.title }} -{% include "table.ede" with table = { "rows":"1234567", "src":"perf" } %} +{% include "tablevars-delta-pct.ede" with table = sec.value %} -***** Forging - -{% include "table.ede" with table = { "rows":"1234", "src":"forge" } %} - -***** Individual peer propagation - -{% include "table.ede" with table = { "rows":"1234", "src":"peers" } %} - -***** End-to-end propagation - -{% include "table.ede" with table = { "rows":"12345678", "src":"end2end" } %} +{% endfor %} *** Observations ***** Resources @@ -61,15 +52,37 @@ We compare ... relative to ={{ base.ver }}= /{{ base.meta.era | toTitle }}, unde @duncan, @Kevin Hammond, @Nigel Hemsley, @neil, @jared.corduan, @Damian, @nfrisby, @amesgen, @marcin, @Vitor Silva, @Javier Franco, @carlos.lopezdelara, @disasm, @michael.karg -* Appendix: charts - -{% include "chart.ede" with args = { "title": "Kernel-reported CPU usage", "ylabel": "CPU usage, %", "metric": "cpuProcess", "yrange": "1:200", "logscale": false } %} -{% include "chart.ede" with args = { "title": "RTS memory allocation rate", "ylabel": "Memory allocation rate, MB/s", "metric": "rtsAllocation", "yrange": "0:5000", "logscale": false } %} -{% include "chart.ede" with args = { "title": "Kernel reported memory usage", "ylabel": "RSS, MB", "metric": "memRSS", "logscale": false } %} -{% include "chart.ede" with args = { "title": "RTS GC live bytes", "ylabel": "GC live bytes, MB", "metric": "rtsLiveBytes", "logscale": false } %} -{% include "chart.ede" with args = { "title": "Single peer fetched-to-adopted time", "metric": "pAdopted" } %} -{% include "chart.ede" with args = { "title": "First peer notice time", "metric": "pNoticed" } %} -{% include "chart.ede" with args = { "title": "Block adoption, 50% of cluster", "metric": "prop0.50" } %} -{% include "chart.ede" with args = { "title": "Block adoption, 80% of cluster", "metric": "prop0.80" } %} -{% include "chart.ede" with args = { "title": "Block adoption, 90% of cluster", "metric": "prop0.90" } %} -{% include "chart.ede" with args = { "title": "Block adoption, 96% of cluster", "metric": "prop0.96" } %} +* Appendix A: charts + +*** Cluster performance charts + +{# +{% for m in dictionary.dClusterPerf %} +{%include "chart.ede" with args= { "title": m.value.deDescription, "metric": m.value.deField, "logscale": m.value.deLogScale, "yrange": m.value.deRange, "ylabel": m.value.deShortDesc + " " + m.value.deUnit } %} + - {{ m.value.deShortDesc }} ({{ m.value.deField }}) :: {{ m.value.deDescription }} +#} +{%include "chart.ede" with args= { "title": "Kernel-reported CPU usage", "metric": "CentiCpu", "logscale": false, "yrange": "1:200", "ylabel": "CPU usage, %" }%} +{# +{%include "chart.ede" with args= { "title": "Kernel-reported CPU usage", "metric": "CentiCpu", "logscale": false, "yrange": "1:200", "ylabel": "CPU usage, %" }%} +{%include "chart.ede" with args= { "title": "RTS memory allocation rate", "metric": "Alloc", "logscale": false, "yrange": "0:5000", "ylabel": "Memory allocation rate, MB/s" }%} +{%include "chart.ede" with args= { "title": "Kernel reported memory usage", "metric": "RSS", "logscale": false, "ylabel": "RSS, MB" }%} +{%include "chart.ede" with args= { "title": "RTS GC live bytes", "metric": "Live", "logscale": false, "ylabel": "GC live bytes, MB" }%} +{%include "chart.ede" with args= { "title": "Single peer fetched-to-adopted time", "metric": "cdfPeerAdoptions", "logscale": true }%} +{%include "chart.ede" with args= { "title": "First peer notice time", "metric": "cdfPeerNotices", "logscale": true }%} +{%include "chart.ede" with args= { "title": "Block adoption, 50% of cluster", "metric": "cdf0.50", "logscale": true }%} +{%include "chart.ede" with args= { "title": "Block adoption, 80% of cluster", "metric": "cdf0.80", "logscale": true }%} +{%include "chart.ede" with args= { "title": "Block adoption, 90% of cluster", "metric": "cdf0.90", "logscale": true }%} +{%include "chart.ede" with args= { "title": "Block adoption, 96% of cluster", "metric": "cdf0.96", "logscale": true }%} +#} + +* Appendix B: data dictionary + +*** Block propagation metrics +{% for m in dictionary.dBlockProp %} + - {{ m.value.deShortDesc }} ({{ m.value.deField }}) :: {{ m.value.deDescription }} +{% endfor %} + +*** Cluster performance metrics +{% for m in dictionary.dClusterPerf %} + - {{ m.value.deShortDesc }} ({{ m.value.deField }}) :: {{ m.value.deDescription }} +{% endfor %} diff --git a/nix/workbench/ede/table.ede b/nix/workbench/ede/table.ede index c98082ab147..bd4b59c911e 100644 --- a/nix/workbench/ede/table.ede +++ b/nix/workbench/ede/table.ede @@ -1,10 +1,10 @@ -| | {{ base.ver }}{% for run in runs %} | {{ run.value.ver }} | Δ | Δ%{% endfor %} | -|---------------------------+-----{% for run in runs %}-+------+------+-----{% endfor %}-| -{% for i in table.rows %} -| | {% for run in runs %} | | | {% endfor %} | +| | {{ base.ver }}{% for run in runs %} | {{ run.value.ver }}{% endfor %} | +|---------------------------+-----{% for run in runs %}-+-----{% endfor %}-| +{% for i in table.rowPrecs %} +| | {% for run in runs %} | {% endfor %} | {% endfor %} -|---------------------------+-----{% for run in runs %}-+------+------+-----{% endfor %}-| -#+TBLFM: $2=remote(file:$base/${{ table.src }},@@#$average);p3::$1=remote(file:$base/${{ table.src }},@@#$metric) +|---------------------------+-----{% for run in runs %}-+-----{% endfor %}-| +#+TBLFM: $2='(identity remote(file:$base/${{ table.dataRef }},@@#${{ table.valueCol }}))::$1='(identity remote(file:$base/${{ table.dataRef }},@@#${{ table.nameCol }})) {% for run in runs %} -#+TBLFM: ${{ run.index * 3 }}=remote(file:$run{{ run.index }}/${{ table.src }},@@#$average);p3::${{ run.index * 3 + 1 }}=${{ run.index * 3 }}-$2;p3::${{ run.index * 3 + 2 }}=round(100*${{ run.index * 3 + 1 }}/$2) +#+TBLFM: ${{ 2 + run.index }}='(identity remote(file:$run{{ run.index }}/${{ table.dataRef }},@@#${{ table.valueCol }})) {% endfor %} diff --git a/nix/workbench/ede/tablevars-delta-pct.ede b/nix/workbench/ede/tablevars-delta-pct.ede new file mode 100644 index 00000000000..6febfedb270 --- /dev/null +++ b/nix/workbench/ede/tablevars-delta-pct.ede @@ -0,0 +1,13 @@ +| | {{ base.ver }}{% for run in runs %} | {{ run.value.ver }} | Δ | Δ%{% endfor %} | +|---------------------------+-----{% for run in runs %}-+------+------+-----{% endfor %}-| +{% for i in table.rowPrecs %} +| | {% for run in runs %} | | | {% endfor %} | +{% endfor %} +|---------------------------+-----{% for run in runs %}-+------+------+-----{% endfor %}-| +{% for var in table.vars %} +| | {% for run in runs %} | | | {% endfor %} | +{% endfor %} +#+TBLFM: $2='(identity remote(file:$base/${{ table.dataRef }},@@#${{ table.valueCol }})); N f-3::$1='(identity remote(file:$base/${{ table.dataRef }},@@#${{ table.nameCol }})){% for var in table.vars %}::@{{ var.value.angles }}$1=string("{{ var.value.name }}")::@{{ var.value.angles }}$2='(identity remote(file:$base/${{ table.dataRef }},${{var.key}})){% endfor %} +{% for run in runs %} +#+TBLFM: ${{ run.index * 3 }}='(identity remote(file:$run{{ run.index }}/${{ table.dataRef }},@@#${{ table.valueCol }})); N f-3::${{ run.index * 3 + 1 }}=${{ run.index * 3 }}-$2; N f-3::${{ run.index * 3 + 2 }}=if($2 == 0, string("nan"), round(100*${{ run.index * 3 + 1 }}/$2)){% for var in table.vars %}::@{{ var.value.angles }}${{ run.index * 3 }}='(identity remote(file:$base/${{ table.dataRef }},${{var.key}}))::@{{ var.value.angles }}${{ run.index * 3 + 1 }}=string("")::@{{ var.value.angles }}${{ run.index * 3 + 2 }}=string(""){% endfor %} +{% endfor %} diff --git a/nix/workbench/ede/tablevars-delta.ede b/nix/workbench/ede/tablevars-delta.ede new file mode 100644 index 00000000000..79a5d5f58c6 --- /dev/null +++ b/nix/workbench/ede/tablevars-delta.ede @@ -0,0 +1,13 @@ +| | {{ base.ver }}{% for run in runs %} | {{ run.value.ver }} | Δ{% endfor %} | +|---------------------------+-----{% for run in runs %}-+------+-----{% endfor %}-| +{% for i in table.rowPrecs %} +| | {% for run in runs %} | | {% endfor %} | +{% endfor %} +|---------------------------+-----{% for run in runs %}-+------+-----{% endfor %}-| +{% for var in table.vars %} +| | {% for run in runs %} | | {% endfor %} | +{% endfor %} +#+TBLFM: $2='(identity remote(file:$base/${{ table.dataRef }},@@#${{ table.valueCol }})); N f-3::$1='(identity remote(file:$base/${{ table.dataRef }},@@#${{ table.nameCol }})){% for var in table.vars %}::@{{ var.value.angles }}$1=string("{{ var.value.name }}")::@{{ var.value.angles }}$2='(identity remote(file:$base/${{ table.dataRef }},${{var.key}})){% endfor %} +{% for run in runs %} +#+TBLFM: ${{ run.index * 2 }}='(identity remote(file:$run{{ run.index }}/${{ table.dataRef }},@@#${{ table.valueCol }})); N f-3::${{ run.index * 2 + 1 }}=${{ run.index * 2 }}-$2; N f-3::@{{ var.value.angles }}${{ run.index * 2 }}='(identity remote(file:$base/${{ table.dataRef }},${{var.key}}))::@{{ var.value.angles }}${{ run.index * 2 + 1 }}=string(""){% endfor %} +{% endfor %} diff --git a/nix/workbench/lib-cabal.sh b/nix/workbench/lib-cabal.sh index 6bbc887e692..5d30dedd614 100644 --- a/nix/workbench/lib-cabal.sh +++ b/nix/workbench/lib-cabal.sh @@ -30,7 +30,7 @@ function workbench-prebuild-executables() unset NIX_ENFORCE_PURITY for exe in cardano-node cardano-cli cardano-topology cardano-tracer tx-generator locli do echo "workbench: $(blue prebuilding) $(red $exe)" - cabal $(test -z "$verbose" && echo '-v0') build ${WB_FLAGS_CABAL} -- exe:$exe 2>&1 >/dev/null || return 1 + cabal $(test -z "${verbose:-}" && echo '-v0') build ${WB_FLAGS_CABAL} -- exe:$exe 2>&1 >/dev/null || return 1 done echo } diff --git a/nix/workbench/lib.sh b/nix/workbench/lib.sh index 54ea5ff9c44..8b02a889162 100644 --- a/nix/workbench/lib.sh +++ b/nix/workbench/lib.sh @@ -19,6 +19,25 @@ jq_check_json() { jq '.' "$1" >/dev/null } +json_compact_prettify() +{ + for f in "$@" + do if test -n "$(find $f -size +1M)"; + ## Skip large files. + then continue; fi + + jq_fmutate "$f" --raw-input --raw-output --slurp \ + 'gsub("\n +";"")|gsub("\n ]";"]")|gsub(",\"";",\n \"")' & + done + + wait +} + +jscompact() +{ + json_compact_prettify "$@" +} + helptopcmd() { local topcmd=$1 cmd=$2; shift 2 white $topcmd @@ -52,6 +71,7 @@ EOF } usage() { + set +x __usage "$@" exit 1 } @@ -76,7 +96,7 @@ color() { with_color() { local color=$1; shift color $color - echo -ne "$*" + echo -ne "$@" color reset } @@ -84,11 +104,19 @@ colorise_colors=( red green blue yellow white cyan ) colorise() { + ## Disable tracing locally: + if test -n "$(echo $- | tr -cd x)" + then set +x + local exit='set -x' + else local exit= + fi + local i for ((i=0; $#!=0; i++)) do echo -n "$(with_color ${colorise_colors[$((i % 6))]} $1) " shift done + eval $exit } newline() { @@ -131,19 +159,26 @@ red() { with_color red $* } +verbose() { + if test -n "${verbose:-}" + then local subsys=$1; shift + msg "$(with_color blue $subsys): $*" + fi +} + progress() { local subsys=$1; shift - msg "$(with_color green $subsys): $(with_color blue $*)" + msg "$(with_color green $subsys): $(with_color blue "$@")" } progress_ne() { local subsys=$1; shift - msg_ne "$(with_color green $subsys): $(with_color blue $*)" + msg_ne "$(with_color green $subsys): $(with_color blue "$@")" } warn() { local subsys=$1; shift - msg "$(with_color green $subsys): $(with_color yellow $*)" + msg "$(with_color green $subsys): $(with_color yellow "$@")" } fail() { diff --git a/nix/workbench/profiles/prof1-variants.jq b/nix/workbench/profiles/prof1-variants.jq index 5eef42e9ff4..9d5c3840c2c 100644 --- a/nix/workbench/profiles/prof1-variants.jq +++ b/nix/workbench/profiles/prof1-variants.jq @@ -192,6 +192,14 @@ def all_profile_variants: } as $chainsync_cluster | ## + ### Definition vocabulary: filtering + ## + ({} + | .analysis.filters = [] + | .analysis.filter_exprs = [] + ) as $no_filtering + | + ## ### Definition vocabulary: timescale ## { genesis: @@ -219,22 +227,18 @@ def all_profile_variants: | ({} | .node.shutdown_on_block_synced = 1 - | .analysis.filters = [] ) as $for_1blk | ({} | .node.shutdown_on_block_synced = 3 - | .analysis.filters = [] ) as $for_3blk | ({} | .node.shutdown_on_block_synced = 15 - | .analysis.filters = [] ) as $for_15blk | ({} | .node.shutdown_on_block_synced = 30 - | .analysis.filters = [] ) as $for_30blk | ## @@ -277,13 +281,12 @@ def all_profile_variants: ## ### Definition vocabulary: scenario ## - ($mainnet_timescale * $chainsync_cluster * + ($mainnet_timescale * $chainsync_cluster * $no_filtering * $without_tracer * { desc: "Mainnet chain syncing benchmark" , scenario: "chainsync" , preset: "mainnet" , analysis: { type: "performance" - , filters: [] } }) as $scenario_chainsync | @@ -300,15 +303,15 @@ def all_profile_variants: ## ### Definition vocabulary: base variant ## - ($scenario_fixed_loaded * $doublet * $dataset_empty * $for_1blk * + ($scenario_fixed_loaded * $doublet * $dataset_empty * $for_1blk * $no_filtering * { desc: "Stop as soon as we've seen a single block" }) as $startstop_base | - ($scenario_fixed_loaded * $doublet * $dataset_empty * $for_3blk * + ($scenario_fixed_loaded * $doublet * $dataset_empty * $for_3blk * $no_filtering * { desc: "Miniature dataset, CI-friendly duration, test scale" }) as $citest_base | - ($scenario_fixed_loaded * $doublet * $dataset_miniature * $for_15blk * + ($scenario_fixed_loaded * $doublet * $dataset_miniature * $for_15blk * $no_filtering * { desc: "Miniature dataset, CI-friendly duration, bench scale" }) as $cibench_base | diff --git a/nix/workbench/profiles/prof2-derived.jq b/nix/workbench/profiles/prof2-derived.jq index 39f1422005a..73e4ea4c458 100644 --- a/nix/workbench/profiles/prof2-derived.jq +++ b/nix/workbench/profiles/prof2-derived.jq @@ -198,9 +198,11 @@ def add_derived_params: , cluster_startup_overhead_s: $dataset_induced_startup_delay_conservative , filter_exprs: ($ana.filter_exprs + - [ { tag: "CBlock", contents: { tag: "BMinimumAdoptions" + (if $n_pools == 0 then [] + else + [ { tag: "CBlock", contents: { tag: "BMinimumAdoptions" , contents: ($n_pools - 1) } } - ]) + ] end)) } }) } diff --git a/nix/workbench/profiles/tracing.nix b/nix/workbench/profiles/tracing.nix index 9c466b40604..730810ca9c9 100644 --- a/nix/workbench/profiles/tracing.nix +++ b/nix/workbench/profiles/tracing.nix @@ -19,10 +19,16 @@ "Forwarder" ]); }; - BlockFetch.severity = "Info"; - ChainSync.severity = "Info"; + + ## These are benchmarking-specific config deviations from the default. + ## + "BlockFetch".severity = "Info"; + "BlockFetch.Client.CompletedBlockFetch".maxFrequency = 0; + "ChainSync".severity = "Info"; + "ChainSync.Client.DownloadedHeader".maxFrequency = 0; "Forge.Loop.BlockContext".severity = "Info"; - "Forge.Loop.LedgerView".severity = "Info"; "Forge.Loop.LedgerState".severity = "Info"; + "Forge.Loop.LedgerView".severity = "Info"; + "Startup".severity = "Info"; }; } diff --git a/nix/workbench/run.sh b/nix/workbench/run.sh index 29c318e8858..2191258feb4 100644 --- a/nix/workbench/run.sh +++ b/nix/workbench/run.sh @@ -1,8 +1,15 @@ -global_rundir_def=$PWD/run +global_rundir_def=$(realpath ${WB_RUNDIR:-$PWD/run}) usage_run() { usage "run" "Managing cluster runs" </dev/null + then fail "set-add $name: constituent run missing: $(white $x)" + fi + ln -s ../../$x + done);; + + run-or-set | ros ) + local usage="USAGE: wb run $op [--query] [--on-remote | -or | -r] NAME" + local query= get_args=() on_remote= + while test $# -gt 0 + do case "$1" in + --try | --query ) get_args+=($1); query='true';; + --on-remote | -or | -r ) on_remote='true';; + --* ) msg "FATAL: run-or-set, unknown flag '$1'"; usage_run;; + * ) break;; esac; shift; done + + local name=${1:?$usage} + local env=$( jq <<<$remote .env -r) + local depl=$(jq <<<$remote .depl -r) + if test -n "$on_remote" + then if test -n "$(ssh $env -- sh -c "'$(run_ls_sets_cmd $depl/runs)'" | + grep $name || true)" + then rsync -Wa --delete-after \ + $env:$depl/runs/.sets/$name ../cardano-node/run/.sets + ssh $env -- \ + sh -c "'cd $depl/runs/.sets/$name && find . -type l | cut -d/ -f2'" + else ssh $env -- \ + sh -c "'if test -f $depl/runs/$name/meta.json; + then echo $name; + else echo \"$(red run-or-set on $env/$depl:) missing run or set $(white $name)\" + exit 1; + fi'" + fi + elif test -n "$(run list-sets | grep $name || true)" + then (cd "$global_rundir/.sets/$name" + find . -type l | cut -d/ -f2) + elif run get "${get_args[@]}" $name >/dev/null + then echo $name + elif test -n "$query" + then return 1 + else fail "run-or-set: missing run or set $(white $name)" + fi;; list-pattern | lsp ) test -d "$global_rundir" && @@ -87,7 +203,7 @@ case "$op" in else echo -n "$global_rundir/$1" fi;; - fix-legacy-run-structure | fix-legacy ) + fix-legacy-run-structure | fix-legacy | flrs ) local usage="USAGE: wb run $op RUN" local run=${1:?$usage} local dir=$(run compute-path "$run") @@ -110,25 +226,44 @@ case "$op" in mv "$logdir" "$dir"/$logs_less; done; fi else msg "fixing up a cardano-ops run in: $dir"; fi - progress "run | aws-get" "adding manifest" + progress "run | fix-legacy-run-structure" "adding manifest" jq_fmutate "$dir"/meta.json ' .meta.manifest = $manifest ' --argjson manifest "$(legacy_run_manifest $dir)" + progress "run | fix-legacy-run-structure" "adding timing" + jq_fmutate "$dir"/meta.json ' + .meta.timing = $timing + ' --argjson timing "$(legacy_run_timing $dir)" + jq ' .meta.profile_content | .analysis.filters += ["model"] + | .node.tracing_backend = + (if .node.withNewTracing + then "trace-dispatcher" + else "iohk-monitoring" + end) ' "$dir"/meta.json > "$dir"/profile.json;; check ) - local usage="USAGE: wb run $op RUN" + local usage="USAGE: wb run $op [--query] RUN" + local query= + while test $# -gt 0 + do case "$1" in + --try | --query ) query='true';; + --* ) msg "FATAL: run-or-set, unknown flag '$1'"; usage_run;; + * ) break;; esac; shift; done + local run=${1:?$usage} local dir=$(run compute-path "$run") - if ! jq_check_json "$dir"/meta.json + if ! jq_check_json "$dir"/meta.json 2>/dev/null then if test $run = 'current' then local alt=$(run list | tail -n1) progress 'run | check' "$(with_color white current) missing, resetting to $(with_color white $alt)" run set-current $alt + elif test -n "$query" + then return 1 else fatal "run $run (at $dir) missing a file: meta.json"; fi; fi test -f "$dir"/profile.json -a -f "$dir"/genesis-shelley.json || @@ -162,10 +297,19 @@ case "$op" in fi;; get-path | get ) - local usage="USAGE: wb run $op RUN" + local usage="USAGE: wb run $op [--query] RUN" + local check_args=() + while test $# -gt 0 + do case "$1" in + --try | --query ) check_args+=($1);; + --* ) msg "FATAL: run-or-set, unknown flag '$1'"; usage_run;; + * ) break;; esac; shift; done + local run=${1:?$usage} - run check "$run" - run compute-path "$run";; + if run check "${check_args[@]}" "$run" + then run compute-path "$run" + else return 1 + fi;; show-meta | show | meta | s ) local usage="USAGE: wb run $op RUN" @@ -413,20 +557,7 @@ case "$op" in echo $dir;; - list-aws | lsaws ) - local usage="USAGE: wb run $op [DEPLOYMENT=bench-1] [ENV=bench]" - local depl=${1:-bench-1} - local env=${2:-bench} - - ssh $env -- \ - sh -c "'cd $depl/runs && - find . -mindepth 2 -maxdepth 2 -type f -name 'meta.json' -exec dirname \{\} \; | - grep -v current\$\\|deploy-logs\$ | - cut -c3- | - sort || - true'" 2>/dev/null;; - - allocate-from-aws | aws-get ) + fetch-run | fetch | fr ) local usage="USAGE: wb run $op RUN [MACHINE] [DEPLOYMENT=bench-1] [ENV=bench]" local run=${1:?$usage} local mach=${2:-all-hosts} @@ -444,28 +575,34 @@ case "$op" in ) run_aws_get "${args[@]}";; - analysis-from-aws | aws-get-analysis | fetch-analysis | fa ) + fetch-analysis | fa ) local usage="USAGE: wb run $op RUN.." - local runs=($*) run + local runs=() run + for rs in $* + do runs+=($(run "${sargs[@]}" run-or-set --query --on-remote $rs || echo $rs)) + done + if test $# = 0; then runs=(current); fi + local env='bench' local depl='bench-1' progress "aws" "trying to fetch analyses: $(white ${runs[*]})" for run in ${runs[*]} do if test "$(ssh $env -- sh -c "'ls -ld $depl/runs/$run | wc -l'")" = 0 - then fail "aws-analysis: run does not exist on AWS: $(white $run)" + then fail "fetch-analysis: run does not exist on AWS: $(white $run)" elif test "$(ssh $env -- sh -c "'ls -ld $depl/runs/$run/analysis | wc -l'")" = 0 - then fail "aws-analysis: run has not been analysed on AWS: $(white $run)" + then fail "fetch-analysis: run has not been analysed on AWS: $(white $run)" else local analysis_files=( $(ssh $env -- \ - sh -c "'cd $depl/runs/$run && ls analysis/*.{json,cdf,org,txt} | grep -v flt.json | grep -v flt.logobjs.json | grep -v flt.perf-stats.json'" \ + sh -c "'cd $depl/runs/$run && ls analysis/{cdf/*.cdf,*.{json,org,txt}} | fgrep -v -e flt.json -e flt.logobjs.json -e flt.perf-stats.json'" \ 2>/dev/null) ) local args=( + "${run_aws_get_args[@]}" "$env" "$depl" "$run" - 'if test -f compressed/logs-$obj.tar.zst; then cat compressed/logs-$obj.tar.zst; else tar c $obj --zstd --ignore-failed-read; fi' + 'tar c ${files[*]} --zstd' common-run-files ${analysis_files[*]} @@ -483,7 +620,7 @@ case "$op" in local env=${4:-bench} if test "$(ssh $env -- sh -c "'ls -ld $depl/runs/$run | wc -l'")" = 0 - then fail "aws-analysis: run does not exist on AWS: $(white $run)" + then fail "analyse-aws: run does not exist on AWS: $(white $run)" else ssh $env -- sh -c "'export WB_RUNDIR=../$depl/runs && cd cardano-node && echo env: $(yellow $env), rundir: $(color blue)\$WB_RUNDIR$(color reset), workbench: $(color yellow)\$(git log -n1)$(color reset) && make analyse RUN=$run'" fi ;; @@ -647,21 +784,34 @@ EOF run_aws_get() { local usage='USAGE: run_aws_get ENV DEPLOYMENT RUN REMOTE-TAR-CMD OBJ..' + local clean= + while test $# -gt 0 + do case "$1" in + --clean | -c ) clean='true';; + -- ) shift; break;; + --* ) msg "FATAL: unknown flag '$1'"; fail "$usage";; + * ) break;; esac; shift; done + local env=${1:?$usage}; shift local depl=${1:?$usage}; shift local run=${1:?$usage}; shift local remote_tar_cmd=${1:?$usage}; shift local objects=($*) - progress "aws-get" "env $(yellow $env) depl $(yellow $depl) run $(white $run)" - progress "aws-get" "tar $(green $remote_tar_cmd) objects ${objects[*]}" + progress "run_aws_get" "env $(yellow $env) depl $(yellow $depl) run $(white $run)" + progress "run_aws_get" "tar $(green $remote_tar_cmd)" local meta=$(ssh $env -- sh -c "'jq . $depl/runs/$run/meta.json'") if ! jq . <<<$meta >/dev/null - then fail "allocate-from-aws: malformed $(yellow meta.json) in $(white $run) on $(white $depl)@$(white env)"; fi + then fail "run_aws_get: malformed $(yellow meta.json) in $(white $run) on $(white $depl)@$(white env)"; fi ## Minor validation passed, create & populate run with remote data: local dir=$global_rundir/$run + if test -z "$run" -o -z "$global_rundir" + then fail "run_aws_get: run=$run global_rundir=$global_rundir" + elif test -n "$clean" + then rm -rf "$dir" + fi mkdir -p "$dir" jq . <<<$meta > $dir/meta.json @@ -678,26 +828,27 @@ run_aws_get() { local xs=(${xs2[*]}) local count=${#xs[*]} - progress "run | aws-get $(white $run)" "objects to fetch: $(white $count) total: $(yellow ${xs[*]})" + progress "run | fetch $(white $run)" "objects to fetch: $(white $count) total" local max_batch=9 base=0 batch while test $base -lt $count do local batch=(${xs[*]:$base:$max_batch}) - progress_ne "run | aws-get $(white $run)" "fetching batch: " - local obj= - for obj in ${batch[*]} - do { ssh $env -- \ - sh -c "'obj=${obj}; cd $depl/runs/$run && ${remote_tar_cmd}'" 2>/dev/null | - (cd $dir; tar x --zstd) - echo -ne " $(yellow $obj)" >&2 - } & - done - wait - echo >&2 + { + local lbatch=(${batch[*]}) + ssh $env -- \ + sh -c "'files=(${lbatch[*]}); cd $depl/runs/$run && ${remote_tar_cmd}'" | + (cd $dir + tar x --zstd || + progress "fetch error" "'files=(${lbatch[*]}); cd $depl/runs/$run && ${remote_tar_cmd}'" + ) + progress "run | fetch $(white $run)" "batch done: $(yellow ${batch[*]})" + } & + sleep 1 base=$((base + max_batch)) done + wait - progress "run | aws-get" "adding manifest" + progress "run | fetch" "adding manifest" jq_fmutate "$dir"/meta.json '.meta.manifest = $manifest ' --argjson manifest "$(legacy_run_manifest $dir)" } @@ -738,3 +889,79 @@ legacy_run_manifest() { } ' --null-input "${args[@]}" } + +legacy_run_timing() { + local dir=$1 + local stamp=$(jq -r '.meta.timestamp' $dir/meta.json) + + local args=( + --argjson stamp $stamp + ) + jq ' + .meta.profile_content as $prof + | ($stamp + ($prof.generator.tx_count / $prof.generator.tps)) as $stamp_end + | + { future_offset: "0 seconds" + , start: $stamp + , shutdown_end: $stamp_end + , workload_end: $stamp_end + , earliest_end: $stamp_end + + , start_tag: .meta.tag[:16] + , start_human: ($stamp | todateiso8601) + , systemStart: ($stamp | todateiso8601) + , shutdownTime: ($stamp_end | todateiso8601) + , workloadEndTime: ($stamp_end | todateiso8601) + , earliestEndTime: ($stamp_end | todateiso8601) + }' $dir/meta.json "${args[@]}" +} + +expand_runspecs() { + local runs=() rs + + if test $# = 0 + then runs=(current) + else for rs in $* + do runs+=($(run run-or-set $rs)) + done + fi + echo ${runs[*]} +} + +run_ls_cmd() { + local rundir=$1 + + echo 'cd '$rundir' && \ + find . -mindepth 2 -maxdepth 2 -type f -name meta.json -exec dirname \{\} \; | + grep -v "current\$\|deploy-logs\$" | + cut -c3- | + sort || true' +} + +run_ls_tabulated_cmd() { + local rundir=$1 limit=$2 + + echo 'cd '$rundir' && \ + find . -mindepth 2 -maxdepth 2 -type f -name meta.json -exec dirname \{\} \; | + grep -v "current\$\|deploy-logs\$" | + cut -c3- | + sort | + tail -n'$limit' | + while read lst_tag; test -n "$lst_tag"; + do printf_args=( + $(jq ".meta | .manifest as \$manif | + \"\\(.profile) \\(\$manif.\"cardano-node\" | .[:7]) \\(.batch) \\(\$manif.\"cardano-node-version\") \\(\$manif.\"cardano-node-branch\")\" + " -r <$lst_tag/meta.json)) + printf "%16s %-75s %7s %-20s %-15s %10s\n" \ + $(echo $lst_tag |cut -c -16) ${printf_args[*]} + done || true' +} + +run_ls_sets_cmd() { + local rundir=$1 + + echo 'cd '$rundir'/.sets && \ + find -L . -mindepth 3 -maxdepth 3 -type f -name meta.json -exec dirname \{\} \; | + cut -d/ -f2 | + sort -u || true' +} diff --git a/nix/workbench/wb b/nix/workbench/wb index 6aafe09b5a2..30c42ee86e8 100755 --- a/nix/workbench/wb +++ b/nix/workbench/wb @@ -108,7 +108,6 @@ start() local prebuild_done= local genesis_cache_entry_dir= local topology_dir= - local verbose= local manifest="{}" local iterations=1 local no_retry_failed_runs=t diff --git a/trace-resources/src/Cardano/Logging/Resources/Darwin.hsc b/trace-resources/src/Cardano/Logging/Resources/Darwin.hsc index c3e27f3c116..bdfd7795936 100644 --- a/trace-resources/src/Cardano/Logging/Resources/Darwin.hsc +++ b/trace-resources/src/Cardano/Logging/Resources/Darwin.hsc @@ -100,6 +100,10 @@ readResourceStatsInternal = getProcessID >>= \pid -> do , rHeap = GhcStats.gcdetails_mem_in_use_bytes $ GhcStats.gc rts , rRSS = _resident_size mem , rCentiBlkIO = 0 + , rNetRd = 0 + , rNetWr = 0 + , rFsRd = 0 + , rFsWr = 0 , rThreads = 0 } where diff --git a/trace-resources/src/Cardano/Logging/Resources/Dummy.hs b/trace-resources/src/Cardano/Logging/Resources/Dummy.hs index 3d123bd044b..6a05e790d2c 100644 --- a/trace-resources/src/Cardano/Logging/Resources/Dummy.hs +++ b/trace-resources/src/Cardano/Logging/Resources/Dummy.hs @@ -25,6 +25,10 @@ readResourceStatsInternal = do , rRSS = 0 , rHeap = 0 , rCentiBlkIO = 0 + , rNetRd = 0 + , rNetWr = 0 + , rFsRd = 0 + , rFsWr = 0 , rThreads = 0 } where diff --git a/trace-resources/src/Cardano/Logging/Resources/Linux.hs b/trace-resources/src/Cardano/Logging/Resources/Linux.hs index f771ad7e003..6c9256256dd 100644 --- a/trace-resources/src/Cardano/Logging/Resources/Linux.hs +++ b/trace-resources/src/Cardano/Logging/Resources/Linux.hs @@ -1,4 +1,5 @@ {-# LANGUAGE CPP #-} +{-# LANGUAGE RecordWildCards #-} module Cardano.Logging.Resources.Linux ( @@ -14,20 +15,103 @@ import System.Posix.Files (fileMode, getFileStatus, ownerReadMode) import Text.Read (readMaybe) +-- * Disk IO stats: +-- /proc/[pid]/io (since kernel 2.6.20) +-- This file contains I/O statistics for the process, for example: +-- +-- # cat /proc/3828/io +-- rchar: 323934931 +-- wchar: 323929600 +-- syscr: 632687 +-- syscw: 632675 +-- read_bytes: 0 +-- write_bytes: 323932160 +-- cancelled_write_bytes: 0 +-- +-- The fields are as follows: +-- +-- rchar: characters read +-- The number of bytes which this task has caused to be read from storage. This is simply the sum +-- of bytes which this process passed to read(2) and similar system calls. It includes things such +-- as terminal I/O and is unaffected by whether or not actual physical disk I/O was required (the +-- read might have been satisfied from pagecache). +-- +-- wchar: characters written +-- The number of bytes which this task has caused, or shall cause to be written to disk. Similar +-- caveats apply here as with rchar. +-- +-- syscr: read syscalls +-- Attempt to count the number of read I/O operations-that is, system calls such as read(2) and +-- pread(2). +-- +-- syscw: write syscalls +-- Attempt to count the number of write I/O operations-that is, system calls such as write(2) and +-- pwrite(2). +-- +-- read_bytes: bytes read +-- Attempt to count the number of bytes which this process really did cause to be fetched from the +-- storage layer. This is accurate for block-backed filesystems. +-- +-- write_bytes: bytes written +-- Attempt to count the number of bytes which this process caused to be sent to the storage layer. +-- +-- cancelled_write_bytes: +-- The big inaccuracy here is truncate. If a process writes 1MB to a file and then deletes the +-- file, it will in fact perform no writeout. But it will have been accounted as having caused 1MB +-- of write. In other words: this field represents the number of bytes which this process caused +-- to not happen, by truncating pagecache. A task can cause "negative" I/O too. If this task +-- truncates some dirty pagecache, some I/O which another task has been accounted for (in its +-- write\_bytes) will not be happening. +-- +-- Note: In the current implementation, things are a bit racy on 32-bit systems: if process A reads +-- process B's /proc/[pid]/io while process B is updating one of these 64-bit counters, process A could +-- see an intermediate result. +-- +-- Permission to access this file is governed by a ptrace access mode PTRACE\_MODE\_READ\_FSCREDS check; see +-- ptrace(2). +-- +readProcBlockInOut :: IO (Word64, Word64) +readProcBlockInOut = do + fields <- readProcList "/proc/self/io" + case -- We're only interested in 'read_bytes' & 'write_bytes': + fmap fromInteger . take 3 . drop 9 $ fields of + [fsRd, _, fsWr] -> pure (fsRd, fsWr) + _ -> pure (0, 0) + +-- * Network stats: +-- grep IpExt /proc//net/netstat +-- IpExt: InNoRoutes InTruncatedPkts InMcastPkts OutMcastPkts InBcastPkts OutBcastPkts InOctets OutOctets InMcastOctets OutMcastOctets InBcastOctets OutBcastOctets InCsumErrors InNoECTPkts InECT1Pkts InECT0Pkts InCEPkts +-- IpExt: 0 0 20053 8977 2437 23 3163525943 196480057 2426648 1491754 394285 5523 0 3513269 0 217426 0 +-- +readProcNetInOut :: IO (Word64, Word64) +readProcNetInOut = do + fields <- words . lastline . lines <$> readFile "/proc/self/net/netstat" + case -- We're only interested in 'InOctets' & 'OutOctets': + fmap readMaybe . take 2 . drop 7 $ fields of + [Just netIn, Just netOut] -> pure (netIn, netOut) + _ -> pure (0, 0) + where + lastline ls | length ls == 4 = last ls -- ensures we read the fourth line + | otherwise = [] + -- | TODO we have to expand the |readMemStats| function -- to read full data from |proc| readResourceStatsInternal :: IO (Maybe ResourceStats) readResourceStatsInternal = do rts <- GhcStats.getRTSStats - mkProcStats rts . fmap fromIntegral <$> readProcList "/proc/self/stat" + net <- readProcNetInOut + fs <- readProcBlockInOut + mkProcStats rts net fs . fmap fromIntegral <$> readProcList "/proc/self/stat" where - mkProcStats :: GhcStats.RTSStats -> [Word64] -> Maybe ResourceStats + mkProcStats :: GhcStats.RTSStats -> (Word64, Word64) -> (Word64, Word64) -> [Word64] -> Maybe ResourceStats mkProcStats rts - (_:_:_:_:_:_:_:_:_:_ -- 00-09 - :_:_:_:user:sys:_:_:_:_:threads -- 10-19 - :_:_:_:rss:_:_:_:_:_:_ -- 20-29 - :_:_:_:_:_:_:_:_:_:_ -- 30-39 - :_:blkio:_rest) = -- 40-42 + (rNetRd, rNetWr) + (rFsRd, rFsWr) + (_:_:_:_:_:_:_:_:_:_ -- 00-09 + :_:_:_:user:sys:_:_:_:_:rThreads -- 10-19 + :_:_:_:rss:_:_:_:_:_:_ -- 20-29 + :_:_:_:_:_:_:_:_:_:_ -- 30-39 + :_:rCentiBlkIO:_rest) = -- 40-42 Just $ Resources { rCentiCpu = user + sys , rCentiGC = nsToCenti $ GhcStats.gc_cpu_ns rts @@ -38,10 +122,9 @@ readResourceStatsInternal = do , rLive = GhcStats.gcdetails_live_bytes $ GhcStats.gc rts , rHeap = GhcStats.gcdetails_mem_in_use_bytes $ GhcStats.gc rts , rRSS = rss * 4096 -- TODO: this is really PAGE_SIZE. - , rCentiBlkIO = blkio - , rThreads = threads + , .. } - mkProcStats _ _ = Nothing + mkProcStats _ _ _ _ = Nothing nsToCenti :: GhcStats.RtsTime -> Word64 nsToCenti = floor . (/ (10000000 :: Double)) . fromIntegral diff --git a/trace-resources/src/Cardano/Logging/Resources/Types.hs b/trace-resources/src/Cardano/Logging/Resources/Types.hs index a1de16d384d..7df0bae3522 100644 --- a/trace-resources/src/Cardano/Logging/Resources/Types.hs +++ b/trace-resources/src/Cardano/Logging/Resources/Types.hs @@ -2,6 +2,7 @@ {-# LANGUAGE DeriveFunctor #-} {-# LANGUAGE DeriveGeneric #-} {-# LANGUAGE FlexibleInstances #-} +{-# LANGUAGE RecordWildCards #-} module Cardano.Logging.Resources.Types ( Resources(..) @@ -33,12 +34,16 @@ data Resources a , rHeap :: !a , rRSS :: !a , rCentiBlkIO :: !a + , rNetRd :: !a + , rNetWr :: !a + , rFsRd :: !a + , rFsWr :: !a , rThreads :: !a } deriving (Functor, Generic, Show) instance Applicative Resources where - pure a = Resources a a a a a a a a a a a + pure a = Resources a a a a a a a a a a a a a a a f <*> x = Resources { rCentiCpu = rCentiCpu f (rCentiCpu x) @@ -51,6 +56,10 @@ instance Applicative Resources where , rHeap = rHeap f (rHeap x) , rRSS = rRSS f (rRSS x) , rCentiBlkIO = rCentiBlkIO f (rCentiBlkIO x) + , rNetRd = rNetRd f (rNetRd x) + , rNetWr = rNetWr f (rNetWr x) + , rFsRd = rFsRd f (rFsRd x) + , rFsWr = rFsWr f (rFsWr x) , rThreads = rThreads f (rThreads x) } @@ -76,30 +85,38 @@ docResourceStats :: Documented ResourceStats docResourceStats = Documented [ DocMsg [] - [("Resources.Stat.Cputicks", "Reports the CPU ticks, sice the process was started") - ,("Resources.Mem.Resident", "") - ,("Resources.RTS.GcLiveBytes", "") - ,("Resources.RTS.GcMajorNum", "") - ,("Resources.RTS.GcMinorNum", "") - ,("Resources.RTS.Gcticks", "") - ,("Resources.RTS.Mutticks", "") - ,("Resources.RTS.Threads","") + [("Resources.Stat.Cputicks", "Kernel-reported CPU ticks (1/100th of a second), since process start") + ,("Resources.Mem.Resident", "Kernel-reported RSS (resident set size)") + ,("Resources.RTS.GcLiveBytes", "RTS-reported live bytes") + ,("Resources.RTS.GcMajorNum", "Major GCs") + ,("Resources.RTS.GcMinorNum", "Minor GCs") + ,("Resources.RTS.Gcticks", "RTS-reported CPU ticks spent on GC") + ,("Resources.RTS.Mutticks", "RTS-reported CPU ticks spent on mutator") + ,("Resources.State.NetRd", "IP packet bytes read") + ,("Resources.State.NetWr", "IP packet bytes written") + ,("Resources.State.FsRd", "FS bytes read") + ,("Resources.State.FsWr", "FS bytes written") + ,("Resources.RTS.Threads","RTS green thread count") ] "" ] instance LogFormatting ResourceStats where - forHuman rs = "Resources:" - <> " Cpu Ticks " <> (pack . show) (rCentiCpu rs) - <> ", GC centiseconds " <> (pack . show) (rCentiGC rs) - <> ", Mutator centiseconds " <> (pack . show) (rCentiMut rs) - <> ", GCs major " <> (pack . show) (rGcsMajor rs) - <> ", GCs minor " <> (pack . show) (rGcsMinor rs) - <> ", Allocated bytes " <> (pack . show) (rAlloc rs) - <>" , GC live bytes " <> (pack . show) (rLive rs) - <> ", RTS heap " <> (pack . show) (rHeap rs) - <> ", RSS " <> (pack . show) (rRSS rs) - <> ", Threads " <> (pack . show) (rThreads rs) + forHuman Resources{..} = "Resources:" + <> " Cpu Ticks " <> (pack . show) rCentiCpu + <> ", GC centiseconds " <> (pack . show) rCentiGC + <> ", Mutator centiseconds " <> (pack . show) rCentiMut + <> ", GCs major " <> (pack . show) rGcsMajor + <> ", GCs minor " <> (pack . show) rGcsMinor + <> ", Allocated bytes " <> (pack . show) rAlloc + <>" , GC live bytes " <> (pack . show) rLive + <> ", RTS heap " <> (pack . show) rHeap + <> ", RSS " <> (pack . show) rRSS + <> ", Net bytes read " <> (pack . show) rNetRd + <> " written " <> (pack . show) rNetWr + <> ", FS bytes read " <> (pack . show) rFsRd + <> " written " <> (pack . show) rFsWr + <> ", Threads " <> (pack . show) rThreads <> "." forMachine _dtal rs = mconcat @@ -114,16 +131,27 @@ instance LogFormatting ResourceStats where , "Heap" .= Number (fromIntegral $ rHeap rs) , "RSS" .= Number (fromIntegral $ rRSS rs) , "CentiBlkIO" .= Number (fromIntegral $ rCentiBlkIO rs) + , "NetRd" .= Number (fromIntegral $ rNetRd rs) + , "NetWr" .= Number (fromIntegral $ rNetWr rs) + , "FsRd" .= Number (fromIntegral $ rFsRd rs) + , "FsWr" .= Number (fromIntegral $ rFsWr rs) , "Threads" .= Number (fromIntegral $ rThreads rs) ] asMetrics rs = - [ IntM "Resources.Stat.Cputicks" (fromIntegral $ rCentiCpu rs) - , IntM "Resources.Mem.Resident" (fromIntegral $ rRSS rs) - , IntM "Resources.RTS.GcLiveBytes" (fromIntegral $ rLive rs) - , IntM "Resources.RTS.GcMajorNum" (fromIntegral $ rGcsMajor rs) - , IntM "Resources.RTS.GcMinorNum" (fromIntegral $ rGcsMinor rs) - , IntM "Resources.RTS.Gcticks" (fromIntegral $ rCentiGC rs) - , IntM "Resources.RTS.Mutticks" (fromIntegral $ rCentiMut rs) + [ IntM "Resources.Stat.Cputicks" (fromIntegral $ rCentiCpu rs) + , IntM "Resources.RTS.Gcticks" (fromIntegral $ rCentiGC rs) + , IntM "Resources.RTS.Mutticks" (fromIntegral $ rCentiMut rs) + , IntM "Resources.RTS.GcMajorNum" (fromIntegral $ rGcsMajor rs) + , IntM "Resources.RTS.GcMinorNum" (fromIntegral $ rGcsMinor rs) + , IntM "Resources.RTS.Alloc" (fromIntegral $ rAlloc rs) + , IntM "Resources.RTS.GcLiveBytes" (fromIntegral $ rLive rs) + , IntM "Resources.RTS.Heap" (fromIntegral $ rHeap rs) + , IntM "Resources.Mem.Resident" (fromIntegral $ rRSS rs) + , IntM "Resources.Stat.BlkIOticks" (fromIntegral $ rCentiBlkIO rs) + , IntM "Resources.State.NetRd" (fromIntegral $ rNetRd rs) + , IntM "Resources.State.NetWr" (fromIntegral $ rNetWr rs) + , IntM "Resources.State.FsRd" (fromIntegral $ rFsRd rs) + , IntM "Resources.State.FsWr" (fromIntegral $ rFsWr rs) , IntM "Resources.RTS.Stat.Threads" (fromIntegral $ rThreads rs) ] diff --git a/trace-resources/src/Cardano/Logging/Resources/Windows.hsc b/trace-resources/src/Cardano/Logging/Resources/Windows.hsc index 2954482f768..df8ddac9e40 100644 --- a/trace-resources/src/Cardano/Logging/Resources/Windows.hsc +++ b/trace-resources/src/Cardano/Logging/Resources/Windows.hsc @@ -154,6 +154,10 @@ readResourceStatsInternal = getCurrentProcessId >>= \pid -> do , rHeap = GhcStats.gcdetails_mem_in_use_bytes $ GhcStats.gc rts , rRSS = fromIntegral (_workingSetSize mem) , rCentiBlkIO = 0 + , rNetRd = 0 + , rNetWr = 0 + , rFsRd = 0 + , rFsWr = 0 , rThreads = 0 } where