tweak debug levels, document some guidelines
Beginnings of a project-wide policy for what output to show at each debug level, for now. Later we'll want more flexibility, eg filtering by topic.
This commit is contained in:
parent
6e36ede9aa
commit
684cb45e1a
@ -258,7 +258,7 @@ priceLookup pricesatdate d from mto =
|
||||
fromnode = node m from
|
||||
mto' = mto <|> mdefaultto
|
||||
where
|
||||
mdefaultto = dbg4 ("default valuation commodity for "++T.unpack from) $
|
||||
mdefaultto = dbg1 ("default valuation commodity for "++T.unpack from) $
|
||||
M.lookup from defaultdests
|
||||
in
|
||||
case mto' of
|
||||
@ -276,12 +276,12 @@ priceLookup pricesatdate d from mto =
|
||||
case sp fromnode tonode g :: Maybe [Node] of
|
||||
Nothing -> Nothing
|
||||
Just nodes ->
|
||||
dbg ("market price "++intercalate "->" (map T.unpack comms)) $
|
||||
dbg ("market price for "++intercalate " -> " (map T.unpack comms)) $
|
||||
Just $ product $ pathEdgeLabels g nodes -- convert to a single exchange rate
|
||||
where comms = catMaybes $ map (lab g) nodes
|
||||
|
||||
-- log a message and a Maybe Quantity, hiding Just/Nothing and limiting decimal places
|
||||
dbg msg = dbg4With (((msg++": ")++) . maybe "" (show . roundTo 8))
|
||||
dbg msg = dbg1With (((msg++": ")++) . maybe "" (show . roundTo 8))
|
||||
|
||||
tests_priceLookup =
|
||||
let
|
||||
@ -311,33 +311,32 @@ tests_priceLookup =
|
||||
-- the given day.
|
||||
pricesAtDate :: [PriceDirective] -> [MarketPrice] -> Day -> PriceGraph
|
||||
pricesAtDate pricedirectives impliedmarketprices d =
|
||||
-- trace ("pricesAtDate ("++show d++")") $
|
||||
dbg9 ("pricesAtDate "++show d) $
|
||||
PriceGraph{prGraph=g, prNodemap=m, prDefaultValuationCommodities=defaultdests}
|
||||
where
|
||||
-- prices in effect on date d, either declared or implied
|
||||
declaredandimpliedprices = dbg5 "declaredandimpliedprices" $
|
||||
currentdeclaredandimpliedprices = dbg2 "currentdeclaredandimpliedprices" $
|
||||
latestPriceForEachPairOn pricedirectives impliedmarketprices d
|
||||
|
||||
-- infer any additional reverse prices not already declared or implied
|
||||
reverseprices =
|
||||
dbg5 "reverseprices" $
|
||||
map marketPriceReverse declaredandimpliedprices \\ declaredandimpliedprices
|
||||
reverseprices = dbg2 "reverseprices" $
|
||||
map marketPriceReverse currentdeclaredandimpliedprices \\ currentdeclaredandimpliedprices
|
||||
|
||||
-- build the graph and associated node map
|
||||
(g, m) =
|
||||
mkMapGraph
|
||||
(dbg5 "g nodelabels" $ sort allcomms) -- this must include all nodes mentioned in edges
|
||||
(dbg5 "g edges" $ [(mpfrom, mpto, mprate) | MarketPrice{..} <- prices])
|
||||
(dbg9 "price graph labels" $ sort allcomms) -- this must include all nodes mentioned in edges
|
||||
(dbg9 "price graph edges" $ [(mpfrom, mpto, mprate) | MarketPrice{..} <- prices])
|
||||
:: (Gr CommoditySymbol Quantity, NodeMap CommoditySymbol)
|
||||
where
|
||||
prices = declaredandimpliedprices ++ reverseprices
|
||||
prices = currentdeclaredandimpliedprices ++ reverseprices
|
||||
allcomms = map mpfrom prices
|
||||
|
||||
-- determine a default valuation commodity D for each source commodity S:
|
||||
-- the price commodity in the latest declared market price for S (on any date)
|
||||
defaultdests = M.fromList [(mpfrom,mpto) | MarketPrice{..} <- alldeclaredprices]
|
||||
where
|
||||
alldeclaredprices = dbg5 "alldeclaredprices" $ map priceDirectiveToMarketPrice pricedirectives
|
||||
alldeclaredprices = dbg2 "alldeclaredprices" $ map priceDirectiveToMarketPrice pricedirectives
|
||||
|
||||
-- From a list of price directives in parse order, and a list of
|
||||
-- transaction-implied market prices in parse order, get the effective
|
||||
@ -346,7 +345,6 @@ pricesAtDate pricedirectives impliedmarketprices d =
|
||||
-- that day, with declared prices taking precedence.
|
||||
latestPriceForEachPairOn :: [PriceDirective] -> [MarketPrice] -> Day -> [MarketPrice]
|
||||
latestPriceForEachPairOn pricedirectives impliedmarketprices d =
|
||||
dbg5 "latestPriceForEachPairOn" $
|
||||
let
|
||||
-- consider only declarations/transactions before the valuation date
|
||||
declaredprices = map priceDirectiveToMarketPrice $ filter ((<=d).pddate) pricedirectives
|
||||
|
||||
@ -111,7 +111,7 @@ readJournal :: InputOpts -> Maybe FilePath -> Text -> IO (Either String Journal)
|
||||
readJournal iopts mpath txt = do
|
||||
let r :: Reader IO =
|
||||
fromMaybe JournalReader.reader $ findReader (mformat_ iopts) mpath
|
||||
dbg1IO "trying reader" (rFormat r)
|
||||
dbg7IO "trying reader" (rFormat r)
|
||||
(runExceptT . (rReadFn r) iopts (fromMaybe "(string)" mpath)) txt
|
||||
|
||||
-- | Read the default journal file specified by the environment, or raise an error.
|
||||
|
||||
@ -382,7 +382,7 @@ rulesp = do
|
||||
}
|
||||
|
||||
blankorcommentlinep :: CsvRulesParser ()
|
||||
blankorcommentlinep = lift (dbgparse 3 "trying blankorcommentlinep") >> choiceInState [blanklinep, commentlinep]
|
||||
blankorcommentlinep = lift (dbgparse 8 "trying blankorcommentlinep") >> choiceInState [blanklinep, commentlinep]
|
||||
|
||||
blanklinep :: CsvRulesParser ()
|
||||
blanklinep = lift (skipMany spacenonewline) >> newline >> return () <?> "blank line"
|
||||
@ -395,7 +395,7 @@ commentcharp = oneOf (";#*" :: [Char])
|
||||
|
||||
directivep :: CsvRulesParser (DirectiveName, String)
|
||||
directivep = (do
|
||||
lift $ dbgparse 3 "trying directive"
|
||||
lift $ dbgparse 8 "trying directive"
|
||||
d <- fmap T.unpack $ choiceInState $ map (lift . string . T.pack) directives
|
||||
v <- (((char ':' >> lift (many spacenonewline)) <|> lift (some spacenonewline)) >> directivevalp)
|
||||
<|> (optional (char ':') >> lift (skipMany spacenonewline) >> lift eolof >> return "")
|
||||
@ -418,7 +418,7 @@ directivevalp = anySingle `manyTill` lift eolof
|
||||
|
||||
fieldnamelistp :: CsvRulesParser [CsvFieldName]
|
||||
fieldnamelistp = (do
|
||||
lift $ dbgparse 3 "trying fieldnamelist"
|
||||
lift $ dbgparse 8 "trying fieldnamelist"
|
||||
string "fields"
|
||||
optional $ char ':'
|
||||
lift (skipSome spacenonewline)
|
||||
@ -444,7 +444,7 @@ barefieldnamep = some $ noneOf (" \t\n,;#~" :: [Char])
|
||||
|
||||
fieldassignmentp :: CsvRulesParser (HledgerFieldName, FieldTemplate)
|
||||
fieldassignmentp = do
|
||||
lift $ dbgparse 3 "trying fieldassignmentp"
|
||||
lift $ dbgparse 8 "trying fieldassignmentp"
|
||||
f <- journalfieldnamep
|
||||
v <- choiceInState [ assignmentseparatorp >> fieldvalp
|
||||
, lift eolof >> return ""
|
||||
@ -454,7 +454,7 @@ fieldassignmentp = do
|
||||
|
||||
journalfieldnamep :: CsvRulesParser String
|
||||
journalfieldnamep = do
|
||||
lift (dbgparse 2 "trying journalfieldnamep")
|
||||
lift (dbgparse 8 "trying journalfieldnamep")
|
||||
T.unpack <$> choiceInState (map (lift . string . T.pack) journalfieldnames)
|
||||
|
||||
maxpostings = 99
|
||||
@ -489,7 +489,7 @@ journalfieldnames =
|
||||
|
||||
assignmentseparatorp :: CsvRulesParser ()
|
||||
assignmentseparatorp = do
|
||||
lift $ dbgparse 3 "trying assignmentseparatorp"
|
||||
lift $ dbgparse 8 "trying assignmentseparatorp"
|
||||
_ <- choiceInState [ lift (skipMany spacenonewline) >> char ':' >> lift (skipMany spacenonewline)
|
||||
, lift (skipSome spacenonewline)
|
||||
]
|
||||
@ -497,13 +497,13 @@ assignmentseparatorp = do
|
||||
|
||||
fieldvalp :: CsvRulesParser String
|
||||
fieldvalp = do
|
||||
lift $ dbgparse 2 "trying fieldvalp"
|
||||
lift $ dbgparse 8 "trying fieldvalp"
|
||||
anySingle `manyTill` lift eolof
|
||||
|
||||
-- A conditional block: one or more matchers, one per line, followed by one or more indented rules.
|
||||
conditionalblockp :: CsvRulesParser ConditionalBlock
|
||||
conditionalblockp = do
|
||||
lift $ dbgparse 3 "trying conditionalblockp"
|
||||
lift $ dbgparse 8 "trying conditionalblockp"
|
||||
string "if" >> lift (skipMany spacenonewline) >> optional newline
|
||||
ms <- some matcherp
|
||||
as <- many (try $ lift (skipSome spacenonewline) >> fieldassignmentp)
|
||||
@ -520,7 +520,7 @@ matcherp = try fieldmatcherp <|> recordmatcherp
|
||||
-- A pattern on the whole line, not beginning with a csv field reference.
|
||||
recordmatcherp :: CsvRulesParser Matcher
|
||||
recordmatcherp = do
|
||||
lift $ dbgparse 2 "trying matcherp"
|
||||
lift $ dbgparse 8 "trying matcherp"
|
||||
-- pos <- currentPos
|
||||
-- _ <- optional (matchoperatorp >> lift (skipMany spacenonewline) >> optional newline)
|
||||
r <- regexp
|
||||
@ -535,7 +535,7 @@ recordmatcherp = do
|
||||
-- %description chez jacques
|
||||
fieldmatcherp :: CsvRulesParser Matcher
|
||||
fieldmatcherp = do
|
||||
lift $ dbgparse 2 "trying fieldmatcher"
|
||||
lift $ dbgparse 8 "trying fieldmatcher"
|
||||
-- An optional fieldname (default: "all")
|
||||
-- f <- fromMaybe "all" `fmap` (optional $ do
|
||||
-- f' <- fieldnamep
|
||||
@ -551,7 +551,7 @@ fieldmatcherp = do
|
||||
|
||||
csvfieldreferencep :: CsvRulesParser CsvFieldReference
|
||||
csvfieldreferencep = do
|
||||
lift $ dbgparse 3 "trying csvfieldreferencep"
|
||||
lift $ dbgparse 8 "trying csvfieldreferencep"
|
||||
char '%'
|
||||
f <- fieldnamep
|
||||
return $ '%' : quoteIfNeeded f
|
||||
@ -559,7 +559,7 @@ csvfieldreferencep = do
|
||||
-- A single regular expression
|
||||
regexp :: CsvRulesParser RegexpPattern
|
||||
regexp = do
|
||||
lift $ dbgparse 3 "trying regexp"
|
||||
lift $ dbgparse 8 "trying regexp"
|
||||
-- notFollowedBy matchoperatorp
|
||||
c <- lift nonspace
|
||||
cs <- anySingle `manyTill` lift eolof
|
||||
@ -606,12 +606,12 @@ readJournalFromCsv mrulesfile csvfile csvdata =
|
||||
rulestext <-
|
||||
if rulesfileexists
|
||||
then do
|
||||
dbg1IO "using conversion rules file" rulesfile
|
||||
dbg7IO "using conversion rules file" rulesfile
|
||||
readFilePortably rulesfile >>= expandIncludes (takeDirectory rulesfile)
|
||||
else
|
||||
return $ defaultRulesText rulesfile
|
||||
rules <- either throwerr return $ parseAndValidateCsvRules rulesfile rulestext
|
||||
dbg2IO "rules" rules
|
||||
dbg7IO "rules" rules
|
||||
|
||||
-- parse the skip directive's value, if any
|
||||
let skiplines = case getDirective "skip" rules of
|
||||
@ -623,12 +623,12 @@ readJournalFromCsv mrulesfile csvfile csvdata =
|
||||
-- parsec seems to fail if you pass it "-" here TODO: try again with megaparsec
|
||||
let parsecfilename = if csvfile == "-" then "(stdin)" else csvfile
|
||||
let separator = fromMaybe ',' (getDirective "separator" rules >>= parseSeparator)
|
||||
dbg2IO "separator" separator
|
||||
dbg7IO "separator" separator
|
||||
records <- (either throwerr id .
|
||||
dbg2 "validateCsv" . validateCsv rules skiplines .
|
||||
dbg2 "parseCsv")
|
||||
dbg8 "validateCsv" . validateCsv rules skiplines .
|
||||
dbg8 "parseCsv")
|
||||
`fmap` parseCsv separator parsecfilename csvdata
|
||||
dbg1IO "first 3 csv records" $ take 3 records
|
||||
dbg7IO "first 3 csv records" $ take 3 records
|
||||
|
||||
-- identify header lines
|
||||
-- let (headerlines, datalines) = identifyHeaderLines records
|
||||
@ -653,10 +653,10 @@ readJournalFromCsv mrulesfile csvfile csvdata =
|
||||
-- than one date and the first date is more recent than the last):
|
||||
-- reverse them to get same-date transactions ordered chronologically.
|
||||
txns' =
|
||||
(if newestfirst || mseemsnewestfirst == Just True then reverse else id) txns
|
||||
(if newestfirst || mdataseemsnewestfirst == Just True then reverse else id) txns
|
||||
where
|
||||
newestfirst = dbg3 "newestfirst" $ isJust $ getDirective "newest-first" rules
|
||||
mseemsnewestfirst = dbg3 "mseemsnewestfirst" $
|
||||
newestfirst = dbg7 "newestfirst" $ isJust $ getDirective "newest-first" rules
|
||||
mdataseemsnewestfirst = dbg7 "mdataseemsnewestfirst" $
|
||||
case nub $ map tdate txns of
|
||||
ds | length ds > 1 -> Just $ head ds > last ds
|
||||
_ -> Nothing
|
||||
@ -1060,7 +1060,7 @@ getEffectiveAssignment :: CsvRules -> CsvRecord -> HledgerFieldName -> Maybe Fie
|
||||
getEffectiveAssignment rules record f = lastMay $ map snd $ assignments
|
||||
where
|
||||
-- all active assignments to field f, in order
|
||||
assignments = dbg2 "assignments" $ filter ((==f).fst) $ toplevelassignments ++ conditionalassignments
|
||||
assignments = dbg8 "assignments" $ filter ((==f).fst) $ toplevelassignments ++ conditionalassignments
|
||||
where
|
||||
-- all top level field assignments
|
||||
toplevelassignments = rassignments rules
|
||||
@ -1077,18 +1077,18 @@ getEffectiveAssignment rules record f = lastMay $ map snd $ assignments
|
||||
matcherMatches :: Matcher -> Bool
|
||||
matcherMatches (RecordMatcher pat) = regexMatchesCI pat' wholecsvline
|
||||
where
|
||||
pat' = dbg3 "regex" pat
|
||||
pat' = dbg8 "regex" pat
|
||||
-- A synthetic whole CSV record to match against. Note, this can be
|
||||
-- different from the original CSV data:
|
||||
-- - any whitespace surrounding field values is preserved
|
||||
-- - any quotes enclosing field values are removed
|
||||
-- - and the field separator is always comma
|
||||
-- which means that a field containing a comma will look like two fields.
|
||||
wholecsvline = dbg3 "wholecsvline" $ intercalate "," record
|
||||
wholecsvline = dbg8 "wholecsvline" $ intercalate "," record
|
||||
matcherMatches (FieldMatcher csvfieldref pat) = regexMatchesCI pat csvfieldvalue
|
||||
where
|
||||
-- the value of the referenced CSV field to match against.
|
||||
csvfieldvalue = dbg3 "csvfieldvalue" $ replaceCsvFieldReference rules record csvfieldref
|
||||
csvfieldvalue = dbg8 "csvfieldvalue" $ replaceCsvFieldReference rules record csvfieldref
|
||||
|
||||
-- | Render a field assigment's template, possibly interpolating referenced
|
||||
-- CSV field values. Outer whitespace is removed from interpolated values.
|
||||
|
||||
@ -296,7 +296,7 @@ includedirectivep = do
|
||||
-- on journal. Duplicating readJournal a bit here.
|
||||
let r = fromMaybe reader $ findReader Nothing (Just prefixedpath)
|
||||
parser = rParser r
|
||||
dbg1IO "trying reader" (rFormat r)
|
||||
dbg7IO "trying reader" (rFormat r)
|
||||
updatedChildj <- journalAddFile (filepath, childInput) <$>
|
||||
parseIncludeFile parser initChildj filepath childInput
|
||||
|
||||
@ -425,7 +425,7 @@ commoditydirectiveonelinep = do
|
||||
pure $ (off, amount)
|
||||
lift (skipMany spacenonewline)
|
||||
_ <- lift followingcommentp
|
||||
let comm = Commodity{csymbol=acommodity, cformat=Just $ dbg2 "style from commodity directive" astyle}
|
||||
let comm = Commodity{csymbol=acommodity, cformat=Just $ dbg7 "style from commodity directive" astyle}
|
||||
if asdecimalpoint astyle == Nothing
|
||||
then customFailure $ parseErrorAt off pleaseincludedecimalpoint
|
||||
else modify' (\j -> j{jcommodities=M.insert acommodity comm $ jcommodities j})
|
||||
@ -469,7 +469,7 @@ formatdirectivep expectedsym = do
|
||||
then
|
||||
if asdecimalpoint astyle == Nothing
|
||||
then customFailure $ parseErrorAt off pleaseincludedecimalpoint
|
||||
else return $ dbg2 "style from format subdirective" astyle
|
||||
else return $ dbg7 "style from format subdirective" astyle
|
||||
else customFailure $ parseErrorAt off $
|
||||
printf "commodity directive symbol \"%s\" and format directive symbol \"%s\" should be the same" expectedsym acommodity
|
||||
|
||||
|
||||
@ -70,8 +70,9 @@ balanceReport ropts@ReportOpts{..} q j =
|
||||
(if invert_ then brNegate else id) $
|
||||
(mappedsorteditems, mappedtotal)
|
||||
where
|
||||
-- dbg1 = const id -- exclude from debug output
|
||||
dbg1 s = let p = "balanceReport" in Hledger.Utils.dbg1 (p++" "++s) -- add prefix in debug output
|
||||
-- dbg = const id -- exclude from debug output
|
||||
dbg s = let p = "balanceReport" in Hledger.Utils.dbg4 (p++" "++s) -- add prefix in debug output
|
||||
dbg' s = let p = "balanceReport" in Hledger.Utils.dbg5 (p++" "++s) -- add prefix in debug output
|
||||
|
||||
-- Get all the summed accounts & balances, according to the query, as an account tree.
|
||||
-- If doing cost valuation, amounts will be converted to cost first.
|
||||
@ -95,13 +96,13 @@ balanceReport ropts@ReportOpts{..} q j =
|
||||
-- Modify this tree for display - depth limit, boring parents, zeroes - and convert to a list.
|
||||
displayaccts :: [Account]
|
||||
| queryDepth q == 0 =
|
||||
dbg1 "displayaccts" $
|
||||
dbg' "displayaccts" $
|
||||
take 1 $ clipAccountsAndAggregate (queryDepth q) $ flattenAccounts valuedaccttree
|
||||
| flat_ ropts = dbg1 "displayaccts" $
|
||||
| flat_ ropts = dbg' "displayaccts" $
|
||||
filterzeros $
|
||||
filterempty $
|
||||
drop 1 $ clipAccountsAndAggregate (queryDepth q) $ flattenAccounts valuedaccttree
|
||||
| otherwise = dbg1 "displayaccts" $
|
||||
| otherwise = dbg' "displayaccts" $
|
||||
filter (not.aboring) $
|
||||
drop 1 $ flattenAccounts $
|
||||
markboring $
|
||||
@ -116,7 +117,7 @@ balanceReport ropts@ReportOpts{..} q j =
|
||||
markboring = if no_elide_ then id else markBoringParentAccounts
|
||||
|
||||
-- Make a report row for each account.
|
||||
items = dbg1 "items" $ map (balanceReportItem ropts q) displayaccts
|
||||
items = dbg "items" $ map (balanceReportItem ropts q) displayaccts
|
||||
|
||||
-- Sort report rows (except sorting by amount in tree mode, which was done above).
|
||||
sorteditems
|
||||
@ -139,17 +140,17 @@ balanceReport ropts@ReportOpts{..} q j =
|
||||
sortedrows = sortAccountItemsLike sortedanames anamesandrows
|
||||
|
||||
-- Calculate the grand total.
|
||||
total | not (flat_ ropts) = dbg1 "total" $ sum [amt | (_,_,indent,amt) <- items, indent == 0]
|
||||
| otherwise = dbg1 "total" $
|
||||
total | not (flat_ ropts) = dbg "total" $ sum [amt | (_,_,indent,amt) <- items, indent == 0]
|
||||
| otherwise = dbg "total" $
|
||||
if flatShowsExclusiveBalance
|
||||
then sum $ map fourth4 items
|
||||
else sum $ map aebalance $ clipAccountsAndAggregate 1 displayaccts
|
||||
|
||||
-- Calculate percentages if needed.
|
||||
mappedtotal | percent_ = dbg1 "mappedtotal" $ total `perdivide` total
|
||||
mappedtotal | percent_ = dbg "mappedtotal" $ total `perdivide` total
|
||||
| otherwise = total
|
||||
mappedsorteditems | percent_ =
|
||||
dbg1 "mappedsorteditems" $
|
||||
dbg "mappedsorteditems" $
|
||||
map (\(fname, sname, indent, amount) -> (fname, sname, indent, amount `perdivide` total)) sorteditems
|
||||
| otherwise = sorteditems
|
||||
|
||||
|
||||
@ -317,8 +317,7 @@ intervalFromRawOpts = lastDef NoInterval . collectopts intervalfromrawopt
|
||||
-- | get period expression from --forecast option
|
||||
forecastPeriodFromRawOpts :: Day -> RawOpts -> Maybe DateSpan
|
||||
forecastPeriodFromRawOpts d opts =
|
||||
case
|
||||
dbg2 "forecastopt" $ maybestringopt "forecast" opts
|
||||
case maybestringopt "forecast" opts
|
||||
of
|
||||
Nothing -> Nothing
|
||||
Just "" -> Just nulldatespan
|
||||
@ -478,13 +477,13 @@ queryOptsFromOpts d ReportOpts{..} = flagsqopts ++ argsqopts
|
||||
reportSpan :: Journal -> ReportOpts -> IO DateSpan
|
||||
reportSpan j ropts = do
|
||||
(mspecifiedstartdate, mspecifiedenddate) <-
|
||||
dbg2 "specifieddates" <$> specifiedStartEndDates ropts
|
||||
dbg3 "specifieddates" <$> specifiedStartEndDates ropts
|
||||
let
|
||||
DateSpan mjournalstartdate mjournalenddate =
|
||||
dbg2 "journalspan" $ journalDateSpan False j -- ignore secondary dates
|
||||
dbg3 "journalspan" $ journalDateSpan False j -- ignore secondary dates
|
||||
mstartdate = mspecifiedstartdate <|> mjournalstartdate
|
||||
menddate = mspecifiedenddate <|> mjournalenddate
|
||||
return $ dbg1 "reportspan" $ DateSpan mstartdate menddate
|
||||
return $ dbg3 "reportspan" $ DateSpan mstartdate menddate
|
||||
|
||||
reportStartDate :: Journal -> ReportOpts -> IO (Maybe Day)
|
||||
reportStartDate j ropts = spanStart <$> reportSpan j ropts
|
||||
|
||||
@ -1,5 +1,28 @@
|
||||
{-# LANGUAGE FlexibleContexts, TypeFamilies #-}
|
||||
-- | Debugging helpers
|
||||
{- | Debugging helpers.
|
||||
|
||||
You can enable increasingly verbose debug output by adding --debug [1-9]
|
||||
to a hledger command line. --debug with no argument means --debug 1.
|
||||
This is implemented by calling dbgN or similar helpers, defined below.
|
||||
These calls can be found throughout hledger code; they have been added
|
||||
organically where it seemed likely they would be needed again.
|
||||
The choice of debug level has not been very systematic.
|
||||
202006 Here's a start at some guidelines, not yet applied project-wide:
|
||||
|
||||
Debug level: What to show:
|
||||
------------ ---------------------------------------------------------
|
||||
0 normal command output only (no warnings, eg)
|
||||
1 (--debug) basic/useful warnings & most common troubleshooting info, eg for valuation
|
||||
2 troubleshooting info, more detail
|
||||
3 report options selection
|
||||
4 report generation
|
||||
5 report generation, more detail
|
||||
6 command line parsing
|
||||
7 input file reading
|
||||
8 input file reading, more detail
|
||||
9 any other rarely needed / more in-depth info
|
||||
|
||||
-}
|
||||
|
||||
-- more:
|
||||
-- http://hackage.haskell.org/packages/archive/TraceUtils/0.1.0.2/doc/html/Debug-TraceUtils.html
|
||||
|
||||
@ -115,7 +115,7 @@ main = do
|
||||
(argsbeforecmd, argsaftercmd') = break (==rawcmd) args
|
||||
argsaftercmd = drop 1 argsaftercmd'
|
||||
dbgIO :: Show a => String -> a -> IO ()
|
||||
dbgIO = ptraceAtIO 2
|
||||
dbgIO = ptraceAtIO 6
|
||||
|
||||
dbgIO "running" prognameandversion
|
||||
dbgIO "raw args" args
|
||||
|
||||
Loading…
Reference in New Issue
Block a user