tweak debug levels, document some guidelines
Beginnings of a project-wide policy for what output to show at each debug level, for now. Later we'll want more flexibility, eg filtering by topic.
This commit is contained in:
		
							parent
							
								
									6e36ede9aa
								
							
						
					
					
						commit
						684cb45e1a
					
				| @ -258,7 +258,7 @@ priceLookup pricesatdate d from mto = | |||||||
|     fromnode = node m from |     fromnode = node m from | ||||||
|     mto' = mto <|> mdefaultto |     mto' = mto <|> mdefaultto | ||||||
|       where |       where | ||||||
|         mdefaultto = dbg4 ("default valuation commodity for "++T.unpack from) $ |         mdefaultto = dbg1 ("default valuation commodity for "++T.unpack from) $ | ||||||
|                      M.lookup from defaultdests |                      M.lookup from defaultdests | ||||||
|   in |   in | ||||||
|     case mto' of |     case mto' of | ||||||
| @ -276,12 +276,12 @@ priceLookup pricesatdate d from mto = | |||||||
|             case sp fromnode tonode g :: Maybe [Node] of |             case sp fromnode tonode g :: Maybe [Node] of | ||||||
|               Nothing    -> Nothing |               Nothing    -> Nothing | ||||||
|               Just nodes -> |               Just nodes -> | ||||||
|                 dbg ("market price "++intercalate "->" (map T.unpack comms)) $ |                 dbg ("market price for "++intercalate " -> " (map T.unpack comms)) $ | ||||||
|                 Just $ product $ pathEdgeLabels g nodes  -- convert to a single exchange rate |                 Just $ product $ pathEdgeLabels g nodes  -- convert to a single exchange rate | ||||||
|                 where comms = catMaybes $ map (lab g) nodes |                 where comms = catMaybes $ map (lab g) nodes | ||||||
| 
 | 
 | ||||||
|           -- log a message and a Maybe Quantity, hiding Just/Nothing and limiting decimal places |           -- log a message and a Maybe Quantity, hiding Just/Nothing and limiting decimal places | ||||||
|           dbg msg = dbg4With (((msg++": ")++) . maybe "" (show . roundTo 8)) |           dbg msg = dbg1With (((msg++": ")++) . maybe "" (show . roundTo 8)) | ||||||
| 
 | 
 | ||||||
| tests_priceLookup = | tests_priceLookup = | ||||||
|   let |   let | ||||||
| @ -311,33 +311,32 @@ tests_priceLookup = | |||||||
| -- the given day. | -- the given day. | ||||||
| pricesAtDate :: [PriceDirective] -> [MarketPrice] -> Day -> PriceGraph | pricesAtDate :: [PriceDirective] -> [MarketPrice] -> Day -> PriceGraph | ||||||
| pricesAtDate pricedirectives impliedmarketprices d = | pricesAtDate pricedirectives impliedmarketprices d = | ||||||
|   -- trace ("pricesAtDate ("++show d++")") $ |   dbg9 ("pricesAtDate "++show d) $ | ||||||
|   PriceGraph{prGraph=g, prNodemap=m, prDefaultValuationCommodities=defaultdests} |   PriceGraph{prGraph=g, prNodemap=m, prDefaultValuationCommodities=defaultdests} | ||||||
|   where |   where | ||||||
|     -- prices in effect on date d, either declared or implied |     -- prices in effect on date d, either declared or implied | ||||||
|     declaredandimpliedprices = dbg5 "declaredandimpliedprices" $ |     currentdeclaredandimpliedprices = dbg2 "currentdeclaredandimpliedprices" $ | ||||||
|       latestPriceForEachPairOn pricedirectives impliedmarketprices d |       latestPriceForEachPairOn pricedirectives impliedmarketprices d | ||||||
| 
 | 
 | ||||||
|     -- infer any additional reverse prices not already declared or implied |     -- infer any additional reverse prices not already declared or implied | ||||||
|     reverseprices = |     reverseprices = dbg2 "reverseprices" $ | ||||||
|       dbg5 "reverseprices" $ |       map marketPriceReverse currentdeclaredandimpliedprices \\ currentdeclaredandimpliedprices | ||||||
|       map marketPriceReverse declaredandimpliedprices \\ declaredandimpliedprices |  | ||||||
| 
 | 
 | ||||||
|     -- build the graph and associated node map |     -- build the graph and associated node map | ||||||
|     (g, m) = |     (g, m) = | ||||||
|       mkMapGraph |       mkMapGraph | ||||||
|       (dbg5 "g nodelabels" $ sort allcomms) -- this must include all nodes mentioned in edges |       (dbg9 "price graph labels" $ sort allcomms) -- this must include all nodes mentioned in edges | ||||||
|       (dbg5 "g edges"      $ [(mpfrom, mpto, mprate) | MarketPrice{..} <- prices]) |       (dbg9 "price graph edges" $ [(mpfrom, mpto, mprate) | MarketPrice{..} <- prices]) | ||||||
|       :: (Gr CommoditySymbol Quantity, NodeMap CommoditySymbol) |       :: (Gr CommoditySymbol Quantity, NodeMap CommoditySymbol) | ||||||
|       where |       where | ||||||
|         prices   = declaredandimpliedprices ++ reverseprices |         prices   = currentdeclaredandimpliedprices ++ reverseprices | ||||||
|         allcomms = map mpfrom prices |         allcomms = map mpfrom prices | ||||||
| 
 | 
 | ||||||
|     -- determine a default valuation commodity D for each source commodity S: |     -- determine a default valuation commodity D for each source commodity S: | ||||||
|     -- the price commodity in the latest declared market price for S (on any date) |     -- the price commodity in the latest declared market price for S (on any date) | ||||||
|     defaultdests = M.fromList [(mpfrom,mpto) | MarketPrice{..} <- alldeclaredprices] |     defaultdests = M.fromList [(mpfrom,mpto) | MarketPrice{..} <- alldeclaredprices] | ||||||
|       where |       where | ||||||
|         alldeclaredprices = dbg5 "alldeclaredprices" $ map priceDirectiveToMarketPrice pricedirectives |         alldeclaredprices = dbg2 "alldeclaredprices" $ map priceDirectiveToMarketPrice pricedirectives | ||||||
| 
 | 
 | ||||||
| -- From a list of price directives in parse order, and a list of | -- From a list of price directives in parse order, and a list of | ||||||
| -- transaction-implied market prices in parse order, get the effective | -- transaction-implied market prices in parse order, get the effective | ||||||
| @ -346,7 +345,6 @@ pricesAtDate pricedirectives impliedmarketprices d = | |||||||
| -- that day, with declared prices taking precedence. | -- that day, with declared prices taking precedence. | ||||||
| latestPriceForEachPairOn :: [PriceDirective] -> [MarketPrice] -> Day -> [MarketPrice] | latestPriceForEachPairOn :: [PriceDirective] -> [MarketPrice] -> Day -> [MarketPrice] | ||||||
| latestPriceForEachPairOn pricedirectives impliedmarketprices d = | latestPriceForEachPairOn pricedirectives impliedmarketprices d = | ||||||
|   dbg5 "latestPriceForEachPairOn" $ |  | ||||||
|   let |   let | ||||||
|     -- consider only declarations/transactions before the valuation date |     -- consider only declarations/transactions before the valuation date | ||||||
|     declaredprices = map priceDirectiveToMarketPrice $ filter ((<=d).pddate) pricedirectives |     declaredprices = map priceDirectiveToMarketPrice $ filter ((<=d).pddate) pricedirectives | ||||||
|  | |||||||
| @ -111,7 +111,7 @@ readJournal :: InputOpts -> Maybe FilePath -> Text -> IO (Either String Journal) | |||||||
| readJournal iopts mpath txt = do | readJournal iopts mpath txt = do | ||||||
|   let r :: Reader IO = |   let r :: Reader IO = | ||||||
|         fromMaybe JournalReader.reader $ findReader (mformat_ iopts) mpath |         fromMaybe JournalReader.reader $ findReader (mformat_ iopts) mpath | ||||||
|   dbg1IO "trying reader" (rFormat r) |   dbg7IO "trying reader" (rFormat r) | ||||||
|   (runExceptT . (rReadFn r) iopts (fromMaybe "(string)" mpath)) txt |   (runExceptT . (rReadFn r) iopts (fromMaybe "(string)" mpath)) txt | ||||||
| 
 | 
 | ||||||
| -- | Read the default journal file specified by the environment, or raise an error. | -- | Read the default journal file specified by the environment, or raise an error. | ||||||
|  | |||||||
| @ -382,7 +382,7 @@ rulesp = do | |||||||
|           } |           } | ||||||
| 
 | 
 | ||||||
| blankorcommentlinep :: CsvRulesParser () | blankorcommentlinep :: CsvRulesParser () | ||||||
| blankorcommentlinep = lift (dbgparse 3 "trying blankorcommentlinep") >> choiceInState [blanklinep, commentlinep] | blankorcommentlinep = lift (dbgparse 8 "trying blankorcommentlinep") >> choiceInState [blanklinep, commentlinep] | ||||||
| 
 | 
 | ||||||
| blanklinep :: CsvRulesParser () | blanklinep :: CsvRulesParser () | ||||||
| blanklinep = lift (skipMany spacenonewline) >> newline >> return () <?> "blank line" | blanklinep = lift (skipMany spacenonewline) >> newline >> return () <?> "blank line" | ||||||
| @ -395,7 +395,7 @@ commentcharp = oneOf (";#*" :: [Char]) | |||||||
| 
 | 
 | ||||||
| directivep :: CsvRulesParser (DirectiveName, String) | directivep :: CsvRulesParser (DirectiveName, String) | ||||||
| directivep = (do | directivep = (do | ||||||
|   lift $ dbgparse 3 "trying directive" |   lift $ dbgparse 8 "trying directive" | ||||||
|   d <- fmap T.unpack $ choiceInState $ map (lift . string . T.pack) directives |   d <- fmap T.unpack $ choiceInState $ map (lift . string . T.pack) directives | ||||||
|   v <- (((char ':' >> lift (many spacenonewline)) <|> lift (some spacenonewline)) >> directivevalp) |   v <- (((char ':' >> lift (many spacenonewline)) <|> lift (some spacenonewline)) >> directivevalp) | ||||||
|        <|> (optional (char ':') >> lift (skipMany spacenonewline) >> lift eolof >> return "") |        <|> (optional (char ':') >> lift (skipMany spacenonewline) >> lift eolof >> return "") | ||||||
| @ -418,7 +418,7 @@ directivevalp = anySingle `manyTill` lift eolof | |||||||
| 
 | 
 | ||||||
| fieldnamelistp :: CsvRulesParser [CsvFieldName] | fieldnamelistp :: CsvRulesParser [CsvFieldName] | ||||||
| fieldnamelistp = (do | fieldnamelistp = (do | ||||||
|   lift $ dbgparse 3 "trying fieldnamelist" |   lift $ dbgparse 8 "trying fieldnamelist" | ||||||
|   string "fields" |   string "fields" | ||||||
|   optional $ char ':' |   optional $ char ':' | ||||||
|   lift (skipSome spacenonewline) |   lift (skipSome spacenonewline) | ||||||
| @ -444,7 +444,7 @@ barefieldnamep = some $ noneOf (" \t\n,;#~" :: [Char]) | |||||||
| 
 | 
 | ||||||
| fieldassignmentp :: CsvRulesParser (HledgerFieldName, FieldTemplate) | fieldassignmentp :: CsvRulesParser (HledgerFieldName, FieldTemplate) | ||||||
| fieldassignmentp = do | fieldassignmentp = do | ||||||
|   lift $ dbgparse 3 "trying fieldassignmentp" |   lift $ dbgparse 8 "trying fieldassignmentp" | ||||||
|   f <- journalfieldnamep |   f <- journalfieldnamep | ||||||
|   v <- choiceInState [ assignmentseparatorp >> fieldvalp |   v <- choiceInState [ assignmentseparatorp >> fieldvalp | ||||||
|                      , lift eolof >> return "" |                      , lift eolof >> return "" | ||||||
| @ -454,7 +454,7 @@ fieldassignmentp = do | |||||||
| 
 | 
 | ||||||
| journalfieldnamep :: CsvRulesParser String | journalfieldnamep :: CsvRulesParser String | ||||||
| journalfieldnamep = do | journalfieldnamep = do | ||||||
|   lift (dbgparse 2 "trying journalfieldnamep") |   lift (dbgparse 8 "trying journalfieldnamep") | ||||||
|   T.unpack <$> choiceInState (map (lift . string . T.pack) journalfieldnames) |   T.unpack <$> choiceInState (map (lift . string . T.pack) journalfieldnames) | ||||||
| 
 | 
 | ||||||
| maxpostings = 99 | maxpostings = 99 | ||||||
| @ -489,7 +489,7 @@ journalfieldnames = | |||||||
| 
 | 
 | ||||||
| assignmentseparatorp :: CsvRulesParser () | assignmentseparatorp :: CsvRulesParser () | ||||||
| assignmentseparatorp = do | assignmentseparatorp = do | ||||||
|   lift $ dbgparse 3 "trying assignmentseparatorp" |   lift $ dbgparse 8 "trying assignmentseparatorp" | ||||||
|   _ <- choiceInState [ lift (skipMany spacenonewline) >> char ':' >> lift (skipMany spacenonewline) |   _ <- choiceInState [ lift (skipMany spacenonewline) >> char ':' >> lift (skipMany spacenonewline) | ||||||
|                      , lift (skipSome spacenonewline) |                      , lift (skipSome spacenonewline) | ||||||
|                      ] |                      ] | ||||||
| @ -497,13 +497,13 @@ assignmentseparatorp = do | |||||||
| 
 | 
 | ||||||
| fieldvalp :: CsvRulesParser String | fieldvalp :: CsvRulesParser String | ||||||
| fieldvalp = do | fieldvalp = do | ||||||
|   lift $ dbgparse 2 "trying fieldvalp" |   lift $ dbgparse 8 "trying fieldvalp" | ||||||
|   anySingle `manyTill` lift eolof |   anySingle `manyTill` lift eolof | ||||||
| 
 | 
 | ||||||
| -- A conditional block: one or more matchers, one per line, followed by one or more indented rules. | -- A conditional block: one or more matchers, one per line, followed by one or more indented rules. | ||||||
| conditionalblockp :: CsvRulesParser ConditionalBlock | conditionalblockp :: CsvRulesParser ConditionalBlock | ||||||
| conditionalblockp = do | conditionalblockp = do | ||||||
|   lift $ dbgparse 3 "trying conditionalblockp" |   lift $ dbgparse 8 "trying conditionalblockp" | ||||||
|   string "if" >> lift (skipMany spacenonewline) >> optional newline |   string "if" >> lift (skipMany spacenonewline) >> optional newline | ||||||
|   ms <- some matcherp |   ms <- some matcherp | ||||||
|   as <- many (try $ lift (skipSome spacenonewline) >> fieldassignmentp) |   as <- many (try $ lift (skipSome spacenonewline) >> fieldassignmentp) | ||||||
| @ -520,7 +520,7 @@ matcherp = try fieldmatcherp <|> recordmatcherp | |||||||
| -- A pattern on the whole line, not beginning with a csv field reference. | -- A pattern on the whole line, not beginning with a csv field reference. | ||||||
| recordmatcherp :: CsvRulesParser Matcher | recordmatcherp :: CsvRulesParser Matcher | ||||||
| recordmatcherp = do | recordmatcherp = do | ||||||
|   lift $ dbgparse 2 "trying matcherp" |   lift $ dbgparse 8 "trying matcherp" | ||||||
|   -- pos <- currentPos |   -- pos <- currentPos | ||||||
|   -- _  <- optional (matchoperatorp >> lift (skipMany spacenonewline) >> optional newline) |   -- _  <- optional (matchoperatorp >> lift (skipMany spacenonewline) >> optional newline) | ||||||
|   r <- regexp |   r <- regexp | ||||||
| @ -535,7 +535,7 @@ recordmatcherp = do | |||||||
| -- %description chez jacques | -- %description chez jacques | ||||||
| fieldmatcherp :: CsvRulesParser Matcher | fieldmatcherp :: CsvRulesParser Matcher | ||||||
| fieldmatcherp = do | fieldmatcherp = do | ||||||
|   lift $ dbgparse 2 "trying fieldmatcher" |   lift $ dbgparse 8 "trying fieldmatcher" | ||||||
|   -- An optional fieldname (default: "all") |   -- An optional fieldname (default: "all") | ||||||
|   -- f <- fromMaybe "all" `fmap` (optional $ do |   -- f <- fromMaybe "all" `fmap` (optional $ do | ||||||
|   --        f' <- fieldnamep |   --        f' <- fieldnamep | ||||||
| @ -551,7 +551,7 @@ fieldmatcherp = do | |||||||
| 
 | 
 | ||||||
| csvfieldreferencep :: CsvRulesParser CsvFieldReference | csvfieldreferencep :: CsvRulesParser CsvFieldReference | ||||||
| csvfieldreferencep = do | csvfieldreferencep = do | ||||||
|   lift $ dbgparse 3 "trying csvfieldreferencep" |   lift $ dbgparse 8 "trying csvfieldreferencep" | ||||||
|   char '%' |   char '%' | ||||||
|   f <- fieldnamep |   f <- fieldnamep | ||||||
|   return $ '%' : quoteIfNeeded f |   return $ '%' : quoteIfNeeded f | ||||||
| @ -559,7 +559,7 @@ csvfieldreferencep = do | |||||||
| -- A single regular expression | -- A single regular expression | ||||||
| regexp :: CsvRulesParser RegexpPattern | regexp :: CsvRulesParser RegexpPattern | ||||||
| regexp = do | regexp = do | ||||||
|   lift $ dbgparse 3 "trying regexp" |   lift $ dbgparse 8 "trying regexp" | ||||||
|   -- notFollowedBy matchoperatorp |   -- notFollowedBy matchoperatorp | ||||||
|   c <- lift nonspace |   c <- lift nonspace | ||||||
|   cs <- anySingle `manyTill` lift eolof |   cs <- anySingle `manyTill` lift eolof | ||||||
| @ -606,12 +606,12 @@ readJournalFromCsv mrulesfile csvfile csvdata = | |||||||
|   rulestext <- |   rulestext <- | ||||||
|     if rulesfileexists |     if rulesfileexists | ||||||
|     then do |     then do | ||||||
|       dbg1IO "using conversion rules file" rulesfile |       dbg7IO "using conversion rules file" rulesfile | ||||||
|       readFilePortably rulesfile >>= expandIncludes (takeDirectory rulesfile) |       readFilePortably rulesfile >>= expandIncludes (takeDirectory rulesfile) | ||||||
|     else |     else | ||||||
|       return $ defaultRulesText rulesfile |       return $ defaultRulesText rulesfile | ||||||
|   rules <- either throwerr return $ parseAndValidateCsvRules rulesfile rulestext |   rules <- either throwerr return $ parseAndValidateCsvRules rulesfile rulestext | ||||||
|   dbg2IO "rules" rules |   dbg7IO "rules" rules | ||||||
| 
 | 
 | ||||||
|   -- parse the skip directive's value, if any |   -- parse the skip directive's value, if any | ||||||
|   let skiplines = case getDirective "skip" rules of |   let skiplines = case getDirective "skip" rules of | ||||||
| @ -623,12 +623,12 @@ readJournalFromCsv mrulesfile csvfile csvdata = | |||||||
|   -- parsec seems to fail if you pass it "-" here TODO: try again with megaparsec |   -- parsec seems to fail if you pass it "-" here TODO: try again with megaparsec | ||||||
|   let parsecfilename = if csvfile == "-" then "(stdin)" else csvfile |   let parsecfilename = if csvfile == "-" then "(stdin)" else csvfile | ||||||
|   let separator = fromMaybe ',' (getDirective "separator" rules >>= parseSeparator) |   let separator = fromMaybe ',' (getDirective "separator" rules >>= parseSeparator) | ||||||
|   dbg2IO "separator" separator |   dbg7IO "separator" separator | ||||||
|   records <- (either throwerr id . |   records <- (either throwerr id . | ||||||
|               dbg2 "validateCsv" . validateCsv rules skiplines . |               dbg8 "validateCsv" . validateCsv rules skiplines . | ||||||
|               dbg2 "parseCsv") |               dbg8 "parseCsv") | ||||||
|              `fmap` parseCsv separator parsecfilename csvdata |              `fmap` parseCsv separator parsecfilename csvdata | ||||||
|   dbg1IO "first 3 csv records" $ take 3 records |   dbg7IO "first 3 csv records" $ take 3 records | ||||||
| 
 | 
 | ||||||
|   -- identify header lines |   -- identify header lines | ||||||
|   -- let (headerlines, datalines) = identifyHeaderLines records |   -- let (headerlines, datalines) = identifyHeaderLines records | ||||||
| @ -653,10 +653,10 @@ readJournalFromCsv mrulesfile csvfile csvdata = | |||||||
|     -- than one date and the first date is more recent than the last): |     -- than one date and the first date is more recent than the last): | ||||||
|     -- reverse them to get same-date transactions ordered chronologically. |     -- reverse them to get same-date transactions ordered chronologically. | ||||||
|     txns' = |     txns' = | ||||||
|       (if newestfirst || mseemsnewestfirst == Just True then reverse else id) txns |       (if newestfirst || mdataseemsnewestfirst == Just True then reverse else id) txns | ||||||
|       where |       where | ||||||
|         newestfirst = dbg3 "newestfirst" $ isJust $ getDirective "newest-first" rules |         newestfirst = dbg7 "newestfirst" $ isJust $ getDirective "newest-first" rules | ||||||
|         mseemsnewestfirst = dbg3 "mseemsnewestfirst" $ |         mdataseemsnewestfirst = dbg7 "mdataseemsnewestfirst" $ | ||||||
|           case nub $ map tdate txns of |           case nub $ map tdate txns of | ||||||
|             ds | length ds > 1 -> Just $ head ds > last ds |             ds | length ds > 1 -> Just $ head ds > last ds | ||||||
|             _                  -> Nothing |             _                  -> Nothing | ||||||
| @ -1060,7 +1060,7 @@ getEffectiveAssignment :: CsvRules -> CsvRecord -> HledgerFieldName -> Maybe Fie | |||||||
| getEffectiveAssignment rules record f = lastMay $ map snd $ assignments | getEffectiveAssignment rules record f = lastMay $ map snd $ assignments | ||||||
|   where |   where | ||||||
|     -- all active assignments to field f, in order |     -- all active assignments to field f, in order | ||||||
|     assignments = dbg2 "assignments" $ filter ((==f).fst) $ toplevelassignments ++ conditionalassignments |     assignments = dbg8 "assignments" $ filter ((==f).fst) $ toplevelassignments ++ conditionalassignments | ||||||
|       where |       where | ||||||
|         -- all top level field assignments |         -- all top level field assignments | ||||||
|         toplevelassignments    = rassignments rules |         toplevelassignments    = rassignments rules | ||||||
| @ -1077,18 +1077,18 @@ getEffectiveAssignment rules record f = lastMay $ map snd $ assignments | |||||||
|                 matcherMatches :: Matcher -> Bool |                 matcherMatches :: Matcher -> Bool | ||||||
|                 matcherMatches (RecordMatcher pat) = regexMatchesCI pat' wholecsvline |                 matcherMatches (RecordMatcher pat) = regexMatchesCI pat' wholecsvline | ||||||
|                   where |                   where | ||||||
|                     pat' = dbg3 "regex" pat |                     pat' = dbg8 "regex" pat | ||||||
|                     -- A synthetic whole CSV record to match against. Note, this can be |                     -- A synthetic whole CSV record to match against. Note, this can be | ||||||
|                     -- different from the original CSV data: |                     -- different from the original CSV data: | ||||||
|                     -- - any whitespace surrounding field values is preserved |                     -- - any whitespace surrounding field values is preserved | ||||||
|                     -- - any quotes enclosing field values are removed |                     -- - any quotes enclosing field values are removed | ||||||
|                     -- - and the field separator is always comma |                     -- - and the field separator is always comma | ||||||
|                     -- which means that a field containing a comma will look like two fields. |                     -- which means that a field containing a comma will look like two fields. | ||||||
|                     wholecsvline = dbg3 "wholecsvline" $ intercalate "," record |                     wholecsvline = dbg8 "wholecsvline" $ intercalate "," record | ||||||
|                 matcherMatches (FieldMatcher csvfieldref pat) = regexMatchesCI pat csvfieldvalue |                 matcherMatches (FieldMatcher csvfieldref pat) = regexMatchesCI pat csvfieldvalue | ||||||
|                   where |                   where | ||||||
|                     -- the value of the referenced CSV field to match against. |                     -- the value of the referenced CSV field to match against. | ||||||
|                     csvfieldvalue = dbg3 "csvfieldvalue" $ replaceCsvFieldReference rules record csvfieldref |                     csvfieldvalue = dbg8 "csvfieldvalue" $ replaceCsvFieldReference rules record csvfieldref | ||||||
| 
 | 
 | ||||||
| -- | Render a field assigment's template, possibly interpolating referenced | -- | Render a field assigment's template, possibly interpolating referenced | ||||||
| -- CSV field values. Outer whitespace is removed from interpolated values. | -- CSV field values. Outer whitespace is removed from interpolated values. | ||||||
|  | |||||||
| @ -296,7 +296,7 @@ includedirectivep = do | |||||||
|       -- on journal. Duplicating readJournal a bit here. |       -- on journal. Duplicating readJournal a bit here. | ||||||
|       let r = fromMaybe reader $ findReader Nothing (Just prefixedpath) |       let r = fromMaybe reader $ findReader Nothing (Just prefixedpath) | ||||||
|           parser = rParser r |           parser = rParser r | ||||||
|       dbg1IO "trying reader" (rFormat r) |       dbg7IO "trying reader" (rFormat r) | ||||||
|       updatedChildj <- journalAddFile (filepath, childInput) <$> |       updatedChildj <- journalAddFile (filepath, childInput) <$> | ||||||
|                         parseIncludeFile parser initChildj filepath childInput |                         parseIncludeFile parser initChildj filepath childInput | ||||||
| 
 | 
 | ||||||
| @ -425,7 +425,7 @@ commoditydirectiveonelinep = do | |||||||
|     pure $ (off, amount) |     pure $ (off, amount) | ||||||
|   lift (skipMany spacenonewline) |   lift (skipMany spacenonewline) | ||||||
|   _ <- lift followingcommentp |   _ <- lift followingcommentp | ||||||
|   let comm = Commodity{csymbol=acommodity, cformat=Just $ dbg2 "style from commodity directive" astyle} |   let comm = Commodity{csymbol=acommodity, cformat=Just $ dbg7 "style from commodity directive" astyle} | ||||||
|   if asdecimalpoint astyle == Nothing |   if asdecimalpoint astyle == Nothing | ||||||
|   then customFailure $ parseErrorAt off pleaseincludedecimalpoint |   then customFailure $ parseErrorAt off pleaseincludedecimalpoint | ||||||
|   else modify' (\j -> j{jcommodities=M.insert acommodity comm $ jcommodities j}) |   else modify' (\j -> j{jcommodities=M.insert acommodity comm $ jcommodities j}) | ||||||
| @ -469,7 +469,7 @@ formatdirectivep expectedsym = do | |||||||
|     then |     then | ||||||
|       if asdecimalpoint astyle == Nothing |       if asdecimalpoint astyle == Nothing | ||||||
|       then customFailure $ parseErrorAt off pleaseincludedecimalpoint |       then customFailure $ parseErrorAt off pleaseincludedecimalpoint | ||||||
|       else return $ dbg2 "style from format subdirective" astyle |       else return $ dbg7 "style from format subdirective" astyle | ||||||
|     else customFailure $ parseErrorAt off $ |     else customFailure $ parseErrorAt off $ | ||||||
|          printf "commodity directive symbol \"%s\" and format directive symbol \"%s\" should be the same" expectedsym acommodity |          printf "commodity directive symbol \"%s\" and format directive symbol \"%s\" should be the same" expectedsym acommodity | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -70,8 +70,9 @@ balanceReport ropts@ReportOpts{..} q j = | |||||||
|   (if invert_ then brNegate  else id) $ |   (if invert_ then brNegate  else id) $ | ||||||
|   (mappedsorteditems, mappedtotal) |   (mappedsorteditems, mappedtotal) | ||||||
|     where |     where | ||||||
|       -- dbg1 = const id -- exclude from debug output |       -- dbg = const id -- exclude from debug output | ||||||
|       dbg1 s = let p = "balanceReport" in Hledger.Utils.dbg1 (p++" "++s)  -- add prefix in debug output |       dbg s = let p = "balanceReport" in Hledger.Utils.dbg4 (p++" "++s)  -- add prefix in debug output | ||||||
|  |       dbg' s = let p = "balanceReport" in Hledger.Utils.dbg5 (p++" "++s)  -- add prefix in debug output | ||||||
| 
 | 
 | ||||||
|       -- Get all the summed accounts & balances, according to the query, as an account tree. |       -- Get all the summed accounts & balances, according to the query, as an account tree. | ||||||
|       -- If doing cost valuation, amounts will be converted to cost first. |       -- If doing cost valuation, amounts will be converted to cost first. | ||||||
| @ -95,13 +96,13 @@ balanceReport ropts@ReportOpts{..} q j = | |||||||
|       -- Modify this tree for display - depth limit, boring parents, zeroes - and convert to a list. |       -- Modify this tree for display - depth limit, boring parents, zeroes - and convert to a list. | ||||||
|       displayaccts :: [Account] |       displayaccts :: [Account] | ||||||
|           | queryDepth q == 0 = |           | queryDepth q == 0 = | ||||||
|                          dbg1 "displayaccts" $ |                          dbg' "displayaccts" $ | ||||||
|                          take 1 $ clipAccountsAndAggregate (queryDepth q) $ flattenAccounts valuedaccttree |                          take 1 $ clipAccountsAndAggregate (queryDepth q) $ flattenAccounts valuedaccttree | ||||||
|           | flat_ ropts = dbg1 "displayaccts" $ |           | flat_ ropts = dbg' "displayaccts" $ | ||||||
|                          filterzeros $ |                          filterzeros $ | ||||||
|                          filterempty $ |                          filterempty $ | ||||||
|                          drop 1 $ clipAccountsAndAggregate (queryDepth q) $ flattenAccounts valuedaccttree |                          drop 1 $ clipAccountsAndAggregate (queryDepth q) $ flattenAccounts valuedaccttree | ||||||
|           | otherwise  = dbg1 "displayaccts" $ |           | otherwise  = dbg' "displayaccts" $ | ||||||
|                          filter (not.aboring) $ |                          filter (not.aboring) $ | ||||||
|                          drop 1 $ flattenAccounts $ |                          drop 1 $ flattenAccounts $ | ||||||
|                          markboring $ |                          markboring $ | ||||||
| @ -116,7 +117,7 @@ balanceReport ropts@ReportOpts{..} q j = | |||||||
|             markboring  = if no_elide_ then id else markBoringParentAccounts |             markboring  = if no_elide_ then id else markBoringParentAccounts | ||||||
| 
 | 
 | ||||||
|       -- Make a report row for each account. |       -- Make a report row for each account. | ||||||
|       items = dbg1 "items" $ map (balanceReportItem ropts q) displayaccts |       items = dbg "items" $ map (balanceReportItem ropts q) displayaccts | ||||||
| 
 | 
 | ||||||
|       -- Sort report rows (except sorting by amount in tree mode, which was done above). |       -- Sort report rows (except sorting by amount in tree mode, which was done above). | ||||||
|       sorteditems |       sorteditems | ||||||
| @ -139,17 +140,17 @@ balanceReport ropts@ReportOpts{..} q j = | |||||||
|               sortedrows = sortAccountItemsLike sortedanames anamesandrows |               sortedrows = sortAccountItemsLike sortedanames anamesandrows | ||||||
| 
 | 
 | ||||||
|       -- Calculate the grand total. |       -- Calculate the grand total. | ||||||
|       total | not (flat_ ropts) = dbg1 "total" $ sum [amt | (_,_,indent,amt) <- items, indent == 0] |       total | not (flat_ ropts) = dbg "total" $ sum [amt | (_,_,indent,amt) <- items, indent == 0] | ||||||
|             | otherwise         = dbg1 "total" $ |             | otherwise         = dbg "total" $ | ||||||
|                                   if flatShowsExclusiveBalance |                                   if flatShowsExclusiveBalance | ||||||
|                                   then sum $ map fourth4 items |                                   then sum $ map fourth4 items | ||||||
|                                   else sum $ map aebalance $ clipAccountsAndAggregate 1 displayaccts |                                   else sum $ map aebalance $ clipAccountsAndAggregate 1 displayaccts | ||||||
|        |        | ||||||
|       -- Calculate percentages if needed. |       -- Calculate percentages if needed. | ||||||
|       mappedtotal | percent_  = dbg1 "mappedtotal" $ total `perdivide` total |       mappedtotal | percent_  = dbg "mappedtotal" $ total `perdivide` total | ||||||
|                   | otherwise = total |                   | otherwise = total | ||||||
|       mappedsorteditems | percent_ = |       mappedsorteditems | percent_ = | ||||||
|                             dbg1 "mappedsorteditems" $ |                             dbg "mappedsorteditems" $ | ||||||
|                             map (\(fname, sname, indent, amount) -> (fname, sname, indent, amount `perdivide` total)) sorteditems |                             map (\(fname, sname, indent, amount) -> (fname, sname, indent, amount `perdivide` total)) sorteditems | ||||||
|                         | otherwise = sorteditems |                         | otherwise = sorteditems | ||||||
| 
 | 
 | ||||||
|  | |||||||
| @ -317,8 +317,7 @@ intervalFromRawOpts = lastDef NoInterval . collectopts intervalfromrawopt | |||||||
| -- | get period expression from --forecast option | -- | get period expression from --forecast option | ||||||
| forecastPeriodFromRawOpts :: Day -> RawOpts -> Maybe DateSpan | forecastPeriodFromRawOpts :: Day -> RawOpts -> Maybe DateSpan | ||||||
| forecastPeriodFromRawOpts d opts = | forecastPeriodFromRawOpts d opts = | ||||||
|   case  |   case maybestringopt "forecast" opts | ||||||
|     dbg2 "forecastopt" $ maybestringopt "forecast" opts |  | ||||||
|   of |   of | ||||||
|     Nothing -> Nothing |     Nothing -> Nothing | ||||||
|     Just "" -> Just nulldatespan |     Just "" -> Just nulldatespan | ||||||
| @ -478,13 +477,13 @@ queryOptsFromOpts d ReportOpts{..} = flagsqopts ++ argsqopts | |||||||
| reportSpan :: Journal -> ReportOpts -> IO DateSpan | reportSpan :: Journal -> ReportOpts -> IO DateSpan | ||||||
| reportSpan j ropts = do | reportSpan j ropts = do | ||||||
|   (mspecifiedstartdate, mspecifiedenddate) <- |   (mspecifiedstartdate, mspecifiedenddate) <- | ||||||
|     dbg2 "specifieddates" <$> specifiedStartEndDates ropts |     dbg3 "specifieddates" <$> specifiedStartEndDates ropts | ||||||
|   let |   let | ||||||
|     DateSpan mjournalstartdate mjournalenddate = |     DateSpan mjournalstartdate mjournalenddate = | ||||||
|       dbg2 "journalspan" $ journalDateSpan False j  -- ignore secondary dates |       dbg3 "journalspan" $ journalDateSpan False j  -- ignore secondary dates | ||||||
|     mstartdate = mspecifiedstartdate <|> mjournalstartdate |     mstartdate = mspecifiedstartdate <|> mjournalstartdate | ||||||
|     menddate   = mspecifiedenddate   <|> mjournalenddate |     menddate   = mspecifiedenddate   <|> mjournalenddate | ||||||
|   return $ dbg1 "reportspan" $ DateSpan mstartdate menddate |   return $ dbg3 "reportspan" $ DateSpan mstartdate menddate | ||||||
| 
 | 
 | ||||||
| reportStartDate :: Journal -> ReportOpts -> IO (Maybe Day) | reportStartDate :: Journal -> ReportOpts -> IO (Maybe Day) | ||||||
| reportStartDate j ropts = spanStart <$> reportSpan j ropts | reportStartDate j ropts = spanStart <$> reportSpan j ropts | ||||||
|  | |||||||
| @ -1,5 +1,28 @@ | |||||||
| {-# LANGUAGE FlexibleContexts, TypeFamilies #-} | {-# LANGUAGE FlexibleContexts, TypeFamilies #-} | ||||||
| -- | Debugging helpers | {- | Debugging helpers. | ||||||
|  | 
 | ||||||
|  | You can enable increasingly verbose debug output by adding --debug [1-9] | ||||||
|  | to a hledger command line. --debug with no argument means --debug 1. | ||||||
|  | This is implemented by calling dbgN or similar helpers, defined below. | ||||||
|  | These calls can be found throughout hledger code; they have been added | ||||||
|  | organically where it seemed likely they would be needed again. | ||||||
|  | The choice of debug level has not been very systematic. | ||||||
|  | 202006 Here's a start at some guidelines, not yet applied project-wide: | ||||||
|  | 
 | ||||||
|  | Debug level:  What to show: | ||||||
|  | ------------  --------------------------------------------------------- | ||||||
|  | 0             normal command output only (no warnings, eg) | ||||||
|  | 1 (--debug)   basic/useful warnings & most common troubleshooting info, eg for valuation | ||||||
|  | 2             troubleshooting info, more detail | ||||||
|  | 3             report options selection | ||||||
|  | 4             report generation | ||||||
|  | 5             report generation, more detail | ||||||
|  | 6             command line parsing | ||||||
|  | 7             input file reading | ||||||
|  | 8             input file reading, more detail | ||||||
|  | 9             any other rarely needed / more in-depth info | ||||||
|  | 
 | ||||||
|  | -} | ||||||
| 
 | 
 | ||||||
| -- more: | -- more: | ||||||
| -- http://hackage.haskell.org/packages/archive/TraceUtils/0.1.0.2/doc/html/Debug-TraceUtils.html | -- http://hackage.haskell.org/packages/archive/TraceUtils/0.1.0.2/doc/html/Debug-TraceUtils.html | ||||||
|  | |||||||
| @ -115,7 +115,7 @@ main = do | |||||||
|     (argsbeforecmd, argsaftercmd') = break (==rawcmd) args |     (argsbeforecmd, argsaftercmd') = break (==rawcmd) args | ||||||
|     argsaftercmd         = drop 1 argsaftercmd' |     argsaftercmd         = drop 1 argsaftercmd' | ||||||
|     dbgIO :: Show a => String -> a -> IO () |     dbgIO :: Show a => String -> a -> IO () | ||||||
|     dbgIO = ptraceAtIO 2 |     dbgIO = ptraceAtIO 6 | ||||||
| 
 | 
 | ||||||
|   dbgIO "running" prognameandversion |   dbgIO "running" prognameandversion | ||||||
|   dbgIO "raw args" args |   dbgIO "raw args" args | ||||||
|  | |||||||
		Loading…
	
		Reference in New Issue
	
	Block a user