read system cleanup, require conversion rules from a file to simplify API

This commit is contained in:
Simon Michael 2012-03-24 18:08:11 +00:00
parent ca5d5020e1
commit d4451ce5e3
7 changed files with 232 additions and 305 deletions

View File

@ -183,8 +183,8 @@ data Reader = Reader {
rFormat :: Format
-- quickly check if this reader can probably handle the given file path and file content
,rDetector :: FilePath -> String -> Bool
-- parse the given string, using the given parsing rules if any, returning a journal or error aware of the given file path
,rParser :: Maybe ParseRules -> FilePath -> String -> ErrorT String IO Journal
-- parse the given string, using the given parse rules file if any, returning a journal or error aware of the given file path
,rParser :: Maybe FilePath -> FilePath -> String -> ErrorT String IO Journal
}
-- data format parse/conversion rules

View File

@ -1,21 +1,21 @@
{-|
This is the entry point to hledger's reading system, which can read
Journals from various data formats. Use this module if you want to
parse journal data or read journal files; it should not be necessary
Journals from various data formats. Use this module if you want to parse
journal data or read journal files. Generally it should not be necessary
to import modules below this one.
-}
module Hledger.Read (
-- * Journal reading utilities
-- * Journal reading API
defaultJournalPath,
defaultJournal,
readJournal,
readJournalFile,
requireJournalFileExists,
ensureJournalFileExists,
-- * Temporary parser exports for Convert
-- * Parsers used elsewhere
ledgeraccountname,
someamount,
-- * Tests
@ -89,12 +89,12 @@ readerForFormat s | null rs = Nothing
where
rs = filter ((s==).rFormat) readers :: [Reader]
-- | Read a Journal from this string or give an error message, using
-- the specified data format or trying all known formats. CSV
-- conversion rules may be provided for better conversion of that
-- format, and/or a file path for better error messages.
readJournal :: Maybe Format -> Maybe ParseRules -> Maybe FilePath -> String -> IO (Either String Journal)
readJournal format rules path s =
-- | Read a Journal from this string or give an error message, using the
-- specified data format or trying all known formats. A CSV conversion
-- rules file may be specified for better conversion of that format,
-- and/or a file path for better error messages.
readJournal :: Maybe Format -> Maybe FilePath -> Maybe FilePath -> String -> IO (Either String Journal)
readJournal format rulesfile path s =
let readerstotry = case format of Nothing -> readers
Just f -> case readerForFormat f of Just r -> [r]
Nothing -> []
@ -103,7 +103,7 @@ readJournal format rules path s =
path' = fromMaybe "(string)" path
tryReader :: Reader -> IO (Either String Journal)
tryReader r = do -- printf "trying %s reader\n" (rFormat r)
(runErrorT . (rParser r) rules path') s
(runErrorT . (rParser r) rulesfile path') s
-- if no reader succeeds, we return the error of the first;
-- ideally it would be the error of the most likely intended
@ -136,15 +136,15 @@ readJournal format rules path s =
-- Nothing -> ""
-- Just p -> " in "++p
-- | Read a Journal from this file (or stdin if the filename is -) or
-- give an error message, using the specified data format or trying
-- all known formats. CSV conversion rules may be provided for better
-- | Read a Journal from this file (or stdin if the filename is -) or give
-- an error message, using the specified data format or trying all known
-- formats. A CSV conversion rules file may be specified for better
-- conversion of that format.
readJournalFile :: Maybe Format -> Maybe CsvReader.CsvRules -> FilePath -> IO (Either String Journal)
readJournalFile format rules "-" = getContents >>= readJournal format rules (Just "(stdin)")
readJournalFile format rules f = do
readJournalFile :: Maybe Format -> Maybe FilePath -> FilePath -> IO (Either String Journal)
readJournalFile format rulesfile "-" = getContents >>= readJournal format rulesfile (Just "(stdin)")
readJournalFile format rulesfile f = do
requireJournalFileExists f
withFile f ReadMode $ \h -> hGetContents h >>= readJournal format rules (Just f)
withFile f ReadMode $ \h -> hGetContents h >>= readJournal format rulesfile (Just f)
-- | If the specified journal file does not exist, give a helpful error and quit.
requireJournalFileExists :: FilePath -> IO ()

View File

@ -1,72 +1,80 @@
{-|
A reader for CSV files. Uses optional extra rules to help interpret the
data, like the convert command.
A reader for the CSV data format. Uses an extra rules file
(<http://hledger.org/MANUAL.html#rules-file-directives>) to help interpret
the data. Example:
@
\"2012\/3\/22\",\"something\",\"10.00\"
\"2012\/3\/23\",\"another\",\"5.50\"
@
and rules file:
@
date-field 0
description-field 1
amount-field 2
base-account assets:bank:checking
SAVINGS
assets:bank:savings
@
-}
module Hledger.Read.CsvReader (
CsvRules(..),
nullrules,
-- * Reader
reader,
-- * Tests
tests_Hledger_Read_CsvReader
)
where
import Control.Monad
import Control.Monad.Error
import Test.HUnit
-- import Text.ParserCombinators.Parsec hiding (parse)
-- import Test.HUnit
import Data.List
import Data.Maybe
import Data.Ord
import Data.Time.Format (parseTime)
import Safe
import System.Directory (doesFileExist)
import System.Exit (exitFailure)
import System.FilePath (takeBaseName, replaceExtension)
import System.FilePath
import System.IO (stderr)
import System.Locale (defaultTimeLocale)
import Test.HUnit
import Text.CSV (parseCSV, parseCSVFromFile, CSV)
import Text.ParserCombinators.Parsec
import Text.CSV (parseCSV, CSV)
import Text.ParserCombinators.Parsec hiding (parse)
import Text.ParserCombinators.Parsec.Error
import Text.ParserCombinators.Parsec.Pos
import Text.Printf (hPrintf)
import Hledger.Data
import Hledger.Read.Utils
import Prelude hiding (getContents)
import Hledger.Utils.UTF8 (getContents)
import Hledger.Utils
import Hledger.Data.FormatStrings as FormatStrings
import Hledger.Read.JournalReader (ledgeraccountname, someamount)
-- import Hledger.Read.JournalReader (ledgerDirective, ledgerHistoricalPrice,
-- ledgerDefaultYear, emptyLine, ledgerdatetime)
reader :: Reader
reader = Reader format detect parse_
reader = Reader format detect parse
format :: String
format = "csv"
-- | Does the given file path and data look like CSV ?
detect :: FilePath -> String -> Bool
detect f _ = fileSuffix f == format
detect f _ = takeExtension f == format
-- | Parse and post-process a "Journal" from CSV data, or give an error.
-- XXX currently ignores the string and reads from the file path
parse_ :: Maybe ParseRules -> FilePath -> String -> ErrorT String IO Journal
parse_ rules f s = do
r <- liftIO $ journalFromCsv rules f s
parse :: Maybe FilePath -> FilePath -> String -> ErrorT String IO Journal
parse rulesfile f s = do
r <- liftIO $ readJournalFromCsv rulesfile f s
case r of Left e -> throwError e
Right j -> return j
-- csvFile :: GenParser Char JournalContext (JournalUpdate,JournalContext)
-- csvFile = do items <- many timelogItem
-- eof
-- ctx <- getState
-- return (liftM (foldr (.) id) $ sequence items, ctx)
nullrules = CsvRules {
dateField=Nothing,
dateFormat=Nothing,
@ -88,32 +96,32 @@ nullrules = CsvRules {
type CsvRecord = [String]
-- | Read the CSV file named as an argument and print equivalent journal transactions,
-- using/creating a .rules file.
journalFromCsv :: Maybe CsvRules -> FilePath -> String -> IO (Either String Journal)
journalFromCsv csvrules csvfile content = do
-- | Read a Journal or an error message from the given CSV data (and
-- filename, used for error messages.) To do this we read a CSV
-- conversion rules file, or auto-create a default one if it does not
-- exist. The rules filename may be specified, otherwise it will be
-- derived from the CSV filename (unless the filename is - in which case
-- an error will be raised.)
readJournalFromCsv :: Maybe FilePath -> FilePath -> String -> IO (Either String Journal)
readJournalFromCsv rulesfile csvfile csvdata = do
let usingStdin = csvfile == "-"
-- rulesFileSpecified = isJust $ rules_file_ opts
-- when (usingStdin && (not rulesFileSpecified)) $ error' "please use --rules-file to specify a rules file when converting stdin"
csvparse <- parseCsv csvfile content
rulesfile' = case rulesfile of
Just f -> f
Nothing -> if usingStdin
then error' "please use --rules-file to specify a rules file when converting stdin"
else rulesFileFor csvfile
created <- ensureRulesFileExists rulesfile'
if created
then hPrintf stderr "creating default conversion rules file %s, edit this file for better results\n" rulesfile'
else hPrintf stderr "using conversion rules file %s\n" rulesfile'
rules <- liftM (either (error'.show) id) $ parseCsvRulesFile rulesfile'
csvparse <- parseCsv csvfile csvdata
let records = case csvparse of
Left e -> error' $ show e
Right rs -> filter (/= [""]) rs
rules <- case csvrules of
Nothing -> do
let rulesfile = rulesFileFor csvfile
exists <- doesFileExist rulesfile
if (not exists)
then do
hPrintf stderr "creating conversion rules file %s, edit this file for better results\n" rulesfile
writeFile rulesfile initialRulesFileContent
else
hPrintf stderr "using conversion rules file %s\n" rulesfile
liftM (either (error'.show) id) $ parseCsvRulesFile rulesfile
Just r -> return r
let invalid = validateRules rules
-- when (debug_ opts) $ hPrintf stderr "rules: %s\n" (show rules)
when (isJust invalid) $ error (fromJust invalid)
let requiredfields = max 2 (maxFieldIndex rules + 1)
badrecords = take 1 $ filter ((< requiredfields).length) records
if null badrecords
@ -127,11 +135,24 @@ journalFromCsv csvrules csvfile content = do
, show $ head badrecords
])
-- | Ensure there is a conversion rules file at the given path, creating a
-- default one if needed and returning True in this case.
ensureRulesFileExists :: FilePath -> IO Bool
ensureRulesFileExists f = do
exists <- doesFileExist f
if exists
then return False
else do
-- note Hledger.Utils.UTF8.* do no line ending conversion on windows,
-- we currently require unix line endings on all platforms.
writeFile f newRulesFileContent
return True
parseCsv :: FilePath -> String -> IO (Either ParseError CSV)
parseCsv path content =
parseCsv path csvdata =
case path of
"-" -> liftM (parseCSV "(stdin)") getContents
_ -> return $ parseCSV path content
_ -> return $ parseCSV path csvdata
-- | The highest (0-based) field index referenced in the field
-- definitions, or -1 if no fields are defined.
@ -155,8 +176,8 @@ maxFieldIndex r = maximumDef (-1) $ catMaybes [
rulesFileFor :: FilePath -> FilePath
rulesFileFor = flip replaceExtension ".rules"
initialRulesFileContent :: String
initialRulesFileContent = let prognameandversion = "hledger" in
newRulesFileContent :: String
newRulesFileContent = let prognameandversion = "hledger" in
"# csv conversion rules file generated by " ++ prognameandversion ++ "\n" ++
"# Add rules to this file for more accurate conversion, see\n"++
"# http://hledger.org/MANUAL.html#convert\n" ++
@ -179,25 +200,19 @@ initialRulesFileContent = let prognameandversion = "hledger" in
"(TO|FROM) SAVINGS\n" ++
"assets:bank:savings\n"
validateRules :: CsvRules -> Maybe String
validateRules rules = let
hasAmount = isJust $ amountField rules
hasIn = isJust $ amountInField rules
hasOut = isJust $ amountOutField rules
in case (hasAmount, hasIn, hasOut) of
(True, True, _) -> Just "Don't specify amount-in-field when specifying amount-field"
(True, _, True) -> Just "Don't specify amount-out-field when specifying amount-field"
(_, False, True) -> Just "Please specify amount-in-field when specifying amount-out-field"
(_, True, False) -> Just "Please specify amount-out-field when specifying amount-in-field"
(False, False, False) -> Just "Please specify either amount-field, or amount-in-field and amount-out-field"
_ -> Nothing
-- rules file parser
parseCsvRulesFile :: FilePath -> IO (Either ParseError CsvRules)
parseCsvRulesFile f = do
s <- readFile f
return $ parseCsvRules f s
let rules = parseCsvRules f s
return $ case rules of
Left e -> Left e
Right r -> case validateRules r of
Left e -> Left $ toParseError e
Right r -> Right r
where
toParseError s = newErrorMessage (Message s) (initialPos "")
parseCsvRules :: FilePath -> String -> Either ParseError CsvRules
parseCsvRules rulesfile s = runParser csvrulesfile nullrules{baseAccount=takeBaseName rulesfile} rulesfile s
@ -339,8 +354,6 @@ accountrule = do
return (pats',acct)
<?> "account rule"
blanklines = many1 blankline
blankline = many spacenonewline >> newline >> return () <?> "blank line"
commentchar = oneOf ";#"
@ -356,6 +369,19 @@ matchreplacepattern = do
newline
return (matchpat,replpat)
validateRules :: CsvRules -> Either String CsvRules
validateRules rules =
let hasAmount = isJust $ amountField rules
hasIn = isJust $ amountInField rules
hasOut = isJust $ amountOutField rules
in case (hasAmount, hasIn, hasOut) of
(True, True, _) -> Left "Don't specify amount-in-field when specifying amount-field"
(True, _, True) -> Left "Don't specify amount-out-field when specifying amount-field"
(_, False, True) -> Left "Please specify amount-in-field when specifying amount-out-field"
(_, True, False) -> Left "Please specify amount-out-field when specifying amount-in-field"
(False, False, False) -> Left "Please specify either amount-field, or amount-in-field and amount-out-field"
_ -> Right rules
-- csv record conversion
formatD :: CsvRecord -> Bool -> Maybe Int -> Maybe Int -> HledgerFormatField -> String
formatD record leftJustified min max f = case f of
@ -483,8 +509,6 @@ identify rules defacct desc | null matchingrules = (defacct,desc)
newdesc = case r of Just repl -> regexReplaceCI p repl desc
Nothing -> desc
caseinsensitive = ("(?i)"++)
getAmount :: CsvRules -> CsvRecord -> String
getAmount rules fields = case amountField rules of
Just f -> maybe "" (atDef "" fields) $ Just f

View File

@ -1,119 +1,36 @@
{-# LANGUAGE RecordWildCards #-}
{-|
A reader for hledger's (and c++ ledger's) journal file format.
From the ledger 2.5 manual:
A reader for hledger's journal file format
(<http://hledger.org/MANUAL.html#the-journal-file>). hledger's journal
format is a compatible subset of c++ ledger's
(<http://ledger-cli.org/3.0/doc/ledger3.html#Journal-Format>), so this
reader should handle many ledger files as well. Example:
@
The ledger file format is quite simple, but also very flexible. It supports
many options, though typically the user can ignore most of them. They are
summarized below. The initial character of each line determines what the
line means, and how it should be interpreted. Allowable initial characters
are:
NUMBER A line beginning with a number denotes an entry. It may be followed by any
number of lines, each beginning with whitespace, to denote the entrys account
transactions. The format of the first line is:
DATE[=EDATE] [*|!] [(CODE)] DESC
If * appears after the date (with optional effective date), it indicates the entry
is cleared, which can mean whatever the user wants it t omean. If ! appears
after the date, it indicates d the entry is pending; i.e., tentatively cleared from
the users point of view, but not yet actually cleared. If a CODE appears in
parentheses, it may be used to indicate a check number, or the type of the
transaction. Following these is the payee, or a description of the transaction.
The format of each following transaction is:
ACCOUNT AMOUNT [; NOTE]
The ACCOUNT may be surrounded by parentheses if it is a virtual
transactions, or square brackets if it is a virtual transactions that must
balance. The AMOUNT can be followed by a per-unit transaction cost,
by specifying AMOUNT, or a complete transaction cost with \@ AMOUNT.
Lastly, the NOTE may specify an actual and/or effective date for the
transaction by using the syntax [ACTUAL_DATE] or [=EFFECTIVE_DATE] or
[ACTUAL_DATE=EFFECtIVE_DATE].
= An automated entry. A value expression must appear after the equal sign.
After this initial line there should be a set of one or more transactions, just as
if it were normal entry. If the amounts of the transactions have no commodity,
they will be applied as modifiers to whichever real transaction is matched by
the value expression.
~ A period entry. A period expression must appear after the tilde.
After this initial line there should be a set of one or more transactions, just as
if it were normal entry.
! A line beginning with an exclamation mark denotes a command directive. It
must be immediately followed by the command word. The supported commands
are:
!include
Include the stated ledger file.
!account
The account name is given is taken to be the parent of all transac-
tions that follow, until !end is seen.
!end Ends an account block.
; A line beginning with a colon indicates a comment, and is ignored.
Y If a line begins with a capital Y, it denotes the year used for all subsequent
entries that give a date without a year. The year should appear immediately
after the Y, for example: Y2004. This is useful at the beginning of a file, to
specify the year for that file. If all entries specify a year, however, this command
has no effect.
P Specifies a historical price for a commodity. These are usually found in a pricing
history file (see the -Q option). The syntax is:
P DATE SYMBOL PRICE
N SYMBOL Indicates that pricing information is to be ignored for a given symbol, nor will
quotes ever be downloaded for that symbol. Useful with a home currency, such
as the dollar ($). It is recommended that these pricing options be set in the price
database file, which defaults to ~/.pricedb. The syntax for this command is:
N SYMBOL
D AMOUNT Specifies the default commodity to use, by specifying an amount in the expected
format. The entry command will use this commodity as the default when none
other can be determined. This command may be used multiple times, to set
the default flags for different commodities; whichever is seen last is used as the
default commodity. For example, to set US dollars as the default commodity,
while also setting the thousands flag and decimal flag for that commodity, use:
D $1,000.00
C AMOUNT1 = AMOUNT2
Specifies a commodity conversion, where the first amount is given to be equiv-
alent to the second amount. The first amount should use the decimal precision
desired during reporting:
C 1.00 Kb = 1024 bytes
i, o, b, h
These four relate to timeclock support, which permits ledger to read timelog
files. See the timeclocks documentation for more info on the syntax of its
timelog files.
2012\/3\/24 gift
expenses:gifts $10
assets:cash
@
-}
module Hledger.Read.JournalReader (
emptyLine,
journalAddFile,
journalFile,
ledgeraccountname,
ledgerdatetime,
ledgerDefaultYear,
ledgerDirective,
ledgerHistoricalPrice,
reader,
someamount,
tests_Hledger_Read_JournalReader
-- * Reader
reader,
-- * Parsers used elsewhere
emptyLine,
journalFile,
ledgeraccountname,
ledgerdatetime,
ledgerDefaultYear,
ledgerDirective,
ledgerHistoricalPrice,
someamount,
parseJournalWith,
getParentAccount,
-- * Tests
tests_Hledger_Read_JournalReader
)
where
import Control.Monad
@ -131,9 +48,10 @@ import Safe (headDef)
import Test.HUnit
import Text.ParserCombinators.Parsec hiding (parse)
import Text.Printf
import System.FilePath
import System.Time (getClockTime)
import Hledger.Data
import Hledger.Read.Utils
import Hledger.Utils
import Prelude hiding (readFile)
import Hledger.Utils.UTF8 (readFile)
@ -149,13 +67,70 @@ format = "journal"
-- | Does the given file path and data provide hledger's journal file format ?
detect :: FilePath -> String -> Bool
detect f _ = fileSuffix f == format
detect f _ = takeExtension f == format
-- | Parse and post-process a "Journal" from hledger's journal file
-- format, or give an error.
parse :: Maybe ParseRules -> FilePath -> String -> ErrorT String IO Journal
parse :: Maybe FilePath -> FilePath -> String -> ErrorT String IO Journal
parse _ = parseJournalWith journalFile
-- parsing utils
-- | Flatten a list of JournalUpdate's into a single equivalent one.
combineJournalUpdates :: [JournalUpdate] -> JournalUpdate
combineJournalUpdates us = liftM (foldr (.) id) $ sequence us
-- | Given a JournalUpdate-generating parsec parser, file path and data string,
-- parse and post-process a Journal so that it's ready to use, or give an error.
parseJournalWith :: (GenParser Char JournalContext (JournalUpdate,JournalContext)) -> FilePath -> String -> ErrorT String IO Journal
parseJournalWith p f s = do
tc <- liftIO getClockTime
tl <- liftIO getCurrentLocalTime
y <- liftIO getCurrentYear
case runParser p nullctx{ctxYear=Just y} f s of
Right (updates,ctx) -> do
j <- updates `ap` return nulljournal
case journalFinalise tc tl f s ctx j of
Right j' -> return j'
Left estr -> throwError estr
Left e -> throwError $ show e
setYear :: Integer -> GenParser tok JournalContext ()
setYear y = updateState (\ctx -> ctx{ctxYear=Just y})
getYear :: GenParser tok JournalContext (Maybe Integer)
getYear = liftM ctxYear getState
setCommodity :: Commodity -> GenParser tok JournalContext ()
setCommodity c = updateState (\ctx -> ctx{ctxCommodity=Just c})
getCommodity :: GenParser tok JournalContext (Maybe Commodity)
getCommodity = liftM ctxCommodity getState
pushParentAccount :: String -> GenParser tok JournalContext ()
pushParentAccount parent = updateState addParentAccount
where addParentAccount ctx0 = ctx0 { ctxAccount = parent : ctxAccount ctx0 }
popParentAccount :: GenParser tok JournalContext ()
popParentAccount = do ctx0 <- getState
case ctxAccount ctx0 of
[] -> unexpected "End of account block with no beginning"
(_:rest) -> setState $ ctx0 { ctxAccount = rest }
getParentAccount :: GenParser tok JournalContext String
getParentAccount = liftM (concatAccountNames . reverse . ctxAccount) getState
addAccountAlias :: (AccountName,AccountName) -> GenParser tok JournalContext ()
addAccountAlias a = updateState (\(ctx@Ctx{..}) -> ctx{ctxAliases=a:ctxAliases})
getAccountAliases :: GenParser tok JournalContext [(AccountName,AccountName)]
getAccountAliases = liftM ctxAliases getState
clearAccountAliases :: GenParser tok JournalContext ()
clearAccountAliases = updateState (\(ctx@Ctx{..}) -> ctx{ctxAliases=[]})
--
-- | Top-level journal parser. Returns a single composite, I/O performing,
-- error-raising "JournalUpdate" (and final "JournalContext") which can be
-- applied to an empty journal to get the final result.
@ -164,7 +139,7 @@ journalFile = do
journalupdates <- many journalItem
eof
finalctx <- getState
return $ (juSequence journalupdates, finalctx)
return $ (combineJournalUpdates journalupdates, finalctx)
where
-- As all journal line types can be distinguished by the first
-- character, excepting transactions versus empty (blank or
@ -228,7 +203,7 @@ ledgerInclude = do
txt <- readFileOrError outerPos filepath
let inIncluded = show outerPos ++ " in included file " ++ show filename ++ ":\n"
case runParser journalFile outerState filepath txt of
Right (ju,_) -> juSequence [return $ journalAddFile (filepath,txt), ju] `catchError` (throwError . (inIncluded ++))
Right (ju,_) -> combineJournalUpdates [return $ journalAddFile (filepath,txt), ju] `catchError` (throwError . (inIncluded ++))
Left err -> throwError $ inIncluded ++ show err
where readFileOrError pos fp =
ErrorT $ liftM Right (readFile fp) `catch`

View File

@ -1,6 +1,12 @@
{-|
A reader for the timelog file format generated by timeclock.el.
A reader for the timelog file format generated by timeclock.el
(<http://www.emacswiki.org/emacs/TimeClock>). Example:
@
i 2007\/03\/10 12:26:00 hledger
o 2007\/03\/10 17:26:02
@
From timeclock.el 2.6:
@ -32,29 +38,26 @@ i, o or O. The meanings of the codes are:
now finished. Useful for creating summary reports.
@
Example:
@
i 2007/03/10 12:26:00 hledger
o 2007/03/10 17:26:02
@
-}
module Hledger.Read.TimelogReader (
reader,
tests_Hledger_Read_TimelogReader
-- * Reader
reader,
-- * Tests
tests_Hledger_Read_TimelogReader
)
where
import Control.Monad
import Control.Monad.Error
import Test.HUnit
import Text.ParserCombinators.Parsec hiding (parse)
import System.FilePath
import Hledger.Data
import Hledger.Read.Utils
import Hledger.Read.JournalReader (ledgerDirective, ledgerHistoricalPrice,
ledgerDefaultYear, emptyLine, ledgerdatetime)
import Hledger.Read.JournalReader (
ledgerDirective, ledgerHistoricalPrice, ledgerDefaultYear, emptyLine, ledgerdatetime,
parseJournalWith, getParentAccount
)
import Hledger.Utils
@ -66,12 +69,12 @@ format = "timelog"
-- | Does the given file path and data provide timeclock.el's timelog format ?
detect :: FilePath -> String -> Bool
detect f _ = fileSuffix f == format
detect f _ = takeExtension f == format
-- | Parse and post-process a "Journal" from timeclock.el's timelog
-- format, saving the provided file path and the current time, or give an
-- error.
parse :: Maybe ParseRules -> FilePath -> String -> ErrorT String IO Journal
parse :: Maybe FilePath -> FilePath -> String -> ErrorT String IO Journal
parse _ = parseJournalWith timelogFile
timelogFile :: GenParser Char JournalContext (JournalUpdate,JournalContext)

View File

@ -1,88 +0,0 @@
{-# LANGUAGE RecordWildCards #-}
{-|
Utilities used throughout hledger's read system.
-}
module Hledger.Read.Utils
where
import Control.Monad.Error
import Data.List
import System.Directory (getHomeDirectory)
import System.FilePath(takeDirectory,combine)
import System.Time (getClockTime)
import Text.ParserCombinators.Parsec
import Hledger.Data.Types
import Hledger.Utils
import Hledger.Data.Posting
import Hledger.Data.Dates (getCurrentYear)
import Hledger.Data.Journal
juSequence :: [JournalUpdate] -> JournalUpdate
juSequence us = liftM (foldr (.) id) $ sequence us
-- | Given a JournalUpdate-generating parsec parser, file path and data string,
-- parse and post-process a Journal so that it's ready to use, or give an error.
parseJournalWith :: (GenParser Char JournalContext (JournalUpdate,JournalContext)) -> FilePath -> String -> ErrorT String IO Journal
parseJournalWith p f s = do
tc <- liftIO getClockTime
tl <- liftIO getCurrentLocalTime
y <- liftIO getCurrentYear
case runParser p nullctx{ctxYear=Just y} f s of
Right (updates,ctx) -> do
j <- updates `ap` return nulljournal
case journalFinalise tc tl f s ctx j of
Right j' -> return j'
Left estr -> throwError estr
Left e -> throwError $ show e
setYear :: Integer -> GenParser tok JournalContext ()
setYear y = updateState (\ctx -> ctx{ctxYear=Just y})
getYear :: GenParser tok JournalContext (Maybe Integer)
getYear = liftM ctxYear getState
setCommodity :: Commodity -> GenParser tok JournalContext ()
setCommodity c = updateState (\ctx -> ctx{ctxCommodity=Just c})
getCommodity :: GenParser tok JournalContext (Maybe Commodity)
getCommodity = liftM ctxCommodity getState
pushParentAccount :: String -> GenParser tok JournalContext ()
pushParentAccount parent = updateState addParentAccount
where addParentAccount ctx0 = ctx0 { ctxAccount = parent : ctxAccount ctx0 }
popParentAccount :: GenParser tok JournalContext ()
popParentAccount = do ctx0 <- getState
case ctxAccount ctx0 of
[] -> unexpected "End of account block with no beginning"
(_:rest) -> setState $ ctx0 { ctxAccount = rest }
getParentAccount :: GenParser tok JournalContext String
getParentAccount = liftM (concatAccountNames . reverse . ctxAccount) getState
addAccountAlias :: (AccountName,AccountName) -> GenParser tok JournalContext ()
addAccountAlias a = updateState (\(ctx@Ctx{..}) -> ctx{ctxAliases=a:ctxAliases})
getAccountAliases :: GenParser tok JournalContext [(AccountName,AccountName)]
getAccountAliases = liftM ctxAliases getState
clearAccountAliases :: GenParser tok JournalContext ()
clearAccountAliases = updateState (\(ctx@Ctx{..}) -> ctx{ctxAliases=[]})
-- | Convert a possibly relative, possibly tilde-containing file path to an absolute one.
-- using the current directory from a parsec source position. ~username is not supported.
expandPath :: (MonadIO m) => SourcePos -> FilePath -> m FilePath
expandPath pos fp = liftM mkAbsolute (expandHome fp)
where
mkAbsolute = combine (takeDirectory (sourceName pos))
expandHome inname | "~/" `isPrefixOf` inname = do homedir <- liftIO getHomeDirectory
return $ homedir ++ drop 1 inname
| otherwise = return inname
fileSuffix :: FilePath -> String
fileSuffix = reverse . takeWhile (/='.') . reverse . dropWhile (/='.')

View File

@ -25,6 +25,7 @@ module Hledger.Utils (---- provide these frequently used modules - or not, for c
)
where
import Codec.Binary.UTF8.String as UTF8 (decodeString, encodeString, isUTF8Encoded)
import Control.Monad.Error
import Data.Char
import Data.List
import Data.Maybe
@ -32,6 +33,8 @@ import Data.Time.Clock
import Data.Time.LocalTime
import Data.Tree
import Debug.Trace
import System.Directory (getHomeDirectory)
import System.FilePath(takeDirectory,combine)
import System.Info (os)
import Test.HUnit
import Text.ParserCombinators.Parsec
@ -425,3 +428,13 @@ isRight = not . isLeft
-- | Apply a function the specified number of times. Possibly uses O(n) stack ?
applyN :: Int -> (a -> a) -> a -> a
applyN n f = (!! n) . iterate f
-- | Convert a possibly relative, possibly tilde-containing file path to an absolute one.
-- using the current directory from a parsec source position. ~username is not supported.
expandPath :: (MonadIO m) => SourcePos -> FilePath -> m FilePath
expandPath pos fp = liftM mkAbsolute (expandHome fp)
where
mkAbsolute = combine (takeDirectory (sourceName pos))
expandHome inname | "~/" `isPrefixOf` inname = do homedir <- liftIO getHomeDirectory
return $ homedir ++ drop 1 inname
| otherwise = return inname