notes: clint's patches
This commit is contained in:
parent
d4a1e51f2c
commit
ec841b90cf
411
NOTES.org
411
NOTES.org
@ -782,8 +782,193 @@ hledger -f demo.journal reg inacct:expenses:food:pets date:2010/8/25
|
|||||||
**** absorb its directives into journal format ?
|
**** absorb its directives into journal format ?
|
||||||
*** support apostrophe digit group separator
|
*** support apostrophe digit group separator
|
||||||
*** detect .hs plugins
|
*** detect .hs plugins
|
||||||
*** Clint's ofx support
|
|
||||||
*** more powerful storage layer
|
*** more powerful storage layer
|
||||||
|
**** Clint's filestore_proof_of_concept.dpatch
|
||||||
|
|
||||||
|
New patches:
|
||||||
|
|
||||||
|
[filestore-proof-of-concept
|
||||||
|
Clint Adams <clint@softwarefreedom.org>**20110901172739
|
||||||
|
Ignore-this: 1991477c2b70d276665c52478dc54d3d
|
||||||
|
|
||||||
|
This is a somewhat broken replacement of the traditional file
|
||||||
|
storage with a forced darcs repo. It assumes that the
|
||||||
|
darcs repo already exists since Data.FileStore refuses to
|
||||||
|
initialize a repository in an extant directory. It does not
|
||||||
|
handle any error conditions well.
|
||||||
|
] hunk ./hledger-lib/Hledger/Read.hs 104
|
||||||
|
when (not exists) $ do
|
||||||
|
hPrintf stderr "No journal file \"%s\", creating it.\n" f
|
||||||
|
hPrintf stderr "Edit this file or use \"hledger add\" or \"hledger web\" to add transactions.\n"
|
||||||
|
- emptyJournal >>= writeFile f
|
||||||
|
+ emptyJournal >>= writeFileWithBackup f
|
||||||
|
|
||||||
|
-- | Give the content for a new auto-created journal file.
|
||||||
|
emptyJournal :: IO String
|
||||||
|
hunk ./hledger-lib/Hledger/Utils.hs 40
|
||||||
|
import Text.ParserCombinators.Parsec
|
||||||
|
import Text.Printf
|
||||||
|
import Text.RegexPR
|
||||||
|
+import System.FilePath (takeFileName, takeDirectory)
|
||||||
|
+import qualified Data.FileStore.Types as DFT
|
||||||
|
+import qualified Data.FileStore.Generic as DFG
|
||||||
|
+import Data.FileStore.Darcs (darcsFileStore)
|
||||||
|
-- import qualified Data.Map as Map
|
||||||
|
--
|
||||||
|
-- import Prelude hiding (readFile,writeFile,getContents,putStr,putStrLn)
|
||||||
|
hunk ./hledger-lib/Hledger/Utils.hs 432
|
||||||
|
-- | Apply a function the specified number of times. Possibly uses O(n) stack ?
|
||||||
|
applyN :: Int -> (a -> a) -> a -> a
|
||||||
|
applyN n f = (!! n) . iterate f
|
||||||
|
+
|
||||||
|
+-- Store file in VCS; Data.FileStore takes care of only committing
|
||||||
|
+-- when necessary.
|
||||||
|
+
|
||||||
|
+filestoreSave :: FilePath -> String -> IO ()
|
||||||
|
+filestoreSave f t = DFT.save assumedRepo assumedFilename assumedAuthor logMessage t
|
||||||
|
+ where
|
||||||
|
+ assumedRepo = darcsFileStore (takeDirectory f)
|
||||||
|
+ assumedFilename = takeFileName f
|
||||||
|
+ assumedAuthor = (DFT.Author "Hledger Role" "hledger@fake")
|
||||||
|
+ logMessage = "Some kind of change committed by some part of the hledger suite"
|
||||||
|
+
|
||||||
|
+writeFileWithBackup :: FilePath -> String -> IO ()
|
||||||
|
+writeFileWithBackup = filestoreSave
|
||||||
|
+
|
||||||
|
+-- modify existing file in filestore
|
||||||
|
+filestoreModify :: FilePath -> DFT.RevisionId -> String -> IO (Either DFT.MergeInfo ())
|
||||||
|
+filestoreModify f lr t = DFG.modify assumedRepo assumedFilename lr assumedAuthor logMessage t
|
||||||
|
+ where
|
||||||
|
+ assumedRepo = darcsFileStore (takeDirectory f)
|
||||||
|
+ assumedFilename = takeFileName f
|
||||||
|
+ assumedAuthor = (DFT.Author "Hledger Role" "hledger@fake")
|
||||||
|
+ logMessage = "Some kind of change committed by some part of the hledger suite"
|
||||||
|
+
|
||||||
|
+filestoreAppend :: FilePath -> String -> IO ()
|
||||||
|
+filestoreAppend f t = do
|
||||||
|
+ lastrev <- DFT.latest assumedRepo assumedFilename
|
||||||
|
+ oldcontents <- DFT.retrieve assumedRepo assumedFilename (Just lastrev)
|
||||||
|
+ result <- filestoreModify f lastrev (oldcontents ++ "\n\n" ++ t)
|
||||||
|
+ either (\x -> putStrLn "Help, the append didn't work and I am failing miserably.") (\x -> return ()) result
|
||||||
|
+ where
|
||||||
|
+ assumedRepo = darcsFileStore (takeDirectory f)
|
||||||
|
+ assumedFilename = takeFileName f
|
||||||
|
hunk ./hledger-lib/hledger-lib.cabal 60
|
||||||
|
,containers
|
||||||
|
,directory
|
||||||
|
,filepath
|
||||||
|
+ ,filestore
|
||||||
|
,mtl
|
||||||
|
,old-locale
|
||||||
|
,old-time
|
||||||
|
hunk ./hledger/Hledger/Cli/Add.hs 31
|
||||||
|
import qualified Data.Set as Set
|
||||||
|
|
||||||
|
import Hledger
|
||||||
|
-import Prelude hiding (putStr, putStrLn, appendFile)
|
||||||
|
-import Hledger.Utils.UTF8 (putStr, putStrLn, appendFile)
|
||||||
|
+import Prelude hiding (putStr, putStrLn)
|
||||||
|
+import Hledger.Utils.UTF8 (putStr, putStrLn)
|
||||||
|
import Hledger.Cli.Options
|
||||||
|
import Hledger.Cli.Register (postingsReportAsText)
|
||||||
|
import Hledger.Cli.Utils
|
||||||
|
hunk ./hledger/Hledger/Cli/Add.hs 194
|
||||||
|
journalAddTransaction :: Journal -> CliOpts -> Transaction -> IO Journal
|
||||||
|
journalAddTransaction j@Journal{jtxns=ts} opts t = do
|
||||||
|
let f = journalFilePath j
|
||||||
|
- appendToJournalFile f $ showTransaction t
|
||||||
|
+ filestoreAppend f $ showTransaction t
|
||||||
|
when (debug_ opts) $ do
|
||||||
|
putStrLn $ printf "\nAdded transaction to %s:" f
|
||||||
|
putStrLn =<< registerFromString (show t)
|
||||||
|
hunk ./hledger/Hledger/Cli/Add.hs 200
|
||||||
|
return j{jtxns=ts++[t]}
|
||||||
|
|
||||||
|
--- | Append data to a journal file; or if the file is "-", dump it to stdout.
|
||||||
|
-appendToJournalFile :: FilePath -> String -> IO ()
|
||||||
|
-appendToJournalFile f s =
|
||||||
|
- if f == "-"
|
||||||
|
- then putStr $ sep ++ s
|
||||||
|
- else appendFile f $ sep++s
|
||||||
|
- where
|
||||||
|
- -- appendFile means we don't need file locking to be
|
||||||
|
- -- multi-user-safe, but also that we can't figure out the minimal
|
||||||
|
- -- number of newlines needed as separator
|
||||||
|
- sep = "\n\n"
|
||||||
|
- -- sep | null $ strip t = ""
|
||||||
|
- -- | otherwise = replicate (2 - min 2 (length lastnls)) '\n'
|
||||||
|
- -- where lastnls = takeWhile (=='\n') $ reverse t
|
||||||
|
-
|
||||||
|
-- | Convert a string of journal data into a register report.
|
||||||
|
registerFromString :: String -> IO String
|
||||||
|
registerFromString s = do
|
||||||
|
hunk ./hledger/Hledger/Cli/Utils.hs 18
|
||||||
|
journalSpecifiedFileIsNewer,
|
||||||
|
fileModificationTime,
|
||||||
|
openBrowserOn,
|
||||||
|
- writeFileWithBackup,
|
||||||
|
- writeFileWithBackupIfChanged,
|
||||||
|
readFileStrictly,
|
||||||
|
Test(TestList),
|
||||||
|
)
|
||||||
|
hunk ./hledger/Hledger/Cli/Utils.hs 25
|
||||||
|
import Control.Exception
|
||||||
|
import Data.List
|
||||||
|
import Data.Maybe
|
||||||
|
-import Safe (readMay)
|
||||||
|
import System.Console.CmdArgs
|
||||||
|
hunk ./hledger/Hledger/Cli/Utils.hs 26
|
||||||
|
-import System.Directory (getModificationTime, getDirectoryContents, copyFile)
|
||||||
|
+import System.Directory (getModificationTime)
|
||||||
|
import System.Exit
|
||||||
|
hunk ./hledger/Hledger/Cli/Utils.hs 28
|
||||||
|
-import System.FilePath ((</>), splitFileName, takeDirectory)
|
||||||
|
import System.Info (os)
|
||||||
|
import System.Process (readProcessWithExitCode)
|
||||||
|
import System.Time (ClockTime, getClockTime, diffClockTimes, TimeDiff(TimeDiff))
|
||||||
|
hunk ./hledger/Hledger/Cli/Utils.hs 123
|
||||||
|
-- what not.
|
||||||
|
-- ::ShellExecute(NULL, "open", "www.somepage.com", NULL, NULL, SW_SHOWNORMAL);
|
||||||
|
|
||||||
|
--- | Back up this file with a (incrementing) numbered suffix then
|
||||||
|
--- overwrite it with this new text, or give an error, but only if the text
|
||||||
|
--- is different from the current file contents, and return a flag
|
||||||
|
--- indicating whether we did anything.
|
||||||
|
-writeFileWithBackupIfChanged :: FilePath -> String -> IO Bool
|
||||||
|
-writeFileWithBackupIfChanged f t = do
|
||||||
|
- s <- readFile f
|
||||||
|
- if t == s then return False
|
||||||
|
- else backUpFile f >> writeFile f t >> return True
|
||||||
|
-
|
||||||
|
--- | Back up this file with a (incrementing) numbered suffix, then
|
||||||
|
--- overwrite it with this new text, or give an error.
|
||||||
|
-writeFileWithBackup :: FilePath -> String -> IO ()
|
||||||
|
-writeFileWithBackup f t = backUpFile f >> writeFile f t
|
||||||
|
-
|
||||||
|
readFileStrictly :: FilePath -> IO String
|
||||||
|
readFileStrictly f = readFile f >>= \s -> Control.Exception.evaluate (length s) >> return s
|
||||||
|
hunk ./hledger/Hledger/Cli/Utils.hs 125
|
||||||
|
-
|
||||||
|
--- | Back up this file with a (incrementing) numbered suffix, or give an error.
|
||||||
|
-backUpFile :: FilePath -> IO ()
|
||||||
|
-backUpFile fp = do
|
||||||
|
- fs <- safeGetDirectoryContents $ takeDirectory $ fp
|
||||||
|
- let (d,f) = splitFileName fp
|
||||||
|
- versions = catMaybes $ map (f `backupNumber`) fs
|
||||||
|
- next = maximum (0:versions) + 1
|
||||||
|
- f' = printf "%s.%d" f next
|
||||||
|
- copyFile fp (d </> f')
|
||||||
|
-
|
||||||
|
-safeGetDirectoryContents :: FilePath -> IO [FilePath]
|
||||||
|
-safeGetDirectoryContents "" = getDirectoryContents "."
|
||||||
|
-safeGetDirectoryContents fp = getDirectoryContents fp
|
||||||
|
-
|
||||||
|
--- | Does the second file represent a backup of the first, and if so which version is it ?
|
||||||
|
-backupNumber :: FilePath -> FilePath -> Maybe Int
|
||||||
|
-backupNumber f g = case regexMatch ("^" ++ f ++ "\\.([0-9]+)$") g of
|
||||||
|
- Just (_, ((_,suffix):_)) -> readMay suffix
|
||||||
|
- _ -> Nothing
|
||||||
|
|
||||||
*** web api
|
*** web api
|
||||||
*** client-side ui
|
*** client-side ui
|
||||||
*** support -V ?
|
*** support -V ?
|
||||||
@ -1033,7 +1218,229 @@ improve reliability
|
|||||||
*** web: better web ui/gui
|
*** web: better web ui/gui
|
||||||
*** nice standard financial reports
|
*** nice standard financial reports
|
||||||
*** more automated bank data conversion
|
*** more automated bank data conversion
|
||||||
*** parse more file formats - gnucash, qif, ofx, csv..
|
*** parse more file formats - gnucash, qif, ofx, csv, etc.
|
||||||
|
**** ofx reader
|
||||||
|
***** clint's code
|
||||||
|
Date: Sun, 18 Sep 2011 12:26:16 -0400
|
||||||
|
From: Clint Adams <clint@softwarefreedom.org>
|
||||||
|
To: hledger@googlegroups.com
|
||||||
|
Subject: OFX conversion
|
||||||
|
Message-ID: <20110918162616.GA18874@softwarefreedom.org>
|
||||||
|
MIME-Version: 1.0
|
||||||
|
User-Agent: Mutt/1.5.20 (2009-06-14)
|
||||||
|
X-Original-Sender: clint@softwarefreedom.org
|
||||||
|
X-Original-Authentication-Results: gmr-mx.google.com; spf=pass (google.com:
|
||||||
|
domain of clint@softwarefreedom.org designates 216.27.154.199 as permitted
|
||||||
|
sender) smtp.mail=clint@softwarefreedom.org
|
||||||
|
Reply-To: hledger@googlegroups.com
|
||||||
|
Precedence: list
|
||||||
|
Mailing-list: list hledger@googlegroups.com; contact hledger+owners@googlegroups.com
|
||||||
|
List-ID: <hledger.googlegroups.com>
|
||||||
|
X-Google-Group-Id: 895107692464
|
||||||
|
List-Post: <http://groups.google.com/group/hledger/post?hl=en_US>, <mailto:hledger@googlegroups.com>
|
||||||
|
List-Help: <http://groups.google.com/support/?hl=en_US>, <mailto:hledger+help@googlegroups.com>
|
||||||
|
List-Archive: <http://groups.google.com/group/hledger?hl=en_US>
|
||||||
|
Sender: hledger@googlegroups.com
|
||||||
|
List-Subscribe: <http://groups.google.com/group/hledger/subscribe?hl=en_US>, <mailto:hledger+subscribe@googlegroups.com>
|
||||||
|
List-Unsubscribe: <http://groups.google.com/group/hledger/subscribe?hl=en_US>, <mailto:hledger+unsubscribe@googlegroups.com>
|
||||||
|
Content-Type: text/plain; charset=iso-8859-1
|
||||||
|
Content-Disposition: inline
|
||||||
|
Content-Transfer-Encoding: 8bit
|
||||||
|
|
||||||
|
This is definitely suboptimal but it seems to work on
|
||||||
|
the OFX 1.0.2 output from AmEx.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
{-# LANGUAGE Arrows, NoMonomorphismRestriction #-}
|
||||||
|
import Text.XML.HXT.Core
|
||||||
|
import Text.Printf (printf)
|
||||||
|
|
||||||
|
import Data.List (groupBy)
|
||||||
|
import Data.List.Split (splitOn)
|
||||||
|
|
||||||
|
import Data.Maybe (fromMaybe)
|
||||||
|
|
||||||
|
import Data.Time.Calendar (Day (ModifiedJulianDay))
|
||||||
|
import Data.Time.Format (formatTime)
|
||||||
|
import Data.Time.LocalTime (LocalTime (LocalTime), TimeOfDay (TimeOfDay))
|
||||||
|
import Data.Time.Parse (strptime)
|
||||||
|
|
||||||
|
import System.Locale (defaultTimeLocale)
|
||||||
|
import System.Process (readProcessWithExitCode)
|
||||||
|
|
||||||
|
import Hledger.Cli.Format (FormatString (FormatField), Field (FieldNo))
|
||||||
|
import Hledger.Cli.Convert
|
||||||
|
|
||||||
|
normAmount :: String -> String
|
||||||
|
normAmount amt | amt == "" = ""
|
||||||
|
| otherwise = printf "%.2f" (read amt :: Double)
|
||||||
|
|
||||||
|
compressWhitespace :: String -> String
|
||||||
|
compressWhitespace x = map head $ groupSpaces x
|
||||||
|
where groupSpaces "" = [""]
|
||||||
|
groupSpaces x = groupBy (\x y -> x==' ' && y==' ') x
|
||||||
|
|
||||||
|
data Transaction = Transaction
|
||||||
|
{ trnType, dtUser, dtPosted, trnAmt, fitId, refNum, name, memo :: String }
|
||||||
|
deriving (Show, Eq)
|
||||||
|
|
||||||
|
-- this doesn't get the timezone right
|
||||||
|
ofxDateParse :: String -> String
|
||||||
|
ofxDateParse x = formatTime defaultTimeLocale "%Y-%m-%d" (fst (fromMaybe (LocalTime (ModifiedJulianDay 100) (TimeOfDay 0 0 0), "") (strptime "%Y%m%d%H%M%S.%OS" x)))
|
||||||
|
|
||||||
|
parseFakeXML string = readString [ withValidate no
|
||||||
|
, withRemoveWS yes
|
||||||
|
] string
|
||||||
|
|
||||||
|
atTag tag = deep (isElem >>> hasName tag)
|
||||||
|
text = getChildren >>> getText
|
||||||
|
textAtTag tag = atTag tag >>> text
|
||||||
|
|
||||||
|
getTransactions = atTag "STMTTRN" >>>
|
||||||
|
proc l -> do
|
||||||
|
trnType <- textAtTag "TRNTYPE" -< l
|
||||||
|
dtUser <- textAtTag "DTUSER" -< l
|
||||||
|
dtPosted <- textAtTag "DTPOSTED" -< l
|
||||||
|
trnAmt <- textAtTag "TRNAMT" -< l
|
||||||
|
fitId <- textAtTag "FITID" -< l
|
||||||
|
refNum <- textAtTag "REFNUM" -< l
|
||||||
|
name <- textAtTag "NAME" -< l
|
||||||
|
memo <- textAtTag "MEMO" -< l
|
||||||
|
returnA -< Transaction
|
||||||
|
{ trnType = trnType,
|
||||||
|
dtUser = ofxDateParse dtUser,
|
||||||
|
dtPosted = ofxDateParse dtPosted,
|
||||||
|
trnAmt = trnAmt,
|
||||||
|
fitId = fitId,
|
||||||
|
refNum = refNum,
|
||||||
|
name = name,
|
||||||
|
memo = memo }
|
||||||
|
|
||||||
|
ofxrules = CsvRules {
|
||||||
|
dateField=Just 0,
|
||||||
|
dateFormat=Nothing,
|
||||||
|
statusField=Nothing,
|
||||||
|
codeField=Nothing,
|
||||||
|
descriptionField=[FormatField False Nothing Nothing (FieldNo 2)],
|
||||||
|
amountField=Just 1,
|
||||||
|
inField=Nothing,
|
||||||
|
outField=Nothing,
|
||||||
|
currencyField=Nothing,
|
||||||
|
baseCurrency=Nothing,
|
||||||
|
accountField=Nothing,
|
||||||
|
account2Field=Nothing,
|
||||||
|
effectiveDateField=Nothing,
|
||||||
|
baseAccount="Liabilities:American Express",
|
||||||
|
accountRules=[]
|
||||||
|
}
|
||||||
|
|
||||||
|
txnToCsvRecord :: Transaction -> CsvRecord
|
||||||
|
txnToCsvRecord x = [dtUser x, normAmount (trnAmt x), compressWhitespace (name x) ++ "(" ++ refNum x ++ ")", fitId x, memo x]
|
||||||
|
|
||||||
|
printTxnWithComment :: CsvRecord -> IO ()
|
||||||
|
printTxnWithComment x = putStrLn ("; " ++ x !! 3 ++ " - " ++ x !! 4) >> printTxn False ofxrules x
|
||||||
|
|
||||||
|
main = do
|
||||||
|
filecontents <- readFile "/tmp/ofx.ofx"
|
||||||
|
let splitfilecontents = splitOn "\n\n" filecontents
|
||||||
|
let ofxheader = head splitfilecontents
|
||||||
|
let ofxsgml = splitfilecontents !! 1
|
||||||
|
(_, fakexml, _) <- readProcessWithExitCode "/usr/bin/sgml2xml" [] ofxsgml
|
||||||
|
|
||||||
|
transes <- runX (parseFakeXML fakexml >>> getTransactions)
|
||||||
|
|
||||||
|
let records = map txnToCsvRecord transes
|
||||||
|
mapM_ (printTxnWithComment) records
|
||||||
|
|
||||||
|
**** qif reader
|
||||||
|
***** clint's code
|
||||||
|
Date: Tue, 25 Oct 2011 11:46:24 -0400
|
||||||
|
From: Clint Adams <clint@softwarefreedom.org>
|
||||||
|
To: hledger@googlegroups.com
|
||||||
|
Cc: thomas@marketpsychdata.com, jjenning@fastmail.fm
|
||||||
|
Subject: Re: QIF parsing
|
||||||
|
Message-ID: <20111025154624.GA3097@softwarefreedom.org>
|
||||||
|
References: <20111006164952.GA734@softwarefreedom.org>
|
||||||
|
MIME-Version: 1.0
|
||||||
|
In-Reply-To: <20111006164952.GA734@softwarefreedom.org>
|
||||||
|
User-Agent: Mutt/1.5.21 (2010-09-15)
|
||||||
|
X-Original-Sender: clint@softwarefreedom.org
|
||||||
|
X-Original-Authentication-Results: gmr-mx.google.com; spf=pass (google.com:
|
||||||
|
domain of clint@softwarefreedom.org designates 207.86.247.70 as permitted
|
||||||
|
sender) smtp.mail=clint@softwarefreedom.org
|
||||||
|
Reply-To: hledger@googlegroups.com
|
||||||
|
Precedence: list
|
||||||
|
Mailing-list: list hledger@googlegroups.com; contact hledger+owners@googlegroups.com
|
||||||
|
List-ID: <hledger.googlegroups.com>
|
||||||
|
X-Google-Group-Id: 895107692464
|
||||||
|
List-Post: <http://groups.google.com/group/hledger/post?hl=en_US>, <mailto:hledger@googlegroups.com>
|
||||||
|
List-Help: <http://groups.google.com/support/?hl=en_US>, <mailto:hledger+help@googlegroups.com>
|
||||||
|
List-Archive: <http://groups.google.com/group/hledger?hl=en_US>
|
||||||
|
Sender: hledger@googlegroups.com
|
||||||
|
List-Subscribe: <http://groups.google.com/group/hledger/subscribe?hl=en_US>, <mailto:hledger+subscribe@googlegroups.com>
|
||||||
|
List-Unsubscribe: <http://groups.google.com/group/hledger/subscribe?hl=en_US>, <mailto:hledger+unsubscribe@googlegroups.com>
|
||||||
|
Content-Type: text/plain; charset=iso-8859-1
|
||||||
|
Content-Disposition: inline
|
||||||
|
Content-Transfer-Encoding: 8bit
|
||||||
|
X-Truedomain-Domain: googlegroups.com
|
||||||
|
X-Truedomain-SPF: Neutral (mx4: 173.255.219.222 is neither permitted nor denied by domain of googlegroups.com)
|
||||||
|
X-Truedomain-DKIM: Pass
|
||||||
|
X-Truedomain-ID: 16FADD416626EE6BDC6CCBB61A94EA31
|
||||||
|
X-Truedomain: Neutral
|
||||||
|
|
||||||
|
I had to update my QIF converter for modern hledger; included below.
|
||||||
|
|
||||||
|
Thomas, I didn't see your reply because I'm not subscribed to
|
||||||
|
this Google Group. I believe that QuickBooks uses OFX, not QIF,
|
||||||
|
so you'd be more interested in
|
||||||
|
|
||||||
|
http://groups.google.com/group/hledger/browse_thread/thread/e03ccc655347ba72
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
http://www.dingoskidneys.com/~jaredj/
|
||||||
|
|
||||||
|
------8<-------
|
||||||
|
|
||||||
|
import Text.Parsec
|
||||||
|
import Text.Parsec.String
|
||||||
|
|
||||||
|
import Control.Monad.State as State
|
||||||
|
|
||||||
|
import System (getArgs)
|
||||||
|
import Data.List (groupBy)
|
||||||
|
import Data.Maybe (fromMaybe)
|
||||||
|
import qualified Data.Map as Map
|
||||||
|
import Text.Printf (printf)
|
||||||
|
|
||||||
|
import Hledger.Cli.Format (FormatString (FormatField), Field (FieldNo))
|
||||||
|
import Hledger.Cli.Convert
|
||||||
|
|
||||||
|
qifFile :: GenParser Char st (String,[[TransactionDetail]])
|
||||||
|
qifFile = do
|
||||||
|
skipMany newline
|
||||||
|
dtype <- typeHeader
|
||||||
|
newline
|
||||||
|
trans <- endBy1 transaction recordSep
|
||||||
|
return $ (dtype,trans)
|
||||||
|
|
||||||
|
typeHeader :: GenParser Char st String
|
||||||
|
typeHeader = do
|
||||||
|
string "!Type:"
|
||||||
|
dataType
|
||||||
|
|
||||||
|
dataType :: GenParser Char st String
|
||||||
|
dataType = do string "Cash"
|
||||||
|
<|> string "Bank"
|
||||||
|
<|> string "CCard"
|
||||||
|
<|> string "Invst"
|
||||||
|
<|> string "Oth A"
|
||||||
|
<|> string "Oth L"
|
||||||
|
<|> string "Invoice"
|
||||||
|
|
||||||
|
transaction :: GenParser Char st [TransactionDetail]
|
||||||
|
|
||||||
*** download via ofx protocol
|
*** download via ofx protocol
|
||||||
*** parsing: more date syntax ? last nov, next friday, optional this, week of
|
*** parsing: more date syntax ? last nov, next friday, optional this, week of
|
||||||
*** parsing: more period syntax ? every N days, biweekly
|
*** parsing: more period syntax ? every N days, biweekly
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user