toldg now uses Python build-in templating, not jinja2\ngetofx uses ofx library for extracting transactions

This commit is contained in:
2020-08-10 19:07:13 -04:00
parent 82e906885a
commit b9adfc0960
2 changed files with 120 additions and 95 deletions

177
getofx.py
View File

@@ -1,94 +1,137 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import datetime import os
import ofxtools
import json
import logging
import sys import sys
import csv import csv
import xml.etree.ElementTree as ET import json
import logging
import datetime
import ofxtools
from typing import List
from dataclasses import dataclass
from ofxtools import OFXClient from ofxtools import OFXClient
from ofxtools.Client import StmtRq, CcStmtEndRq, CcStmtRq from ofxtools.Client import StmtRq, CcStmtEndRq, CcStmtRq
from functools import namedtuple from ofxtools.Parser import OFXTree
def get_transactions(data): @dataclass
Transaction = namedtuple("Transaction", class ClientConfig:
["details", "date", "description", url: str
"amount", "type", "balance", "slip"]) userid: str
root = ET.fromstring(data) org: str
ts = [] clientuid: str
for statement in root.iter("STMTTRN"): fid: str
description, date, amount = "", "", "" bankid: str
for child in statement: version: int
if child.tag == "TRNAMT":
amount = child.text
elif child.tag == "DTPOSTED": @dataclass
d = datetime.datetime.strptime(child.text[:8], "%Y%m%d") class AccountConfig:
date = d.strftime("%m/%d/%Y") name: str
elif child.tag == "NAME": accttype: str
if description: acctid: str
description = child.text + " " + description csv_file: str
else: fields: List[str]
description = child.text
elif child.tag == "MEMO":
if description: @dataclass
description = description + " " + child.text class Config:
else: """
description = child.text Basic class for abstracting the configuration.
t = Transaction("-", date, description, amount, "-", "-", "-") """
ts.append(t) secret: str
client: ClientConfig
accounts: List[AccountConfig]
@dataclass
class Transaction:
date: str
description: str
amount: str
def get_transactions(client: OFXClient, secret: str, account: AccountConfig):
dtstart = datetime.datetime(2020, 1, 1, tzinfo=ofxtools.utils.UTC)
dtend = datetime.datetime(2020, 12, 31, tzinfo=ofxtools.utils.UTC)
if account.accttype.upper() in ("CHECKING", "SAVINGS"):
rq = StmtRq(acctid=account.acctid, accttype=account.accttype.upper(),
dtstart=dtstart, dtend=dtend)
else:
rq = CcStmtRq(acctid=account.acctid, dtstart=dtstart, dtend=dtend)
response = client.request_statements(secret, rq)
parser = OFXTree()
parser.parse(response)
ofx = parser.convert()
ts = [Transaction(t.dtposted.strftime("%m/%d/%Y"),
t.name + " " + t.memo if t.memo else t.name,
str(t.trnamt))
for t in ofx.statements[0].banktranlist]
return ts return ts
def process_account(client, secret, year, name, accttype, acctid, csv_file): def write_csv(account: AccountConfig, transactions: List[Transaction]):
dtstart = datetime.datetime(int(year), 1, 1, tzinfo=ofxtools.utils.UTC)
dtend = datetime.datetime(int(year), 12, 31, tzinfo=ofxtools.utils.UTC)
if accttype.upper() in ("CHECKING", "SAVINGS"): def transaction_to_csv_row(t: Transaction) -> List[str]:
rq = StmtRq(acctid=acctid, accttype=accttype.upper(), """ This allows to user to specify how to order the fields in the CSV
dtstart=dtstart, dtend=dtend) file. I have implemented this feature because the columns in my
checking account and in my credit card accounts are different. If the
field is one of 'date', 'description', or 'amount' we get that
attribute from the transaction. Otherwise, we use the field itself
(usually an empty string in my case). """
return [getattr(t, f) if hasattr(t, f) else f
for f in account.fields]
status = "no change"
csv_file = account.csv_file
if not os.path.isfile(csv_file):
status = "new"
with open(account.csv_file, "w") as f:
csv_writer = csv.writer(f)
csv_writer.writerow(["date", "description", "amount"])
for t in transactions:
r = transaction_to_csv_row(t)
csv_writer.writerow(r)
else: else:
rq = CcStmtRq(acctid=acctid, dtstart=dtstart, dtend=dtend) # TODO: diff rows and append only the new once.
pass
response = client.request_statements(secret, rq) logging.warning(f"{account.name:30} -> {account.csv_file:30} | {status}")
data = response.read().decode()
# with open(csv_file.replace(".csv", ".xml"), "w") as f:
# f.write(data)
transactions = get_transactions(data)
with open(csv_file, "w") as f:
csv_writer = csv.writer(f)
csv_writer.writerow(["details", "date", "description",
"amount", "type", "balance", "slip"])
for t in transactions:
csv_writer.writerow(t)
#if t.date.startswith(year):
def get_client(url, userid, org, fid, clientuid, bankid, version, **kwargs):
return OFXClient(url, userid=userid, org=org, fid=fid, def get_client(c: ClientConfig) -> OFXClient:
clientuid=clientuid, bankid=bankid, version=version, return OFXClient(c.url, userid=c.userid, org=c.org, fid=c.fid,
prettyprint=True) clientuid=c.clientuid, bankid=c.bankid,
version=c.version, prettyprint=True)
def main(config): def parse_config(config_file: str) -> Config:
client = get_client(**config["client"]) with open(config_file, 'r') as f:
year = config["year"] # We could use the dacite package if the configureation
secret = config["secret"] # gets more complex and for automatical type ckecking, but
for account in config["accounts"]: # probably not worth it as this point.
name = account["name"] config = Config(**json.load(f))
logging.info(f"Processing {name}.") config.client = ClientConfig(**config.client)
process_account(client, secret, year, **account) config.accounts = [AccountConfig(**a) for a in config.accounts]
return config
def main(config: Config):
client = get_client(config.client)
for account in config.accounts:
transactions = get_transactions(client, config.secret, account)
write_csv(account, transactions)
if __name__ == "__main__": if __name__ == "__main__":
logging.basicConfig(level=logging.WARNING, format='%(message)s')
try: try:
config_file = sys.argv[1] config_file = sys.argv[1]
except IndexError: except IndexError:
config_file = "gather.json" config_file = "getofx.json"
with open(config_file, 'r') as f: config = parse_config(config_file)
config = json.load(f)
main(config) main(config)

View File

@@ -8,9 +8,7 @@ import time
import re import re
import datetime import datetime
import logging import logging
import jinja2
import shutil import shutil
import tempfile
from dataclasses import dataclass, field from dataclasses import dataclass, field
from typing import List, Tuple from typing import List, Tuple
@@ -80,10 +78,9 @@ class LdgTransaction:
LEDGER_TRANSACTION_TEMPLATE = """ LEDGER_TRANSACTION_TEMPLATE = """
{{t.date}} {{t.description}} ; {{t.row}} {t.date} {t.description} ; {t.row}
{{t.account2}} {{t.currency}} {{t.debit}} {t.account2} {t.currency} {t.debit}
{{t.account1}} {{t.currency}} {{t.credit}} {t.account1} {t.currency} {t.credit}
""" """
@@ -207,37 +204,21 @@ def get_transactions(csv_file, config: CsvConfig, mappings: List[CsvMapping]):
def render_to_file(transactions, csv_file, ledger_file, template_file=""): def render_to_file(transactions, csv_file, ledger_file, template_file=""):
if template_file: content = "".join([LEDGER_TRANSACTION_TEMPLATE.format(t=t)
dirname = os.path.dirname(template_file) for t in transactions])
template_file = os.path.basename(template_file)
template_loader = jinja2.FileSystemLoader(searchpath=dirname)
template_env = jinja2.Environment(loader=template_loader)
template = template_env.get_template(template_file)
else:
template_env = jinja2.Environment(loader=jinja2.BaseLoader)
template = template_env.from_string(LEDGER_TRANSACTION_TEMPLATE)
# Write transactions into virtual file. We could just create a string
# object, but that doesn't work as nicely with the Jinja API plus I think
# this approach is faster.
tf = tempfile.SpooledTemporaryFile(mode='w+')
for t in transactions:
tf.write(template.render(t=t))
tf.seek(0)
new_ledger_content = tf.read()
status = "no change" status = "no change"
if not os.path.isfile(ledger_file): if not os.path.isfile(ledger_file):
with open(ledger_file, 'w') as f: with open(ledger_file, 'w') as f:
f.write(new_ledger_content) f.write(new_content)
status = "new" status = "new"
else: else:
with open(ledger_file, 'r') as f: with open(ledger_file, 'r') as f:
old_ledger_content = f.read() old_content = f.read()
f.close() f.close()
if new_ledger_content != old_ledger_content: if old_content != content:
with open(ledger_file, 'w') as f: with open(ledger_file, 'w') as f:
f.write(new_ledger_content) f.write(content)
status = "update" status = "update"
logging.info(f"{csv_file:30} -> {ledger_file:30} | {status}") logging.info(f"{csv_file:30} -> {ledger_file:30} | {status}")
@@ -314,3 +295,4 @@ if __name__ == "__main__":
with open(config_file, 'r') as f: with open(config_file, 'r') as f:
config = Config(**json.load(f)) config = Config(**json.load(f))
main(config) main(config)