Hi,
That's what I'm currently using. It stores original values as
meta information in original key and conversions as originalIn and
originalOut
from dateutil.parser import parse
from io import StringIO
from beancount.ingest import importer
from beancount.core import data
from beancount.core import amount
from beancount.core.number import D
from beancount.ingest.importers.mixins import identifier
import csv
class Importer(identifier.IdentifyMixin,
importer.ImporterProtocol):
"""An importer for Revolut CSV files."""
def __init__(self, regexps, account, currency):
identifier.IdentifyMixin.__init__(self, matchers=[
('filename', regexps)
])
self.account = account
self.currency = currency
def name(self):
return super().name() + self.account
def file_account(self, file):
return self.account
def extract(self, file, existing_entries):
entries = []
with StringIO(file.contents()) as csvfile:
reader = csv.DictReader(csvfile, ['Date',
'Reference','PaidOut', 'PaidIn', 'ExchangeOut', 'ExchangeIn',
'Balance', 'Category', 'Notes'], delimiter=';',
skipinitialspace=True)
next(reader)
for row in reader:
metakv = {
'category': row['Category'].strip(),
}
exchangeIn = row['ExchangeIn'].strip()
exchangeOut = row['ExchangeOut'].strip()
if exchangeIn and exchangeOut:
metakv['originalIn'] = exchangeIn
metakv['originalOut'] = exchangeOut
elif exchangeIn:
metakv['original'] = exchangeIn
elif exchangeOut:
metakv['original'] = exchangeOut
meta = data.new_metadata(file.name, 0, metakv)
entry = data.Transaction(
meta,
parse(row['Date'].strip()).date(),
'*',
'',
(row['Reference'].strip() + ' ' +
row['Notes'].strip()).strip(),
data.EMPTY_SET,
data.EMPTY_SET,
[
data.Posting(self.account,
amount.Amount(D(row['PaidIn'].strip()) -
D(row['PaidOut'].strip()), self.currency), None, None, None,
None),
]
)
entries.append(entry)
return entries
--
You received this message because you are subscribed to the Google Groups "Beancount" group.
To unsubscribe from this group and stop receiving emails from it, send an email to beancount+...@googlegroups.com.
To view this discussion on the web visit https://groups.google.com/d/msgid/beancount/90613531-9c08-45a5-8dfc-7be50029c224%40googlegroups.com.
To unsubscribe from this group and stop receiving emails from it, send an email to bean...@googlegroups.com.
Hi,
sure. I'm using smart_importer so you can leave that out if you don't use it
import sys
from os import path
sys.path.insert(0, path.join(path.dirname(__file__)))
from importers.revolut import importer as revolutimp
from beancount.ingest import extract
from smart_importer import apply_hooks, PredictPostings
from smart_importer.detector import DuplicateDetector
CONFIG = [
apply_hooks(revolutimp.Importer('/Revolut-CHF.*\.csv',
'Assets:Patrick:Liquidity:Revolut:CHF', 'CHF'),
[PredictPostings(), DuplicateDetector()]),
apply_hooks(revolutimp.Importer('/Revolut-GBP.*\.csv',
'Assets:Patrick:Liquidity:Revolut:GBP', 'GBP'),
[PredictPostings(), DuplicateDetector()]),
apply_hooks(revolutimp.Importer('/Revolut-USD.*\.csv',
'Assets:Patrick:Liquidity:Revolut:USD', 'USD'),
[PredictPostings(), DuplicateDetector()]),
apply_hooks(revolutimp.Importer('/Revolut-EUR.*\.csv',
'Assets:Patrick:Liquidity:Revolut:EUR', 'EUR'),
[PredictPostings(), DuplicateDetector()]),
]
extract.HEADER = ''
To unsubscribe from this group and stop receiving emails from it, send an email to beancount+...@googlegroups.com.
To view this discussion on the web visit https://groups.google.com/d/msgid/beancount/31a28fff-21f2-4005-a1a9-bfb7efd41696%40googlegroups.com.
To view this discussion on the web visit https://groups.google.com/d/msgid/beancount/31a28fff-21f2-4005-a1a9-bfb7efd41696%40googlegroups.com.
Sure, should be possible to do that. I'm using smart importer (https://github.com/beancount/smart_importer) for that.
It's assigning the right accounts based on existing data.
To unsubscribe from this group and stop receiving emails from it, send an email to beancount+...@googlegroups.com.
To view this discussion on the web visit https://groups.google.com/d/msgid/beancount/58ad6d89-1eb3-4ca3-8a8a-b3d96217e080%40googlegroups.com.