Changeset - a5b3dc146351
[Not reviewed]
0 4 0
Brett Smith - 4 years ago 2020-07-01 13:50:20
brettcsmith@brettcsmith.org
accrual: AccrualPostings.make_consistent() groups accruals by date.

This accommodates cases of contracts without separate invoices,
where a series of payments are scheduled over time.

The dance we used to do of group-by-invoice, then make consistent was
already suspect. It was originally motivated by the consistency checks that
are now gone. Use this opportunity to clean up and just make make_consistent
a classmethod.
4 files changed with 213 insertions and 62 deletions:
0 comments (0 inline, 0 general)
conservancy_beancount/reports/accrual.py
Show inline comments
 
#!/usr/bin/env python3
 
"""accrual-report - Status reports for accruals
 

	
 
accrual-report checks accruals (postings under Assets:Receivable and
 
Liabilities:Payable) for errors and metadata consistency, and reports any
 
problems on stderr. Then it writes a report about the status of those
 
accruals.
 

	
 
If you run it with no arguments, it will generate an aging report in ODS format.
 

	
 
Otherwise, the typical way to run it is to pass an RT ticket number or
 
invoice link as an argument, to report about accruals that match those
 
criteria::
 

	
 
    # Report all accruals associated with RT#1230:
 
    accrual-report 1230
 
    # Report all accruals with the invoice link rt:45/670.
 
    accrual-report 45/670
 
    # Report all accruals with the invoice link Invoice980.pdf.
 
    accrual-report Invoice980.pdf
 

	
 
By default, to stay fast, accrual-report only looks at unaudited books. You
 
can search further back in history by passing the ``--since`` argument. The
 
argument can be a fiscal year, or a negative number of how many years back
 
to search::
 

	
 
    # Search for accruals since 2016
 
    accrual-report --since 2016 [search terms …]
 
    # Search for accruals from the beginning of three fiscal years ago
 
    accrual-report --since -3 [search terms …]
 

	
 
If you want to further limit what accruals are reported, you can match on
 
other metadata by passing additional arguments in ``name=value`` format.
 
You can pass any number of search terms. For example::
 

	
 
    # Report accruals associated with RT#1230 and Jane Doe
 
    accrual-report 1230 entity=Doe-Jane
 

	
 
accrual-report will automatically decide what kind of report to generate
 
from the search terms you provide and the results they return. If you
 
searched on an RT ticket or invoice that returned a single outstanding
 
payable, it writes an outgoing approval report. If you searched on RT ticket
 
or invoice that returned other results, it writes a balance
 
report. Otherwise, it writes an aging report. You can specify what report
 
type you want with the ``--report-type`` option::
 

	
 
    # Write an outgoing approval report for all outstanding payables for
 
    # Jane Doe, even if there's more than one
 
    accrual-report --report-type outgoing entity=Doe-Jane
 
    # Write an aging report for a single RT invoice (this can be helpful when
 
    # one invoice covers multiple parties)
 
    accrual-report --report-type aging 12/345
 
"""
 
# Copyright © 2020  Brett Smith
 
#
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU Affero General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU Affero General Public License for more details.
 
#
 
# You should have received a copy of the GNU Affero General Public License
 
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 

	
 
import argparse
 
import collections
 
import datetime
 
import enum
 
import logging
 
import re
 
import sys
 

	
 
from pathlib import Path
 

	
 
from typing import (
 
    cast,
 
    Any,
 
    BinaryIO,
 
    Callable,
 
    Deque,
 
    Dict,
 
    Hashable,
 
    Iterable,
 
    Iterator,
 
    List,
 
    Mapping,
 
    NamedTuple,
 
    Optional,
 
    Sequence,
 
    Set,
 
    TextIO,
 
    Tuple,
 
    TypeVar,
 
    Union,
 
)
 
from ..beancount_types import (
 
    Entries,
 
    Error,
 
    Errors,
 
    MetaKey,
 
    MetaValue,
 
    Transaction,
 
)
 

	
 
import odf.style  # type:ignore[import]
 
import odf.table  # type:ignore[import]
 
import rt
 

	
 
from beancount.parser import printer as bc_printer
 

	
 
from . import core
 
from .. import books
 
from .. import cliutil
 
from .. import config as configmod
 
from .. import data
 
from .. import filters
 
from .. import rtutil
 

	
 
PROGNAME = 'accrual-report'
 

	
 
PostGroups = Mapping[Optional[str], 'AccrualPostings']
 
PostGroups = Mapping[Optional[Hashable], 'AccrualPostings']
 
T = TypeVar('T')
 

	
 
logger = logging.getLogger('conservancy_beancount.reports.accrual')
 

	
 
class Sentinel:
 
    pass
 

	
 

	
 
class Account(NamedTuple):
 
    name: str
 
    aging_thresholds: Sequence[int]
 

	
 

	
 
class AccrualAccount(enum.Enum):
 
    # Note the aging report uses the same order accounts are defined here.
 
    # See AgingODS.start_spreadsheet().
 
    RECEIVABLE = Account('Assets:Receivable', [365, 120, 90, 60])
 
    PAYABLE = Account('Liabilities:Payable', [365, 90, 60, 30])
 

	
 
    @classmethod
 
    def account_names(cls) -> Iterator[str]:
 
        return (acct.value.name for acct in cls)
 

	
 
    @classmethod
 
    def by_account(cls, name: data.Account) -> 'AccrualAccount':
 
        for account in cls:
 
            if name.is_under(account.value.name):
 
                return account
 
        raise ValueError(f"unrecognized account {name!r}")
 

	
 
    @classmethod
 
    def classify(cls, related: core.RelatedPostings) -> 'AccrualAccount':
 
        for account in cls:
 
            account_name = account.value.name
 
            if all(post.account.is_under(account_name) for post in related):
 
                return account
 
        raise ValueError("unrecognized account set in related postings")
 

	
 
    @property
 
    def normalize_amount(self) -> Callable[[T], T]:
 
        return core.normalize_amount_func(self.value.name)
 

	
 

	
 
class AccrualPostings(core.RelatedPostings):
 
    __slots__ = (
 
        'accrual_type',
 
        'date',
 
        'end_balance',
 
        'account',
 
        'entity',
 
        'invoice',
 
    )
 
    INCONSISTENT = Sentinel()
 

	
 
    def __init__(self,
 
                 source: Iterable[data.Posting]=(),
 
                 *,
 
                 _can_own: bool=False,
 
    ) -> None:
 
        super().__init__(source, _can_own=_can_own)
 
        self.account = self._single_item(post.account for post in self)
 
        if isinstance(self.account, Sentinel):
 
            self.accrual_type: Optional[AccrualAccount] = None
 
            norm_func: Callable[[T], T] = lambda x: x
 
            entity_pred: Callable[[data.Posting], bool] = bool
 
            accrual_pred: Callable[[data.Posting], bool] = bool
 
        else:
 
            self.accrual_type = AccrualAccount.by_account(self.account)
 
            norm_func = self.accrual_type.normalize_amount
 
            entity_pred = lambda post: norm_func(post.units).number > 0
 
        self.entity = self._single_item(self.entities(entity_pred))
 
            accrual_pred = lambda post: norm_func(post.units).number > 0
 
            if not any(accrual_pred(post) for post in self):
 
                accrual_pred = bool
 
        self.date = self._single_item(self.dates(accrual_pred))
 
        self.entity = self._single_item(self.entities(accrual_pred))
 
        self.invoice = self._single_item(self.first_meta_links('invoice', None))
 
        self.end_balance = norm_func(self.balance_at_cost())
 

	
 
    def _single_item(self, seq: Iterable[T]) -> Union[T, Sentinel]:
 
        items = iter(seq)
 
        try:
 
            item1 = next(items)
 
        except StopIteration:
 
            all_same = False
 
        else:
 
            all_same = all(item == item1 for item in items)
 
        return item1 if all_same else self.INCONSISTENT
 

	
 
    def dates(self, pred: Callable[[data.Posting], bool]=bool) -> Iterator[datetime.date]:
 
        return filters.iter_unique(post.meta.date for post in self if pred(post))
 

	
 
    def entities(self, pred: Callable[[data.Posting], bool]=bool) -> Iterator[MetaValue]:
 
        return filters.iter_unique(
 
            post.meta['entity']
 
            for post in self
 
            if pred(post) and 'entity' in post.meta
 
        )
 

	
 
    def make_consistent(self) -> Iterator[Tuple[str, 'AccrualPostings']]:
 
        account_ok = isinstance(self.account, str)
 
        entity_ok = isinstance(self.entity, str)
 
        # `'/' in self.invoice` is just our heuristic to ensure that the
 
        # invoice metadata is "unique enough," and not just a placeholder
 
        # value like "FIXME". It can be refined if needed.
 
        invoice_ok = isinstance(self.invoice, str) and '/' in self.invoice
 
        if account_ok and entity_ok and invoice_ok:
 
            # type ignore for <https://github.com/python/mypy/issues/6670>
 
            yield (self.invoice, self)  # type:ignore[misc]
 
            return
 
        groups = collections.defaultdict(list)
 
        for post in self:
 
            post_invoice = self.invoice if invoice_ok else (
 
                post.meta.get('invoice') or 'BlankInvoice'
 
            )
 
            post_entity = self.entity if entity_ok else (
 
                post.meta.get('entity') or 'BlankEntity'
 
            )
 
            groups[f'{post.account} {post_invoice} {post_entity}'].append(post)
 
        type_self = type(self)
 
        for group_key, posts in groups.items():
 
            yield group_key, type_self(posts, _can_own=True)
 
    @classmethod
 
    def make_consistent(cls,
 
                        postings: Iterable[data.Posting],
 
    ) -> Iterator[Tuple[Hashable, 'AccrualPostings']]:
 
        accruals: Dict[Tuple[str, ...], List[data.Posting]] = collections.defaultdict(list)
 
        payments: Dict[Tuple[str, ...], Deque[data.Posting]] = collections.defaultdict(Deque)
 
        key: Tuple[str, ...]
 
        for post in postings:
 
            norm_func = core.normalize_amount_func(post.account)
 
            invoice = str(post.meta.get('invoice', 'BlankInvoice'))
 
            if norm_func(post.units.number) >= 0:
 
                entity = str(post.meta.get('entity', 'BlankEntity'))
 
                key = (post.meta.date.isoformat(), entity, invoice, post.account)
 
                accruals[key].append(post)
 
            else:
 
                key = (invoice, post.account)
 
                payments[key].append(post)
 

	
 
        for key, acc_posts in accruals.items():
 
            pay_posts = payments[key[2:]]
 
            if not pay_posts:
 
                continue
 
            norm_func = core.normalize_amount_func(key[-1])
 
            balance = norm_func(core.MutableBalance(post.at_cost() for post in acc_posts))
 
            while pay_posts and not balance.le_zero():
 
                pay_post = pay_posts.popleft()
 
                acc_posts.append(pay_post)
 
                balance += norm_func(pay_post.at_cost())
 
            if balance.le_zero() and not balance.is_zero():
 
                # pay_post causes the accrual to be overpaid. Split it into two
 
                # synthesized postings: one that causes the accrual to be
 
                # exactly zero, and one with the remainder back in payments.
 
                post_cost = pay_post.at_cost()
 
                # Calling norm_func() reverses the call in the while loop to add
 
                # the amount to the balance.
 
                overpayment = norm_func(balance[post_cost.currency])
 
                amt_to_zero = post_cost._replace(number=post_cost.number - overpayment.number)
 
                acc_posts[-1] = pay_post._replace(units=amt_to_zero, cost=None, price=None)
 
                pay_posts.appendleft(pay_post._replace(units=overpayment, cost=None, price=None))
 
            acc_posts.sort(key=lambda post: post.meta.date)
 

	
 
        for key, acc_posts in accruals.items():
 
            yield key, cls(acc_posts, _can_own=True)
 
        for key, pay_posts in payments.items():
 
            if pay_posts:
 
                yield key, cls(pay_posts, _can_own=True)
 

	
 
    def is_paid(self, default: Optional[bool]=None) -> Optional[bool]:
 
        if self.accrual_type is None:
 
            return default
 
        else:
 
            return self.end_balance.le_zero()
 

	
 
    def is_zero(self, default: Optional[bool]=None) -> Optional[bool]:
 
        if self.accrual_type is None:
 
            return default
 
        else:
 
            return self.end_balance.is_zero()
 

	
 
    def since_last_nonzero(self) -> 'AccrualPostings':
 
        for index, (post, balance) in enumerate(self.iter_with_balance()):
 
            if balance.is_zero():
 
                start_index = index
 
        try:
 
            empty = start_index == index
 
        except NameError:
 
            empty = True
 
        return self if empty else self[start_index + 1:]
 

	
 
    @property
 
    def rt_id(self) -> Union[str, None, Sentinel]:
 
        return self._single_item(self.first_meta_links('rt-id', None))
 

	
 

	
 
class BaseReport:
 
    def __init__(self, out_file: TextIO) -> None:
 
        self.out_file = out_file
 
        self.logger = logger.getChild(type(self).__name__)
 

	
 
    def _report(self, posts: AccrualPostings, index: int) -> Iterable[str]:
 
        raise NotImplementedError("BaseReport._report")
 

	
 
    def run(self, groups: PostGroups) -> None:
 
        for index, invoice in enumerate(groups):
 
            for line in self._report(groups[invoice], index):
 
                print(line, file=self.out_file)
 

	
 

	
 
class AgingODS(core.BaseODS[AccrualPostings, Optional[data.Account]]):
 
    DOC_COLUMNS = [
 
        'rt-id',
 
        'invoice',
 
        'approval',
 
        'contract',
 
        'purchase-order',
 
    ]
 
    COLUMNS = [
 
        'Date',
 
        data.Metadata.human_name('entity'),
 
        'Invoice Amount',
 
        'Booked Amount',
 
        data.Metadata.human_name('project'),
 
        *(data.Metadata.human_name(key) for key in DOC_COLUMNS),
 
    ]
 
    COL_COUNT = len(COLUMNS)
 

	
 
    def __init__(self,
 
                 rt_wrapper: rtutil.RT,
 
                 date: datetime.date,
 
                 logger: logging.Logger,
 
    ) -> None:
 
        super().__init__(rt_wrapper)
 
        self.date = date
 
        self.logger = logger
 

	
 
    def section_key(self, row: AccrualPostings) -> Optional[data.Account]:
 
        if isinstance(row.account, str):
 
            return row.account
 
        else:
 
            return None
 

	
 
    def start_spreadsheet(self) -> None:
 
        for accrual_type in AccrualAccount:
 
            self.use_sheet(accrual_type.name.title())
 
            for index in range(self.COL_COUNT):
 
                if index == 0:
 
                    style: Union[str, odf.style.Style] = ''
 
                elif index < 6:
 
                    style = self.column_style(1.2)
 
                else:
 
                    style = self.column_style(1.5)
 
                self.sheet.addElement(odf.table.TableColumn(stylename=style))
 
            self.add_row(*(
 
                self.string_cell(name, stylename=self.style_bold)
 
                for name in self.COLUMNS
 
            ))
 
            self.lock_first_row()
 

	
 
    def start_section(self, key: Optional[data.Account]) -> None:
 
        if key is None:
 
            return
 
        self.age_thresholds = list(AccrualAccount.by_account(key).value.aging_thresholds)
 
        self.age_balances = [core.MutableBalance() for _ in self.age_thresholds]
 
        accrual_date = self.date - datetime.timedelta(days=self.age_thresholds[-1])
 
        acct_parts = key.slice_parts()
 
        self.use_sheet(acct_parts[1])
 
        self.add_row()
 
        self.add_row(self.string_cell(
 
            f"{' '.join(acct_parts[2:])} {acct_parts[1]} Aging Report"
 
            f" Accrued by {accrual_date.isoformat()} Unpaid by {self.date.isoformat()}",
 
            stylename=self.merge_styles(self.style_bold, self.style_centertext),
 
            numbercolumnsspanned=self.COL_COUNT,
 
        ))
 
        self.add_row()
 

	
 
    def end_section(self, key: Optional[data.Account]) -> None:
 
        if key is None:
 
            return
 
        total_balance = core.MutableBalance()
 
        text_style = self.merge_styles(self.style_bold, self.style_endtext)
 
        text_span = 4
 
        last_age_text: Optional[str] = None
 
        self.add_row()
 
        for threshold, balance in zip(self.age_thresholds, self.age_balances):
 
            years, days = divmod(threshold, 365)
 
            years_text = f"{years} {'Year' if years == 1 else 'Years'}"
 
            days_text = f"{days} Days"
 
            if years and days:
 
                age_text = f"{years_text} {days_text}"
 
            elif years:
 
                age_text = years_text
 
            else:
 
                age_text = days_text
 
            if last_age_text is None:
 
                age_range = f"Over {age_text}"
 
            else:
 
                age_range = f"{age_text}–{last_age_text}"
 
            self.add_row(
 
                self.string_cell(
 
                    f"Total Aged {age_range}: ",
 
                    stylename=text_style,
 
                    numbercolumnsspanned=text_span,
 
                ),
 
                *(odf.table.TableCell() for _ in range(1, text_span)),
 
                self.balance_cell(balance),
 
            )
 
            last_age_text = age_text
 
            total_balance += balance
 
        self.add_row(
 
            self.string_cell(
 
                "Total Unpaid: ",
 
                stylename=text_style,
 
                numbercolumnsspanned=text_span,
 
            ),
 
            *(odf.table.TableCell() for _ in range(1, text_span)),
 
            self.balance_cell(total_balance),
 
        )
 

	
 
    def write_row(self, row: AccrualPostings) -> None:
 
        age = (self.date - row[0].meta.date).days
 
        if row.end_balance.ge_zero():
 
            for index, threshold in enumerate(self.age_thresholds):
 
                if age >= threshold:
 
                    self.age_balances[index] += row.end_balance
 
                    break
 
            else:
 
                return
 
        raw_balance = row.balance()
 
        if row.accrual_type is not None:
 
            raw_balance = row.accrual_type.normalize_amount(raw_balance)
 
        if raw_balance == row.end_balance:
 
            amount_cell = odf.table.TableCell()
 
        else:
 
            amount_cell = self.balance_cell(raw_balance)
 
        projects = {post.meta.get('project') or None for post in row}
 
        projects.discard(None)
 
        self.add_row(
 
            self.date_cell(row[0].meta.date),
 
            self.multiline_cell(row.entities()),
 
            amount_cell,
 
            self.balance_cell(row.end_balance),
 
            self.multiline_cell(sorted(projects)),
 
            *(self.meta_links_cell(row.all_meta_links(key))
 
              for key in self.DOC_COLUMNS),
 
        )
 

	
 

	
 
class AgingReport(BaseReport):
 
    def __init__(self,
 
                 rt_wrapper: rtutil.RT,
 
                 out_file: BinaryIO,
 
                 date: Optional[datetime.date]=None,
 
    ) -> None:
 
        if date is None:
 
            date = datetime.date.today()
 
        self.out_bin = out_file
 
        self.logger = logger.getChild(type(self).__name__)
 
        self.ods = AgingODS(rt_wrapper, date, self.logger)
 

	
 
    def run(self, groups: PostGroups) -> None:
 
        rows: List[AccrualPostings] = []
 
        for group in groups.values():
 
            if group.is_zero():
 
                # Cheap optimization: don't slice and dice groups we're not
 
                # going to report anyway.
 
                continue
 
            elif group.accrual_type is None:
 
                group = group.since_last_nonzero()
 
            else:
 
                # Filter out new accruals after the report date.
 
                # e.g., cover the case that the same invoices has multiple
 
                # postings over time, and we don't want to report too-recent
 
                # ones.
 
                cutoff_date = self.ods.date - datetime.timedelta(
 
                    days=group.accrual_type.value.aging_thresholds[-1],
 
                )
 
                group = AccrualPostings(
 
                    post for post in group.since_last_nonzero()
 
                    if post.meta.date <= cutoff_date
 
                    or group.accrual_type.normalize_amount(post.units.number) < 0
 
                )
 
            if group and not group.is_zero():
 
                rows.append(group)
 
        rows.sort(key=lambda related: (
 
            related.account,
 
            related[0].meta.date,
 
            ('\0'.join(related.entities())
 
             if related.entity is related.INCONSISTENT
 
             else related.entity),
 
        ))
 
        self.ods.write(rows)
 
        self.ods.save_file(self.out_bin)
 

	
 

	
 
class BalanceReport(BaseReport):
 
    def _report(self, posts: AccrualPostings, index: int) -> Iterable[str]:
 
        posts = posts.since_last_nonzero()
 
        date_s = posts[0].meta.date.strftime('%Y-%m-%d')
 
        balance_s = posts.balance_at_cost().format(zero="Zero balance")
 
        if index:
 
            yield ""
 
        yield f"{posts.invoice}:"
 
        yield f"  {balance_s} outstanding since {date_s}"
 

	
 

	
 
class OutgoingReport(BaseReport):
 
    PAYMENT_METHODS = {
 
        'ach': 'ACH',
 
        'check': 'Check',
 
        'creditcard': 'Credit Card',
 
        'debitcard': 'Debit Card',
 
        'fxwire': 'International Wire',
 
        'paypal': 'PayPal',
 
        'uswire': 'Domestic Wire',
 
        'vendorportal': 'Vendor Portal',
 
    }
 
    PAYMENT_METHOD_RE = re.compile(rf'^([a-z]{{3}})\s+({"|".join(PAYMENT_METHODS)})$')
 

	
 
    def __init__(self, rt_wrapper: rtutil.RT, out_file: TextIO) -> None:
 
        super().__init__(out_file)
 
        self.rt_wrapper = rt_wrapper
 
        self.rt_client = rt_wrapper.rt
 

	
 
    def _primary_rt_id(self, posts: AccrualPostings) -> rtutil.TicketAttachmentIds:
 
        rt_id = posts.rt_id
 
        if rt_id is None:
 
            raise ValueError("no rt-id links found")
 
        elif isinstance(rt_id, Sentinel):
 
            raise ValueError("multiple rt-id links found")
 
        parsed = rtutil.RT.parse(rt_id)
 
        if parsed is None:
 
            raise ValueError("rt-id is not a valid RT reference")
 
        else:
 
            return parsed
 

	
 
    def _report(self, posts: AccrualPostings, index: int) -> Iterable[str]:
 
        posts = posts.since_last_nonzero()
 
        try:
 
            ticket_id, _ = self._primary_rt_id(posts)
 
            ticket = self.rt_client.get_ticket(ticket_id)
 
            # Note we only use this when ticket is None.
 
            errmsg = f"ticket {ticket_id} not found"
 
        except (ValueError, rt.RtError) as error:
 
            ticket = None
 
            errmsg = error.args[0]
 
        if ticket is None:
 
            self.logger.error(
 
                "can't generate outgoings report for %s because no RT ticket available: %s",
 
                posts.invoice, errmsg,
 
            )
 
            return
 

	
 
        try:
 
            rt_requestor = self.rt_client.get_user(ticket['Requestors'][0])
 
        except (IndexError, rt.RtError):
 
            rt_requestor = None
 
        if rt_requestor is None:
 
            requestor = ''
 
            requestor_name = ''
 
        else:
 
            requestor_name = (
 
                rt_requestor.get('RealName')
 
                or ticket.get('CF.{payment-to}')
 
                or ''
 
            )
 
            requestor = f'{requestor_name} <{rt_requestor["EmailAddress"]}>'.strip()
 

	
 
        balance_s = posts.end_balance.format(None)
 
        raw_balance = -posts.balance()
 
        payment_amount = raw_balance.format('¤¤ #,##0.00')
 
        if raw_balance != posts.end_balance:
 
            payment_amount += f' ({balance_s})'
 
            balance_s = f'{raw_balance} ({balance_s})'
 

	
 
        payment_to = ticket.get('CF.{payment-to}') or requestor_name
 
        contract_links = list(posts.all_meta_links('contract'))
 
        if contract_links:
 
            contract_s = ' , '.join(self.rt_wrapper.iter_urls(
 
                contract_links, missing_fmt='<BROKEN RT LINK: {}>',
 
            ))
 
        else:
 
            contract_s = "NO CONTRACT GOVERNS THIS TRANSACTION"
 
        projects = [v for v in posts.meta_values('project')
 
                    if isinstance(v, str)]
 

	
 
        yield "PAYMENT FOR APPROVAL:"
 
        yield f"REQUESTOR: {requestor}"
 
        yield f"PAYMENT TO: {payment_to}"
 
        yield f"TOTAL TO PAY: {balance_s}"
 
        yield f"AGREEMENT: {contract_s}"
 
        yield f"PROJECT: {', '.join(projects)}"
 
        yield "\nBEANCOUNT ENTRIES:\n"
 

	
 
        last_txn: Optional[Transaction] = None
 
        for post in posts:
 
            txn = post.meta.txn
 
            if txn is not last_txn:
 
                last_txn = txn
 
                txn = self.rt_wrapper.txn_with_urls(txn, '{}')
 
                # Suppress payment-method metadata from the report.
 
                txn.meta.pop('payment-method', None)
 
                for txn_post in txn.postings:
 
                    if txn_post.meta:
 
                        txn_post.meta.pop('payment-method', None)
 
                yield bc_printer.format_entry(txn)
 

	
 
        cf_targets = {
 
            'payment-amount': payment_amount,
 
            'payment-to': payment_to,
 
        }
 
        payment_methods = filters.iter_unique(
 
            post.meta['payment-method'].lower()
 
            for post in posts
 
            if isinstance(post.meta.get('payment-method'), str)
 
        )
 
        payment_method: Optional[str] = next(payment_methods, None)
 
        if payment_method is None:
 
            payment_method_count = "no"
 
        elif next(payment_methods, None) is None:
 
            pass
 
        else:
 
            payment_method_count = "multiple"
 
            payment_method = None
 
        if payment_method is None:
 
            self.logger.warning(
 
                "cannot set payment-method for rt:%s: %s metadata values found",
 
                ticket_id, payment_method_count,
 
            )
 
        else:
 
            match = self.PAYMENT_METHOD_RE.fullmatch(payment_method)
 
            if match is None:
 
                self.logger.warning(
 
                    "cannot set payment-method for rt:%s: invalid value %r",
 
                    ticket_id, payment_method,
 
                )
 
            else:
 
                cf_targets['payment-method'] = '{} {}'.format(
 
                    match.group(1).upper(),
 
                    self.PAYMENT_METHODS[match.group(2)],
 
                )
 

	
 
        cf_updates = {
 
            f'CF_{key}': value
 
            for key, value in cf_targets.items()
 
            if ticket.get(f'CF.{{{key}}}') != value
 
        }
 
        if cf_updates:
 
            try:
 
                ok = self.rt_client.edit_ticket(ticket_id, **cf_updates)
 
            except rt.RtError:
 
                self.logger.debug("RT exception on edit_ticket", exc_info=True)
 
                ok = False
 
            if not ok:
 
                self.logger.warning("failed to set custom fields for rt:%s", ticket_id)
 

	
 

	
 
class ReportType(enum.Enum):
 
    AGING = AgingReport
 
    BALANCE = BalanceReport
 
    OUTGOING = OutgoingReport
 
    AGE = AGING
 
    BAL = BALANCE
 
    OUT = OUTGOING
 
    OUTGOINGS = OUTGOING
 

	
 
    @classmethod
 
    def by_name(cls, name: str) -> 'ReportType':
 
        try:
 
            return cls[name.upper()]
 
        except KeyError:
 
            raise ValueError(f"unknown report type {name!r}") from None
 

	
 

	
 
class ReturnFlag(enum.IntFlag):
 
    LOAD_ERRORS = 1
 
    # 2 was used in the past, it can probably be reclaimed.
 
    REPORT_ERRORS = 4
 
    NOTHING_TO_REPORT = 8
 

	
 

	
 
def filter_search(postings: Iterable[data.Posting],
 
                  search_terms: Iterable[cliutil.SearchTerm],
 
) -> Iterable[data.Posting]:
 
    accounts = tuple(AccrualAccount.account_names())
 
    postings = (post for post in postings if post.account.is_under(*accounts))
 
    for query in search_terms:
 
        postings = query.filter_postings(postings)
 
    return postings
 

	
 
def parse_arguments(arglist: Optional[Sequence[str]]=None) -> argparse.Namespace:
 
    parser = argparse.ArgumentParser(prog=PROGNAME)
 
    cliutil.add_version_argument(parser)
 
    parser.add_argument(
 
        '--report-type', '-t',
 
        metavar='NAME',
 
        type=ReportType.by_name,
 
        help="""The type of report to generate, one of `aging`, `balance`, or
 
`outgoing`. If not specified, the default is `aging` when no search terms are
 
given, `outgoing` for search terms that return a single outstanding payable,
 
and `balance` any other time.
 
""")
 
    parser.add_argument(
 
        '--since',
 
        metavar='YEAR',
 
        type=int,
 
        default=0,
 
        help="""How far back to search the books for related transactions.
 
You can either specify a fiscal year, or a negative offset from the current
 
fiscal year, to start loading entries from. The default is to load the current,
 
unaudited books.
 
""")
 
    parser.add_argument(
 
        '--output-file', '-O',
 
        metavar='PATH',
 
        type=Path,
 
        help="""Write the report to this file, or stdout when PATH is `-`.
 
The default is stdout for the balance and outgoing reports, and a generated
 
filename for other reports.
 
""")
 
    cliutil.add_loglevel_argument(parser)
 
    parser.add_argument(
 
        'search_terms',
 
        metavar='FILTER',
 
        type=cliutil.SearchTerm.arg_parser('invoice', 'rt-id'),
 
        nargs=argparse.ZERO_OR_MORE,
 
        help="""Report on accruals that match this criteria. The format is
 
NAME=TERM. TERM is a link or word that must exist in a posting's NAME
 
metadata to match. A single ticket number is a shortcut for
 
`rt-id=rt:NUMBER`. Any other link, including an RT attachment link in
 
`TIK/ATT` format, is a shortcut for `invoice=LINK`.
 
""")
 
    args = parser.parse_args(arglist)
 
    if args.report_type is None and not any(
 
            term.meta_key == 'invoice' or term.meta_key == 'rt-id'
 
            for term in args.search_terms
 
    ):
 
        args.report_type = ReportType.AGING
 
    return args
 

	
 
def main(arglist: Optional[Sequence[str]]=None,
 
         stdout: TextIO=sys.stdout,
 
         stderr: TextIO=sys.stderr,
 
         config: Optional[configmod.Config]=None,
 
) -> int:
 
    args = parse_arguments(arglist)
 
    cliutil.set_loglevel(logger, args.loglevel)
 
    if config is None:
 
        config = configmod.Config()
 
        config.load_file()
 

	
 
    returncode = 0
 
    books_loader = config.books_loader()
 
    if books_loader is None:
 
        entries, load_errors, _ = books.Loader.load_none(config.config_file_path())
 
    else:
 
        load_since = None if args.report_type == ReportType.AGING else args.since
 
        entries, load_errors, _ = books_loader.load_all(load_since)
 
    filters.remove_opening_balance_txn(entries)
 
    for error in load_errors:
 
        bc_printer.print_error(error, file=stderr)
 
        returncode |= ReturnFlag.LOAD_ERRORS
 

	
 
    postings = list(filter_search(
 
        data.Posting.from_entries(entries), args.search_terms,
 
    ))
 
    if not postings:
 
        logger.warning("no matching entries found to report")
 
        returncode |= ReturnFlag.NOTHING_TO_REPORT
 
    # groups is a mapping of metadata value strings to AccrualPostings.
 
    # The keys are basically arbitrary, the report classes don't rely on them,
 
    # but they do help symbolize what's being grouped.
 
    # For the outgoing approval report, groups maps rt-id link strings to
 
    # associated accruals.
 
    # For all other reports, groups starts by grouping postings together by
 
    # invoice link string, then uses AccrualReport.make_consistent() to split
 
    # out groups that need it.
 
    # For all other reports, groups comes from AccrualReport.make_consistent().
 
    groups: PostGroups
 
    if args.report_type is None or args.report_type is ReportType.OUTGOING:
 
        groups = dict(AccrualPostings.group_by_first_meta_link(postings, 'rt-id'))
 
        if (args.report_type is None
 
            and len(groups) == 1
 
            and all(group.accrual_type is AccrualAccount.PAYABLE
 
                    and not group.is_paid()
 
                    and key  # Make sure we have a usable rt-id
 
                    for key, group in groups.items())
 
        ):
 
            args.report_type = ReportType.OUTGOING
 
    if args.report_type is not ReportType.OUTGOING:
 
        groups = {
 
            key: group
 
            for _, source in AccrualPostings.group_by_first_meta_link(postings, 'invoice')
 
            for key, group in source.make_consistent()
 
        }
 
        groups = dict(AccrualPostings.make_consistent(postings))
 
    if args.report_type is not ReportType.AGING:
 
        groups = {
 
            key: posts for key, posts in groups.items() if not posts.is_paid()
 
        } or groups
 
    del postings
 

	
 
    report: Optional[BaseReport] = None
 
    output_path: Optional[Path] = None
 
    if args.report_type is ReportType.AGING:
 
        rt_wrapper = config.rt_wrapper()
 
        if rt_wrapper is None:
 
            logger.error("unable to generate aging report: RT client is required")
 
        else:
 
            now = datetime.datetime.now()
 
            if args.output_file is None:
 
                out_dir_path = config.repository_path() or Path()
 
                args.output_file = out_dir_path / now.strftime('AgingReport_%Y-%m-%d_%H:%M.ods')
 
                logger.info("Writing report to %s", args.output_file)
 
            out_bin = cliutil.bytes_output(args.output_file, stdout)
 
            report = AgingReport(rt_wrapper, out_bin)
 
    elif args.report_type is ReportType.OUTGOING:
 
        rt_wrapper = config.rt_wrapper()
 
        if rt_wrapper is None:
 
            logger.error("unable to generate outgoing report: RT client is required")
 
        else:
 
            out_file = cliutil.text_output(args.output_file, stdout)
 
            report = OutgoingReport(rt_wrapper, out_file)
 
    else:
 
        out_file = cliutil.text_output(args.output_file, stdout)
 
        report = BalanceReport(out_file)
 

	
 
    if report is None:
 
        returncode |= ReturnFlag.REPORT_ERRORS
 
    else:
 
        report.run(groups)
 
    return 0 if returncode == 0 else 16 + returncode
 

	
 
entry_point = cliutil.make_entry_point(__name__, PROGNAME)
 

	
 
if __name__ == '__main__':
 
    exit(entry_point())
conservancy_beancount/reports/core.py
Show inline comments
 
"""core.py - Common data classes for reporting functionality"""
 
# Copyright © 2020  Brett Smith
 
#
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU Affero General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU Affero General Public License for more details.
 
#
 
# You should have received a copy of the GNU Affero General Public License
 
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 

	
 
import abc
 
import collections
 
import copy
 
import datetime
 
import enum
 
import itertools
 
import operator
 
import re
 
import urllib.parse as urlparse
 

	
 
import babel.core  # type:ignore[import]
 
import babel.numbers  # type:ignore[import]
 

	
 
import odf.config  # type:ignore[import]
 
import odf.element  # type:ignore[import]
 
import odf.number  # type:ignore[import]
 
import odf.opendocument  # type:ignore[import]
 
import odf.style  # type:ignore[import]
 
import odf.table  # type:ignore[import]
 
import odf.text  # type:ignore[import]
 

	
 
from decimal import Decimal
 
from pathlib import Path
 

	
 
from beancount.core import amount as bc_amount
 

	
 
from .. import data
 
from .. import filters
 
from .. import rtutil
 

	
 
from typing import (
 
    cast,
 
    overload,
 
    Any,
 
    BinaryIO,
 
    Callable,
 
    Dict,
 
    Generic,
 
    Iterable,
 
    Iterator,
 
    List,
 
    Mapping,
 
    MutableMapping,
 
    Optional,
 
    Sequence,
 
    Set,
 
    Tuple,
 
    Type,
 
    TypeVar,
 
    Union,
 
)
 
from ..beancount_types import (
 
    MetaKey,
 
    MetaValue,
 
)
 

	
 
OPENING_BALANCE_NAME = "OPENING BALANCE"
 
ENDING_BALANCE_NAME = "ENDING BALANCE"
 

	
 
DecimalCompat = data.DecimalCompat
 
BalanceType = TypeVar('BalanceType', bound='Balance')
 
ElementType = Callable[..., odf.element.Element]
 
LinkType = Union[str, Tuple[str, Optional[str]]]
 
RelatedType = TypeVar('RelatedType', bound='RelatedPostings')
 
RT = TypeVar('RT', bound=Sequence)
 
ST = TypeVar('ST')
 
T = TypeVar('T')
 

	
 
class Balance(Mapping[str, data.Amount]):
 
    """A collection of amounts mapped by currency
 

	
 
    Each key is a Beancount currency string, and each value represents the
 
    balance in that currency.
 
    """
 
    __slots__ = ('_currency_map', 'tolerance')
 
    TOLERANCE = Decimal('0.01')
 

	
 
    def __init__(self,
 
                 source: Iterable[data.Amount]=(),
 
                 tolerance: Optional[Decimal]=None,
 
    ) -> None:
 
        if tolerance is None:
 
            tolerance = self.TOLERANCE
 
        self.tolerance = tolerance
 
        self._currency_map: Dict[str, data.Amount] = {}
 
        for amount in source:
 
            self._add_amount(self._currency_map, amount)
 

	
 
    def _add_amount(self,
 
                    currency_map: MutableMapping[str, data.Amount],
 
                    amount: data.Amount,
 
    ) -> None:
 
        code = amount.currency
 
        try:
 
            current_number = currency_map[code].number
 
        except KeyError:
 
            current_number = Decimal(0)
 
        currency_map[code] = data.Amount(current_number + amount.number, code)
 

	
 
    def _add_other(self,
 
                   currency_map: MutableMapping[str, data.Amount],
 
                   other: Union[data.Amount, 'Balance'],
 
    ) -> None:
 
        if isinstance(other, Balance):
 
            for amount in other.values():
 
                self._add_amount(currency_map, amount)
 
        else:
 
            self._add_amount(currency_map, other)
 

	
 
    def __repr__(self) -> str:
 
        values = [repr(amt) for amt in self.values()]
 
        return f"{type(self).__name__}({values!r})"
 

	
 
    def __str__(self) -> str:
 
        return self.format()
 

	
 
    def __abs__(self: BalanceType) -> BalanceType:
 
        return type(self)(bc_amount.abs(amt) for amt in self.values())
 

	
 
    def __add__(self: BalanceType, other: Union[data.Amount, 'Balance']) -> BalanceType:
 
        retval_map = self._currency_map.copy()
 
        self._add_other(retval_map, other)
 
        return type(self)(retval_map.values())
 

	
 
    def __sub__(self: BalanceType, other: Union[data.Amount, 'Balance']) -> BalanceType:
 
        return self.__add__(-other)
 

	
 
    def __eq__(self, other: Any) -> bool:
 
        if isinstance(other, Balance):
 
            clean_self = self.clean_copy()
 
            clean_other = other.clean_copy()
 
            return len(clean_self) == len(clean_other) and all(
 
                clean_self[key] == clean_other.get(key) for key in clean_self
 
            )
 
        else:
 
            return super().__eq__(other)
 

	
 
    def __neg__(self: BalanceType) -> BalanceType:
 
        return type(self)(-amt for amt in self.values())
 

	
 
    def __pos__(self: BalanceType) -> BalanceType:
 
        return self
 

	
 
    def __getitem__(self, key: str) -> data.Amount:
 
        return self._currency_map[key]
 

	
 
    def __iter__(self) -> Iterator[str]:
 
        return iter(self._currency_map)
 

	
 
    def __len__(self) -> int:
 
        return len(self._currency_map)
 

	
 
    def _all_amounts(self,
 
                     op_func: Callable[[DecimalCompat, DecimalCompat], bool],
 
                     operand: DecimalCompat,
 
    ) -> bool:
 
        return all(op_func(amt.number, operand) for amt in self.values())
 

	
 
    def copy(self: BalanceType, tolerance: Optional[Decimal]=None) -> BalanceType:
 
        if tolerance is None:
 
            tolerance = self.tolerance
 
        return type(self)(self.values(), tolerance)
 

	
 
    def clean_copy(self: BalanceType, tolerance: Optional[Decimal]=None) -> BalanceType:
 
        if tolerance is None:
 
            tolerance = self.tolerance
 
        return type(self)(
 
            (amount for amount in self.values() if abs(amount.number) >= tolerance),
 
            tolerance,
 
        )
 

	
 
    @staticmethod
 
    def within_tolerance(dec: DecimalCompat, tolerance: DecimalCompat) -> bool:
 
        dec = cast(Decimal, dec)
 
        return abs(dec) < tolerance
 

	
 
    def eq_zero(self) -> bool:
 
        """Returns true if all amounts in the balance == 0, within tolerance."""
 
        return self._all_amounts(self.within_tolerance, self.tolerance)
 

	
 
    is_zero = eq_zero
 

	
 
    def ge_zero(self) -> bool:
 
        """Returns true if all amounts in the balance >= 0, within tolerance."""
 
        op_func = operator.gt if self.tolerance else operator.ge
 
        return self._all_amounts(op_func, -self.tolerance)
 

	
 
    def le_zero(self) -> bool:
 
        """Returns true if all amounts in the balance <= 0, within tolerance."""
 
        op_func = operator.lt if self.tolerance else operator.le
 
        return self._all_amounts(op_func, self.tolerance)
 

	
 
    def format(self,
 
               fmt: Optional[str]='#,##0.00 ¤¤',
 
               sep: str=', ',
 
               empty: str="Zero balance",
 
               zero: Optional[str]=None,
 
               tolerance: Optional[Decimal]=None,
 
    ) -> str:
 
        """Formats the balance as a string with the given parameters
 

	
 
        If the balance is completely empty, return ``empty``.
 
        If the balance is zero (within tolerance) and ``zero`` is specified,
 
        return ``zero``.
 
        Otherwise, return a string with each amount in the balance formatted
 
        as ``fmt``, separated by ``sep``.
 

	
 
        If you set ``fmt`` to None, amounts will be formatted according to the
 
        user's locale. The default format is Beancount's input format.
 
        """
 
        balance = self.clean_copy(tolerance) or self.copy(tolerance)
 
        if not balance:
 
            return empty
 
        elif zero is not None and balance.is_zero():
 
            return zero
 
        else:
 
            amounts = list(balance.values())
 
            amounts.sort(key=lambda amt: (-abs(amt.number), amt.currency))
 
            return sep.join(
 
                babel.numbers.format_currency(
 
                    amt.number, amt.currency, fmt, format_type='accounting',
 
                ) for amt in amounts
 
            )
 

	
 

	
 
class MutableBalance(Balance):
 
    __slots__ = ()
 

	
 
    def __iadd__(self: BalanceType, other: Union[data.Amount, Balance]) -> BalanceType:
 
        self._add_other(self._currency_map, other)
 
        return self
 

	
 
    def __isub__(self: BalanceType, other: Union[data.Amount, Balance]) -> BalanceType:
 
        self._add_other(self._currency_map, -other)
 
        return self
 

	
 

	
 
class RelatedPostings(Sequence[data.Posting]):
 
    """Collect and query related postings
 

	
 
    This class provides common functionality for collecting related postings
 
    and running queries on them: iterating over them, tallying their balance,
 
    etc.
 

	
 
    This class doesn't know anything about how the postings are related. That's
 
    entirely up to the caller.
 

	
 
    A common pattern is to use this class with collections.defaultdict
 
    to organize postings based on some key. See the group_by_meta classmethod
 
    for an example.
 
    """
 
    __slots__ = ('_postings',)
 

	
 
    def __init__(self,
 
                 source: Iterable[data.Posting]=(),
 
                 *,
 
                 _can_own: bool=False,
 
    ) -> None:
 
        self._postings: List[data.Posting]
 
        if _can_own and isinstance(source, list):
 
        self._postings: Sequence[data.Posting]
 
        if _can_own and isinstance(source, Sequence):
 
            self._postings = source
 
        else:
 
            self._postings = list(source)
 

	
 
    @classmethod
 
    def _group_by(cls: Type[RelatedType],
 
                  postings: Iterable[data.Posting],
 
                  key: Callable[[data.Posting], T],
 
    ) -> Iterator[Tuple[T, RelatedType]]:
 
        mapping: Dict[T, List[data.Posting]] = collections.defaultdict(list)
 
        for post in postings:
 
            mapping[key(post)].append(post)
 
        for value, posts in mapping.items():
 
            yield value, cls(posts, _can_own=True)
 

	
 
    @classmethod
 
    def group_by_account(cls: Type[RelatedType],
 
                         postings: Iterable[data.Posting],
 
    ) -> Iterator[Tuple[data.Account, RelatedType]]:
 
        return cls._group_by(postings, operator.attrgetter('account'))
 

	
 
    @classmethod
 
    def group_by_meta(cls: Type[RelatedType],
 
                      postings: Iterable[data.Posting],
 
                      key: MetaKey,
 
                      default: Optional[MetaValue]=None,
 
    ) -> Iterator[Tuple[Optional[MetaValue], RelatedType]]:
 
        """Relate postings by metadata value
 

	
 
        This method takes an iterable of postings and returns a mapping.
 
        The keys of the mapping are the values of post.meta.get(key, default).
 
        The values are RelatedPostings instances that contain all the postings
 
        that had that same metadata value.
 
        """
 
        def key_func(post: data.Posting) -> Optional[MetaValue]:
 
            return post.meta.get(key, default)
 
        return cls._group_by(postings, key_func)
 

	
 
    @classmethod
 
    def group_by_first_meta_link(
 
            cls: Type[RelatedType],
 
            postings: Iterable[data.Posting],
 
            key: MetaKey,
 
    ) -> Iterator[Tuple[Optional[str], RelatedType]]:
 
        """Relate postings by the first link in metadata
 

	
 
        This method takes an iterable of postings and returns a mapping.
 
        The keys of the mapping are the values of
 
        post.meta.first_link(key, None).
 
        The values are RelatedPostings instances that contain all the postings
 
        that had that same first metadata link.
 
        """
 
        def key_func(post: data.Posting) -> Optional[MetaValue]:
 
            return post.meta.first_link(key, None)
 
        return cls._group_by(postings, key_func)
 

	
 
    def __repr__(self) -> str:
 
        return f'<{type(self).__name__} {self._postings!r}>'
 

	
 
    @overload
 
    def __getitem__(self: RelatedType, index: int) -> data.Posting: ...
 

	
 
    @overload
 
    def __getitem__(self: RelatedType, s: slice) -> RelatedType: ...
 

	
 
    def __getitem__(self: RelatedType,
 
                    index: Union[int, slice],
 
    ) -> Union[data.Posting, RelatedType]:
 
        if isinstance(index, slice):
 
            return type(self)(self._postings[index], _can_own=True)
 
        else:
 
            return self._postings[index]
 

	
 
    def __len__(self) -> int:
 
        return len(self._postings)
 

	
 
    def all_meta_links(self, key: MetaKey) -> Iterator[str]:
 
        return filters.iter_unique(
 
            link for post in self for link in post.meta.report_links(key)
 
        )
 

	
 
    @overload
 
    def first_meta_links(self, key: MetaKey, default: str='') -> Iterator[str]: ...
 

	
 
    @overload
 
    def first_meta_links(self, key: MetaKey, default: None) -> Iterator[Optional[str]]: ...
 

	
 
    def first_meta_links(self,
 
                         key: MetaKey,
 
                         default: Optional[str]='',
 
    ) -> Iterator[Optional[str]]:
 
        retval = filters.iter_unique(
 
            post.meta.first_link(key, default) for post in self
 
        )
 
        if default == '':
 
            retval = (s for s in retval if s)
 
        return retval
 

	
 
    def iter_with_balance(self) -> Iterator[Tuple[data.Posting, Balance]]:
 
        balance = MutableBalance()
 
        for post in self:
 
            balance += post.units
 
            yield post, balance
 

	
 
    def balance(self) -> Balance:
 
        return Balance(post.units for post in self)
 

	
 
    def balance_at_cost(self) -> Balance:
 
        return Balance(post.at_cost() for post in self)
 

	
 
    def balance_at_cost_by_date(self, date: datetime.date) -> Balance:
 
        for index, post in enumerate(self):
 
            if post.meta.date >= date:
 
                break
 
        else:
 
            index += 1
 
        return Balance(post.at_cost() for post in self._postings[:index])
 

	
 
    def meta_values(self,
 
                    key: MetaKey,
 
                    default: Optional[MetaValue]=None,
 
    ) -> Set[Optional[MetaValue]]:
 
        return {post.meta.get(key, default) for post in self}
 

	
 

	
 
class PeriodPostings(RelatedPostings):
 
    """Postings filtered and balanced over a date range
 

	
 
    Create a subclass with ``PeriodPostings.with_start_date(date)``.
 
    Note that there is no explicit stop date. The expectation is that the
 
    caller has already filtered out posts past the stop date from the input.
 

	
 
    Instances of that subclass will have three Balance attributes:
 

	
 
    * ``start_bal`` is the balance at cost of postings to your start date
 
    * ``period_bal`` is the balance at cost of postings from your start date
 
    * ``stop_bal`` is the balance at cost of all postings
 

	
 
    Use this subclass when your report includes a lot of balances over time to
 
    help you get the math right.
 
    """
 
    __slots__ = (
 
        'begin_bal',
 
        'end_bal',
 
        'period_bal',
 
        'start_bal',
 
        'stop_bal',
 
    )
 
    START_DATE = datetime.date(datetime.MINYEAR, 1, 1)
 

	
 
    def __init__(self,
 
                 source: Iterable[data.Posting]=(),
 
                 *,
 
                 _can_own: bool=False,
 
    ) -> None:
 
        start_posts: List[data.Posting] = []
 
        period_posts: List[data.Posting] = []
 
        for post in source:
 
            if post.meta.date < self.START_DATE:
 
                start_posts.append(post)
 
            else:
 
                period_posts.append(post)
 
        super().__init__(period_posts, _can_own=True)
 
        self.start_bal = RelatedPostings(start_posts, _can_own=True).balance_at_cost()
 
        self.period_bal = self.balance_at_cost()
 
        self.stop_bal = self.start_bal + self.period_bal
 
        # Convenience aliases
 
        self.begin_bal = self.start_bal
 
        self.end_bal = self.stop_bal
 

	
 
    @classmethod
 
    def with_start_date(cls: Type[RelatedType], start_date: datetime.date) -> Type[RelatedType]:
 
        name = f'BalancePostings{start_date.strftime("%Y%m%d")}'
 
        return type(name, (cls,), {'START_DATE': start_date})
 

	
 

	
 
class BaseSpreadsheet(Generic[RT, ST], metaclass=abc.ABCMeta):
 
    """Abstract base class to help write spreadsheets
 

	
 
    This class provides the very core logic to write an arbitrary set of data
 
    rows to arbitrary output. It calls hooks when it starts writing the
 
    spreadsheet, starts a new "section" of rows, ends a section, and ends the
 
    spreadsheet.
 

	
 
    RT is the type of the input data rows. ST is the type of the section
 
    identifier that you create from each row. If you don't want to use the
 
    section logic at all, set ST to None and define section_key to return None.
 
    """
 

	
 
    @abc.abstractmethod
 
    def section_key(self, row: RT) -> ST:
 
        """Return the section a row belongs to
 

	
 
        Given a data row, this method should return some identifier for the
 
        "section" the row belongs to. The write method uses this to
 
        determine when to call start_section and end_section.
 

	
 
        If your spreadsheet doesn't need sections, define this to return None.
 
        """
 
        ...
 

	
 
    @abc.abstractmethod
 
    def write_row(self, row: RT) -> None:
 
        """Write a data row to the output spreadsheet
 

	
 
        This method is called once for each data row in the input.
 
        """
 
        ...
 

	
 
    # The next four methods are all called by the write method when the name
 
    # says. You may override them to output headers or sums, record
 
    # state, etc. The default implementations are all noops.
 

	
 
    def start_spreadsheet(self) -> None:
 
        pass
 

	
 
    def start_section(self, key: ST) -> None:
 
        pass
 

	
 
    def end_section(self, key: ST) -> None:
 
        pass
 

	
 
    def end_spreadsheet(self) -> None:
 
        pass
 

	
 
    def write(self, rows: Iterable[RT]) -> None:
 
        prev_section: Optional[ST] = None
 
        self.start_spreadsheet()
 
        for row in rows:
 
            section = self.section_key(row)
 
            if section != prev_section:
 
                if prev_section is not None:
 
                    self.end_section(prev_section)
 
                self.start_section(section)
 
                prev_section = section
 
            self.write_row(row)
 
        try:
 
            should_end = section is not None
 
        except NameError:
 
            should_end = False
 
        if should_end:
 
            self.end_section(section)
 
        self.end_spreadsheet()
 

	
 

	
 
class Border(enum.IntFlag):
 
    TOP = 1
 
    RIGHT = 2
 
    BOTTOM = 4
 
    LEFT = 8
 
    # in CSS order, clockwise from top
 

	
 

	
 
class BaseODS(BaseSpreadsheet[RT, ST], metaclass=abc.ABCMeta):
 
    """Abstract base class to help write OpenDocument spreadsheets
 

	
 
    This class provides the very core logic to write an arbitrary set of data
 
    rows to an OpenDocument spreadsheet. It provides helper methods for
 
    building sheets, rows, and cells.
 

	
 
    See also the BaseSpreadsheet base class for additional documentation about
 
    methods you must and can define, the definition of RT and ST, etc.
 
    """
 
    # Defined in the XSL spec, "Definitions of Units of Measure"
 
    MEASUREMENT_UNITS = frozenset([
 
        'cm',
 
        'em',
 
        'in',
 
        'mm',
 
        'pc',
 
        'pt',
 
        'px',
 
    ])
 
    MEASUREMENT_RE = re.compile(
 
        r'([-+]?(?:\d+\.?|\.\d+|\d+\.\d+))({})'.format('|'.join(MEASUREMENT_UNITS)),
 
        re.ASCII,
 
    )
 

	
 
    def __init__(self, rt_wrapper: Optional[rtutil.RT]=None) -> None:
 
        self.rt_wrapper = rt_wrapper
 
        self.locale = babel.core.Locale.default('LC_MONETARY')
 
        self.currency_fmt_key = 'accounting'
 
        self._name_counter = itertools.count(1)
 
        self._style_cache: MutableMapping[str, odf.style.Style] = {}
 
        self.document = odf.opendocument.OpenDocumentSpreadsheet()
 
        self.init_settings()
 
        self.init_styles()
 
        self.sheet = self.use_sheet("Report")
 

	
 
    ### Low-level document tree manipulation
 
    # The *intent* is that you only need to use these if you're adding new
 
    # methods to manipulate document settings or styles.
 

	
 
    def copy_element(self, elem: odf.element.Element) -> odf.element.Element:
 
        retval = odf.element.Element(
 
            qname=elem.qname,
 
            qattributes=copy.copy(elem.attributes),
 
        )
 
        try:
 
            orig_name = retval.getAttribute('name')
 
        except ValueError:
 
            orig_name = None
 
        if orig_name is not None:
 
            retval.setAttribute('name', f'{orig_name}{next(self._name_counter)}')
 
        for child in elem.childNodes:
 
            # Order is important: need to check the deepest subclasses first.
 
            if isinstance(child, odf.element.CDATASection):
 
                retval.addCDATA(child.data)
 
            elif isinstance(child, odf.element.Text):
 
                retval.addText(child.data)
 
            else:
 
                retval.addElement(self.copy_element(child))
 
        return retval
 

	
 
    def ensure_child(self,
 
                     parent: odf.element.Element,
 
                     child_type: ElementType,
 
                     **kwargs: Any,
 
    ) -> odf.element.Element:
 
        new_child = child_type(**kwargs)
 
        found_child = self.find_child(parent, new_child)
 
        if found_child is None:
 
            parent.addElement(new_child)
 
            return parent.lastChild
 
        else:
 
            return found_child
 

	
 
    def ensure_config_map_entry(self,
 
                                root: odf.element.Element,
 
                                map_name: str,
 
                                entry_name: str,
 
    ) -> odf.element.Element:
 
        """Return a ``ConfigItemMapEntry`` under ``root``
 

	
 
        This method ensures there's a ``ConfigItemMapNamed`` named ``map_name``
 
        under ``root``, and a ``ConfigItemMapEntry`` named ``entry_name`` under
 
        that. Return the ``ConfigItemMapEntry`` element.
 
        """
 
        config_map = self.ensure_child(root, odf.config.ConfigItemMapNamed, name=map_name)
 
        return self.ensure_child(config_map, odf.config.ConfigItemMapEntry, name=entry_name)
 

	
 
    def find_child(self,
 
                   parent: odf.element.Element,
 
                   child: odf.element.Element,
 
    ) -> Optional[odf.element.Element]:
 
        attrs = {k: v for k, v in self.iter_attributes(child)}
 
        if not attrs:
 
            return None
 
        for elem in parent.childNodes:
 
            if (elem.qname == child.qname
 
                and all(elem.getAttribute(k) == v for k, v in attrs.items())):
 
                return elem
 
        return None
 

	
 
    def iter_attributes(self, elem: odf.element.Element) -> Iterator[Tuple[str, str]]:
 
        for (_, key), value in self.iter_qattributes(elem):
 
            yield key.lower().replace('-', ''), value
 

	
 
    def iter_qattributes(self, elem: odf.element.Element) -> Iterator[Tuple[Tuple[str, str], str]]:
 
        if elem.attributes:
 
            yield from elem.attributes.items()
 

	
 
    def replace_child(self,
 
                     parent: odf.element.Element,
 
                     child_type: ElementType,
 
                     **kwargs: Any,
 
    ) -> odf.element.Element:
 
        new_child = child_type(**kwargs)
 
        found_child = self.find_child(parent, new_child)
 
        parent.insertBefore(new_child, found_child)
 
        if found_child is not None:
 
            parent.removeChild(found_child)
 
        return new_child
 

	
 
    def set_config(self,
 
                   root: odf.element.Element,
 
                   name: str,
 
                   value: Union[bool, int, str],
 
                   config_type: Optional[str]=None,
 
    ) -> None:
 
        """Ensure ``root`` has a ``ConfigItem`` with the given name, type, and value"""
 
        value_s = str(value)
 
        if isinstance(value, bool):
 
            value_s = str(value).lower()
tests/books/accruals.beancount
Show inline comments
 
2010-01-01 open Assets:Checking
 
2010-01-01 open Assets:Receivable:Accounts
 
2010-01-01 open Expenses:FilingFees
 
2010-01-01 open Expenses:Services:Legal
 
2010-01-01 open Expenses:Travel
 
2010-01-01 open Income:Donations
 
2010-01-01 open Liabilities:Payable:Accounts
 
2010-01-01 open Equity:Funds:Opening
 

	
 
2010-03-01 * "Opening balances"
 
  Equity:Funds:Opening  -1000 USD
 
  Assets:Receivable:Accounts  6000 USD
 
  Liabilities:Payable:Accounts  -5000 USD
 

	
 
2010-03-05 * "EarlyBird" "Payment for receivable from previous FY"
 
  rt-id: "rt:40"
 
  invoice: "rt:40/400"
 
  project: "Conservancy"
 
  Assets:Receivable:Accounts  -500 USD
 
  Assets:Checking  500 USD
 

	
 
2010-03-06 * "EarlyBird" "Payment for payment from previous FY"
 
  rt-id: "rt:44"
 
  invoice: "rt:44/440"
 
  project: "Conservancy"
 
  Liabilities:Payable:Accounts  125 USD
 
  Assets:Checking  -125 USD
 

	
 
2010-03-15 * "GrantCo" "2010Q1 grant"
 
  rt-id: "rt:470"
 
  invoice: "rt:470/4700"
 
  project: "Development Grant"
 
  Assets:Receivable:Accounts  5000 USD
 
  Income:Donations           -5000 USD
 

	
 
2010-03-25 * "GrantCo" "2010Q1 grant ACH payment"
 
  rt-id: "rt:470"
 
  invoice: "rt:470/4700"
 
  project: "Development Grant"
 
  Assets:Receivable:Accounts  -5000 USD
 
  Assets:Checking              5000 USD
 

	
 
2010-03-30 * "EarlyBird" "Travel reimbursement"
 
  rt-id: "rt:490"
 
  invoice: "rt:490/4900"
 
  project: "Conservancy"
 
  Liabilities:Payable:Accounts  -75 USD
 
  Expenses:Travel  75 USD
 

	
 
2010-04-15 * "Multiparty invoice"
 
  rt-id: "rt:480"
 
  invoice: "rt:480/4800"
 
  project: "Conservancy"
 
  Expenses:Travel  250 USD
 
  Liabilities:Payable:Accounts  -125 USD
 
  entity: "MultiPartyA"
 
  Liabilities:Payable:Accounts  -125 USD
 
  entity: "MultiPartyB"
 

	
 
2010-04-18 * "MultiPartyA" "Payment for 480"
 
  rt-id: "rt:480"
 
  invoice: "rt:480/4800"
 
  project: "Conservancy"
 
  Liabilities:Payable:Accounts  125 USD
 
  Assets:Checking  -125 USD
 

	
 
2010-04-20 * "MultiPartyB" "Payment for 480"
 
  rt-id: "rt:480"
 
  invoice: "rt:480/4800"
 
  project: "Conservancy"
 
  Liabilities:Payable:Accounts  125 USD
 
  Assets:Checking  -125 USD
 

	
 
2010-04-25 ! "Vendor" "First trip travel reimbursement"
 
  rt-id: "rt:310"
 
  contract: "rt:310/3100"
 
  invoice: "FIXME"  ; still waiting on them to send it
 
  project: "Conservancy"
 
  payment-method: "USD USWire"
 
  Liabilities:Payable:Accounts  -200 USD
 
  Expenses:Travel  200 USD
 

	
 
2010-04-30 * "Vendor" "Second trip travel reimbursement"
 
  rt-id: "rt:310"
 
  contract: "rt:310/3100"
 
  invoice: "rt:310/3120"
 
  project: "Conservancy"
 
  payment-method: "USD Check"
 
  Liabilities:Payable:Accounts  -220 USD
 
  Expenses:Travel  220 USD
 

	
 
2010-05-05 * "DonorA" "Donation pledge"
 
  rt-id: "rt:505"
 
  invoice: "rt:505/5050"
 
  approval: "rt:505/5040"
 
  project: "Conservancy"
 
  Income:Donations  -2,500 EUR {1.100 USD}
 
  Assets:Receivable:Accounts  2,500 EUR {1.100 USD}
 

	
 
2010-05-10 * "Lawyer" "April legal services"
 
  rt-id: "rt:510"
 
  invoice: "rt:510/5100"
 
  contract: "rt:510/4000"
 
  project: "Conservancy"
 
  Expenses:Services:Legal  200.00 USD
 
  Liabilities:Payable:Accounts  -200.00 USD
 
  payment-method: "USD ACH"
 

	
 
2010-05-15 * "MatchingProgram" "May matched donations"
 
  invoice: "rt://ticket/515/attachments/5150"
 
  approval: "rt://ticket/515/attachments/5140"
 
  project: "Conservancy"
 
  Income:Donations  -1500.00 USD
 
  Assets:Receivable:Accounts  1500.00 USD
 

	
 
2010-05-20 * "DonorA" "Donation made"
 
  rt-id: "rt:505"
 
  invoice: "rt:505/5050"
 
  project: "Conservancy"
 
  Assets:Receivable:Accounts  -2,750.00 USD
 
  Assets:Checking  2,750.00 USD
 
  receipt: "DonorAWire.pdf"
 

	
 
2010-05-25 * "Lawyer" "May payment"
 
  rt-id: "rt:510"
 
  invoice: "rt:510/5100"
 
  project: "Conservancy"
 
  Liabilities:Payable:Accounts  200.00 USD
 
  contract: "rt:510/4000"
 
  Assets:Checking  -200.00 USD
 
  receipt: "rt:510/5105"
 

	
 
2010-06-10 * "Lawyer" "May legal services"
 
  rt-id: "rt:510"
 
  invoice: "rt:510/6100"
 
  contract: "rt:510/4000"
 
  project: "Conservancy"
 
  Expenses:Services:Legal  220.00 USD
 
  Liabilities:Payable:Accounts  -220.00 USD
 
  payment-method: "USD ACH"
 

	
 
2010-06-12 * "Lawyer" "Additional legal fees for May"
 
2010-06-10 * "Lawyer" "Additional legal fees for May"
 
  rt-id: "rt:510"
 
  invoice: "rt:510/6100"
 
  contract: "rt:510/4000"
 
  project: "Conservancy"
 
  Expenses:FilingFees  60.00 USD
 
  Liabilities:Payable:Accounts  -60.00 USD
 
  payment-method: "USD ACH"
 

	
 
2010-06-15 * "GrantCo" "2010Q2 grant"
 
  rt-id: "rt:470"
 
  invoice: "rt:470/4700"
 
  project: "Development Grant"
 
  Assets:Receivable:Accounts  5500 USD
 
  Income:Donations           -5500 USD
 

	
 
2010-06-18 * "EuroGov" "European legal fees"
 
  ; Multiple rt-ids are used to test proper handling for both
 
  ; searching and generating the outgoing report.
 
  rt-id: "rt:520 rt:525"
 
  invoice: "rt:520/5200"
 
  contract: "rt:520/5220"
 
  project: "Conservancy"
 
  Liabilities:Payable:Accounts  -1,000 EUR {1.100 USD}
 
  payment-method: "eur fxwire"
 
  Expenses:FilingFees  1,000 EUR {1.100 USD}
 

	
 
2010-06-20 * "StateGov" "Business registration"
 
  ; Intentionally has no rt-id
 
  invoice: "Invoices/2010StateRegistration.pdf"
 
  project: "Conservancy"
 
  Liabilities:Payable:Accounts  -50 USD
 
  Expenses:FilingFees  50 USD
 

	
 
2010-09-15 * "GrantCo" "2010Q3 grant"
 
  rt-id: "rt:470"
 
  invoice: "rt:470/4700"
 
  project: "Development Grant"
 
  Assets:Receivable:Accounts  6000 USD
 
  Income:Donations           -6000 USD
tests/test_reports_accrual.py
Show inline comments
 
"""test_reports_accrual - Unit tests for accrual report"""
 
# Copyright © 2020  Brett Smith
 
#
 
# This program is free software: you can redistribute it and/or modify
 
# it under the terms of the GNU Affero General Public License as published by
 
# the Free Software Foundation, either version 3 of the License, or
 
# (at your option) any later version.
 
#
 
# This program is distributed in the hope that it will be useful,
 
# but WITHOUT ANY WARRANTY; without even the implied warranty of
 
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
 
# GNU Affero General Public License for more details.
 
#
 
# You should have received a copy of the GNU Affero General Public License
 
# along with this program.  If not, see <https://www.gnu.org/licenses/>.
 

	
 
import collections
 
import copy
 
import datetime
 
import io
 
import itertools
 
import operator
 
import re
 

	
 
import babel.numbers
 
import odf.opendocument
 
import odf.table
 
import odf.text
 

	
 
import pytest
 

	
 
from . import testutil
 

	
 
from decimal import Decimal
 
from typing import NamedTuple, Optional, Sequence
 

	
 
from beancount.core import data as bc_data
 
from beancount import loader as bc_loader
 
from conservancy_beancount import cliutil
 
from conservancy_beancount import data
 
from conservancy_beancount import rtutil
 
from conservancy_beancount.reports import accrual
 

	
 
_accruals_load = bc_loader.load_file(testutil.test_path('books/accruals.beancount'))
 
ACCRUAL_TXNS = [
 
    entry for entry in _accruals_load[0]
 
    if hasattr(entry, 'narration')
 
    and entry.narration != 'Opening balances'
 
]
 
ACCRUALS_COUNT = sum(
 
    1
 
    for txn in ACCRUAL_TXNS
 
    for post in txn.postings
 
    if post.account.startswith(('Assets:Receivable:', 'Liabilities:Payable:'))
 
)
 

	
 
ACCOUNTS = [
 
    'Assets:Receivable:Accounts',
 
    'Assets:Receivable:Loans',
 
    'Liabilities:Payable:Accounts',
 
    'Liabilities:Payable:Vacation',
 
]
 

	
 
class AgingRow(NamedTuple):
 
    date: datetime.date
 
    entity: Sequence[str]
 
    amount: Optional[Sequence[bc_data.Amount]]
 
    at_cost: bc_data.Amount
 
    rt_id: Sequence[str]
 
    invoice: Sequence[str]
 
    project: Sequence[str]
 

	
 
    @classmethod
 
    def make_simple(cls, date, entity, at_cost, invoice,
 
                    rt_id=None, orig_amount=None, project='Conservancy'):
 
        if isinstance(date, str):
 
            date = datetime.datetime.strptime(date, '%Y-%m-%d').date()
 
        if not isinstance(at_cost, tuple):
 
            at_cost = testutil.Amount(at_cost)
 
        if rt_id is None and invoice.startswith('rt:'):
 
            rt_id, _, _ = invoice.partition('/')
 
        return cls(date, [entity], orig_amount, at_cost, [rt_id], [invoice], [project])
 

	
 
    def check_row_match(self, sheet_row):
 
        cells = testutil.ODSCell.from_row(sheet_row)
 
        assert len(cells) >= len(self)
 
        cells = iter(cells)
 
        assert next(cells).value == self.date
 
        assert next(cells).text == '\0'.join(self.entity)
 
        assert next(cells).text == '\0'.join(
 
            babel.numbers.format_currency(number, currency, format_type='accounting')
 
            for number, currency in self.amount or ()
 
        )
 
        usd_cell = next(cells)
 
        assert usd_cell.value_type == 'currency'
 
        assert usd_cell.value == self.at_cost.number
 
        assert next(cells).text == '\0'.join(self.project)
 
        for index, cell in enumerate(cells):
 
            links = cell.getElementsByType(odf.text.A)
 
            assert len(links) == len(cell.childNodes)
 
        assert index >= 1
 

	
 

	
 
AGING_AP = [
 
    AgingRow.make_simple('2010-03-06', 'EarlyBird', -125, 'rt:44/440'),
 
    AgingRow.make_simple('2010-03-30', 'EarlyBird', 75, 'rt:490/4900'),
 
    AgingRow.make_simple('2010-04-25', 'Vendor', 200, 'FIXME'),
 
    AgingRow.make_simple('2010-04-30', 'Vendor', 220, 'rt:310/3120'),
 
    AgingRow.make_simple('2010-06-10', 'Lawyer', 280, 'rt:510/6100'),
 
    AgingRow.make_simple('2010-06-18', 'EuroGov', 1100, 'rt:520/5200',
 
                         orig_amount=[testutil.Amount(1000, 'EUR')]),
 
    AgingRow.make_simple('2010-06-20', 'StateGov', 50, 'Invoices/2010StateRegistration.pdf'),
 
]
 

	
 
AGING_AR = [
 
    AgingRow.make_simple('2010-03-05', 'EarlyBird', -500, 'rt:40/400'),
 
    AgingRow.make_simple('2010-05-15', 'MatchingProgram', 1500,
 
                         'rt://ticket/515/attachments/5150'),
 
    AgingRow.make_simple('2010-06-15', 'GrantCo', 11500, 'rt:470/4700',
 
    AgingRow.make_simple('2010-06-15', 'GrantCo', 5500, 'rt:470/4700',
 
                         project='Development Grant'),
 
    AgingRow.make_simple('2010-09-15', 'GrantCo', 6000, 'rt:470/4700',
 
                         project='Development Grant'),
 
]
 

	
 
class RTClient(testutil.RTClient):
 
    TICKET_DATA = {
 
        '40': [
 
            ('400', 'invoice feb.csv', 'text/csv', '40.4k'),
 
        ],
 
        '44': [
 
            ('440', 'invoice feb.csv', 'text/csv', '40.4k'),
 
        ],
 
        '310': [
 
            ('3100', 'VendorContract.pdf', 'application/pdf', '1.7m'),
 
            ('3120', 'VendorInvoiceB.pdf', 'application/pdf', '1.8m'),
 
        ],
 
        '490': [],
 
        '505': [],
 
        '510': [
 
            ('4000', 'contract.pdf', 'application/pdf', '1.4m'),
 
            ('5100', 'invoice april.pdf', 'application/pdf', '1.5m'),
 
            ('5105', 'payment.png', 'image/png', '51.5k'),
 
            ('6100', 'invoice may.pdf', 'application/pdf', '1.6m'),
 
        ],
 
        '515': [],
 
        '520': [],
 
    }
 

	
 

	
 
@pytest.fixture
 
def accrual_postings():
 
    return data.Posting.from_entries(copy.deepcopy(ACCRUAL_TXNS))
 

	
 
def accruals_by_meta(postings, value, key='invoice', wrap_type=iter):
 
    return wrap_type(
 
        post for post in postings
 
        if post.meta.get(key) == value
 
        and post.account.is_under('Assets:Receivable', 'Liabilities:Payable')
 
    )
 

	
 
def find_row_by_text(row_source, want_text):
 
    for row in row_source:
 
        try:
 
            found_row = row.childNodes[0].text == want_text
 
        except IndexError:
 
            found_row = False
 
        if found_row:
 
            return row
 
    return None
 

	
 
def check_aging_sheet(sheet, aging_rows, date, accrue_date):
 
    if not aging_rows:
 
        return
 
    if isinstance(accrue_date, int):
 
        accrue_date = date + datetime.timedelta(days=accrue_date)
 
    rows = iter(sheet.getElementsByType(odf.table.TableRow))
 
    for row in rows:
 
        if "Aging Report" in row.text:
 
            break
 
    else:
 
        assert None, "Header row not found"
 
    assert f"Accrued by {accrue_date.isoformat()}" in row.text
 
    assert f"Unpaid by {date.isoformat()}" in row.text
 
    expect_rows = iter(aging_rows)
 
    row0 = find_row_by_text(rows, aging_rows[0].date.isoformat())
 
    next(expect_rows).check_row_match(row0)
 
    for actual, expected in zip(rows, expect_rows):
 
        expected.check_row_match(actual)
 
    for row in rows:
 
        if row.text.startswith("Total Aged "):
 
            break
 
    else:
 
        assert None, "Totals rows not found"
 
    actual_sum = Decimal(row.childNodes[-1].value)
 
    for row in rows:
 
        if row.text.startswith("Total Aged "):
 
            actual_sum += Decimal(row.childNodes[-1].value)
 
        else:
 
            break
 
    assert actual_sum == sum(
 
        row.at_cost.number
 
        for row in aging_rows
 
        if row.date <= accrue_date
 
        and row.at_cost.number > 0
 
    )
 

	
 
def check_aging_ods(ods_file,
 
                    date=None,
 
                    recv_rows=AGING_AR,
 
                    pay_rows=AGING_AP,
 
):
 
    if date is None:
 
        date = datetime.date.today()
 
    ods_file.seek(0)
 
    ods = odf.opendocument.load(ods_file)
 
    sheets = ods.spreadsheet.getElementsByType(odf.table.Table)
 
    assert len(sheets) == 2
 
    check_aging_sheet(sheets[0], recv_rows, date, -60)
 
    check_aging_sheet(sheets[1], pay_rows, date, -30)
 

	
 
@pytest.mark.parametrize('search_terms,expect_count,check_func', [
 
    ([], ACCRUALS_COUNT, lambda post: post.account.is_under(
 
        'Assets:Receivable:', 'Liabilities:Payable:',
 
    )),
 
    ([('rt-id', '^rt:505$')], 2, lambda post: post.meta['entity'] == 'DonorA'),
 
    ([('invoice', r'^rt:\D+515/')], 1, lambda post: post.meta['entity'] == 'MatchingProgram'),
 
    ([('entity', '^Lawyer$')], 3, lambda post: post.meta['rt-id'] == 'rt:510'),
 
    ([('entity', '^Lawyer$'), ('contract', '^rt:510/')], 2,
 
     lambda post: post.meta['invoice'].startswith('rt:510/')),
 
    ([('rt-id', '^rt:510$'), ('approval', '.')], 0, lambda post: False),
 
])
 
def test_filter_search(accrual_postings, search_terms, expect_count, check_func):
 
    search_terms = [cliutil.SearchTerm._make(query) for query in search_terms]
 
    actual = list(accrual.filter_search(accrual_postings, search_terms))
 
    if expect_count < ACCRUALS_COUNT:
 
        assert ACCRUALS_COUNT > len(actual) >= expect_count
 
    else:
 
        assert len(actual) == ACCRUALS_COUNT
 
    for post in actual:
 
        assert check_func(post)
 

	
 
@pytest.mark.parametrize('arg,expected', [
 
    ('aging', accrual.AgingReport),
 
    ('balance', accrual.BalanceReport),
 
    ('outgoing', accrual.OutgoingReport),
 
    ('age', accrual.AgingReport),
 
    ('bal', accrual.BalanceReport),
 
    ('out', accrual.OutgoingReport),
 
    ('outgoings', accrual.OutgoingReport),
 
])
 
def test_report_type_by_name(arg, expected):
 
    assert accrual.ReportType.by_name(arg.lower()).value is expected
 
    assert accrual.ReportType.by_name(arg.title()).value is expected
 
    assert accrual.ReportType.by_name(arg.upper()).value is expected
 

	
 
@pytest.mark.parametrize('arg', [
 
    'unknown',
 
    'blance',
 
    'outgong',
 
])
 
def test_report_type_by_unknown_name(arg):
 
    # Raising ValueError helps argparse generate good messages.
 
    with pytest.raises(ValueError):
 
        accrual.ReportType.by_name(arg)
 

	
 
@pytest.mark.parametrize('acct_name', ACCOUNTS)
 
def test_accrual_postings_consistent_account(acct_name):
 
    meta = {'invoice': '{acct_name} invoice.pdf'}
 
    txn = testutil.Transaction(postings=[
 
        (acct_name, 50, meta),
 
        (acct_name, 25, meta),
 
    ])
 
    related = accrual.AccrualPostings(data.Posting.from_txn(txn))
 
    assert related.account == acct_name
 

	
 
def test_accrual_postings_entity():
 
    txn = testutil.Transaction(postings=[
 
        (ACCOUNTS[0], 25, {'entity': 'Accruee'}),
 
        (ACCOUNTS[0], -15, {'entity': 'Payee15'}),
 
        (ACCOUNTS[0], -10, {'entity': 'Payee10'}),
 
    ])
 
    related = accrual.AccrualPostings(data.Posting.from_txn(txn))
 
    assert related.entity == 'Accruee'
 
    assert set(related.entities()) == {'Accruee', 'Payee10', 'Payee15'}
 

	
 
def test_accrual_postings_entities():
 
    txn = testutil.Transaction(postings=[
 
        (ACCOUNTS[0], 25, {'entity': 'Accruee'}),
 
        (ACCOUNTS[0], -15, {'entity': 'Payee15'}),
 
        (ACCOUNTS[0], -10, {'entity': 'Payee10'}),
 
    ])
 
    related = accrual.AccrualPostings(data.Posting.from_txn(txn))
 
    actual = related.entities()
 
    assert next(actual, None) == 'Accruee'
 
    assert set(actual) == {'Payee10', 'Payee15'}
 

	
 
def test_accrual_postings_entities_no_duplicates():
 
    txn = testutil.Transaction(postings=[
 
        (ACCOUNTS[0], 25, {'entity': 'Accruee'}),
 
        (ACCOUNTS[0], -15, {'entity': 'Accruee'}),
 
        (ACCOUNTS[0], -10, {'entity': 'Other'}),
 
    ])
 
    related = accrual.AccrualPostings(data.Posting.from_txn(txn))
 
    actual = related.entities()
 
    assert next(actual, None) == 'Accruee'
 
    assert next(actual, None) == 'Other'
 
    assert next(actual, None) is None
 

	
 
def test_accrual_postings_inconsistent_account():
 
    meta = {'invoice': 'invoice.pdf'}
 
    txn = testutil.Transaction(postings=[
 
        (acct_name, index, meta)
 
        for index, acct_name in enumerate(ACCOUNTS)
 
    ])
 
    related = accrual.AccrualPostings(data.Posting.from_txn(txn))
 
    assert related.account is related.INCONSISTENT
 

	
 
def test_accrual_postings_rt_id():
 
    txn = testutil.Transaction(postings=[
 
        (ACCOUNTS[0], 10, {'rt-id': 'rt:90'}),
 
        (ACCOUNTS[0], 10, {'rt-id': 'rt:90 rt:92'}),
 
        (ACCOUNTS[0], 10, {'rt-id': 'rt:90 rt:94 rt:92'}),
 
    ])
 
    related = accrual.AccrualPostings(data.Posting.from_txn(txn))
 
    assert related.rt_id == 'rt:90'
 

	
 
def test_accrual_postings_rt_id_inconsistent():
 
    txn = testutil.Transaction(postings=[
 
        (ACCOUNTS[0], 10, {'rt-id': 'rt:96'}),
 
        (ACCOUNTS[0], 10, {'rt-id': 'rt:98 rt:96'}),
 
    ])
 
    related = accrual.AccrualPostings(data.Posting.from_txn(txn))
 
    assert related.rt_id is related.INCONSISTENT
 

	
 
def test_accrual_postings_rt_id_none():
 
    txn = testutil.Transaction(postings=[
 
        (ACCOUNTS[0], 10),
 
    ])
 
    related = accrual.AccrualPostings(data.Posting.from_txn(txn))
 
    assert related.rt_id is None
 

	
 
@pytest.mark.parametrize('acct_name,invoice,day', testutil.combine_values(
 
    ACCOUNTS,
 
    ['FIXME', '', None, *testutil.NON_STRING_METADATA_VALUES],
 
    itertools.count(1),
 
))
 
def test_make_consistent_bad_invoice(acct_name, invoice, day):
 
    if acct_name.startswith('Assets:'):
 
        mult = 10
 
    else:
 
        mult = -10
 
    txn = testutil.Transaction(date=datetime.date(2019, 1, day), postings=[
 
        (acct_name, index * 10, {'invoice': invoice, 'entity': f'BadInvoice{day}'})
 
        (acct_name, index * mult, {'invoice': invoice, 'entity': f'BadInvoice{day}'})
 
        for index in range(1, 4)
 
    ])
 
    related = accrual.AccrualPostings(data.Posting.from_txn(txn))
 
    consistent = dict(related.make_consistent())
 
    consistent = dict(accrual.AccrualPostings.make_consistent(data.Posting.from_txn(txn)))
 
    assert len(consistent) == 1
 
    key = next(iter(consistent))
 
    assert acct_name in key
 
    if invoice:
 
        assert str(invoice) in key
 
    actual = consistent[key]
 
    assert actual
 
    assert len(actual) == 3
 
    for act_post, exp_post in zip(actual, txn.postings):
 
        assert act_post.units == exp_post.units
 
        assert act_post.meta.get('invoice') == invoice
 

	
 
def test_make_consistent_across_accounts():
 
    invoice = 'Invoices/CrossAccount.pdf'
 
    txn = testutil.Transaction(date=datetime.date(2019, 2, 1), postings=[
 
        (acct_name, 100, {'invoice': invoice, 'entity': 'CrossAccount'})
 
        for acct_name in ACCOUNTS
 
    ])
 
    related = accrual.AccrualPostings(data.Posting.from_txn(txn))
 
    consistent = dict(related.make_consistent())
 
    consistent = dict(accrual.AccrualPostings.make_consistent(data.Posting.from_txn(txn)))
 
    assert len(consistent) == len(ACCOUNTS)
 
    for key, posts in consistent.items():
 
        assert len(posts) == 1
 
        assert posts.account in key
 

	
 
def test_make_consistent_both_invoice_and_account():
 
    txn = testutil.Transaction(date=datetime.date(2019, 2, 2), postings=[
 
        (acct_name, 150) for acct_name in ACCOUNTS
 
    ])
 
    related = accrual.AccrualPostings(data.Posting.from_txn(txn))
 
    consistent = dict(related.make_consistent())
 
    consistent = dict(accrual.AccrualPostings.make_consistent(data.Posting.from_txn(txn)))
 
    assert len(consistent) == len(ACCOUNTS)
 
    for key, posts in consistent.items():
 
        assert len(posts) == 1
 
        assert posts.account in key
 

	
 
@pytest.mark.parametrize('acct_name', ACCOUNTS)
 
def test_make_consistent_across_entity(acct_name):
 
    amt_sign = operator.pos if acct_name.startswith('Assets') else operator.neg
 
    txn = testutil.Transaction(postings=[
 
        (acct_name, amt_sign(n), {'invoice': 'Inv/1.pdf', 'entity': f'Entity{n}'})
 
        for n in range(1, 4)
 
    ])
 
    related = accrual.AccrualPostings(data.Posting.from_txn(txn))
 
    consistent = dict(related.make_consistent())
 
    consistent = dict(accrual.AccrualPostings.make_consistent(data.Posting.from_txn(txn)))
 
    assert len(consistent) == 3
 
    for key, posts in consistent.items():
 
        assert len(posts) == 1
 
        entities = posts.entities()
 
        assert next(entities, None) == posts.entity
 
        assert next(entities, None) is None
 
        assert posts.entity in key
 

	
 
@pytest.mark.parametrize('acct_name', ACCOUNTS)
 
def test_make_consistent_entity_differs_accrual_payment(acct_name):
 
    invoice = 'Invoices/DifferPay.pdf'
 
    txn = testutil.Transaction(postings=[
 
        # Depending on the account, the order of the accrual and payment might
 
        # be swapped here, but that shouldn't matter.
 
        (acct_name, 125, {'invoice': invoice, 'entity': 'Positive'}),
 
        (acct_name, -125, {'invoice': invoice, 'entity': 'Negative'}),
 
    ])
 
    related = accrual.AccrualPostings(data.Posting.from_txn(txn))
 
    consistent = related.make_consistent()
 
    related = list(data.Posting.from_txn(txn))
 
    consistent = accrual.AccrualPostings.make_consistent(related)
 
    _, actual = next(consistent)
 
    assert actual is related
 
    assert len(actual) == len(related)
 
    assert all(post in actual for post in related)
 
    assert next(consistent, None) is None
 

	
 
def test_make_consistent_by_date_accruals_differ():
 
    meta = {'rt-id': '1', 'invoice': 'rt:1/2', 'entity': 'MultiDate'}
 
    entries = [testutil.Transaction(date=date, postings=[
 
        ('Assets:Receivable:Accounts', date.day * 100, meta),
 
    ]) for date in itertools.islice(testutil.date_seq(), 3)]
 
    actual = [group for _, group in
 
              accrual.AccrualPostings.make_consistent(data.Posting.from_entries(entries))]
 
    assert len(actual) == 3
 
    assert {post.units.number for group in actual for post in group} == {100, 200, 300}
 

	
 
def test_make_consistent_by_date_with_exact_payment():
 
    meta = {'rt-id': '1', 'invoice': 'rt:1/3', 'entity': 'OnePayment'}
 
    entries = [testutil.Transaction(date=date, postings=[(
 
        'Assets:Receivable:Accounts',
 
        35 * (1 if date.day % 2 else -1),
 
        meta,
 
    )]) for date in itertools.islice(testutil.date_seq(), 3)]
 
    actual = [group for _, group in
 
              accrual.AccrualPostings.make_consistent(data.Posting.from_entries(entries))]
 
    assert len(actual) == 2
 
    assert actual[0].is_zero()
 
    assert len(actual[1]) == 1
 
    assert actual[1][0].meta.date.day == 3
 

	
 
def test_make_consistent_by_date_with_underpayment():
 
    meta = {'rt-id': '1', 'invoice': 'rt:1/4', 'entity': 'UnderPayment'}
 
    entries = [testutil.Transaction(date=date, postings=[(
 
        'Assets:Receivable:Accounts',
 
        40 * (1 if date.day % 2 else -.5),
 
        meta,
 
    )]) for date in itertools.islice(testutil.date_seq(), 3)]
 
    actual = [group for _, group in
 
              accrual.AccrualPostings.make_consistent(data.Posting.from_entries(entries))]
 
    assert len(actual) == 2
 
    assert len(actual[0]) == 2
 
    assert actual[0][0].units.number == 40
 
    assert actual[0][1].units.number == -20
 
    assert len(actual[1]) == 1
 
    assert actual[1][0].meta.date.day == 3
 

	
 
def test_make_consistent_by_date_with_overpayment():
 
    meta = {'rt-id': '1', 'invoice': 'rt:1/5', 'entity': 'OverPayment'}
 
    entries = [testutil.Transaction(date=date, postings=[(
 
        'Assets:Receivable:Accounts',
 
        50 * (1 if date.day % 2 else -1.5),
 
        meta,
 
    )]) for date in itertools.islice(testutil.date_seq(), 3)]
 
    actual = [group for _, group in
 
              accrual.AccrualPostings.make_consistent(data.Posting.from_entries(entries))]
 
    assert len(actual) == 2
 
    assert actual[0].is_zero()
 
    assert len(actual[1]) == 2
 
    assert actual[1][0].meta.date.day == 2
 
    assert actual[1][0].units.number == -25
 
    assert actual[1][1].meta.date.day == 3
 

	
 
def test_make_consistent_by_date_with_late_payment():
 
    meta = {'rt-id': '1', 'invoice': 'rt:1/6', 'entity': 'LatePayment'}
 
    entries = [testutil.Transaction(date=date, postings=[(
 
        'Assets:Receivable:Accounts',
 
        60 * (-1 if date.day > 2 else 1),
 
        meta,
 
    )]) for date in itertools.islice(testutil.date_seq(), 3)]
 
    actual = [group for _, group in
 
              accrual.AccrualPostings.make_consistent(data.Posting.from_entries(entries))]
 
    assert len(actual) == 2
 
    assert len(actual[0]) == 2
 
    assert actual[0][0].meta.date.day == 1
 
    assert actual[0][1].meta.date.day == 3
 
    assert len(actual[1]) == 1
 
    assert actual[1][0].meta.date.day == 2
 

	
 
def test_make_consistent_by_date_with_split_payments():
 
    meta = {'rt-id': '1', 'invoice': 'rt:1/7', 'entity': 'SplitPayments'}
 
    entries = [testutil.Transaction(date=date, postings=[(
 
        'Assets:Receivable:Accounts', amount, meta,
 
    )]) for date, amount in zip(testutil.date_seq(), [70, 80, -50, -100])]
 
    actual = [group for _, group in
 
              accrual.AccrualPostings.make_consistent(data.Posting.from_entries(entries))]
 
    assert len(actual) == 2
 
    assert [post.units.number for post in actual[0]] == [70, -50, -20]
 
    assert [post.units.number for post in actual[1]] == [80, -80]
 

	
 
@pytest.mark.parametrize('account,day', itertools.product(
 
    ACCOUNTS,
 
    [1, 10, 20, 30],
 
))
 
def test_make_consistent_with_three_one_split(account, day):
 
    meta = {'rt-id': '1', 'invoice': 'rt:1/8', 'entity': '3Split'}
 
    entries = [testutil.Transaction(date=datetime.date(2019, 5, dd), postings=[(
 
        account, dd, meta,
 
    )]) for dd in [5, 15, 25]]
 
    meta['entity'] = '1Split'
 
    entries.insert(day // 10, testutil.Transaction(
 
        date=datetime.date(2019, 5, day),
 
        postings=[(account, -45, meta)],
 
    ))
 
    postings = data.Posting.from_entries(entries)
 
    actual = dict(accrual.AccrualPostings.make_consistent(iter(postings)))
 
    if account.startswith('Assets:'):
 
        group_count = 3
 
        post_count = 2
 
    else:
 
        group_count = 1
 
        post_count = 4
 
    assert len(actual) == group_count
 
    for related in actual.values():
 
        assert len(related) == post_count
 
        assert sum(post.units.number for post in related) == 0
 
        assert all(post.meta['invoice'] == meta['invoice'] for post in related)
 
        assert {post.meta['entity'] for post in related} == {'1Split', '3Split'}
 

	
 
@pytest.mark.parametrize('account', ACCOUNTS)
 
def test_make_consistent_with_three_two_split(account):
 
    meta = {'rt-id': '1', 'invoice': 'rt:1/9'}
 
    entries = [testutil.Transaction(date=datetime.date(2019, 5, day), postings=[(
 
        account, day * (1 if day % 10 else -1.5), meta,
 
    )]) for day in range(5, 30, 5)]
 
    postings = data.Posting.from_entries(entries)
 
    actual = dict(accrual.AccrualPostings.make_consistent(iter(postings)))
 
    if account.startswith('Assets:'):
 
        group_count = 3
 
    else:
 
        group_count = 2
 
    assert len(actual) == group_count
 
    for related in actual.values():
 
        assert len(related) >= 2
 
        assert sum(post.units.number for post in related) == 0
 
        assert all(post.meta['invoice'] == meta['invoice'] for post in related)
 

	
 
def check_output(output, expect_patterns):
 
    output.seek(0)
 
    testutil.check_lines_match(iter(output), expect_patterns)
 

	
 
def run_outgoing(rt_id, postings, rt_client=None):
 
    if rt_client is None:
 
        rt_client = RTClient()
 
    rt_wrapper = rtutil.RT(rt_client)
 
    if not isinstance(postings, accrual.AccrualPostings):
 
        postings = accruals_by_meta(postings, rt_id, 'rt-id', wrap_type=accrual.AccrualPostings)
 
    output = io.StringIO()
 
    report = accrual.OutgoingReport(rt_wrapper, output)
 
    report.run({rt_id: postings})
 
    return output
 

	
 
@pytest.mark.parametrize('invoice,expected', [
 
    ('rt:505/5050', "Zero balance outstanding since 2010-05-05"),
 
    ('rt:510/5100', "Zero balance outstanding since 2010-05-10"),
 
    ('rt:510/6100', "-280.00 USD outstanding since 2010-06-10"),
 
    ('rt://ticket/515/attachments/5150', "1,500.00 USD outstanding since 2010-05-15",),
 
])
 
def test_balance_report(accrual_postings, invoice, expected, caplog):
 
    related = accruals_by_meta(accrual_postings, invoice, wrap_type=accrual.AccrualPostings)
 
    output = io.StringIO()
 
    report = accrual.BalanceReport(output)
 
    report.run({invoice: related})
 
    assert not caplog.records
 
    check_output(output, [invoice, expected])
 

	
 
def test_outgoing_report(accrual_postings, caplog):
 
    rt_client = RTClient()
 
    output = run_outgoing('rt:510', accrual_postings, rt_client)
 
    rt_url = RTClient.DEFAULT_URL[:-9]
 
    rt_id_url = rf'\b{re.escape(f"{rt_url}Ticket/Display.html?id=510")}\b'
 
    contract_url = rf'\b{re.escape(f"{rt_url}Ticket/Attachment/4000/4000/contract.pdf")}\b'
 
    assert not caplog.records
 
    check_output(output, [
 
        r'^PAYMENT FOR APPROVAL:$',
 
        r'^REQUESTOR: Mx\. 510 <mx510@example\.org>$',
 
        r'^PAYMENT TO: Hon\. Mx\. 510$',
 
        r'^TOTAL TO PAY: \$280\.00$',
 
        fr'^AGREEMENT: {contract_url}',
 
        r'^BEANCOUNT ENTRIES:$',
 
        # For each transaction, check for the date line, a metadata, and the
 
        # Expenses posting.
 
        r'^\s*2010-06-10\s',
 
        fr'^\s+rt-id: "{rt_id_url}"$',
 
        r'^\s+Expenses:Services:Legal\s+220\.00 USD$',
 
        r'^\s*2010-06-12\s',
 
        r'^\s*2010-06-10\s',
 
        fr'^\s+contract: "{contract_url}"$',
 
        r'^\s+Expenses:FilingFees\s+60\.00 USD$',
 
    ])
 
    assert rt_client.edits == {'510': {
 
        'CF_payment-amount': 'USD 280.00',
 
        'CF_payment-method': 'USD ACH',
 
    }}
 
    assert 'payment-method:' not in output.getvalue()
 

	
 
def test_outgoing_report_custom_field_fallbacks(accrual_postings, caplog):
 
    rt_client = RTClient(want_cfs=False)
 
    output = run_outgoing('rt:510', accrual_postings, rt_client)
 
    assert not caplog.records
 
    check_output(output, [
 
        r'^PAYMENT FOR APPROVAL:$',
 
        r'^REQUESTOR: <mx510@example\.org>$',
 
        r'^PAYMENT TO:\s*$',
 
    ])
 

	
 
def test_outgoing_report_fx_amounts(accrual_postings, caplog):
 
    rt_client = RTClient()
 
    output = run_outgoing('rt:520 rt:525', accrual_postings, rt_client)
 
    assert not caplog.records
 
    check_output(output, [
 
        r'^PAYMENT FOR APPROVAL:$',
 
        r'^REQUESTOR: Mx\. 520 <mx520@example\.org>$',
 
        r'^TOTAL TO PAY: 1,000\.00 EUR \(\$1,100.00\)$',
 
    ])
 
    assert rt_client.edits == {'520': {
 
        'CF_payment-amount': 'EUR 1,000.00 ($1,100.00)',
 
        'CF_payment-method': 'EUR International Wire',
 
    }}
 
    assert 'payment-method:' not in output.getvalue()
 

	
 
def test_outgoing_report_multi_invoice(accrual_postings, caplog):
 
    rt_client = RTClient()
 
    output = run_outgoing('rt:310', accrual_postings, rt_client)
 
    log, = caplog.records
 
    assert log.levelname == 'WARNING'
 
    assert log.message.startswith('cannot set payment-method for rt:310: ')
 
    check_output(output, [
 
        r'^PAYMENT FOR APPROVAL:$',
 
        r'^REQUESTOR: Mx\. 310 <mx310@example\.org>$',
 
        r'^TOTAL TO PAY: \$420.00$',
 
    ])
 
    assert rt_client.edits == {'310': {
 
        'CF_payment-amount': 'USD 420.00',
 
    }}
 
    assert 'payment-method:' not in output.getvalue()
 

	
 
def test_outgoing_report_without_rt_id(accrual_postings, caplog):
 
    invoice = 'rt://ticket/515/attachments/5150'
 
    related = accruals_by_meta(
 
        accrual_postings, invoice, wrap_type=accrual.AccrualPostings,
 
    )
 
    output = run_outgoing(None, related)
 
    assert caplog.records
 
    log = caplog.records[0]
 
    assert log.message.startswith(
 
        f"can't generate outgoings report for {invoice} because no RT ticket available:",
 
    )
 
    assert not output.getvalue()
 

	
 
def run_aging_report(postings, today=None):
 
    if today is None:
 
        today = datetime.date.today()
 
    postings = (
 
        post for post in postings
 
        if post.account.is_under('Assets:Receivable', 'Liabilities:Payable')
 
    )
 
    groups = {
 
        key: group
 
        for _, related in accrual.AccrualPostings.group_by_meta(postings, 'invoice')
 
        for key, group in related.make_consistent()
 
    }
 
    groups = dict(accrual.AccrualPostings.make_consistent(postings))
 
    output = io.BytesIO()
 
    rt_wrapper = rtutil.RT(RTClient())
 
    report = accrual.AgingReport(rt_wrapper, output, today)
 
    report.run(groups)
 
    return output
 

	
 
def test_aging_report(accrual_postings):
 
    output = run_aging_report(accrual_postings)
 
    check_aging_ods(output)
 

	
 
@pytest.mark.parametrize('date,recv_end,pay_end', [
 
    # Both these dates are chosen for their off-by-one potential:
 
    # the first is exactly 30 days after the 2010-06-10 payable;
 
    # the second is exactly 60 days after the 2010-05-15 receivable.
 
    (datetime.date(2010, 7, 10), 1, 5),
 
    (datetime.date(2010, 7, 14), 2, 5),
 
])
 
def test_aging_report_date_cutoffs(accrual_postings, date, recv_end, pay_end):
 
    expect_recv = AGING_AR[:recv_end]
 
    expect_pay = AGING_AP[:pay_end]
 
    if 10 <= date.day < 12:
 
        # Take the 60 USD posting out of the invoice 510/6100 payable.
 
        expect_pay[-1] = expect_pay[-1]._replace(
 
            at_cost=testutil.Amount(expect_pay[-1].at_cost.number - 60),
 
        )
 
    output = run_aging_report(accrual_postings, date)
 
    check_aging_ods(output, date, expect_recv, expect_pay)
 

	
 
def test_aging_report_entity_consistency(accrual_postings):
 
    output = run_aging_report((
 
        post for post in accrual_postings
 
        if post.meta.get('rt-id') == 'rt:480'
 
        and post.units.number < 0
 
    ))
 
    check_aging_ods(output, None, [], [
 
        AgingRow.make_simple('2010-04-15', 'MultiPartyA', 125, 'rt:480/4800'),
 
        AgingRow.make_simple('2010-04-15', 'MultiPartyB', 125, 'rt:480/4800'),
 
    ])
 

	
 
def test_aging_report_does_not_include_too_recent_postings(accrual_postings):
 
    # This date is after the Q3 posting, but too soon after for that to be
 
    # included in the aging report.
 
    date = datetime.date(2010, 10, 1)
 
    output = run_aging_report((
 
        post for post in accrual_postings
 
        if post.meta.get('rt-id') == 'rt:470'
 
    ), date)
 
    # Date+amount are both from the Q2 posting only.
 
    check_aging_ods(output, date, [
 
        AgingRow.make_simple('2010-06-15', 'GrantCo', 5500, 'rt:470/4700',
 
                             project='Development Grant'),
 
    ], [])
 

	
 
def run_main(arglist, config=None, out_type=io.StringIO):
 
    if config is None:
 
        config = testutil.TestConfig(
 
            books_path=testutil.test_path('books/accruals.beancount'),
 
            rt_client=RTClient(),
 
        )
 
    if out_type is io.BytesIO:
 
        arglist.insert(0, '--output-file=-')
 
    output = out_type()
 
    errors = io.StringIO()
 
    retcode = accrual.main(arglist, output, errors, config)
 
    output.seek(0)
 
    errors.seek(0)
 
    return retcode, output, errors
 

	
 
def check_main_fails(arglist, config, error_flags):
 
    retcode, output, errors = run_main(arglist, config)
 
    assert retcode > 16
 
    assert (retcode - 16) & error_flags
 
    assert not output.getvalue()
 
    return errors
 

	
 
@pytest.mark.parametrize('arglist', [
 
    ['--report-type=balance', 'entity=EarlyBird'],
 
    ['--report-type=outgoing', 'entity=EarlyBird'],
 
])
 
def test_output_excludes_payments(arglist):
 
    retcode, output, errors = run_main(arglist)
 
    assert not errors.getvalue()
 
    assert retcode == 0
 
    output.seek(0)
 
    for line in output:
 
        assert not re.match(r'\brt:4\d\b', line)
 

	
 
@pytest.mark.parametrize('arglist,expect_invoice', [
 
    (['40'], 'rt:40/400'),
 
    (['44/440'], 'rt:44/440'),
 
])
 
def test_output_payments_when_only_match(arglist, expect_invoice):
 
    retcode, output, errors = run_main(arglist)
 
    assert not errors.getvalue()
 
    assert retcode == 0
 
    check_output(output, [
 
        rf'^{re.escape(expect_invoice)}:$',
 
        r' outstanding since ',
 
    ])
 

	
 
@pytest.mark.parametrize('arglist,expect_amount', [
 
    (['310'], 420),
 
    (['310/3120'], 220),
 
    (['-t', 'out', 'entity=Vendor'], 420),
 
])
 
def test_main_outgoing_report(arglist, expect_amount):
 
    retcode, output, errors = run_main(arglist)
 
    assert not errors.getvalue()
 
    assert retcode == 0
 
    rt_url = RTClient.DEFAULT_URL[:-9]
 
    rt_id_url = re.escape(f'<{rt_url}Ticket/Display.html?id=310>')
 
    contract_url = re.escape(f'<{rt_url}Ticket/Attachment/3120/3120/VendorContract.pdf>')
 
    check_output(output, [
 
        r'^REQUESTOR: Mx\. 310 <mx310@example\.org>$',
 
        rf'^TOTAL TO PAY: \${expect_amount}\.00$',
 
        r'^\s*2010-04-30\s',
 
        r'^\s+Expenses:Travel\s+220 USD$',
 
    ])
 

	
 
@pytest.mark.parametrize('arglist', [
 
    ['-t', 'balance'],
 
    ['515/5150'],
 
])
 
def test_main_balance_report(arglist):
 
    retcode, output, errors = run_main(arglist)
 
    assert not errors.getvalue()
 
    assert retcode == 0
 
    check_output(output, [
 
        r'\brt://ticket/515/attachments/5150:$',
 
        r'^\s+1,500\.00 USD outstanding since 2010-05-15$',
 
    ])
 

	
 
def test_main_balance_report_because_no_rt_id():
 
    invoice = 'Invoices/2010StateRegistration.pdf'
 
    retcode, output, errors = run_main([invoice])
 
    assert not errors.getvalue()
 
    assert retcode == 0
 
    check_output(output, [
 
        rf'\b{re.escape(invoice)}:$',
 
        r'^\s+-50\.00 USD outstanding since 2010-06-20$',
 
    ])
 

	
 
@pytest.mark.parametrize('arglist', [
 
    [],
 
    ['entity=Lawyer'],
 
])
 
def test_main_aging_report(arglist):
 
    if arglist:
 
        recv_rows = [row for row in AGING_AR if 'Lawyer' in row.entity]
 
        pay_rows = [row for row in AGING_AP if 'Lawyer' in row.entity]
 
    else:
 
        recv_rows = AGING_AR
 
        pay_rows = AGING_AP
 
    retcode, output, errors = run_main(arglist, out_type=io.BytesIO)
 
    assert not errors.getvalue()
 
    assert retcode == 0
 
    check_aging_ods(output, None, recv_rows, pay_rows)
 

	
 
def test_main_no_books():
 
    errors = check_main_fails([], testutil.TestConfig(), 1 | 8)
 
    testutil.check_lines_match(iter(errors), [
 
        r':[01]: +no books to load in configuration\b',
 
    ])
 

	
 
@pytest.mark.parametrize('arglist', [
 
    ['499'],
 
    ['505/99999'],
 
    ['-t', 'balance', 'entity=NonExistent'],
 
])
 
def test_main_no_matches(arglist, caplog):
 
    check_main_fails(arglist, None, 8)
 
    testutil.check_logs_match(caplog, [
 
        ('WARNING', 'no matching entries found to report'),
 
    ])
 

	
 
def test_main_no_rt(caplog):
 
    config = testutil.TestConfig(
 
        books_path=testutil.test_path('books/accruals.beancount'),
 
    )
 
    check_main_fails(['-t', 'out'], config, 4)
 
    testutil.check_logs_match(caplog, [
 
        ('ERROR', 'unable to generate outgoing report: RT client is required'),
 
    ])
0 comments (0 inline, 0 general)