#!/bin/sh
-DIRS='src src/plomtask tests'
+DIRS='src src/taskplom tests'
TOOLS='flake8 pylint mypy'
for dir in $DIRS; do
for tool in $TOOLS; do
+++ /dev/null
-"""Non-doable elements of ProcessStep/Todo chains."""
-from __future__ import annotations
-from plomtask.db import DatabaseConnection, BaseModel
-from plomtask.versioned_attributes import VersionedAttribute
-from plomtask.exceptions import HandledException
-
-
-class Condition(BaseModel):
- """Non-Process dependency for ProcessSteps and Todos."""
- table_name = 'conditions'
- to_save_simples = ['is_active']
- versioned_defaults = {'title': 'UNNAMED', 'description': ''}
- to_search = ['title.newest', 'description.newest']
- can_create_by_id = True
- sorters = {'is_active': lambda c: c.is_active,
- 'title': lambda c: c.title.newest}
-
- def __init__(self, id_: int | None, is_active: bool = False) -> None:
- super().__init__(id_)
- self.is_active = is_active
- for name in ['title', 'description']:
- attr = VersionedAttribute(self, f'condition_{name}s',
- self.versioned_defaults[name])
- setattr(self, name, attr)
-
- def remove(self, db_conn: DatabaseConnection) -> None:
- """Remove from DB, with VersionedAttributes.
-
- Checks for Todos and Processes that depend on Condition, prohibits
- deletion if found.
- """
- if self.id_ is not None:
- for item in ('process', 'todo'):
- for attr in ('conditions', 'blockers', 'enables', 'disables'):
- table_name = f'{item}_{attr}'
- for _ in db_conn.row_where(table_name, 'condition',
- self.id_):
- msg = 'cannot remove Condition in use'
- raise HandledException(msg)
- super().remove(db_conn)
-
-
-class ConditionsRelations:
- """Methods for handling relations to Conditions, for Todo and Process."""
- # pylint: disable=too-few-public-methods
-
- def __init__(self) -> None:
- self.conditions: list[Condition] = []
- self.blockers: list[Condition] = []
- self.enables: list[Condition] = []
- self.disables: list[Condition] = []
-
- def set_condition_relations(self,
- db_conn: DatabaseConnection,
- ids_conditions: list[int],
- ids_blockers: list[int],
- ids_enables: list[int],
- ids_disables: list[int]
- ) -> None:
- """Set owned Condition lists to those identified by respective IDs."""
- # pylint: disable=too-many-arguments
- for ids, target in [(ids_conditions, 'conditions'),
- (ids_blockers, 'blockers'),
- (ids_enables, 'enables'),
- (ids_disables, 'disables')]:
- target_list = getattr(self, target)
- while len(target_list) > 0:
- target_list.pop()
- for id_ in ids:
- target_list += [Condition.by_id(db_conn, id_)]
+++ /dev/null
-"""Various utilities for handling dates."""
-from datetime import date as dt_date, timedelta
-from plomtask.exceptions import BadFormatException
-
-
-def dt_date_from_str(date_str: str) -> dt_date:
- """Validate against ISO format, colloq. terms; return as datetime.date."""
- if date_str == 'today':
- date_str = date_in_n_days(0)
- elif date_str == 'yesterday':
- date_str = date_in_n_days(-1)
- elif date_str == 'tomorrow':
- date_str = date_in_n_days(1)
- try:
- date = dt_date.fromisoformat(date_str)
- except (ValueError, TypeError) as e:
- msg = f'Given date of wrong format: {date_str}'
- raise BadFormatException(msg) from e
- return date
-
-
-def days_n_from_dt_date(date: dt_date) -> int:
- """Return number of days from Jan 1st 2000 to datetime.date."""
- return (date - dt_date(2000, 1, 1)).days
-
-
-def dt_date_from_days_n(days_n: int) -> dt_date:
- """Return datetime.date for days_n after Jan 1st 2000."""
- return dt_date(2000, 1, 1) + timedelta(days=days_n)
-
-
-def date_in_n_days(n: int) -> str:
- """Return in ISO format date from today + n days."""
- date = dt_date.today() + timedelta(days=n)
- return date.isoformat()
+++ /dev/null
-"""Collecting Day and date-related items."""
-from __future__ import annotations
-from typing import Any, Self
-from sqlite3 import Row
-from datetime import date as dt_date, timedelta
-from plomtask.db import DatabaseConnection, BaseModel
-from plomtask.todos import Todo
-from plomtask.dating import dt_date_from_days_n, days_n_from_dt_date
-
-
-class Day(BaseModel):
- """Individual days defined by their dates."""
- table_name = 'days'
- to_save_simples = ['comment']
- add_to_dict = ['todos']
- can_create_by_id = True
-
- def __init__(self, id_: int, comment: str = '') -> None:
- super().__init__(id_)
- self.comment = comment
- self.todos: list[Todo] = []
-
- @classmethod
- def from_table_row(cls, db_conn: DatabaseConnection, row: Row | list[Any]
- ) -> Self:
- """Make from DB row, with linked Todos."""
- day = super().from_table_row(db_conn, row)
- day.todos = Todo.by_date(db_conn, day.date)
- return day
-
- @classmethod
- def by_id(cls, db_conn: DatabaseConnection, id_: int) -> Self:
- """Checks Todo.days_to_update if we need to a retrieved Day's .todos"""
- day = super().by_id(db_conn, id_)
- assert isinstance(day.id_, int)
- if day.id_ in Todo.days_to_update:
- Todo.days_to_update.remove(day.id_)
- day.todos = Todo.by_date(db_conn, day.date)
- return day
-
- @classmethod
- def with_filled_gaps(
- cls, conn: DatabaseConnection, dt_start: dt_date, dt_end: dt_date
- ) -> list[Self]:
- """Show days >= start_date, <= end_date, fill gaps with un-storeds."""
- if dt_start > dt_end:
- return []
- start_n_days = days_n_from_dt_date(dt_start)
- end_n_days = days_n_from_dt_date(dt_end)
- ranged_days = [d for d in cls.all(conn)
- if isinstance(d.id_, int)
- and d.id_ >= start_n_days and d.id_ <= end_n_days]
- ranged_days.sort()
- if (not ranged_days) or (isinstance(ranged_days[0].id_, int)
- and start_n_days < ranged_days[0].id_):
- ranged_days.insert(0, cls(start_n_days))
- assert isinstance(ranged_days[-1].id_, int)
- if end_n_days > ranged_days[-1].id_:
- ranged_days.append(cls(end_n_days))
- if len(ranged_days) > 1:
- degapped_ranged_days = []
- for i, day in enumerate(ranged_days):
- degapped_ranged_days += [day]
- if i < len(ranged_days) - 1:
- next_one = ranged_days[i+1]
- assert isinstance(day.id_, int)
- assert isinstance(next_one.id_, int)
- while day.id_ + 1 != next_one.id_:
- assert isinstance(day.id_, int)
- day = cls(day.id_ + 1)
- degapped_ranged_days += [day]
- return degapped_ranged_days
- return ranged_days
-
- @property
- def _dt_date(self) -> dt_date:
- """Return chronological location as datetime.date."""
- assert isinstance(self.id_, int)
- return dt_date_from_days_n(self.id_)
-
- @property
- def date(self) -> str:
- """Return chronological location as ISO format date."""
- return self._dt_date.isoformat()
-
- @property
- def first_of_month(self) -> bool:
- """Return if self is first day of a month."""
- return self.date[-2:] == '01'
-
- @property
- def month_name(self) -> str:
- """Return name of month self is part of."""
- return self._dt_date.strftime('%B')
-
- @property
- def weekday(self) -> str:
- """Return weekday name matching self."""
- return self._dt_date.strftime('%A')
-
- @property
- def prev_date(self) -> str:
- """Return ISO-formatted date preceding date of self."""
- return (self._dt_date - timedelta(days=1)).isoformat()
-
- @property
- def next_date(self) -> str:
- """Return ISO-formatted date succeeding date of this Day."""
- return (self._dt_date + timedelta(days=1)).isoformat()
-
- @property
- def calendarized_todos(self) -> list[Todo]:
- """Return only those of self.todos that have .calendarize set."""
- return [t for t in self.todos if t.calendarize]
-
- @property
- def total_effort(self) -> float:
- """"Sum all .performed_effort of self.todos."""
- total_effort = 0.0
- for todo in self.todos:
- total_effort += todo.performed_effort
- return total_effort
+++ /dev/null
-"""Database management."""
-from __future__ import annotations
-from datetime import date as dt_date
-from os import listdir
-from pathlib import Path
-from sqlite3 import Row
-from typing import cast, Any, Self, Callable
-from plomtask.exceptions import (HandledException, NotFoundException,
- BadFormatException)
-from plomlib.db import (
- PlomDbConn, PlomDbFile, PlomDbMigration, TypePlomDbMigration)
-
-_EXPECTED_DB_VERSION = 7
-_MIGRATIONS_DIR = Path('migrations')
-_FILENAME_DB_SCHEMA = f'init_{_EXPECTED_DB_VERSION}.sql'
-_PATH_DB_SCHEMA = _MIGRATIONS_DIR.joinpath(_FILENAME_DB_SCHEMA)
-
-
-def _mig_6_calc_days_since_millennium(conn: PlomDbConn) -> None:
- rows = conn.exec('SELECT * FROM days').fetchall()
- for row in [list(r) for r in rows]:
- row[-1] = (dt_date.fromisoformat(row[0]) - dt_date(2000, 1, 1)).days
- conn.exec('REPLACE INTO days VALUES', tuple(row))
-
-
-MIGRATION_STEPS_POST_SQL: dict[int, Callable[[PlomDbConn], None]] = {
- 6: _mig_6_calc_days_since_millennium
-}
-
-
-class DatabaseMigration(PlomDbMigration):
- """Collects and enacts DatabaseFile migration commands."""
- migs_dir_path = _MIGRATIONS_DIR
-
- @classmethod
- def gather(cls, from_version: int, base_set: set[TypePlomDbMigration]
- ) -> list[TypePlomDbMigration]:
- msg_prefix = 'Migration directory contains'
- msg_bad_entry = f'{msg_prefix} unexpected entry: '
- migs = []
- total_migs = set()
- post_sql_steps_added = set()
- for entry in [e for e in listdir(cls.migs_dir_path)
- if e != _FILENAME_DB_SCHEMA]:
- path = cls.migs_dir_path.joinpath(entry)
- if not path.is_file():
- continue
- toks = entry.split('_', maxsplit=1)
- if len(toks) < 2 or (not toks[0].isdigit()):
- raise HandledException(f'{msg_bad_entry}{entry}')
- i = int(toks[0])
- if i <= from_version:
- continue
- if i > _EXPECTED_DB_VERSION:
- raise HandledException(f'{msg_prefix} unexpected version {i}')
- post_sql_steps = MIGRATION_STEPS_POST_SQL.get(i, None)
- if post_sql_steps:
- post_sql_steps_added.add(i)
- total_migs.add(cls(i, Path(entry), post_sql_steps))
- for k in [k for k in MIGRATION_STEPS_POST_SQL
- if k > from_version
- and k not in post_sql_steps_added]:
- total_migs.add(cls(k, None, MIGRATION_STEPS_POST_SQL[k]))
- for i in range(from_version + 1, _EXPECTED_DB_VERSION + 1):
- migs_found = [m for m in total_migs if m.target_version == i]
- if not migs_found:
- raise HandledException(f'{msg_prefix} no migration of v. {i}')
- if len(migs_found) > 1:
- raise HandledException(f'{msg_prefix} >1 migration of v. {i}')
- migs += migs_found
- return cast(list[TypePlomDbMigration], migs)
-
-
-class DatabaseFile(PlomDbFile):
- """File readable as DB of expected schema, user version."""
- target_version = _EXPECTED_DB_VERSION
- path_schema = _PATH_DB_SCHEMA
- mig_class = DatabaseMigration
-
-
-class DatabaseConnection(PlomDbConn):
- """A single connection to the database."""
- db_file_class = DatabaseFile
-
- def close(self) -> None:
- """Shortcut to sqlite3.Connection.close()."""
- self._conn.close()
-
- def rewrite_relations(self, table_name: str, key: str, target: int | str,
- rows: list[list[Any]], key_index: int = 0) -> None:
- # pylint: disable=too-many-arguments
- """Rewrite relations in table_name to target, with rows values.
-
- Note that single rows are expected without the column and value
- identified by key and target, which are inserted inside the function
- at key_index.
- """
- self.delete_where(table_name, key, target)
- for row in rows:
- values = tuple(row[:key_index] + [target] + row[key_index:])
- self.exec(f'INSERT INTO {table_name} VALUES', values)
-
- def row_where(self, table_name: str, key: str,
- target: int | str) -> list[Row]:
- """Return list of Rows at table where key == target."""
- return list(self.exec(f'SELECT * FROM {table_name} WHERE {key} =',
- (target,)))
-
- # def column_where_pattern(self,
- # table_name: str,
- # column: str,
- # pattern: str,
- # keys: list[str]) -> list[Any]:
- # """Return column of rows where one of keys matches pattern."""
- # targets = tuple([f'%{pattern}%'] * len(keys))
- # haystack = ' OR '.join([f'{k} LIKE ?' for k in keys])
- # sql = f'SELECT {column} FROM {table_name} WHERE {haystack}'
- # return [row[0] for row in self.exec(sql, targets)]
-
- def column_where(self, table_name: str, column: str, key: str,
- target: int | str) -> list[Any]:
- """Return column of table where key == target."""
- return [row[0] for row in
- self.exec(f'SELECT {column} FROM {table_name} '
- f'WHERE {key} =', (target,))]
-
- def column_all(self, table_name: str, column: str) -> list[Any]:
- """Return complete column of table."""
- return [row[0] for row in
- self.exec(f'SELECT {column} FROM {table_name}')]
-
- def delete_where(self, table_name: str, key: str,
- target: int | str) -> None:
- """Delete from table where key == target."""
- self.exec(f'DELETE FROM {table_name} WHERE {key} =', (target,))
-
-
-class BaseModel:
- """Template for most of the models we use/derive from the DB."""
- table_name = ''
- to_save_simples: list[str] = []
- to_save_relations: list[tuple[str, str, str, int]] = []
- versioned_defaults: dict[str, str | float] = {}
- add_to_dict: list[str] = []
- id_: None | int
- cache_: dict[int, Self]
- to_search: list[str] = []
- can_create_by_id = False
- _exists = True
- sorters: dict[str, Callable[..., Any]] = {}
-
- def __init__(self, id_: int | None) -> None:
- if isinstance(id_, int) and id_ < 1:
- msg = f'illegal {self.__class__.__name__} ID, must be >=1: {id_}'
- raise BadFormatException(msg)
- self.id_ = id_
-
- def __hash__(self) -> int:
- hashable = [self.id_] + [getattr(self, name)
- for name in self.to_save_simples]
- for definition in self.to_save_relations:
- attr = getattr(self, definition[2])
- hashable += [tuple(rel.id_ for rel in attr)]
- for name in self.to_save_versioned():
- hashable += [hash(getattr(self, name))]
- return hash(tuple(hashable))
-
- def __eq__(self, other: object) -> bool:
- if not isinstance(other, self.__class__):
- return False
- return hash(self) == hash(other)
-
- def __lt__(self, other: Any) -> bool:
- if not isinstance(other, self.__class__):
- msg = 'cannot compare to object of different class'
- raise HandledException(msg)
- assert isinstance(self.id_, int)
- assert isinstance(other.id_, int)
- return self.id_ < other.id_
-
- @classmethod
- def to_save_versioned(cls) -> list[str]:
- """Return keys of cls.versioned_defaults assuming we wanna save 'em."""
- return list(cls.versioned_defaults.keys())
-
- @property
- def as_dict_and_refs(self) -> tuple[dict[str, object], list[Self]]:
- """Return self as json.dumps-ready dict, list of referenced objects."""
- d: dict[str, object] = {'id': self.id_}
- refs: list[Self] = []
- for to_save in self.to_save_simples:
- d[to_save] = getattr(self, to_save)
- if len(self.to_save_versioned()) > 0:
- d['_versioned'] = {}
- for k in self.to_save_versioned():
- attr = getattr(self, k)
- assert isinstance(d['_versioned'], dict)
- d['_versioned'][k] = attr.history
- rels_to_collect = [rel[2] for rel in self.to_save_relations]
- rels_to_collect += self.add_to_dict
- for attr_name in rels_to_collect:
- rel_list = []
- for item in getattr(self, attr_name):
- rel_list += [item.id_]
- if item not in refs:
- refs += [item]
- d[attr_name] = rel_list
- return d, refs
-
- @classmethod
- def name_lowercase(cls) -> str:
- """Convenience method to return cls' name in lowercase."""
- return cls.__name__.lower()
-
- @classmethod
- def sort_by(cls, seq: list[Any], sort_key: str, default: str = 'title'
- ) -> str:
- """Sort cls list by cls.sorters[sort_key] (reverse if '-'-prefixed).
-
- Before cls.sorters[sort_key] is applied, seq is sorted by .id_, to
- ensure predictability where parts of seq are of same sort value.
- """
- reverse = False
- if len(sort_key) > 1 and '-' == sort_key[0]:
- sort_key = sort_key[1:]
- reverse = True
- if sort_key not in cls.sorters:
- sort_key = default
- seq.sort(key=lambda x: x.id_, reverse=reverse)
- sorter: Callable[..., Any] = cls.sorters[sort_key]
- seq.sort(key=sorter, reverse=reverse)
- if reverse:
- sort_key = f'-{sort_key}'
- return sort_key
-
- # cache management
- # (we primarily use the cache to ensure we work on the same object in
- # memory no matter where and how we retrieve it, e.g. we don't want
- # .by_id() calls to create a new object each time, but rather a pointer
- # to the one already instantiated)
-
- def __getattribute__(self, name: str) -> Any:
- """Ensure fail if ._disappear() was called, except to check ._exists"""
- if name != '_exists' and not super().__getattribute__('_exists'):
- msg = f'Object for attribute does not exist: {name}'
- raise HandledException(msg)
- return super().__getattribute__(name)
-
- def _disappear(self) -> None:
- """Invalidate object, make future use raise exceptions."""
- assert self.id_ is not None
- if self._get_cached(self.id_):
- self._uncache()
- to_kill = list(self.__dict__.keys())
- for attr in to_kill:
- delattr(self, attr)
- self._exists = False
-
- @classmethod
- def empty_cache(cls) -> None:
- """Empty class's cache, and disappear all former inhabitants."""
- # pylint: disable=protected-access
- # (cause we remain within the class)
- if hasattr(cls, 'cache_'):
- to_disappear = list(cls.cache_.values())
- for item in to_disappear:
- item._disappear()
- cls.cache_ = {}
-
- @classmethod
- def get_cache(cls) -> dict[int, Self]:
- """Get cache dictionary, create it if not yet existing."""
- if not hasattr(cls, 'cache_'):
- d: dict[int, Self] = {}
- cls.cache_ = d
- return cls.cache_
-
- @classmethod
- def _get_cached(cls, id_: int) -> Self | None:
- """Get object of id_ from class's cache, or None if not found."""
- cache = cls.get_cache()
- if id_ in cache:
- obj = cache[id_]
- return obj
- return None
-
- def cache(self) -> None:
- """Update object in class's cache.
-
- Also calls ._disappear if cache holds older reference to object of same
- ID, but different memory address, to avoid doing anything with
- dangling leftovers.
- """
- if self.id_ is None:
- raise HandledException('Cannot cache object without ID.')
- cache = self.get_cache()
- old_cached = self._get_cached(self.id_)
- if old_cached and id(old_cached) != id(self):
- # pylint: disable=protected-access
- # (cause we remain within the class)
- old_cached._disappear()
- cache[self.id_] = self
-
- def _uncache(self) -> None:
- """Remove self from cache."""
- if self.id_ is None:
- raise HandledException('Cannot un-cache object without ID.')
- cache = self.get_cache()
- del cache[self.id_]
-
- # object retrieval and generation
-
- @classmethod
- def from_table_row(cls,
- db_conn: DatabaseConnection,
- row: Row | list[Any]) -> Self:
- """Make from DB row (sans relations), update DB cache with it."""
- obj = cls(*row)
- assert obj.id_ is not None
- for attr_name in cls.to_save_versioned():
- attr = getattr(obj, attr_name)
- table_name = attr.table_name
- for row_ in db_conn.row_where(table_name, 'parent', obj.id_):
- attr.history_from_row(row_)
- obj.cache()
- return obj
-
- @classmethod
- def by_id(cls, db_conn: DatabaseConnection, id_: int) -> Self:
- """Retrieve by id_, on failure throw NotFoundException.
-
- First try to get from cls.cache_, only then check DB; if found,
- put into cache.
- """
- obj = None
- if id_ is not None:
- if isinstance(id_, int) and id_ == 0:
- raise BadFormatException('illegal ID of value 0')
- obj = cls._get_cached(id_)
- if not obj:
- for row in db_conn.row_where(cls.table_name, 'id', id_):
- obj = cls.from_table_row(db_conn, row)
- break
- if obj:
- return obj
- raise NotFoundException(f'found no object of ID {id_}')
-
- @classmethod
- def by_id_or_create(cls, db_conn: DatabaseConnection, id_: int | None
- ) -> Self:
- """Wrapper around .by_id, creating (not caching/saving) if no find."""
- if not cls.can_create_by_id:
- raise HandledException('Class cannot .by_id_or_create.')
- if id_ is None:
- return cls(None)
- try:
- return cls.by_id(db_conn, id_)
- except NotFoundException:
- return cls(id_)
-
- @classmethod
- def all(cls, db_conn: DatabaseConnection) -> list[Self]:
- """Collect all objects of class into list.
-
- Note that this primarily returns the contents of the cache, and only
- _expands_ that by additional findings in the DB. This assumes the
- cache is always instantly cleaned of any items that would be removed
- from the DB.
- """
- items: dict[int, Self] = {}
- for k, v in cls.get_cache().items():
- items[k] = v
- already_recorded = items.keys()
- for id_ in db_conn.column_all(cls.table_name, 'id'):
- if id_ not in already_recorded:
- item = cls.by_id(db_conn, id_)
- assert item.id_ is not None
- items[item.id_] = item
- return sorted(list(items.values()))
-
- @classmethod
- def matching(cls, db_conn: DatabaseConnection, pattern: str) -> list[Self]:
- """Return all objects whose .to_search match pattern."""
- items = cls.all(db_conn)
- if pattern:
- filtered = []
- for item in items:
- for attr_name in cls.to_search:
- toks = attr_name.split('.')
- parent = item
- for tok in toks:
- attr = getattr(parent, tok)
- parent = attr
- if pattern in attr:
- filtered += [item]
- break
- return filtered
- return items
-
- # database writing
-
- def save(self, db_conn: DatabaseConnection) -> None:
- """Write self to DB and cache and ensure .id_.
-
- Write both to DB, and to cache. To DB, write .id_ and attributes
- listed in cls.to_save_[simples|versioned|_relations].
-
- Ensure self.id_ by setting it to what the DB command returns as the
- last saved row's ID (cursor.lastrowid), EXCEPT if self.id_ already
- exists as a 'str', which implies we do our own ID creation (so far
- only the case with the Day class, where it's to be a date string.
- """
- values = tuple([self.id_] + [getattr(self, key)
- for key in self.to_save_simples])
- table_name = self.table_name
- cursor = db_conn.exec(f'REPLACE INTO {table_name} VALUES', values)
- self.id_ = cursor.lastrowid
- self.cache()
- for attr_name in self.to_save_versioned():
- getattr(self, attr_name).save(db_conn)
- for table, column, attr_name, key_index in self.to_save_relations:
- assert isinstance(self.id_, int)
- db_conn.rewrite_relations(table, column, self.id_,
- [[i.id_] for i
- in getattr(self, attr_name)], key_index)
-
- def remove(self, db_conn: DatabaseConnection) -> None:
- """Remove from DB and cache, including dependencies."""
- if self.id_ is None or self._get_cached(self.id_) is None:
- raise HandledException('cannot remove unsaved item')
- for attr_name in self.to_save_versioned():
- getattr(self, attr_name).remove(db_conn)
- for table, column, attr_name, _ in self.to_save_relations:
- db_conn.delete_where(table, column, self.id_)
- self._uncache()
- db_conn.delete_where(self.table_name, 'id', self.id_)
- self._disappear()
+++ /dev/null
-"""Exceptions triggering different HTTP codes."""
-
-
-class HandledException(Exception):
- """To identify Exceptions based on expected (if faulty) user behavior."""
- http_code = 500
-
-
-class BadFormatException(HandledException):
- """To identify Exceptions on malformed inputs."""
- http_code = 400
-
-
-class NotFoundException(HandledException):
- """To identify Exceptions on unsuccessful queries."""
- http_code = 404
+++ /dev/null
-"""Web server stuff."""
-from __future__ import annotations
-from pathlib import Path
-from inspect import signature
-from typing import Any, Callable
-from base64 import b64encode, b64decode
-from binascii import Error as binascii_Exception
-from json import dumps as json_dumps
-from plomtask.dating import (
- days_n_from_dt_date, dt_date_from_str, date_in_n_days)
-from plomtask.days import Day
-from plomtask.exceptions import (HandledException, BadFormatException,
- NotFoundException)
-from plomtask.db import DatabaseConnection, DatabaseFile, BaseModel
-from plomtask.processes import Process, ProcessStep, ProcessStepsNode
-from plomtask.conditions import Condition
-from plomtask.todos import Todo, TodoOrProcStepNode
-from plomtask.misc import DictableNode
-from plomlib.web import PlomHttpServer, PlomHttpHandler, PlomQueryMap
-
-TEMPLATES_DIR = Path('templates')
-
-
-class TaskServer(PlomHttpServer):
- """Extends parent by DatabaseFile .db and .render_mode='html'."""
-
- def __init__(self, db_file: DatabaseFile, *args, **kwargs) -> None:
- super().__init__(TEMPLATES_DIR, *args, **kwargs)
- self.db = db_file
- self.render_mode = 'html'
-
-
-class InputsParser(PlomQueryMap):
- """Wrapper for validating and retrieving dict-like HTTP inputs."""
-
- def get_all_str(self, key: str) -> list[str]:
- """Retrieve list of string values at key (empty if no key)."""
- return self.all(key) or []
-
- def get_all_int(self, key: str, fail_on_empty: bool = False) -> list[int]:
- """Retrieve list of int values at key."""
- all_str = self.get_all_str(key)
- try:
- return [int(s) for s in all_str if fail_on_empty or s != '']
- except ValueError as e:
- msg = f'cannot int a form field value for key {key} in: {all_str}'
- raise BadFormatException(msg) from e
-
- def get_str(self, key: str, default: str | None = None) -> str | None:
- """Retrieve single/first string value of key, or default."""
- first = self.first(key)
- return default if first is None else first
-
- def get_str_or_fail(self, key: str, default: str | None = None) -> str:
- """Retrieve first string value of key, if none: fail or default."""
- vals = self.get_all_str(key)
- if not vals:
- if default is not None:
- return default
- raise BadFormatException(f'no value found for key: {key}')
- return vals[0]
-
- def get_int_or_none(self, key: str) -> int | None:
- """Retrieve single/first value of key as int, return None if empty."""
- val = self.get_str_or_fail(key, '')
- if val == '':
- return None
- try:
- return int(val)
- except (ValueError, TypeError) as e:
- msg = f'cannot int form field value for key {key}: {val}'
- raise BadFormatException(msg) from e
-
- def get_bool(self, key: str) -> bool:
- """Return if value to key truish; return False if None/no value."""
- return self.get_str(key) in {'True', 'true', '1', 'on'}
-
- def get_all_of_key_prefixed(self, key_prefix: str) -> dict[str, list[str]]:
- """Retrieve dict of strings at keys starting with key_prefix."""
- ret = {}
- for key in self.keys_prefixed(key_prefix):
- ret[key[len(key_prefix):]] = self.as_dict[key]
- return ret
-
- def get_float_or_fail(self, key: str) -> float:
- """Retrieve float value of key from self.postvars, fail if none."""
- val = self.get_str_or_fail(key)
- try:
- return float(val)
- except ValueError as e:
- msg = f'cannot float form field value for key {key}: {val}'
- raise BadFormatException(msg) from e
-
- def get_all_floats_or_nones(self, key: str) -> list[float | None]:
- """Retrieve list of float value at key, None if empty strings."""
- ret: list[float | None] = []
- for val in self.get_all_str(key):
- if '' == val:
- ret += [None]
- else:
- try:
- ret += [float(val)]
- except ValueError as e:
- msg = f'cannot float form field value for key {key}: {val}'
- raise BadFormatException(msg) from e
- return ret
-
-
-class TaskHandler(PlomHttpHandler):
- """Handles single HTTP request."""
- # pylint: disable=too-many-public-methods
- server: TaskServer
- params: InputsParser
- postvars: InputsParser
- mapper = InputsParser
- _conn: DatabaseConnection
- _site: str
-
- def _send_page(
- self, ctx: dict[str, Any], tmpl_name: str, code: int = 200
- ) -> None:
- """HTTP-send ctx as HTML or JSON, as defined by .server.render_mode.
-
- The differentiation by .server.render_mode serves to allow easily
- comparable JSON responses for automatic testing.
- """
- if 'html' == self.server.render_mode:
- self.send_rendered(Path(f'{tmpl_name}.html'), ctx, code)
- else:
- self.send_http(self._ctx_to_json(ctx).encode(),
- [('Content-Type', 'application/json')],
- code)
-
- def _ctx_to_json(self, ctx: dict[str, object]) -> str:
- """Render ctx into JSON string.
-
- Flattens any objects that json.dumps might not want to serialize, and
- turns occurrences of BaseModel objects into listings of their .id_, to
- be resolved to a full dict inside a top-level '_library' dictionary,
- to avoid endless and circular nesting.
- """
-
- def flatten(node: object) -> object:
-
- def update_library_with(item: BaseModel) -> None:
- cls_name = item.__class__.__name__
- if cls_name not in library:
- library[cls_name] = {}
- if item.id_ not in library[cls_name]:
- d, refs = item.as_dict_and_refs
- id_key = -1 if item.id_ is None else item.id_
- library[cls_name][id_key] = d
- for ref in refs:
- update_library_with(ref)
-
- if isinstance(node, BaseModel):
- update_library_with(node)
- return node.id_
- if isinstance(node, DictableNode):
- d, refs = node.as_dict_and_refs
- for ref in refs:
- update_library_with(ref)
- return d
- if isinstance(node, (list, tuple)):
- return [flatten(item) for item in node]
- if isinstance(node, dict):
- d = {}
- for k, v in node.items():
- d[k] = flatten(v)
- return d
- if isinstance(node, HandledException):
- return str(node)
- return node
-
- library: dict[str, dict[int, object]] = {}
- for k, v in ctx.items():
- ctx[k] = flatten(v)
- ctx['_library'] = library
- return json_dumps(ctx)
-
- @staticmethod
- def _request_wrapper(http_method: str, not_found_msg: str
- ) -> Callable[..., Callable[[TaskHandler], None]]:
- """Wrapper for do_GET… and do_POST… handlers, to init and clean up.
-
- Among other things, conditionally cleans all caches, but only on POST
- requests, as only those are expected to change the states of objects
- that may be cached, and certainly only those are expected to write any
- changes to the database. We want to call them as early though as
- possible here, either exactly after the specific request handler
- returns successfully, or right after any exception is triggered –
- otherwise, race conditions become plausible.
-
- Note that otherwise any POST attempt, even a failed one, may end in
- problematic inconsistencies:
-
- - if the POST handler experiences an Exception, changes to objects
- won't get written to the DB, but the changed objects may remain in
- the cache and affect other objects despite their possibly illegal
- state
-
- - even if an object was just saved to the DB, we cannot be sure its
- current state is completely identical to what we'd get if loading it
- fresh from the DB (e.g. currently Process.n_owners is only updated
- when loaded anew via .from_table_row, nor is its state written to
- the DB by .save; a questionable design choice, but proof that we
- have no guarantee that objects' .save stores all their states we'd
- prefer at their most up-to-date.
- """
-
- def clear_caches() -> None:
- for cls in (Day, Todo, Condition, Process, ProcessStep):
- cls.empty_cache()
-
- def decorator(f: Callable[..., str | None]
- ) -> Callable[[TaskHandler], None]:
- def wrapper(self: TaskHandler) -> None:
- # pylint: disable=protected-access
- # (because pylint here fails to detect the use of wrapper as a
- # method to self with respective access privileges)
- try:
- self._conn = DatabaseConnection(self.server.db)
- handler_name = f'do_{http_method}_{self.pagename}'
- if hasattr(self, handler_name):
- handler = getattr(self, handler_name)
- redir_target = f(self, handler)
- if 'POST' == http_method:
- clear_caches()
- if redir_target:
- self.redirect(Path(redir_target))
- else:
- msg = f'{not_found_msg}: {self.pagename}'
- raise NotFoundException(msg)
- except HandledException as error:
- if 'POST' == http_method:
- clear_caches()
- ctx = {'msg': error}
- self._send_page(ctx, 'msg', error.http_code)
- finally:
- self._conn.close()
- return wrapper
- return decorator
-
- @_request_wrapper('GET', 'Unknown page')
- def do_GET(self, handler: Callable[[], str | dict[str, object]]
- ) -> str | None:
- """Render page with result of handler, or redirect if result is str."""
- tmpl_name = f'{self.pagename}'
- ctx_or_redir_target = handler()
- if isinstance(ctx_or_redir_target, str):
- return ctx_or_redir_target
- self._send_page(ctx_or_redir_target, tmpl_name)
- return None
-
- @_request_wrapper('POST', 'Unknown POST target')
- def do_POST(self, handler: Callable[[], str]) -> str:
- """Handle POST with handler, prepare redirection to result."""
- redir_target = handler()
- self._conn.commit()
- return redir_target
-
- # GET handlers
-
- @staticmethod
- def _get_item(target_class: Any
- ) -> Callable[..., Callable[[TaskHandler],
- dict[str, object]]]:
- def decorator(f: Callable[..., dict[str, object]]
- ) -> Callable[[TaskHandler], dict[str, object]]:
- def wrapper(self: TaskHandler) -> dict[str, object]:
- # pylint: disable=protected-access
- # (because pylint here fails to detect the use of wrapper as a
- # method to self with respective access privileges)
- id_ = None
- for val in self.params.get_all_int('id', fail_on_empty=True):
- id_ = val
- if target_class.can_create_by_id:
- item = target_class.by_id_or_create(self._conn, id_)
- else:
- item = target_class.by_id(self._conn, id_)
- if 'exists' in signature(f).parameters:
- exists = id_ is not None and target_class._get_cached(id_)
- return f(self, item, exists)
- return f(self, item)
- return wrapper
- return decorator
-
- def do_GET_(self) -> str:
- """Return redirect target on GET /."""
- return '/day'
-
- def _do_GET_calendar(self) -> dict[str, object]:
- """Show Days from ?start= to ?end=.
-
- Both .do_GET_calendar and .do_GET_calendar_txt refer to this to do the
- same, the only difference being the HTML template they are rendered to,
- which .do_GET selects from their method name.
- """
- start = self.params.get_str_or_fail('start', '')
- end = self.params.get_str_or_fail('end', '')
- dt_start = dt_date_from_str(start if start else date_in_n_days(-1))
- dt_end = dt_date_from_str(end if end else date_in_n_days(366))
- days = Day.with_filled_gaps(self._conn, dt_start, dt_end)
- today = date_in_n_days(0)
- return {'start': dt_start.isoformat(), 'end': dt_end.isoformat(),
- 'today': today, 'days': days}
-
- def do_GET_calendar(self) -> dict[str, object]:
- """Show Days from ?start= to ?end= – normal view."""
- return self._do_GET_calendar()
-
- def do_GET_calendar_txt(self) -> dict[str, object]:
- """Show Days from ?start= to ?end= – minimalist view."""
- return self._do_GET_calendar()
-
- def do_GET_day(self) -> dict[str, object]:
- """Show single Day of ?date=."""
- date = self.params.get_str('date', date_in_n_days(0))
- make_type = self.params.get_str_or_fail('make_type', 'full')
- #
- assert isinstance(date, str)
- day = Day.by_id_or_create(self._conn,
- days_n_from_dt_date(dt_date_from_str(date)))
- conditions_present = []
- enablers_for = {}
- disablers_for = {}
- for todo in day.todos:
- for condition in todo.conditions + todo.blockers:
- if condition not in conditions_present:
- conditions_present += [condition]
- enablers_for[condition.id_] = [p for p in
- Process.all(self._conn)
- if condition in p.enables]
- disablers_for[condition.id_] = [p for p in
- Process.all(self._conn)
- if condition in p.disables]
- seen_todos: set[int] = set()
- top_nodes = [t.get_step_tree(seen_todos)
- for t in day.todos if not t.parents]
- return {'day': day,
- 'top_nodes': top_nodes,
- 'make_type': make_type,
- 'enablers_for': enablers_for,
- 'disablers_for': disablers_for,
- 'conditions_present': conditions_present,
- 'processes': Process.all(self._conn)}
-
- @_get_item(Todo)
- def do_GET_todo(self, todo: Todo) -> dict[str, object]:
- """Show single Todo of ?id=."""
-
- def walk_process_steps(node_id: int,
- process_step_nodes: list[ProcessStepsNode],
- steps_nodes: list[TodoOrProcStepNode]) -> int:
- for process_step_node in process_step_nodes:
- node_id += 1
- proc = Process.by_id(self._conn,
- process_step_node.step.step_process_id)
- node = TodoOrProcStepNode(node_id, None, proc, [])
- steps_nodes += [node]
- node_id = walk_process_steps(
- node_id, process_step_node.steps, node.children)
- return node_id
-
- def walk_todo_steps(node_id: int, todos: list[Todo],
- steps_nodes: list[TodoOrProcStepNode]) -> int:
- for todo in todos:
- matched = False
- for match in [item for item in steps_nodes
- if item.process
- and item.process == todo.process]:
- match.todo = todo
- matched = True
- for child in match.children:
- child.fillable = True
- node_id = walk_todo_steps(
- node_id, todo.children, match.children)
- if not matched:
- node_id += 1
- node = TodoOrProcStepNode(node_id, todo, None, [])
- steps_nodes += [node]
- node_id = walk_todo_steps(
- node_id, todo.children, node.children)
- return node_id
-
- def collect_adoptables_keys(
- steps_nodes: list[TodoOrProcStepNode]) -> set[int]:
- ids = set()
- for node in steps_nodes:
- if not node.todo:
- assert isinstance(node.process, Process)
- assert isinstance(node.process.id_, int)
- ids.add(node.process.id_)
- ids = ids | collect_adoptables_keys(node.children)
- return ids
-
- todo_steps = [step.todo for step in todo.get_step_tree(set()).children]
- process_tree = todo.process.get_steps(self._conn, None)
- steps_todo_to_process: list[TodoOrProcStepNode] = []
- last_node_id = walk_process_steps(0, process_tree,
- steps_todo_to_process)
- for steps_node in steps_todo_to_process:
- steps_node.fillable = True
- walk_todo_steps(last_node_id, todo_steps, steps_todo_to_process)
- adoptables: dict[int, list[Todo]] = {}
- any_adoptables = [Todo.by_id(self._conn, t.id_)
- for t in Todo.by_date(self._conn, todo.date)
- if t.id_ is not None
- and t != todo]
- for id_ in collect_adoptables_keys(steps_todo_to_process):
- adoptables[id_] = [t for t in any_adoptables
- if t.process.id_ == id_]
- return {'todo': todo,
- 'steps_todo_to_process': steps_todo_to_process,
- 'adoption_candidates_for': adoptables,
- 'process_candidates': sorted(Process.all(self._conn)),
- 'todo_candidates': any_adoptables,
- 'condition_candidates': Condition.all(self._conn)}
-
- def do_GET_todos(self) -> dict[str, object]:
- """Show Todos from ?start= to ?end=, of ?process=, ?comment= pattern"""
- sort_by = self.params.get_str_or_fail('sort_by', 'title')
- start = self.params.get_str_or_fail('start', '')
- end = self.params.get_str_or_fail('end', '')
- process_id = self.params.get_int_or_none('process_id')
- comment_pattern = self.params.get_str_or_fail('comment_pattern', '')
- #
- ret = Todo.by_date_range_with_limits(self._conn, (start, end))
- todos_by_date_range, start, end = ret
- todos = [t for t in todos_by_date_range
- if comment_pattern in t.comment
- and ((not process_id) or t.process.id_ == process_id)]
- sort_by = Todo.sort_by(todos, sort_by)
- return {'start': start, 'end': end, 'process_id': process_id,
- 'comment_pattern': comment_pattern, 'todos': todos,
- 'all_processes': Process.all(self._conn), 'sort_by': sort_by}
-
- def do_GET_conditions(self) -> dict[str, object]:
- """Show all Conditions."""
- pattern = self.params.get_str_or_fail('pattern', '')
- sort_by = self.params.get_str_or_fail('sort_by', 'title')
- #
- conditions = Condition.matching(self._conn, pattern)
- sort_by = Condition.sort_by(conditions, sort_by)
- return {'conditions': conditions,
- 'sort_by': sort_by,
- 'pattern': pattern}
-
- @_get_item(Condition)
- def do_GET_condition(self,
- c: Condition,
- exists: bool
- ) -> dict[str, object]:
- """Show Condition of ?id=."""
- ps = Process.all(self._conn)
- return {'condition': c,
- 'is_new': not exists,
- 'enabled_processes': [p for p in ps if c in p.conditions],
- 'disabled_processes': [p for p in ps if c in p.blockers],
- 'enabling_processes': [p for p in ps if c in p.enables],
- 'disabling_processes': [p for p in ps if c in p.disables]}
-
- @_get_item(Condition)
- def do_GET_condition_titles(self, c: Condition) -> dict[str, object]:
- """Show title history of Condition of ?id=."""
- return {'condition': c}
-
- @_get_item(Condition)
- def do_GET_condition_descriptions(self, c: Condition) -> dict[str, object]:
- """Show description historys of Condition of ?id=."""
- return {'condition': c}
-
- @_get_item(Process)
- def do_GET_process(self,
- process: Process,
- exists: bool
- ) -> dict[str, object]:
- """Show Process of ?id=."""
- owner_ids = self.params.get_all_int('step_to')
- owned_ids = self.params.get_all_int('has_step')
- title_64 = self.params.get_str('title_b64')
- title_new = None
- if title_64:
- try:
- title_new = b64decode(title_64.encode()).decode()
- except binascii_Exception as exc:
- msg = 'invalid base64 for ?title_b64='
- raise BadFormatException(msg) from exc
- #
- if title_new:
- process.title.set(title_new)
- preset_top_step = None
- owners = process.used_as_step_by(self._conn)
- for step_id in owner_ids:
- owners += [Process.by_id(self._conn, step_id)]
- for process_id in owned_ids:
- Process.by_id(self._conn, process_id) # to ensure ID exists
- preset_top_step = process_id
- return {'process': process,
- 'is_new': not exists,
- 'preset_top_step': preset_top_step,
- 'steps': process.get_steps(self._conn),
- 'owners': owners,
- 'n_todos': len(Todo.by_process_id(self._conn, process.id_)),
- 'process_candidates': Process.all(self._conn),
- 'condition_candidates': Condition.all(self._conn)}
-
- @_get_item(Process)
- def do_GET_process_titles(self, p: Process) -> dict[str, object]:
- """Show title history of Process of ?id=."""
- return {'process': p}
-
- @_get_item(Process)
- def do_GET_process_descriptions(self, p: Process) -> dict[str, object]:
- """Show description historys of Process of ?id=."""
- return {'process': p}
-
- @_get_item(Process)
- def do_GET_process_efforts(self, p: Process) -> dict[str, object]:
- """Show default effort history of Process of ?id=."""
- return {'process': p}
-
- def do_GET_processes(self) -> dict[str, object]:
- """Show all Processes."""
- pattern = self.params.get_str_or_fail('pattern', '')
- sort_by = self.params.get_str_or_fail('sort_by', 'title')
- #
- processes = Process.matching(self._conn, pattern)
- sort_by = Process.sort_by(processes, sort_by)
- return {'processes': processes, 'sort_by': sort_by, 'pattern': pattern}
-
- # POST handlers
-
- @staticmethod
- def _delete_or_post(target_class: Any, redir_target: str = '/'
- ) -> Callable[..., Callable[[TaskHandler], str]]:
- def decorator(f: Callable[..., str]
- ) -> Callable[[TaskHandler], str]:
- def wrapper(self: TaskHandler) -> str:
- # pylint: disable=protected-access
- # (because pylint here fails to detect the use of wrapper as a
- # method to self with respective access privileges)
- id_ = self.params.get_int_or_none('id')
- for _ in self.postvars.get_all_str('delete'):
- if id_ is None:
- msg = 'trying to delete non-saved ' +\
- f'{target_class.__name__}'
- raise NotFoundException(msg)
- item = target_class.by_id(self._conn, id_)
- item.remove(self._conn)
- return redir_target
- if target_class.can_create_by_id:
- item = target_class.by_id_or_create(self._conn, id_)
- else:
- item = target_class.by_id(self._conn, id_)
- return f(self, item)
- return wrapper
- return decorator
-
- def _change_versioned_timestamps(self, cls: Any, attr_name: str) -> str:
- """Update history timestamps for VersionedAttribute."""
- id_ = self.params.get_int_or_none('id')
- item = cls.by_id(self._conn, id_)
- attr = getattr(item, attr_name)
- for k, vals in self.postvars.get_all_of_key_prefixed('at:').items():
- if k[19:] != vals[0]:
- attr.reset_timestamp(k, f'{vals[0]}.0')
- attr.save(self._conn)
- return f'/{cls.name_lowercase()}_{attr_name}s?id={item.id_}'
-
- def do_POST_day(self) -> str:
- """Update or insert Day of date and Todos mapped to it."""
- # pylint: disable=too-many-locals
- date = self.params.get_str_or_fail('date')
- day_comment = self.postvars.get_str_or_fail('day_comment')
- make_type = self.postvars.get_str_or_fail('make_type')
- old_todos = self.postvars.get_all_int('todo_id')
- new_todos_by_process = self.postvars.get_all_int('new_todo')
- comments = self.postvars.get_all_str('comment')
- efforts = self.postvars.get_all_floats_or_nones('effort')
- done_todos = self.postvars.get_all_int('done')
- is_done = [t_id in done_todos for t_id in old_todos]
- if not (len(old_todos) == len(is_done) == len(comments)
- == len(efforts)):
- msg = 'not equal number each of number of todo_id, comments, ' +\
- 'and efforts inputs'
- raise BadFormatException(msg)
- for _ in [id_ for id_ in done_todos if id_ not in old_todos]:
- raise BadFormatException('"done" field refers to unknown Todo')
- #
- day_id = days_n_from_dt_date(dt_date_from_str(date))
- day = Day.by_id_or_create(self._conn, day_id)
- day.comment = day_comment
- day.save(self._conn)
- new_todos = []
- for process_id in sorted(new_todos_by_process):
- process = Process.by_id(self._conn, process_id)
- todo = Todo(None, process, False, day_id)
- todo.save(self._conn)
- new_todos += [todo]
- if 'full' == make_type:
- for todo in new_todos:
- todo.ensure_children(self._conn)
- for i, todo_id in enumerate(old_todos):
- todo = Todo.by_id(self._conn, todo_id)
- todo.is_done = is_done[i]
- todo.comment = comments[i]
- todo.effort = efforts[i]
- todo.save(self._conn)
- return f'/day?date={date}&make_type={make_type}'
-
- @_delete_or_post(Todo, '/')
- def do_POST_todo(self, todo: Todo) -> str:
- """Update Todo and its children."""
- # pylint: disable=too-many-locals
- # pylint: disable=too-many-branches
- # pylint: disable=too-many-statements
- assert isinstance(todo.id_, int)
- adoptees = [(id_, todo.id_) for id_
- in self.postvars.get_all_int('adopt')]
- to_make = {'full': [(id_, todo.id_) for id_
- in self.postvars.get_all_int('make_full')],
- 'empty': [(id_, todo.id_) for id_
- in self.postvars.get_all_int('make_empty')]}
- step_fillers_to = self.postvars.get_all_of_key_prefixed(
- 'step_filler_to_')
- to_update: dict[str, Any] = {
- 'comment': self.postvars.get_str_or_fail('comment', ''),
- 'is_done': self.postvars.get_bool('is_done'),
- 'calendarize': self.postvars.get_bool('calendarize')}
- cond_rels = [self.postvars.get_all_int(name) for name in
- ['conditions', 'blockers', 'enables', 'disables']]
- effort_or_not = self.postvars.get_str('effort')
- if effort_or_not is not None:
- if effort_or_not == '':
- to_update['effort'] = None
- else:
- try:
- to_update['effort'] = float(effort_or_not)
- except ValueError as e:
- msg = 'cannot float form field value for key: effort'
- raise BadFormatException(msg) from e
- for k, fillers in step_fillers_to.items():
- try:
- parent_id = int(k)
- except ValueError as e:
- msg = f'bad step_filler_to_ key: {k}'
- raise BadFormatException(msg) from e
- for filler in [f for f in fillers if f != 'ignore']:
- target_id: int
- prefix = 'make_'
- to_int = filler[5:] if filler.startswith(prefix) else filler
- try:
- target_id = int(to_int)
- except ValueError as e:
- msg = f'bad fill_for target: {filler}'
- raise BadFormatException(msg) from e
- if filler.startswith(prefix):
- to_make['empty'] += [(target_id, parent_id)]
- else:
- adoptees += [(target_id, parent_id)]
- #
- todo.set_condition_relations(self._conn, *cond_rels)
- for parent in [Todo.by_id(self._conn, a[1])
- for a in adoptees] + [todo]:
- for child in parent.children:
- if child not in [t[0] for t in adoptees
- if t[0] == child.id_ and t[1] == parent.id_]:
- parent.remove_child(child)
- parent.save(self._conn)
- for child_id, parent_id in adoptees:
- parent = Todo.by_id(self._conn, parent_id)
- if child_id not in [c.id_ for c in parent.children]:
- parent.add_child(Todo.by_id(self._conn, child_id))
- parent.save(self._conn)
- todo.update_attrs(**to_update)
- for approach, make_data in to_make.items():
- for process_id, parent_id in make_data:
- parent = Todo.by_id(self._conn, parent_id)
- process = Process.by_id(self._conn, process_id)
- made = Todo(None, process, False, todo.day_id)
- made.save(self._conn)
- if 'full' == approach:
- made.ensure_children(self._conn)
- parent.add_child(made)
- parent.save(self._conn)
- # todo.save() may destroy Todo if .effort < 0, so retrieve .id_ early
- url = f'/todo?id={todo.id_}'
- todo.save(self._conn)
- return url
-
- def do_POST_process_descriptions(self) -> str:
- """Update history timestamps for Process.description."""
- return self._change_versioned_timestamps(Process, 'description')
-
- def do_POST_process_efforts(self) -> str:
- """Update history timestamps for Process.effort."""
- return self._change_versioned_timestamps(Process, 'effort')
-
- def do_POST_process_titles(self) -> str:
- """Update history timestamps for Process.title."""
- return self._change_versioned_timestamps(Process, 'title')
-
- @_delete_or_post(Process, '/processes')
- def do_POST_process(self, process: Process) -> str:
- """Update or insert Process of ?id= and fields defined in postvars."""
- # pylint: disable=too-many-locals
-
- def id_or_title(l_id_or_title: list[str]) -> tuple[str, list[int]]:
- l_ids, title = [], ''
- for id_or_title in l_id_or_title:
- try:
- l_ids += [int(id_or_title)]
- except ValueError:
- title = id_or_title
- return title, l_ids
-
- versioned = {
- 'title': self.postvars.get_str_or_fail('title'),
- 'description': self.postvars.get_str_or_fail('description'),
- 'effort': self.postvars.get_float_or_fail('effort')}
- cond_rels = [self.postvars.get_all_int(s) for s
- in ['conditions', 'blockers', 'enables', 'disables']]
- calendarize = self.postvars.get_bool('calendarize')
- step_of = self.postvars.get_all_str('step_of')
- suppressions = self.postvars.get_all_int('suppressed_steps')
- kept_steps = self.postvars.get_all_int('kept_steps')
- new_top_step_procs = self.postvars.get_all_str('new_top_step')
- new_steps_to = {
- int(k): [int(n) for n in v] for (k, v)
- in self.postvars.get_all_of_key_prefixed('new_step_to_').items()}
- new_owner_title, owners_to_set = id_or_title(step_of)
- new_step_title, new_top_step_proc_ids = id_or_title(new_top_step_procs)
- #
- for k, v in versioned.items():
- getattr(process, k).set(v)
- process.calendarize = calendarize
- process.save(self._conn)
- assert isinstance(process.id_, int)
- # set relations to Conditions and ProcessSteps / other Processes
- process.set_condition_relations(self._conn, *cond_rels)
- owned_steps = [ProcessStep.by_id(self._conn, step_id)
- for step_id in kept_steps]
- for parent_step_id, step_process_ids in new_steps_to.items():
- owned_steps += [ProcessStep(None, process.id_, step_process_id,
- parent_step_id)
- for step_process_id in step_process_ids]
- owned_steps += [ProcessStep(None, process.id_, step_process_id, None)
- for step_process_id in new_top_step_proc_ids]
- process.set_step_relations(self._conn, owners_to_set, suppressions,
- owned_steps)
- # encode titles for potential newly-to-create Processes up or down
- params = f'id={process.id_}'
- if new_step_title:
- title_b64_encoded = b64encode(new_step_title.encode()).decode()
- params = f'step_to={process.id_}&title_b64={title_b64_encoded}'
- elif new_owner_title:
- title_b64_encoded = b64encode(new_owner_title.encode()).decode()
- params = f'has_step={process.id_}&title_b64={title_b64_encoded}'
- process.save(self._conn)
- return f'/process?{params}'
-
- def do_POST_condition_descriptions(self) -> str:
- """Update history timestamps for Condition.description."""
- return self._change_versioned_timestamps(Condition, 'description')
-
- def do_POST_condition_titles(self) -> str:
- """Update history timestamps for Condition.title."""
- return self._change_versioned_timestamps(Condition, 'title')
-
- @_delete_or_post(Condition, '/conditions')
- def do_POST_condition(self, condition: Condition) -> str:
- """Update/insert Condition of ?id= and fields defined in postvars."""
- title = self.postvars.get_str_or_fail('title')
- description = self.postvars.get_str_or_fail('description')
- is_active = self.postvars.get_bool('is_active')
- condition.is_active = is_active
- #
- condition.title.set(title)
- condition.description.set(description)
- condition.save(self._conn)
- return f'/condition?id={condition.id_}'
+++ /dev/null
-"""What doesn't fit elsewhere so far."""
-from typing import Any
-
-
-class DictableNode:
- """Template for display chain nodes providing .as_dict_and_refs."""
- # pylint: disable=too-few-public-methods
- _to_dict: list[str] = []
-
- def __init__(self, *args: Any) -> None:
- for i, arg in enumerate(args):
- setattr(self, self._to_dict[i], arg)
-
- @property
- def as_dict_and_refs(self) -> tuple[dict[str, object], list[Any]]:
- """Return self as json.dumps-ready dict, list of referenced objects."""
- d = {}
- refs = []
- for name in self._to_dict:
- attr = getattr(self, name)
- if hasattr(attr, 'id_'):
- d[name] = attr.id_
- continue
- if isinstance(attr, list):
- d[name] = []
- for item in attr:
- item_d, item_refs = item.as_dict_and_refs
- d[name] += [item_d]
- for item_ref in [r for r in item_refs if r not in refs]:
- refs += [item_ref]
- continue
- d[name] = attr
- return d, refs
+++ /dev/null
-"""Collecting Processes and Process-related items."""
-from __future__ import annotations
-from typing import Set, Self, Any
-from sqlite3 import Row
-from plomtask.misc import DictableNode
-from plomtask.db import DatabaseConnection, BaseModel
-from plomtask.versioned_attributes import VersionedAttribute
-from plomtask.conditions import Condition, ConditionsRelations
-from plomtask.exceptions import (NotFoundException, BadFormatException,
- HandledException)
-
-
-class ProcessStepsNode(DictableNode):
- """Collects what's useful to know for ProcessSteps tree display."""
- # pylint: disable=too-few-public-methods
- step: ProcessStep
- process: Process
- is_explicit: bool
- steps: list[ProcessStepsNode]
- seen: bool = False
- is_suppressed: bool = False
- _to_dict = ['step', 'process', 'is_explicit', 'steps', 'seen',
- 'is_suppressed']
-
-
-class Process(BaseModel, ConditionsRelations):
- """Template for, and metadata for, Todos, and their arrangements."""
- # pylint: disable=too-many-instance-attributes
- table_name = 'processes'
- to_save_simples = ['calendarize']
- to_save_relations = [('process_conditions', 'process', 'conditions', 0),
- ('process_blockers', 'process', 'blockers', 0),
- ('process_enables', 'process', 'enables', 0),
- ('process_disables', 'process', 'disables', 0),
- ('process_step_suppressions', 'process',
- 'suppressed_steps', 0)]
- add_to_dict = ['explicit_steps']
- versioned_defaults = {'title': 'UNNAMED', 'description': '', 'effort': 1.0}
- to_search = ['title.newest', 'description.newest']
- can_create_by_id = True
- sorters = {'steps': lambda p: len(p.explicit_steps),
- 'owners': lambda p: p.n_owners,
- 'effort': lambda p: p.effort.newest,
- 'title': lambda p: p.title.newest}
-
- def __init__(self, id_: int | None, calendarize: bool = False) -> None:
- super().__init__(id_)
- ConditionsRelations.__init__(self)
- for name in ['title', 'description', 'effort']:
- attr = VersionedAttribute(self, f'process_{name}s',
- self.versioned_defaults[name])
- setattr(self, name, attr)
- self.explicit_steps: list[ProcessStep] = []
- self.suppressed_steps: list[ProcessStep] = []
- self.calendarize = calendarize
- self.n_owners: int | None = None # only set by from_table_row
-
- @classmethod
- def from_table_row(cls, db_conn: DatabaseConnection, row: Row | list[Any]
- ) -> Self:
- """Make from DB row, with dependencies."""
- process = super().from_table_row(db_conn, row)
- assert process.id_ is not None
- for name in ('conditions', 'blockers', 'enables', 'disables'):
- table = f'process_{name}'
- for c_id in db_conn.column_where(table, 'condition',
- 'process', process.id_):
- target = getattr(process, name)
- target += [Condition.by_id(db_conn, c_id)]
- for row_ in db_conn.row_where('process_steps', 'owner', process.id_):
- # NB: It's tempting to ProcessStep.from_table_row(row_) directly,
- # but we don't want to unnecessarily invalidate cached ProcessSteps
- # elsewhere (notably, other Processes .suppressed_steps), as a
- # complete reload like this would do
- step = ProcessStep.by_id(db_conn, row_[0])
- process.explicit_steps += [step]
- for row_ in db_conn.row_where('process_step_suppressions', 'process',
- process.id_):
- step = ProcessStep.by_id(db_conn, row_[1])
- process.suppressed_steps += [step]
- process.n_owners = len(process.used_as_step_by(db_conn))
- return process
-
- def used_as_step_by(self, db_conn: DatabaseConnection) -> list[Self]:
- """Return Processes using self for a ProcessStep."""
- if not self.id_:
- return []
- owner_ids = set()
- for id_ in db_conn.column_where('process_steps', 'owner',
- 'step_process', self.id_):
- owner_ids.add(id_)
- return [self.__class__.by_id(db_conn, id_) for id_ in owner_ids]
-
- def get_steps(self,
- db_conn: DatabaseConnection,
- external_owner: Self | None = None
- ) -> list[ProcessStepsNode]:
- """Return tree of depended-on explicit and implicit ProcessSteps."""
-
- def make_node(step: ProcessStep, suppressed: bool) -> ProcessStepsNode:
- is_explicit = step.owner_id == top_owner.id_
- process = self.__class__.by_id(db_conn, step.step_process_id)
- step_steps = []
- if not suppressed:
- # exclude implicit siblings to explicit steps of same process
- step_steps = [n for n in process.get_steps(db_conn, top_owner)
- if not [s for s in top_owner.explicit_steps
- if s.parent_step_id == step.id_
- and s.step_process_id == n.process.id_]]
- return ProcessStepsNode(step, process, is_explicit, step_steps,
- False, suppressed)
-
- def walk_steps(node: ProcessStepsNode) -> None:
- node.seen = node.step.id_ in seen_step_ids
- assert isinstance(node.step.id_, int)
- seen_step_ids.add(node.step.id_)
- if node.is_suppressed:
- return
- explicit_children = [s for s in self.explicit_steps
- if s.parent_step_id == node.step.id_]
- for child in explicit_children:
- node.steps += [make_node(child, False)]
- for step in node.steps:
- walk_steps(step)
-
- step_nodes: list[ProcessStepsNode] = []
- seen_step_ids: Set[int] = set()
- top_owner = external_owner or self
- for step in [s for s in self.explicit_steps
- if s.parent_step_id is None]:
- new_node = make_node(step, step in top_owner.suppressed_steps)
- step_nodes += [new_node]
- for step_node in step_nodes:
- walk_steps(step_node)
- return step_nodes
-
- def set_step_relations(self,
- db_conn: DatabaseConnection,
- owners: list[int],
- suppressions: list[int],
- owned_steps: list[ProcessStep]
- ) -> None:
- """Set step owners, suppressions, and owned steps."""
- self._set_owners(db_conn, owners)
- self._set_step_suppressions(db_conn, suppressions)
- self.set_steps(db_conn, owned_steps)
-
- def _set_step_suppressions(self,
- db_conn: DatabaseConnection,
- step_ids: list[int]
- ) -> None:
- """Set self.suppressed_steps from step_ids."""
- assert isinstance(self.id_, int)
- db_conn.delete_where('process_step_suppressions', 'process', self.id_)
- self.suppressed_steps = [ProcessStep.by_id(db_conn, s)
- for s in step_ids]
-
- def _set_owners(self,
- db_conn: DatabaseConnection,
- owner_ids: list[int]
- ) -> None:
- """Re-set owners to those identified in owner_ids."""
- owners_old = self.used_as_step_by(db_conn)
- losers = [o for o in owners_old if o.id_ not in owner_ids]
- owners_old_ids = [o.id_ for o in owners_old]
- winners = [self.by_id(db_conn, id_) for id_ in owner_ids
- if id_ not in owners_old_ids]
- steps_to_remove = []
- for loser in losers:
- steps_to_remove += [s for s in loser.explicit_steps
- if s.step_process_id == self.id_]
- for step in steps_to_remove:
- step.remove(db_conn)
- for winner in winners:
- assert isinstance(winner.id_, int)
- assert isinstance(self.id_, int)
- new_step = ProcessStep(None, winner.id_, self.id_, None)
- new_explicit_steps = winner.explicit_steps + [new_step]
- winner.set_steps(db_conn, new_explicit_steps)
-
- def set_steps(self,
- db_conn: DatabaseConnection,
- steps: list[ProcessStep]
- ) -> None:
- """Set self.explicit_steps in bulk.
-
- Checks against recursion, and turns into top-level steps any of
- unknown or non-owned parent.
- """
- def walk_steps(node: ProcessStep) -> None:
- if node.step_process_id == self.id_:
- raise BadFormatException('bad step selection causes recursion')
- step_process = self.by_id(db_conn, node.step_process_id)
- for step in step_process.explicit_steps:
- walk_steps(step)
-
- # NB: separate the collection of steps to save/remove from the action
- # because the latter may modify the collection / self.explicit_steps
- to_remove = []
- for step in [s for s in self.explicit_steps if s not in steps]:
- to_remove += [step]
- for step in to_remove:
- step.remove(db_conn)
- to_save = []
- for step in [s for s in steps if s not in self.explicit_steps]:
- if step.parent_step_id is not None:
- try:
- parent_step = ProcessStep.by_id(db_conn,
- step.parent_step_id)
- if parent_step.owner_id != self.id_:
- step.parent_step_id = None
- except NotFoundException:
- step.parent_step_id = None
- walk_steps(step)
- to_save += [step]
- for step in to_save:
- step.save(db_conn)
-
- def save(self, db_conn: DatabaseConnection) -> None:
- """Add (or re-write) self and connected items to DB."""
- super().save(db_conn)
- assert isinstance(self.id_, int)
- db_conn.delete_where('process_steps', 'owner', self.id_)
- # NB: we separate the collection of steps to save from step.save()
- # because the latter may modify the collection / self.explicit_steps
- to_save = []
- for step in self.explicit_steps:
- to_save += [step]
- for step in to_save:
- step.save(db_conn)
-
- def remove(self, db_conn: DatabaseConnection) -> None:
- """Remove from DB, with dependencies.
-
- Guard against removal of Processes in use.
- """
- assert isinstance(self.id_, int)
- for _ in db_conn.row_where('process_steps', 'step_process', self.id_):
- raise HandledException('cannot remove Process in use')
- for _ in db_conn.row_where('todos', 'process', self.id_):
- raise HandledException('cannot remove Process in use')
- for step in self.explicit_steps:
- step.remove(db_conn)
- super().remove(db_conn)
-
-
-class ProcessStep(BaseModel):
- """Sub-unit of Processes."""
- table_name = 'process_steps'
- to_save_simples = ['owner_id', 'step_process_id', 'parent_step_id']
-
- def __init__(self, id_: int | None, owner_id: int, step_process_id: int,
- parent_step_id: int | None) -> None:
- super().__init__(id_)
- self.owner_id = owner_id
- self.step_process_id = step_process_id
- self.parent_step_id = parent_step_id
-
- def save(self, db_conn: DatabaseConnection) -> None:
- """Update into DB/cache, and owner's .explicit_steps."""
- super().save(db_conn)
- owner = Process.by_id(db_conn, self.owner_id)
- if self not in owner.explicit_steps:
- for s in [s for s in owner.explicit_steps if s.id_ == self.id_]:
- s.remove(db_conn)
- owner.explicit_steps += [self]
- owner.explicit_steps.sort(key=hash)
-
- def remove(self, db_conn: DatabaseConnection) -> None:
- """Remove from DB, and owner's .explicit_steps."""
- owner = Process.by_id(db_conn, self.owner_id)
- owner.explicit_steps.remove(self)
- super().remove(db_conn)
+++ /dev/null
-"""Actionables."""
-from __future__ import annotations
-from datetime import date as dt_date
-from typing import Any, Self, Set
-from sqlite3 import Row
-from plomtask.misc import DictableNode
-from plomtask.db import DatabaseConnection, BaseModel
-from plomtask.processes import Process, ProcessStepsNode
-from plomtask.versioned_attributes import VersionedAttribute
-from plomtask.conditions import Condition, ConditionsRelations
-from plomtask.exceptions import (NotFoundException, BadFormatException,
- HandledException)
-from plomtask.dating import (
- days_n_from_dt_date, dt_date_from_str, dt_date_from_days_n)
-
-
-class TodoNode(DictableNode):
- """Collects what's useful to know for Todo/Condition tree display."""
- # pylint: disable=too-few-public-methods
- todo: Todo
- seen: bool
- children: list[TodoNode]
- _to_dict = ['todo', 'seen', 'children']
-
-
-class TodoOrProcStepNode(DictableNode):
- """Collect what's useful for Todo-or-ProcessStep tree display."""
- # pylint: disable=too-few-public-methods
- node_id: int
- todo: Todo | None
- process: Process | None
- children: list[TodoOrProcStepNode] # pylint: disable=undefined-variable
- fillable: bool = False
- _to_dict = ['node_id', 'todo', 'process', 'children', 'fillable']
-
-
-class Todo(BaseModel, ConditionsRelations):
- """Individual actionable."""
- # pylint: disable=too-many-instance-attributes
- # pylint: disable=too-many-public-methods
- table_name = 'todos'
- to_save_simples = ['process_id', 'is_done', 'day_id', 'comment', 'effort',
- 'calendarize']
- to_save_relations = [('todo_conditions', 'todo', 'conditions', 0),
- ('todo_blockers', 'todo', 'blockers', 0),
- ('todo_enables', 'todo', 'enables', 0),
- ('todo_disables', 'todo', 'disables', 0),
- ('todo_children', 'parent', 'children', 0),
- ('todo_children', 'child', 'parents', 1)]
- to_search = ['comment']
- days_to_update: Set[int] = set()
- children: list[Todo]
- parents: list[Todo]
- sorters = {'doneness': lambda t: t.is_done,
- 'title': lambda t: t.title_then,
- 'comment': lambda t: t.comment,
- 'date': lambda t: t.day_id}
-
- # pylint: disable=too-many-arguments
- def __init__(self, id_: int | None,
- process: Process,
- is_done: bool,
- day_id: int,
- comment: str = '',
- effort: None | float = None,
- calendarize: bool = False
- ) -> None:
- super().__init__(id_)
- ConditionsRelations.__init__(self)
- if process.id_ is None:
- raise NotFoundException('Process of Todo without ID (not saved?)')
- self.process = process
- self._is_done = is_done
- self.day_id = day_id
- self.comment = comment
- self.effort = effort
- self.children = []
- self.parents = []
- self.calendarize = calendarize
- if not self.id_:
- self.calendarize = self.process.calendarize
- self.conditions = self.process.conditions[:]
- self.blockers = self.process.blockers[:]
- self.enables = self.process.enables[:]
- self.disables = self.process.disables[:]
-
- @property
- def date(self) -> str:
- """Return ISO formatted date matching .day_id."""
- return dt_date_from_days_n(self.day_id).isoformat()
-
- @classmethod
- def by_date_range_with_limits(cls,
- db_conn: DatabaseConnection,
- date_range: tuple[str, str],
- ) -> tuple[list[Self], str, str]:
- """Return Todos within (closed) date_range interval.
-
- If no range values provided, defaults them to 'yesterday' and
- 'tomorrow'. Knows to properly interpret these and 'today' as value.
- """
- dt_date_limits: list[dt_date] = []
- for i in range(2):
- dt_date_limits += [
- dt_date_from_str(date_range[i] if date_range[i]
- else ('yesterday', 'tomorrow')[i])]
- items: list[Self] = []
- for row in db_conn.exec(
- f'SELECT id FROM {cls.table_name} WHERE day >= ? AND day <= ?',
- tuple(days_n_from_dt_date(d) for d in dt_date_limits),
- build_q_marks=False):
- items += [cls.by_id(db_conn, row[0])]
- return (items,
- dt_date_limits[0].isoformat(), dt_date_limits[1].isoformat())
-
- def ensure_children(self, db_conn: DatabaseConnection) -> None:
- """Ensure Todo children (create or adopt) demanded by Process chain."""
-
- def walk_steps(parent: Self, step_node: ProcessStepsNode) -> Todo:
- adoptables = [t for t in self.by_date(db_conn, parent.date)
- if (t not in parent.children)
- and (t != parent)
- and step_node.process.id_ == t.process_id]
- satisfier = None
- for adoptable in adoptables:
- satisfier = adoptable
- break
- if not satisfier:
- satisfier = self.__class__(None, step_node.process, False,
- parent.day_id)
- satisfier.save(db_conn)
- sub_step_nodes = sorted(
- step_node.steps,
- key=lambda s: s.process.id_ if s.process.id_ else 0)
- for sub_node in sub_step_nodes:
- if sub_node.is_suppressed:
- continue
- n_slots = len([n for n in sub_step_nodes
- if n.process == sub_node.process])
- filled_slots = len([t for t in satisfier.children
- if t.process.id_ == sub_node.process.id_])
- # if we did not newly create satisfier, it may already fill
- # some step dependencies, so only fill what remains open
- if n_slots - filled_slots > 0:
- satisfier.add_child(walk_steps(satisfier, sub_node))
- satisfier.save(db_conn)
- return satisfier
-
- process = Process.by_id(db_conn, self.process_id)
- steps_tree = process.get_steps(db_conn)
- for step_node in steps_tree:
- if step_node.is_suppressed:
- continue
- self.add_child(walk_steps(self, step_node))
- self.save(db_conn)
-
- @classmethod
- def from_table_row(cls, db_conn: DatabaseConnection,
- row: Row | list[Any]) -> Self:
- """Make from DB row, with dependencies."""
- if row[1] == 0:
- raise NotFoundException('calling Todo of '
- 'unsaved Process')
- row_as_list = list(row)
- row_as_list[1] = Process.by_id(db_conn, row[1])
- todo = super().from_table_row(db_conn, row_as_list)
- assert isinstance(todo.id_, int)
- for t_id in db_conn.column_where('todo_children', 'child',
- 'parent', todo.id_):
- todo.children += [cls.by_id(db_conn, t_id)]
- for t_id in db_conn.column_where('todo_children', 'parent',
- 'child', todo.id_):
- todo.parents += [cls.by_id(db_conn, t_id)]
- for name in ('conditions', 'blockers', 'enables', 'disables'):
- table = f'todo_{name}'
- for cond_id in db_conn.column_where(table, 'condition',
- 'todo', todo.id_):
- target = getattr(todo, name)
- target += [Condition.by_id(db_conn, cond_id)]
- return todo
-
- @classmethod
- def by_process_id(cls, db_conn: DatabaseConnection,
- process_id: int | None) -> list[Self]:
- """Collect all Todos of Process of process_id."""
- return [t for t in cls.all(db_conn) if t.process.id_ == process_id]
-
- @classmethod
- def by_date(cls, db_conn: DatabaseConnection, date: str) -> list[Self]:
- """Collect all Todos for Day of date."""
- return cls.by_date_range_with_limits(db_conn, (date, date))[0]
-
- @property
- def is_doable(self) -> bool:
- """Decide whether .is_done settable based on children, Conditions."""
- for child in self.children:
- if not child.is_done:
- return False
- for condition in self.conditions:
- if not condition.is_active:
- return False
- for condition in self.blockers:
- if condition.is_active:
- return False
- return True
-
- @property
- def is_deletable(self) -> bool:
- """Decide whether self be deletable (not if preserve-worthy values)."""
- if self.comment:
- return False
- if self.effort and self.effort >= 0:
- return False
- return True
-
- @property
- def performed_effort(self) -> float:
- """Return performed effort, i.e. self.effort or default if done.."""
- if self.effort is not None:
- return self.effort
- if self.is_done:
- return self.effort_then
- return 0
-
- @property
- def process_id(self) -> int:
- """Needed for super().save to save Processes as attributes."""
- assert isinstance(self.process.id_, int)
- return self.process.id_
-
- @property
- def is_done(self) -> bool:
- """Wrapper around self._is_done so we can control its setter."""
- return self._is_done
-
- @is_done.setter
- def is_done(self, value: bool) -> None:
- if value != self.is_done and not self.is_doable:
- raise BadFormatException('cannot change doneness of undoable Todo')
- if self._is_done != value:
- self._is_done = value
- if value is True:
- for condition in self.enables:
- condition.is_active = True
- for condition in self.disables:
- condition.is_active = False
-
- @property
- def title(self) -> VersionedAttribute:
- """Shortcut to .process.title."""
- assert isinstance(self.process.title, VersionedAttribute)
- return self.process.title
-
- @property
- def title_then(self) -> str:
- """Shortcut to .process.title.at(self.date)."""
- title_then = self.process.title.at(self.date)
- assert isinstance(title_then, str)
- return title_then
-
- @property
- def effort_then(self) -> float:
- """Shortcut to .process.effort.at(self.date)"""
- effort_then = self.process.effort.at(self.date)
- assert isinstance(effort_then, float)
- return effort_then
-
- @property
- def has_doneness_in_path(self) -> bool:
- """Check whether self is done or has any children that are."""
- if self.is_done:
- return True
- for child in self.children:
- if child.is_done:
- return True
- if child.has_doneness_in_path:
- return True
- return False
-
- def get_step_tree(self, seen_todos: set[int]) -> TodoNode:
- """Return tree of depended-on Todos."""
-
- def make_node(todo: Self) -> TodoNode:
- children = []
- seen = todo.id_ in seen_todos
- assert isinstance(todo.id_, int)
- seen_todos.add(todo.id_)
- for child in todo.children:
- children += [make_node(child)]
- return TodoNode(todo, seen, children)
-
- return make_node(self)
-
- @property
- def tree_effort(self) -> float:
- """Return sum of performed efforts of self and all descendants."""
-
- def walk_tree(node: Self) -> float:
- local_effort = 0.0
- for child in node.children:
- local_effort += walk_tree(child)
- return node.performed_effort + local_effort
-
- return walk_tree(self)
-
- def add_child(self, child: Self) -> None:
- """Add child to self.children, avoid recursion, update parenthoods."""
-
- def walk_steps(node: Self) -> None:
- if node.id_ == self.id_:
- raise BadFormatException('bad child choice causes recursion')
- for child in node.children:
- walk_steps(child)
-
- if self.id_ is None:
- raise HandledException('Can only add children to saved Todos.')
- if child.id_ is None:
- raise HandledException('Can only add saved children to Todos.')
- if child in self.children:
- raise BadFormatException('cannot adopt same child twice')
- walk_steps(child)
- self.children += [child]
- child.parents += [self]
-
- def remove_child(self, child: Self) -> None:
- """Remove child from self.children, update counter relations."""
- if child not in self.children:
- raise HandledException('Cannot remove un-parented child.')
- self.children.remove(child)
- child.parents.remove(self)
-
- def update_attrs(self, **kwargs: Any) -> None:
- """Update self's attributes listed in kwargs."""
- for k, v in kwargs.items():
- setattr(self, k, v)
-
- def save(self, db_conn: DatabaseConnection) -> None:
- """On save calls, also check if auto-deletion by effort < 0."""
- if self.effort and self.effort < 0 and self.is_deletable:
- self.remove(db_conn)
- return
- if self.id_ is None:
- self.__class__.days_to_update.add(self.day_id)
- super().save(db_conn)
- for condition in self.enables + self.disables + self.conditions:
- condition.save(db_conn)
-
- def remove(self, db_conn: DatabaseConnection) -> None:
- """Remove from DB, including relations."""
- if not self.is_deletable:
- raise HandledException('Cannot remove non-deletable Todo.')
- self.__class__.days_to_update.add(self.day_id)
- children_to_remove = self.children[:]
- parents_to_remove = self.parents[:]
- for child in children_to_remove:
- self.remove_child(child)
- for parent in parents_to_remove:
- parent.remove_child(self)
- super().remove(db_conn)
+++ /dev/null
-"""Attributes whose values are recorded as a timestamped history."""
-from datetime import datetime
-from typing import Any
-from sqlite3 import Row
-from time import sleep
-from plomtask.db import DatabaseConnection
-from plomtask.exceptions import (HandledException, BadFormatException,
- NotFoundException)
-
-TIMESTAMP_FMT = '%Y-%m-%d %H:%M:%S.%f'
-
-
-class VersionedAttribute:
- """Attributes whose values are recorded as a timestamped history."""
-
- def __init__(self,
- parent: Any, table_name: str, default: str | float) -> None:
- self.parent = parent
- self.table_name = table_name
- self._default = default
- self.history: dict[str, str | float] = {}
- # NB: For tighter mypy testing, we might prefer self.history to be
- # dict[str, float] | dict[str, str] instead, but my current coding
- # knowledge only manages to make that work by adding much further
- # complexity, so let's leave it at that for now …
-
- def __hash__(self) -> int:
- history_tuples = tuple((k, v) for k, v in self.history.items())
- hashable = (self.parent.id_, self.table_name, self._default,
- history_tuples)
- return hash(hashable)
-
- @property
- def _newest_timestamp(self) -> str:
- """Return most recent timestamp."""
- return sorted(self.history.keys())[-1]
-
- @property
- def value_type_name(self) -> str:
- """Return string of name of attribute value type."""
- return type(self._default).__name__
-
- @property
- def newest(self) -> str | float:
- """Return most recent value, or self._default if self.history empty."""
- if 0 == len(self.history):
- return self._default
- return self.history[self._newest_timestamp]
-
- def reset_timestamp(self, old_str: str, new_str: str) -> None:
- """Rename self.history key (timestamp) old to new.
-
- Chronological sequence of keys must be preserved, i.e. cannot move
- key before earlier or after later timestamp.
- """
- try:
- new = datetime.strptime(new_str, TIMESTAMP_FMT)
- old = datetime.strptime(old_str, TIMESTAMP_FMT)
- except ValueError as exc:
- raise BadFormatException('Timestamp of illegal format.') from exc
- timestamps = list(self.history.keys())
- if old_str not in timestamps:
- raise HandledException(f'Timestamp {old} not found in history.')
- sorted_timestamps = sorted([datetime.strptime(t, TIMESTAMP_FMT)
- for t in timestamps])
- expected_position = sorted_timestamps.index(old)
- sorted_timestamps.remove(old)
- sorted_timestamps += [new]
- sorted_timestamps.sort()
- if sorted_timestamps.index(new) != expected_position:
- raise HandledException('Timestamp not respecting chronology.')
- value = self.history[old_str]
- del self.history[old_str]
- self.history[new_str] = value
-
- def set(self, value: str | float) -> None:
- """Add to self.history if and only if not same value as newest one.
-
- Note that we wait one micro-second, as timestamp comparison to check
- most recent elements only goes up to that precision.
-
- Also note that we don't check against .newest because that may make us
- compare value against .default even if not set. We want to be able to
- explicitly set .default as the first element.
- """
- sleep(0.00001)
- if 0 == len(self.history) \
- or value != self.history[self._newest_timestamp]:
- self.history[datetime.now().strftime(TIMESTAMP_FMT)] = value
-
- def history_from_row(self, row: Row) -> None:
- """Extend self.history from expected table row format."""
- self.history[row[1]] = row[2]
-
- def at(self, queried_time: str) -> str | float:
- """Retrieve value of timestamp nearest queried_time from the past."""
- if len(queried_time) == 10:
- queried_time += ' 23:59:59.999'
- sorted_timestamps = sorted(self.history.keys())
- if 0 == len(sorted_timestamps):
- return self._default
- selected_timestamp = sorted_timestamps[0]
- for timestamp in sorted_timestamps[1:]:
- if timestamp > queried_time:
- break
- selected_timestamp = timestamp
- return self.history[selected_timestamp]
-
- def save(self, db_conn: DatabaseConnection) -> None:
- """Save as self.history entries, but first wipe old ones."""
- if self.parent.id_ is None:
- raise NotFoundException('cannot save attribute to parent if no ID')
- db_conn.rewrite_relations(self.table_name, 'parent', self.parent.id_,
- [[item[0], item[1]]
- for item in self.history.items()])
-
- def remove(self, db_conn: DatabaseConnection) -> None:
- """Remove from DB."""
- db_conn.delete_where(self.table_name, 'parent', self.parent.id_)
# non-standard libs
try:
from plomlib.db import PlomDbException
- from plomtask.exceptions import HandledException
- from plomtask.http import TaskHandler, TaskServer
- from plomtask.db import DatabaseFile
+ from taskplom.exceptions import HandledException
+ from taskplom.http import TaskHandler, TaskServer
+ from taskplom.db import DatabaseFile
except ModuleNotFoundError as e:
print(f"Missing dependency: {e}. Please run with 'install_deps' argument.")
sys_exit(1)
--- /dev/null
+"""Non-doable elements of ProcessStep/Todo chains."""
+from __future__ import annotations
+from taskplom.db import DatabaseConnection, BaseModel
+from taskplom.versioned_attributes import VersionedAttribute
+from taskplom.exceptions import HandledException
+
+
+class Condition(BaseModel):
+ """Non-Process dependency for ProcessSteps and Todos."""
+ table_name = 'conditions'
+ to_save_simples = ['is_active']
+ versioned_defaults = {'title': 'UNNAMED', 'description': ''}
+ to_search = ['title.newest', 'description.newest']
+ can_create_by_id = True
+ sorters = {'is_active': lambda c: c.is_active,
+ 'title': lambda c: c.title.newest}
+
+ def __init__(self, id_: int | None, is_active: bool = False) -> None:
+ super().__init__(id_)
+ self.is_active = is_active
+ for name in ['title', 'description']:
+ attr = VersionedAttribute(self, f'condition_{name}s',
+ self.versioned_defaults[name])
+ setattr(self, name, attr)
+
+ def remove(self, db_conn: DatabaseConnection) -> None:
+ """Remove from DB, with VersionedAttributes.
+
+ Checks for Todos and Processes that depend on Condition, prohibits
+ deletion if found.
+ """
+ if self.id_ is not None:
+ for item in ('process', 'todo'):
+ for attr in ('conditions', 'blockers', 'enables', 'disables'):
+ table_name = f'{item}_{attr}'
+ for _ in db_conn.row_where(table_name, 'condition',
+ self.id_):
+ msg = 'cannot remove Condition in use'
+ raise HandledException(msg)
+ super().remove(db_conn)
+
+
+class ConditionsRelations:
+ """Methods for handling relations to Conditions, for Todo and Process."""
+ # pylint: disable=too-few-public-methods
+
+ def __init__(self) -> None:
+ self.conditions: list[Condition] = []
+ self.blockers: list[Condition] = []
+ self.enables: list[Condition] = []
+ self.disables: list[Condition] = []
+
+ def set_condition_relations(self,
+ db_conn: DatabaseConnection,
+ ids_conditions: list[int],
+ ids_blockers: list[int],
+ ids_enables: list[int],
+ ids_disables: list[int]
+ ) -> None:
+ """Set owned Condition lists to those identified by respective IDs."""
+ # pylint: disable=too-many-arguments
+ for ids, target in [(ids_conditions, 'conditions'),
+ (ids_blockers, 'blockers'),
+ (ids_enables, 'enables'),
+ (ids_disables, 'disables')]:
+ target_list = getattr(self, target)
+ while len(target_list) > 0:
+ target_list.pop()
+ for id_ in ids:
+ target_list += [Condition.by_id(db_conn, id_)]
--- /dev/null
+"""Various utilities for handling dates."""
+from datetime import date as dt_date, timedelta
+from taskplom.exceptions import BadFormatException
+
+
+def dt_date_from_str(date_str: str) -> dt_date:
+ """Validate against ISO format, colloq. terms; return as datetime.date."""
+ if date_str == 'today':
+ date_str = date_in_n_days(0)
+ elif date_str == 'yesterday':
+ date_str = date_in_n_days(-1)
+ elif date_str == 'tomorrow':
+ date_str = date_in_n_days(1)
+ try:
+ date = dt_date.fromisoformat(date_str)
+ except (ValueError, TypeError) as e:
+ msg = f'Given date of wrong format: {date_str}'
+ raise BadFormatException(msg) from e
+ return date
+
+
+def days_n_from_dt_date(date: dt_date) -> int:
+ """Return number of days from Jan 1st 2000 to datetime.date."""
+ return (date - dt_date(2000, 1, 1)).days
+
+
+def dt_date_from_days_n(days_n: int) -> dt_date:
+ """Return datetime.date for days_n after Jan 1st 2000."""
+ return dt_date(2000, 1, 1) + timedelta(days=days_n)
+
+
+def date_in_n_days(n: int) -> str:
+ """Return in ISO format date from today + n days."""
+ date = dt_date.today() + timedelta(days=n)
+ return date.isoformat()
--- /dev/null
+"""Collecting Day and date-related items."""
+from __future__ import annotations
+from typing import Any, Self
+from sqlite3 import Row
+from datetime import date as dt_date, timedelta
+from taskplom.db import DatabaseConnection, BaseModel
+from taskplom.todos import Todo
+from taskplom.dating import dt_date_from_days_n, days_n_from_dt_date
+
+
+class Day(BaseModel):
+ """Individual days defined by their dates."""
+ table_name = 'days'
+ to_save_simples = ['comment']
+ add_to_dict = ['todos']
+ can_create_by_id = True
+
+ def __init__(self, id_: int, comment: str = '') -> None:
+ super().__init__(id_)
+ self.comment = comment
+ self.todos: list[Todo] = []
+
+ @classmethod
+ def from_table_row(cls, db_conn: DatabaseConnection, row: Row | list[Any]
+ ) -> Self:
+ """Make from DB row, with linked Todos."""
+ day = super().from_table_row(db_conn, row)
+ day.todos = Todo.by_date(db_conn, day.date)
+ return day
+
+ @classmethod
+ def by_id(cls, db_conn: DatabaseConnection, id_: int) -> Self:
+ """Checks Todo.days_to_update if we need to a retrieved Day's .todos"""
+ day = super().by_id(db_conn, id_)
+ assert isinstance(day.id_, int)
+ if day.id_ in Todo.days_to_update:
+ Todo.days_to_update.remove(day.id_)
+ day.todos = Todo.by_date(db_conn, day.date)
+ return day
+
+ @classmethod
+ def with_filled_gaps(
+ cls, conn: DatabaseConnection, dt_start: dt_date, dt_end: dt_date
+ ) -> list[Self]:
+ """Show days >= start_date, <= end_date, fill gaps with un-storeds."""
+ if dt_start > dt_end:
+ return []
+ start_n_days = days_n_from_dt_date(dt_start)
+ end_n_days = days_n_from_dt_date(dt_end)
+ ranged_days = [d for d in cls.all(conn)
+ if isinstance(d.id_, int)
+ and d.id_ >= start_n_days and d.id_ <= end_n_days]
+ ranged_days.sort()
+ if (not ranged_days) or (isinstance(ranged_days[0].id_, int)
+ and start_n_days < ranged_days[0].id_):
+ ranged_days.insert(0, cls(start_n_days))
+ assert isinstance(ranged_days[-1].id_, int)
+ if end_n_days > ranged_days[-1].id_:
+ ranged_days.append(cls(end_n_days))
+ if len(ranged_days) > 1:
+ degapped_ranged_days = []
+ for i, day in enumerate(ranged_days):
+ degapped_ranged_days += [day]
+ if i < len(ranged_days) - 1:
+ next_one = ranged_days[i+1]
+ assert isinstance(day.id_, int)
+ assert isinstance(next_one.id_, int)
+ while day.id_ + 1 != next_one.id_:
+ assert isinstance(day.id_, int)
+ day = cls(day.id_ + 1)
+ degapped_ranged_days += [day]
+ return degapped_ranged_days
+ return ranged_days
+
+ @property
+ def _dt_date(self) -> dt_date:
+ """Return chronological location as datetime.date."""
+ assert isinstance(self.id_, int)
+ return dt_date_from_days_n(self.id_)
+
+ @property
+ def date(self) -> str:
+ """Return chronological location as ISO format date."""
+ return self._dt_date.isoformat()
+
+ @property
+ def first_of_month(self) -> bool:
+ """Return if self is first day of a month."""
+ return self.date[-2:] == '01'
+
+ @property
+ def month_name(self) -> str:
+ """Return name of month self is part of."""
+ return self._dt_date.strftime('%B')
+
+ @property
+ def weekday(self) -> str:
+ """Return weekday name matching self."""
+ return self._dt_date.strftime('%A')
+
+ @property
+ def prev_date(self) -> str:
+ """Return ISO-formatted date preceding date of self."""
+ return (self._dt_date - timedelta(days=1)).isoformat()
+
+ @property
+ def next_date(self) -> str:
+ """Return ISO-formatted date succeeding date of this Day."""
+ return (self._dt_date + timedelta(days=1)).isoformat()
+
+ @property
+ def calendarized_todos(self) -> list[Todo]:
+ """Return only those of self.todos that have .calendarize set."""
+ return [t for t in self.todos if t.calendarize]
+
+ @property
+ def total_effort(self) -> float:
+ """"Sum all .performed_effort of self.todos."""
+ total_effort = 0.0
+ for todo in self.todos:
+ total_effort += todo.performed_effort
+ return total_effort
--- /dev/null
+"""Database management."""
+from __future__ import annotations
+from datetime import date as dt_date
+from os import listdir
+from pathlib import Path
+from sqlite3 import Row
+from typing import cast, Any, Self, Callable
+from taskplom.exceptions import (HandledException, NotFoundException,
+ BadFormatException)
+from plomlib.db import (
+ PlomDbConn, PlomDbFile, PlomDbMigration, TypePlomDbMigration)
+
+_EXPECTED_DB_VERSION = 7
+_MIGRATIONS_DIR = Path('migrations')
+_FILENAME_DB_SCHEMA = f'init_{_EXPECTED_DB_VERSION}.sql'
+_PATH_DB_SCHEMA = _MIGRATIONS_DIR.joinpath(_FILENAME_DB_SCHEMA)
+
+
+def _mig_6_calc_days_since_millennium(conn: PlomDbConn) -> None:
+ rows = conn.exec('SELECT * FROM days').fetchall()
+ for row in [list(r) for r in rows]:
+ row[-1] = (dt_date.fromisoformat(row[0]) - dt_date(2000, 1, 1)).days
+ conn.exec('REPLACE INTO days VALUES', tuple(row))
+
+
+MIGRATION_STEPS_POST_SQL: dict[int, Callable[[PlomDbConn], None]] = {
+ 6: _mig_6_calc_days_since_millennium
+}
+
+
+class DatabaseMigration(PlomDbMigration):
+ """Collects and enacts DatabaseFile migration commands."""
+ migs_dir_path = _MIGRATIONS_DIR
+
+ @classmethod
+ def gather(cls, from_version: int, base_set: set[TypePlomDbMigration]
+ ) -> list[TypePlomDbMigration]:
+ msg_prefix = 'Migration directory contains'
+ msg_bad_entry = f'{msg_prefix} unexpected entry: '
+ migs = []
+ total_migs = set()
+ post_sql_steps_added = set()
+ for entry in [e for e in listdir(cls.migs_dir_path)
+ if e != _FILENAME_DB_SCHEMA]:
+ path = cls.migs_dir_path.joinpath(entry)
+ if not path.is_file():
+ continue
+ toks = entry.split('_', maxsplit=1)
+ if len(toks) < 2 or (not toks[0].isdigit()):
+ raise HandledException(f'{msg_bad_entry}{entry}')
+ i = int(toks[0])
+ if i <= from_version:
+ continue
+ if i > _EXPECTED_DB_VERSION:
+ raise HandledException(f'{msg_prefix} unexpected version {i}')
+ post_sql_steps = MIGRATION_STEPS_POST_SQL.get(i, None)
+ if post_sql_steps:
+ post_sql_steps_added.add(i)
+ total_migs.add(cls(i, Path(entry), post_sql_steps))
+ for k in [k for k in MIGRATION_STEPS_POST_SQL
+ if k > from_version
+ and k not in post_sql_steps_added]:
+ total_migs.add(cls(k, None, MIGRATION_STEPS_POST_SQL[k]))
+ for i in range(from_version + 1, _EXPECTED_DB_VERSION + 1):
+ migs_found = [m for m in total_migs if m.target_version == i]
+ if not migs_found:
+ raise HandledException(f'{msg_prefix} no migration of v. {i}')
+ if len(migs_found) > 1:
+ raise HandledException(f'{msg_prefix} >1 migration of v. {i}')
+ migs += migs_found
+ return cast(list[TypePlomDbMigration], migs)
+
+
+class DatabaseFile(PlomDbFile):
+ """File readable as DB of expected schema, user version."""
+ target_version = _EXPECTED_DB_VERSION
+ path_schema = _PATH_DB_SCHEMA
+ mig_class = DatabaseMigration
+
+
+class DatabaseConnection(PlomDbConn):
+ """A single connection to the database."""
+ db_file_class = DatabaseFile
+
+ def close(self) -> None:
+ """Shortcut to sqlite3.Connection.close()."""
+ self._conn.close()
+
+ def rewrite_relations(self, table_name: str, key: str, target: int | str,
+ rows: list[list[Any]], key_index: int = 0) -> None:
+ # pylint: disable=too-many-arguments
+ """Rewrite relations in table_name to target, with rows values.
+
+ Note that single rows are expected without the column and value
+ identified by key and target, which are inserted inside the function
+ at key_index.
+ """
+ self.delete_where(table_name, key, target)
+ for row in rows:
+ values = tuple(row[:key_index] + [target] + row[key_index:])
+ self.exec(f'INSERT INTO {table_name} VALUES', values)
+
+ def row_where(self, table_name: str, key: str,
+ target: int | str) -> list[Row]:
+ """Return list of Rows at table where key == target."""
+ return list(self.exec(f'SELECT * FROM {table_name} WHERE {key} =',
+ (target,)))
+
+ # def column_where_pattern(self,
+ # table_name: str,
+ # column: str,
+ # pattern: str,
+ # keys: list[str]) -> list[Any]:
+ # """Return column of rows where one of keys matches pattern."""
+ # targets = tuple([f'%{pattern}%'] * len(keys))
+ # haystack = ' OR '.join([f'{k} LIKE ?' for k in keys])
+ # sql = f'SELECT {column} FROM {table_name} WHERE {haystack}'
+ # return [row[0] for row in self.exec(sql, targets)]
+
+ def column_where(self, table_name: str, column: str, key: str,
+ target: int | str) -> list[Any]:
+ """Return column of table where key == target."""
+ return [row[0] for row in
+ self.exec(f'SELECT {column} FROM {table_name} '
+ f'WHERE {key} =', (target,))]
+
+ def column_all(self, table_name: str, column: str) -> list[Any]:
+ """Return complete column of table."""
+ return [row[0] for row in
+ self.exec(f'SELECT {column} FROM {table_name}')]
+
+ def delete_where(self, table_name: str, key: str,
+ target: int | str) -> None:
+ """Delete from table where key == target."""
+ self.exec(f'DELETE FROM {table_name} WHERE {key} =', (target,))
+
+
+class BaseModel:
+ """Template for most of the models we use/derive from the DB."""
+ table_name = ''
+ to_save_simples: list[str] = []
+ to_save_relations: list[tuple[str, str, str, int]] = []
+ versioned_defaults: dict[str, str | float] = {}
+ add_to_dict: list[str] = []
+ id_: None | int
+ cache_: dict[int, Self]
+ to_search: list[str] = []
+ can_create_by_id = False
+ _exists = True
+ sorters: dict[str, Callable[..., Any]] = {}
+
+ def __init__(self, id_: int | None) -> None:
+ if isinstance(id_, int) and id_ < 1:
+ msg = f'illegal {self.__class__.__name__} ID, must be >=1: {id_}'
+ raise BadFormatException(msg)
+ self.id_ = id_
+
+ def __hash__(self) -> int:
+ hashable = [self.id_] + [getattr(self, name)
+ for name in self.to_save_simples]
+ for definition in self.to_save_relations:
+ attr = getattr(self, definition[2])
+ hashable += [tuple(rel.id_ for rel in attr)]
+ for name in self.to_save_versioned():
+ hashable += [hash(getattr(self, name))]
+ return hash(tuple(hashable))
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, self.__class__):
+ return False
+ return hash(self) == hash(other)
+
+ def __lt__(self, other: Any) -> bool:
+ if not isinstance(other, self.__class__):
+ msg = 'cannot compare to object of different class'
+ raise HandledException(msg)
+ assert isinstance(self.id_, int)
+ assert isinstance(other.id_, int)
+ return self.id_ < other.id_
+
+ @classmethod
+ def to_save_versioned(cls) -> list[str]:
+ """Return keys of cls.versioned_defaults assuming we wanna save 'em."""
+ return list(cls.versioned_defaults.keys())
+
+ @property
+ def as_dict_and_refs(self) -> tuple[dict[str, object], list[Self]]:
+ """Return self as json.dumps-ready dict, list of referenced objects."""
+ d: dict[str, object] = {'id': self.id_}
+ refs: list[Self] = []
+ for to_save in self.to_save_simples:
+ d[to_save] = getattr(self, to_save)
+ if len(self.to_save_versioned()) > 0:
+ d['_versioned'] = {}
+ for k in self.to_save_versioned():
+ attr = getattr(self, k)
+ assert isinstance(d['_versioned'], dict)
+ d['_versioned'][k] = attr.history
+ rels_to_collect = [rel[2] for rel in self.to_save_relations]
+ rels_to_collect += self.add_to_dict
+ for attr_name in rels_to_collect:
+ rel_list = []
+ for item in getattr(self, attr_name):
+ rel_list += [item.id_]
+ if item not in refs:
+ refs += [item]
+ d[attr_name] = rel_list
+ return d, refs
+
+ @classmethod
+ def name_lowercase(cls) -> str:
+ """Convenience method to return cls' name in lowercase."""
+ return cls.__name__.lower()
+
+ @classmethod
+ def sort_by(cls, seq: list[Any], sort_key: str, default: str = 'title'
+ ) -> str:
+ """Sort cls list by cls.sorters[sort_key] (reverse if '-'-prefixed).
+
+ Before cls.sorters[sort_key] is applied, seq is sorted by .id_, to
+ ensure predictability where parts of seq are of same sort value.
+ """
+ reverse = False
+ if len(sort_key) > 1 and '-' == sort_key[0]:
+ sort_key = sort_key[1:]
+ reverse = True
+ if sort_key not in cls.sorters:
+ sort_key = default
+ seq.sort(key=lambda x: x.id_, reverse=reverse)
+ sorter: Callable[..., Any] = cls.sorters[sort_key]
+ seq.sort(key=sorter, reverse=reverse)
+ if reverse:
+ sort_key = f'-{sort_key}'
+ return sort_key
+
+ # cache management
+ # (we primarily use the cache to ensure we work on the same object in
+ # memory no matter where and how we retrieve it, e.g. we don't want
+ # .by_id() calls to create a new object each time, but rather a pointer
+ # to the one already instantiated)
+
+ def __getattribute__(self, name: str) -> Any:
+ """Ensure fail if ._disappear() was called, except to check ._exists"""
+ if name != '_exists' and not super().__getattribute__('_exists'):
+ msg = f'Object for attribute does not exist: {name}'
+ raise HandledException(msg)
+ return super().__getattribute__(name)
+
+ def _disappear(self) -> None:
+ """Invalidate object, make future use raise exceptions."""
+ assert self.id_ is not None
+ if self._get_cached(self.id_):
+ self._uncache()
+ to_kill = list(self.__dict__.keys())
+ for attr in to_kill:
+ delattr(self, attr)
+ self._exists = False
+
+ @classmethod
+ def empty_cache(cls) -> None:
+ """Empty class's cache, and disappear all former inhabitants."""
+ # pylint: disable=protected-access
+ # (cause we remain within the class)
+ if hasattr(cls, 'cache_'):
+ to_disappear = list(cls.cache_.values())
+ for item in to_disappear:
+ item._disappear()
+ cls.cache_ = {}
+
+ @classmethod
+ def get_cache(cls) -> dict[int, Self]:
+ """Get cache dictionary, create it if not yet existing."""
+ if not hasattr(cls, 'cache_'):
+ d: dict[int, Self] = {}
+ cls.cache_ = d
+ return cls.cache_
+
+ @classmethod
+ def _get_cached(cls, id_: int) -> Self | None:
+ """Get object of id_ from class's cache, or None if not found."""
+ cache = cls.get_cache()
+ if id_ in cache:
+ obj = cache[id_]
+ return obj
+ return None
+
+ def cache(self) -> None:
+ """Update object in class's cache.
+
+ Also calls ._disappear if cache holds older reference to object of same
+ ID, but different memory address, to avoid doing anything with
+ dangling leftovers.
+ """
+ if self.id_ is None:
+ raise HandledException('Cannot cache object without ID.')
+ cache = self.get_cache()
+ old_cached = self._get_cached(self.id_)
+ if old_cached and id(old_cached) != id(self):
+ # pylint: disable=protected-access
+ # (cause we remain within the class)
+ old_cached._disappear()
+ cache[self.id_] = self
+
+ def _uncache(self) -> None:
+ """Remove self from cache."""
+ if self.id_ is None:
+ raise HandledException('Cannot un-cache object without ID.')
+ cache = self.get_cache()
+ del cache[self.id_]
+
+ # object retrieval and generation
+
+ @classmethod
+ def from_table_row(cls,
+ db_conn: DatabaseConnection,
+ row: Row | list[Any]) -> Self:
+ """Make from DB row (sans relations), update DB cache with it."""
+ obj = cls(*row)
+ assert obj.id_ is not None
+ for attr_name in cls.to_save_versioned():
+ attr = getattr(obj, attr_name)
+ table_name = attr.table_name
+ for row_ in db_conn.row_where(table_name, 'parent', obj.id_):
+ attr.history_from_row(row_)
+ obj.cache()
+ return obj
+
+ @classmethod
+ def by_id(cls, db_conn: DatabaseConnection, id_: int) -> Self:
+ """Retrieve by id_, on failure throw NotFoundException.
+
+ First try to get from cls.cache_, only then check DB; if found,
+ put into cache.
+ """
+ obj = None
+ if id_ is not None:
+ if isinstance(id_, int) and id_ == 0:
+ raise BadFormatException('illegal ID of value 0')
+ obj = cls._get_cached(id_)
+ if not obj:
+ for row in db_conn.row_where(cls.table_name, 'id', id_):
+ obj = cls.from_table_row(db_conn, row)
+ break
+ if obj:
+ return obj
+ raise NotFoundException(f'found no object of ID {id_}')
+
+ @classmethod
+ def by_id_or_create(cls, db_conn: DatabaseConnection, id_: int | None
+ ) -> Self:
+ """Wrapper around .by_id, creating (not caching/saving) if no find."""
+ if not cls.can_create_by_id:
+ raise HandledException('Class cannot .by_id_or_create.')
+ if id_ is None:
+ return cls(None)
+ try:
+ return cls.by_id(db_conn, id_)
+ except NotFoundException:
+ return cls(id_)
+
+ @classmethod
+ def all(cls, db_conn: DatabaseConnection) -> list[Self]:
+ """Collect all objects of class into list.
+
+ Note that this primarily returns the contents of the cache, and only
+ _expands_ that by additional findings in the DB. This assumes the
+ cache is always instantly cleaned of any items that would be removed
+ from the DB.
+ """
+ items: dict[int, Self] = {}
+ for k, v in cls.get_cache().items():
+ items[k] = v
+ already_recorded = items.keys()
+ for id_ in db_conn.column_all(cls.table_name, 'id'):
+ if id_ not in already_recorded:
+ item = cls.by_id(db_conn, id_)
+ assert item.id_ is not None
+ items[item.id_] = item
+ return sorted(list(items.values()))
+
+ @classmethod
+ def matching(cls, db_conn: DatabaseConnection, pattern: str) -> list[Self]:
+ """Return all objects whose .to_search match pattern."""
+ items = cls.all(db_conn)
+ if pattern:
+ filtered = []
+ for item in items:
+ for attr_name in cls.to_search:
+ toks = attr_name.split('.')
+ parent = item
+ for tok in toks:
+ attr = getattr(parent, tok)
+ parent = attr
+ if pattern in attr:
+ filtered += [item]
+ break
+ return filtered
+ return items
+
+ # database writing
+
+ def save(self, db_conn: DatabaseConnection) -> None:
+ """Write self to DB and cache and ensure .id_.
+
+ Write both to DB, and to cache. To DB, write .id_ and attributes
+ listed in cls.to_save_[simples|versioned|_relations].
+
+ Ensure self.id_ by setting it to what the DB command returns as the
+ last saved row's ID (cursor.lastrowid), EXCEPT if self.id_ already
+ exists as a 'str', which implies we do our own ID creation (so far
+ only the case with the Day class, where it's to be a date string.
+ """
+ values = tuple([self.id_] + [getattr(self, key)
+ for key in self.to_save_simples])
+ table_name = self.table_name
+ cursor = db_conn.exec(f'REPLACE INTO {table_name} VALUES', values)
+ self.id_ = cursor.lastrowid
+ self.cache()
+ for attr_name in self.to_save_versioned():
+ getattr(self, attr_name).save(db_conn)
+ for table, column, attr_name, key_index in self.to_save_relations:
+ assert isinstance(self.id_, int)
+ db_conn.rewrite_relations(table, column, self.id_,
+ [[i.id_] for i
+ in getattr(self, attr_name)], key_index)
+
+ def remove(self, db_conn: DatabaseConnection) -> None:
+ """Remove from DB and cache, including dependencies."""
+ if self.id_ is None or self._get_cached(self.id_) is None:
+ raise HandledException('cannot remove unsaved item')
+ for attr_name in self.to_save_versioned():
+ getattr(self, attr_name).remove(db_conn)
+ for table, column, attr_name, _ in self.to_save_relations:
+ db_conn.delete_where(table, column, self.id_)
+ self._uncache()
+ db_conn.delete_where(self.table_name, 'id', self.id_)
+ self._disappear()
--- /dev/null
+"""Exceptions triggering different HTTP codes."""
+
+
+class HandledException(Exception):
+ """To identify Exceptions based on expected (if faulty) user behavior."""
+ http_code = 500
+
+
+class BadFormatException(HandledException):
+ """To identify Exceptions on malformed inputs."""
+ http_code = 400
+
+
+class NotFoundException(HandledException):
+ """To identify Exceptions on unsuccessful queries."""
+ http_code = 404
--- /dev/null
+"""Web server stuff."""
+from __future__ import annotations
+from pathlib import Path
+from inspect import signature
+from typing import Any, Callable
+from base64 import b64encode, b64decode
+from binascii import Error as binascii_Exception
+from json import dumps as json_dumps
+from taskplom.dating import (
+ days_n_from_dt_date, dt_date_from_str, date_in_n_days)
+from taskplom.days import Day
+from taskplom.exceptions import (HandledException, BadFormatException,
+ NotFoundException)
+from taskplom.db import DatabaseConnection, DatabaseFile, BaseModel
+from taskplom.processes import Process, ProcessStep, ProcessStepsNode
+from taskplom.conditions import Condition
+from taskplom.todos import Todo, TodoOrProcStepNode
+from taskplom.misc import DictableNode
+from plomlib.web import PlomHttpServer, PlomHttpHandler, PlomQueryMap
+
+TEMPLATES_DIR = Path('templates')
+
+
+class TaskServer(PlomHttpServer):
+ """Extends parent by DatabaseFile .db and .render_mode='html'."""
+
+ def __init__(self, db_file: DatabaseFile, *args, **kwargs) -> None:
+ super().__init__(TEMPLATES_DIR, *args, **kwargs)
+ self.db = db_file
+ self.render_mode = 'html'
+
+
+class InputsParser(PlomQueryMap):
+ """Wrapper for validating and retrieving dict-like HTTP inputs."""
+
+ def get_all_str(self, key: str) -> list[str]:
+ """Retrieve list of string values at key (empty if no key)."""
+ return self.all(key) or []
+
+ def get_all_int(self, key: str, fail_on_empty: bool = False) -> list[int]:
+ """Retrieve list of int values at key."""
+ all_str = self.get_all_str(key)
+ try:
+ return [int(s) for s in all_str if fail_on_empty or s != '']
+ except ValueError as e:
+ msg = f'cannot int a form field value for key {key} in: {all_str}'
+ raise BadFormatException(msg) from e
+
+ def get_str(self, key: str, default: str | None = None) -> str | None:
+ """Retrieve single/first string value of key, or default."""
+ first = self.first(key)
+ return default if first is None else first
+
+ def get_str_or_fail(self, key: str, default: str | None = None) -> str:
+ """Retrieve first string value of key, if none: fail or default."""
+ vals = self.get_all_str(key)
+ if not vals:
+ if default is not None:
+ return default
+ raise BadFormatException(f'no value found for key: {key}')
+ return vals[0]
+
+ def get_int_or_none(self, key: str) -> int | None:
+ """Retrieve single/first value of key as int, return None if empty."""
+ val = self.get_str_or_fail(key, '')
+ if val == '':
+ return None
+ try:
+ return int(val)
+ except (ValueError, TypeError) as e:
+ msg = f'cannot int form field value for key {key}: {val}'
+ raise BadFormatException(msg) from e
+
+ def get_bool(self, key: str) -> bool:
+ """Return if value to key truish; return False if None/no value."""
+ return self.get_str(key) in {'True', 'true', '1', 'on'}
+
+ def get_all_of_key_prefixed(self, key_prefix: str) -> dict[str, list[str]]:
+ """Retrieve dict of strings at keys starting with key_prefix."""
+ ret = {}
+ for key in self.keys_prefixed(key_prefix):
+ ret[key[len(key_prefix):]] = self.as_dict[key]
+ return ret
+
+ def get_float_or_fail(self, key: str) -> float:
+ """Retrieve float value of key from self.postvars, fail if none."""
+ val = self.get_str_or_fail(key)
+ try:
+ return float(val)
+ except ValueError as e:
+ msg = f'cannot float form field value for key {key}: {val}'
+ raise BadFormatException(msg) from e
+
+ def get_all_floats_or_nones(self, key: str) -> list[float | None]:
+ """Retrieve list of float value at key, None if empty strings."""
+ ret: list[float | None] = []
+ for val in self.get_all_str(key):
+ if '' == val:
+ ret += [None]
+ else:
+ try:
+ ret += [float(val)]
+ except ValueError as e:
+ msg = f'cannot float form field value for key {key}: {val}'
+ raise BadFormatException(msg) from e
+ return ret
+
+
+class TaskHandler(PlomHttpHandler):
+ """Handles single HTTP request."""
+ # pylint: disable=too-many-public-methods
+ server: TaskServer
+ params: InputsParser
+ postvars: InputsParser
+ mapper = InputsParser
+ _conn: DatabaseConnection
+ _site: str
+
+ def _send_page(
+ self, ctx: dict[str, Any], tmpl_name: str, code: int = 200
+ ) -> None:
+ """HTTP-send ctx as HTML or JSON, as defined by .server.render_mode.
+
+ The differentiation by .server.render_mode serves to allow easily
+ comparable JSON responses for automatic testing.
+ """
+ if 'html' == self.server.render_mode:
+ self.send_rendered(Path(f'{tmpl_name}.html'), ctx, code)
+ else:
+ self.send_http(self._ctx_to_json(ctx).encode(),
+ [('Content-Type', 'application/json')],
+ code)
+
+ def _ctx_to_json(self, ctx: dict[str, object]) -> str:
+ """Render ctx into JSON string.
+
+ Flattens any objects that json.dumps might not want to serialize, and
+ turns occurrences of BaseModel objects into listings of their .id_, to
+ be resolved to a full dict inside a top-level '_library' dictionary,
+ to avoid endless and circular nesting.
+ """
+
+ def flatten(node: object) -> object:
+
+ def update_library_with(item: BaseModel) -> None:
+ cls_name = item.__class__.__name__
+ if cls_name not in library:
+ library[cls_name] = {}
+ if item.id_ not in library[cls_name]:
+ d, refs = item.as_dict_and_refs
+ id_key = -1 if item.id_ is None else item.id_
+ library[cls_name][id_key] = d
+ for ref in refs:
+ update_library_with(ref)
+
+ if isinstance(node, BaseModel):
+ update_library_with(node)
+ return node.id_
+ if isinstance(node, DictableNode):
+ d, refs = node.as_dict_and_refs
+ for ref in refs:
+ update_library_with(ref)
+ return d
+ if isinstance(node, (list, tuple)):
+ return [flatten(item) for item in node]
+ if isinstance(node, dict):
+ d = {}
+ for k, v in node.items():
+ d[k] = flatten(v)
+ return d
+ if isinstance(node, HandledException):
+ return str(node)
+ return node
+
+ library: dict[str, dict[int, object]] = {}
+ for k, v in ctx.items():
+ ctx[k] = flatten(v)
+ ctx['_library'] = library
+ return json_dumps(ctx)
+
+ @staticmethod
+ def _request_wrapper(http_method: str, not_found_msg: str
+ ) -> Callable[..., Callable[[TaskHandler], None]]:
+ """Wrapper for do_GET… and do_POST… handlers, to init and clean up.
+
+ Among other things, conditionally cleans all caches, but only on POST
+ requests, as only those are expected to change the states of objects
+ that may be cached, and certainly only those are expected to write any
+ changes to the database. We want to call them as early though as
+ possible here, either exactly after the specific request handler
+ returns successfully, or right after any exception is triggered –
+ otherwise, race conditions become plausible.
+
+ Note that otherwise any POST attempt, even a failed one, may end in
+ problematic inconsistencies:
+
+ - if the POST handler experiences an Exception, changes to objects
+ won't get written to the DB, but the changed objects may remain in
+ the cache and affect other objects despite their possibly illegal
+ state
+
+ - even if an object was just saved to the DB, we cannot be sure its
+ current state is completely identical to what we'd get if loading it
+ fresh from the DB (e.g. currently Process.n_owners is only updated
+ when loaded anew via .from_table_row, nor is its state written to
+ the DB by .save; a questionable design choice, but proof that we
+ have no guarantee that objects' .save stores all their states we'd
+ prefer at their most up-to-date.
+ """
+
+ def clear_caches() -> None:
+ for cls in (Day, Todo, Condition, Process, ProcessStep):
+ cls.empty_cache()
+
+ def decorator(f: Callable[..., str | None]
+ ) -> Callable[[TaskHandler], None]:
+ def wrapper(self: TaskHandler) -> None:
+ # pylint: disable=protected-access
+ # (because pylint here fails to detect the use of wrapper as a
+ # method to self with respective access privileges)
+ try:
+ self._conn = DatabaseConnection(self.server.db)
+ handler_name = f'do_{http_method}_{self.pagename}'
+ if hasattr(self, handler_name):
+ handler = getattr(self, handler_name)
+ redir_target = f(self, handler)
+ if 'POST' == http_method:
+ clear_caches()
+ if redir_target:
+ self.redirect(Path(redir_target))
+ else:
+ msg = f'{not_found_msg}: {self.pagename}'
+ raise NotFoundException(msg)
+ except HandledException as error:
+ if 'POST' == http_method:
+ clear_caches()
+ ctx = {'msg': error}
+ self._send_page(ctx, 'msg', error.http_code)
+ finally:
+ self._conn.close()
+ return wrapper
+ return decorator
+
+ @_request_wrapper('GET', 'Unknown page')
+ def do_GET(self, handler: Callable[[], str | dict[str, object]]
+ ) -> str | None:
+ """Render page with result of handler, or redirect if result is str."""
+ tmpl_name = f'{self.pagename}'
+ ctx_or_redir_target = handler()
+ if isinstance(ctx_or_redir_target, str):
+ return ctx_or_redir_target
+ self._send_page(ctx_or_redir_target, tmpl_name)
+ return None
+
+ @_request_wrapper('POST', 'Unknown POST target')
+ def do_POST(self, handler: Callable[[], str]) -> str:
+ """Handle POST with handler, prepare redirection to result."""
+ redir_target = handler()
+ self._conn.commit()
+ return redir_target
+
+ # GET handlers
+
+ @staticmethod
+ def _get_item(target_class: Any
+ ) -> Callable[..., Callable[[TaskHandler],
+ dict[str, object]]]:
+ def decorator(f: Callable[..., dict[str, object]]
+ ) -> Callable[[TaskHandler], dict[str, object]]:
+ def wrapper(self: TaskHandler) -> dict[str, object]:
+ # pylint: disable=protected-access
+ # (because pylint here fails to detect the use of wrapper as a
+ # method to self with respective access privileges)
+ id_ = None
+ for val in self.params.get_all_int('id', fail_on_empty=True):
+ id_ = val
+ if target_class.can_create_by_id:
+ item = target_class.by_id_or_create(self._conn, id_)
+ else:
+ item = target_class.by_id(self._conn, id_)
+ if 'exists' in signature(f).parameters:
+ exists = id_ is not None and target_class._get_cached(id_)
+ return f(self, item, exists)
+ return f(self, item)
+ return wrapper
+ return decorator
+
+ def do_GET_(self) -> str:
+ """Return redirect target on GET /."""
+ return '/day'
+
+ def _do_GET_calendar(self) -> dict[str, object]:
+ """Show Days from ?start= to ?end=.
+
+ Both .do_GET_calendar and .do_GET_calendar_txt refer to this to do the
+ same, the only difference being the HTML template they are rendered to,
+ which .do_GET selects from their method name.
+ """
+ start = self.params.get_str_or_fail('start', '')
+ end = self.params.get_str_or_fail('end', '')
+ dt_start = dt_date_from_str(start if start else date_in_n_days(-1))
+ dt_end = dt_date_from_str(end if end else date_in_n_days(366))
+ days = Day.with_filled_gaps(self._conn, dt_start, dt_end)
+ today = date_in_n_days(0)
+ return {'start': dt_start.isoformat(), 'end': dt_end.isoformat(),
+ 'today': today, 'days': days}
+
+ def do_GET_calendar(self) -> dict[str, object]:
+ """Show Days from ?start= to ?end= – normal view."""
+ return self._do_GET_calendar()
+
+ def do_GET_calendar_txt(self) -> dict[str, object]:
+ """Show Days from ?start= to ?end= – minimalist view."""
+ return self._do_GET_calendar()
+
+ def do_GET_day(self) -> dict[str, object]:
+ """Show single Day of ?date=."""
+ date = self.params.get_str('date', date_in_n_days(0))
+ make_type = self.params.get_str_or_fail('make_type', 'full')
+ #
+ assert isinstance(date, str)
+ day = Day.by_id_or_create(self._conn,
+ days_n_from_dt_date(dt_date_from_str(date)))
+ conditions_present = []
+ enablers_for = {}
+ disablers_for = {}
+ for todo in day.todos:
+ for condition in todo.conditions + todo.blockers:
+ if condition not in conditions_present:
+ conditions_present += [condition]
+ enablers_for[condition.id_] = [p for p in
+ Process.all(self._conn)
+ if condition in p.enables]
+ disablers_for[condition.id_] = [p for p in
+ Process.all(self._conn)
+ if condition in p.disables]
+ seen_todos: set[int] = set()
+ top_nodes = [t.get_step_tree(seen_todos)
+ for t in day.todos if not t.parents]
+ return {'day': day,
+ 'top_nodes': top_nodes,
+ 'make_type': make_type,
+ 'enablers_for': enablers_for,
+ 'disablers_for': disablers_for,
+ 'conditions_present': conditions_present,
+ 'processes': Process.all(self._conn)}
+
+ @_get_item(Todo)
+ def do_GET_todo(self, todo: Todo) -> dict[str, object]:
+ """Show single Todo of ?id=."""
+
+ def walk_process_steps(node_id: int,
+ process_step_nodes: list[ProcessStepsNode],
+ steps_nodes: list[TodoOrProcStepNode]) -> int:
+ for process_step_node in process_step_nodes:
+ node_id += 1
+ proc = Process.by_id(self._conn,
+ process_step_node.step.step_process_id)
+ node = TodoOrProcStepNode(node_id, None, proc, [])
+ steps_nodes += [node]
+ node_id = walk_process_steps(
+ node_id, process_step_node.steps, node.children)
+ return node_id
+
+ def walk_todo_steps(node_id: int, todos: list[Todo],
+ steps_nodes: list[TodoOrProcStepNode]) -> int:
+ for todo in todos:
+ matched = False
+ for match in [item for item in steps_nodes
+ if item.process
+ and item.process == todo.process]:
+ match.todo = todo
+ matched = True
+ for child in match.children:
+ child.fillable = True
+ node_id = walk_todo_steps(
+ node_id, todo.children, match.children)
+ if not matched:
+ node_id += 1
+ node = TodoOrProcStepNode(node_id, todo, None, [])
+ steps_nodes += [node]
+ node_id = walk_todo_steps(
+ node_id, todo.children, node.children)
+ return node_id
+
+ def collect_adoptables_keys(
+ steps_nodes: list[TodoOrProcStepNode]) -> set[int]:
+ ids = set()
+ for node in steps_nodes:
+ if not node.todo:
+ assert isinstance(node.process, Process)
+ assert isinstance(node.process.id_, int)
+ ids.add(node.process.id_)
+ ids = ids | collect_adoptables_keys(node.children)
+ return ids
+
+ todo_steps = [step.todo for step in todo.get_step_tree(set()).children]
+ process_tree = todo.process.get_steps(self._conn, None)
+ steps_todo_to_process: list[TodoOrProcStepNode] = []
+ last_node_id = walk_process_steps(0, process_tree,
+ steps_todo_to_process)
+ for steps_node in steps_todo_to_process:
+ steps_node.fillable = True
+ walk_todo_steps(last_node_id, todo_steps, steps_todo_to_process)
+ adoptables: dict[int, list[Todo]] = {}
+ any_adoptables = [Todo.by_id(self._conn, t.id_)
+ for t in Todo.by_date(self._conn, todo.date)
+ if t.id_ is not None
+ and t != todo]
+ for id_ in collect_adoptables_keys(steps_todo_to_process):
+ adoptables[id_] = [t for t in any_adoptables
+ if t.process.id_ == id_]
+ return {'todo': todo,
+ 'steps_todo_to_process': steps_todo_to_process,
+ 'adoption_candidates_for': adoptables,
+ 'process_candidates': sorted(Process.all(self._conn)),
+ 'todo_candidates': any_adoptables,
+ 'condition_candidates': Condition.all(self._conn)}
+
+ def do_GET_todos(self) -> dict[str, object]:
+ """Show Todos from ?start= to ?end=, of ?process=, ?comment= pattern"""
+ sort_by = self.params.get_str_or_fail('sort_by', 'title')
+ start = self.params.get_str_or_fail('start', '')
+ end = self.params.get_str_or_fail('end', '')
+ process_id = self.params.get_int_or_none('process_id')
+ comment_pattern = self.params.get_str_or_fail('comment_pattern', '')
+ #
+ ret = Todo.by_date_range_with_limits(self._conn, (start, end))
+ todos_by_date_range, start, end = ret
+ todos = [t for t in todos_by_date_range
+ if comment_pattern in t.comment
+ and ((not process_id) or t.process.id_ == process_id)]
+ sort_by = Todo.sort_by(todos, sort_by)
+ return {'start': start, 'end': end, 'process_id': process_id,
+ 'comment_pattern': comment_pattern, 'todos': todos,
+ 'all_processes': Process.all(self._conn), 'sort_by': sort_by}
+
+ def do_GET_conditions(self) -> dict[str, object]:
+ """Show all Conditions."""
+ pattern = self.params.get_str_or_fail('pattern', '')
+ sort_by = self.params.get_str_or_fail('sort_by', 'title')
+ #
+ conditions = Condition.matching(self._conn, pattern)
+ sort_by = Condition.sort_by(conditions, sort_by)
+ return {'conditions': conditions,
+ 'sort_by': sort_by,
+ 'pattern': pattern}
+
+ @_get_item(Condition)
+ def do_GET_condition(self,
+ c: Condition,
+ exists: bool
+ ) -> dict[str, object]:
+ """Show Condition of ?id=."""
+ ps = Process.all(self._conn)
+ return {'condition': c,
+ 'is_new': not exists,
+ 'enabled_processes': [p for p in ps if c in p.conditions],
+ 'disabled_processes': [p for p in ps if c in p.blockers],
+ 'enabling_processes': [p for p in ps if c in p.enables],
+ 'disabling_processes': [p for p in ps if c in p.disables]}
+
+ @_get_item(Condition)
+ def do_GET_condition_titles(self, c: Condition) -> dict[str, object]:
+ """Show title history of Condition of ?id=."""
+ return {'condition': c}
+
+ @_get_item(Condition)
+ def do_GET_condition_descriptions(self, c: Condition) -> dict[str, object]:
+ """Show description historys of Condition of ?id=."""
+ return {'condition': c}
+
+ @_get_item(Process)
+ def do_GET_process(self,
+ process: Process,
+ exists: bool
+ ) -> dict[str, object]:
+ """Show Process of ?id=."""
+ owner_ids = self.params.get_all_int('step_to')
+ owned_ids = self.params.get_all_int('has_step')
+ title_64 = self.params.get_str('title_b64')
+ title_new = None
+ if title_64:
+ try:
+ title_new = b64decode(title_64.encode()).decode()
+ except binascii_Exception as exc:
+ msg = 'invalid base64 for ?title_b64='
+ raise BadFormatException(msg) from exc
+ #
+ if title_new:
+ process.title.set(title_new)
+ preset_top_step = None
+ owners = process.used_as_step_by(self._conn)
+ for step_id in owner_ids:
+ owners += [Process.by_id(self._conn, step_id)]
+ for process_id in owned_ids:
+ Process.by_id(self._conn, process_id) # to ensure ID exists
+ preset_top_step = process_id
+ return {'process': process,
+ 'is_new': not exists,
+ 'preset_top_step': preset_top_step,
+ 'steps': process.get_steps(self._conn),
+ 'owners': owners,
+ 'n_todos': len(Todo.by_process_id(self._conn, process.id_)),
+ 'process_candidates': Process.all(self._conn),
+ 'condition_candidates': Condition.all(self._conn)}
+
+ @_get_item(Process)
+ def do_GET_process_titles(self, p: Process) -> dict[str, object]:
+ """Show title history of Process of ?id=."""
+ return {'process': p}
+
+ @_get_item(Process)
+ def do_GET_process_descriptions(self, p: Process) -> dict[str, object]:
+ """Show description historys of Process of ?id=."""
+ return {'process': p}
+
+ @_get_item(Process)
+ def do_GET_process_efforts(self, p: Process) -> dict[str, object]:
+ """Show default effort history of Process of ?id=."""
+ return {'process': p}
+
+ def do_GET_processes(self) -> dict[str, object]:
+ """Show all Processes."""
+ pattern = self.params.get_str_or_fail('pattern', '')
+ sort_by = self.params.get_str_or_fail('sort_by', 'title')
+ #
+ processes = Process.matching(self._conn, pattern)
+ sort_by = Process.sort_by(processes, sort_by)
+ return {'processes': processes, 'sort_by': sort_by, 'pattern': pattern}
+
+ # POST handlers
+
+ @staticmethod
+ def _delete_or_post(target_class: Any, redir_target: str = '/'
+ ) -> Callable[..., Callable[[TaskHandler], str]]:
+ def decorator(f: Callable[..., str]
+ ) -> Callable[[TaskHandler], str]:
+ def wrapper(self: TaskHandler) -> str:
+ # pylint: disable=protected-access
+ # (because pylint here fails to detect the use of wrapper as a
+ # method to self with respective access privileges)
+ id_ = self.params.get_int_or_none('id')
+ for _ in self.postvars.get_all_str('delete'):
+ if id_ is None:
+ msg = 'trying to delete non-saved ' +\
+ f'{target_class.__name__}'
+ raise NotFoundException(msg)
+ item = target_class.by_id(self._conn, id_)
+ item.remove(self._conn)
+ return redir_target
+ if target_class.can_create_by_id:
+ item = target_class.by_id_or_create(self._conn, id_)
+ else:
+ item = target_class.by_id(self._conn, id_)
+ return f(self, item)
+ return wrapper
+ return decorator
+
+ def _change_versioned_timestamps(self, cls: Any, attr_name: str) -> str:
+ """Update history timestamps for VersionedAttribute."""
+ id_ = self.params.get_int_or_none('id')
+ item = cls.by_id(self._conn, id_)
+ attr = getattr(item, attr_name)
+ for k, vals in self.postvars.get_all_of_key_prefixed('at:').items():
+ if k[19:] != vals[0]:
+ attr.reset_timestamp(k, f'{vals[0]}.0')
+ attr.save(self._conn)
+ return f'/{cls.name_lowercase()}_{attr_name}s?id={item.id_}'
+
+ def do_POST_day(self) -> str:
+ """Update or insert Day of date and Todos mapped to it."""
+ # pylint: disable=too-many-locals
+ date = self.params.get_str_or_fail('date')
+ day_comment = self.postvars.get_str_or_fail('day_comment')
+ make_type = self.postvars.get_str_or_fail('make_type')
+ old_todos = self.postvars.get_all_int('todo_id')
+ new_todos_by_process = self.postvars.get_all_int('new_todo')
+ comments = self.postvars.get_all_str('comment')
+ efforts = self.postvars.get_all_floats_or_nones('effort')
+ done_todos = self.postvars.get_all_int('done')
+ is_done = [t_id in done_todos for t_id in old_todos]
+ if not (len(old_todos) == len(is_done) == len(comments)
+ == len(efforts)):
+ msg = 'not equal number each of number of todo_id, comments, ' +\
+ 'and efforts inputs'
+ raise BadFormatException(msg)
+ for _ in [id_ for id_ in done_todos if id_ not in old_todos]:
+ raise BadFormatException('"done" field refers to unknown Todo')
+ #
+ day_id = days_n_from_dt_date(dt_date_from_str(date))
+ day = Day.by_id_or_create(self._conn, day_id)
+ day.comment = day_comment
+ day.save(self._conn)
+ new_todos = []
+ for process_id in sorted(new_todos_by_process):
+ process = Process.by_id(self._conn, process_id)
+ todo = Todo(None, process, False, day_id)
+ todo.save(self._conn)
+ new_todos += [todo]
+ if 'full' == make_type:
+ for todo in new_todos:
+ todo.ensure_children(self._conn)
+ for i, todo_id in enumerate(old_todos):
+ todo = Todo.by_id(self._conn, todo_id)
+ todo.is_done = is_done[i]
+ todo.comment = comments[i]
+ todo.effort = efforts[i]
+ todo.save(self._conn)
+ return f'/day?date={date}&make_type={make_type}'
+
+ @_delete_or_post(Todo, '/')
+ def do_POST_todo(self, todo: Todo) -> str:
+ """Update Todo and its children."""
+ # pylint: disable=too-many-locals
+ # pylint: disable=too-many-branches
+ # pylint: disable=too-many-statements
+ assert isinstance(todo.id_, int)
+ adoptees = [(id_, todo.id_) for id_
+ in self.postvars.get_all_int('adopt')]
+ to_make = {'full': [(id_, todo.id_) for id_
+ in self.postvars.get_all_int('make_full')],
+ 'empty': [(id_, todo.id_) for id_
+ in self.postvars.get_all_int('make_empty')]}
+ step_fillers_to = self.postvars.get_all_of_key_prefixed(
+ 'step_filler_to_')
+ to_update: dict[str, Any] = {
+ 'comment': self.postvars.get_str_or_fail('comment', ''),
+ 'is_done': self.postvars.get_bool('is_done'),
+ 'calendarize': self.postvars.get_bool('calendarize')}
+ cond_rels = [self.postvars.get_all_int(name) for name in
+ ['conditions', 'blockers', 'enables', 'disables']]
+ effort_or_not = self.postvars.get_str('effort')
+ if effort_or_not is not None:
+ if effort_or_not == '':
+ to_update['effort'] = None
+ else:
+ try:
+ to_update['effort'] = float(effort_or_not)
+ except ValueError as e:
+ msg = 'cannot float form field value for key: effort'
+ raise BadFormatException(msg) from e
+ for k, fillers in step_fillers_to.items():
+ try:
+ parent_id = int(k)
+ except ValueError as e:
+ msg = f'bad step_filler_to_ key: {k}'
+ raise BadFormatException(msg) from e
+ for filler in [f for f in fillers if f != 'ignore']:
+ target_id: int
+ prefix = 'make_'
+ to_int = filler[5:] if filler.startswith(prefix) else filler
+ try:
+ target_id = int(to_int)
+ except ValueError as e:
+ msg = f'bad fill_for target: {filler}'
+ raise BadFormatException(msg) from e
+ if filler.startswith(prefix):
+ to_make['empty'] += [(target_id, parent_id)]
+ else:
+ adoptees += [(target_id, parent_id)]
+ #
+ todo.set_condition_relations(self._conn, *cond_rels)
+ for parent in [Todo.by_id(self._conn, a[1])
+ for a in adoptees] + [todo]:
+ for child in parent.children:
+ if child not in [t[0] for t in adoptees
+ if t[0] == child.id_ and t[1] == parent.id_]:
+ parent.remove_child(child)
+ parent.save(self._conn)
+ for child_id, parent_id in adoptees:
+ parent = Todo.by_id(self._conn, parent_id)
+ if child_id not in [c.id_ for c in parent.children]:
+ parent.add_child(Todo.by_id(self._conn, child_id))
+ parent.save(self._conn)
+ todo.update_attrs(**to_update)
+ for approach, make_data in to_make.items():
+ for process_id, parent_id in make_data:
+ parent = Todo.by_id(self._conn, parent_id)
+ process = Process.by_id(self._conn, process_id)
+ made = Todo(None, process, False, todo.day_id)
+ made.save(self._conn)
+ if 'full' == approach:
+ made.ensure_children(self._conn)
+ parent.add_child(made)
+ parent.save(self._conn)
+ # todo.save() may destroy Todo if .effort < 0, so retrieve .id_ early
+ url = f'/todo?id={todo.id_}'
+ todo.save(self._conn)
+ return url
+
+ def do_POST_process_descriptions(self) -> str:
+ """Update history timestamps for Process.description."""
+ return self._change_versioned_timestamps(Process, 'description')
+
+ def do_POST_process_efforts(self) -> str:
+ """Update history timestamps for Process.effort."""
+ return self._change_versioned_timestamps(Process, 'effort')
+
+ def do_POST_process_titles(self) -> str:
+ """Update history timestamps for Process.title."""
+ return self._change_versioned_timestamps(Process, 'title')
+
+ @_delete_or_post(Process, '/processes')
+ def do_POST_process(self, process: Process) -> str:
+ """Update or insert Process of ?id= and fields defined in postvars."""
+ # pylint: disable=too-many-locals
+
+ def id_or_title(l_id_or_title: list[str]) -> tuple[str, list[int]]:
+ l_ids, title = [], ''
+ for id_or_title in l_id_or_title:
+ try:
+ l_ids += [int(id_or_title)]
+ except ValueError:
+ title = id_or_title
+ return title, l_ids
+
+ versioned = {
+ 'title': self.postvars.get_str_or_fail('title'),
+ 'description': self.postvars.get_str_or_fail('description'),
+ 'effort': self.postvars.get_float_or_fail('effort')}
+ cond_rels = [self.postvars.get_all_int(s) for s
+ in ['conditions', 'blockers', 'enables', 'disables']]
+ calendarize = self.postvars.get_bool('calendarize')
+ step_of = self.postvars.get_all_str('step_of')
+ suppressions = self.postvars.get_all_int('suppressed_steps')
+ kept_steps = self.postvars.get_all_int('kept_steps')
+ new_top_step_procs = self.postvars.get_all_str('new_top_step')
+ new_steps_to = {
+ int(k): [int(n) for n in v] for (k, v)
+ in self.postvars.get_all_of_key_prefixed('new_step_to_').items()}
+ new_owner_title, owners_to_set = id_or_title(step_of)
+ new_step_title, new_top_step_proc_ids = id_or_title(new_top_step_procs)
+ #
+ for k, v in versioned.items():
+ getattr(process, k).set(v)
+ process.calendarize = calendarize
+ process.save(self._conn)
+ assert isinstance(process.id_, int)
+ # set relations to Conditions and ProcessSteps / other Processes
+ process.set_condition_relations(self._conn, *cond_rels)
+ owned_steps = [ProcessStep.by_id(self._conn, step_id)
+ for step_id in kept_steps]
+ for parent_step_id, step_process_ids in new_steps_to.items():
+ owned_steps += [ProcessStep(None, process.id_, step_process_id,
+ parent_step_id)
+ for step_process_id in step_process_ids]
+ owned_steps += [ProcessStep(None, process.id_, step_process_id, None)
+ for step_process_id in new_top_step_proc_ids]
+ process.set_step_relations(self._conn, owners_to_set, suppressions,
+ owned_steps)
+ # encode titles for potential newly-to-create Processes up or down
+ params = f'id={process.id_}'
+ if new_step_title:
+ title_b64_encoded = b64encode(new_step_title.encode()).decode()
+ params = f'step_to={process.id_}&title_b64={title_b64_encoded}'
+ elif new_owner_title:
+ title_b64_encoded = b64encode(new_owner_title.encode()).decode()
+ params = f'has_step={process.id_}&title_b64={title_b64_encoded}'
+ process.save(self._conn)
+ return f'/process?{params}'
+
+ def do_POST_condition_descriptions(self) -> str:
+ """Update history timestamps for Condition.description."""
+ return self._change_versioned_timestamps(Condition, 'description')
+
+ def do_POST_condition_titles(self) -> str:
+ """Update history timestamps for Condition.title."""
+ return self._change_versioned_timestamps(Condition, 'title')
+
+ @_delete_or_post(Condition, '/conditions')
+ def do_POST_condition(self, condition: Condition) -> str:
+ """Update/insert Condition of ?id= and fields defined in postvars."""
+ title = self.postvars.get_str_or_fail('title')
+ description = self.postvars.get_str_or_fail('description')
+ is_active = self.postvars.get_bool('is_active')
+ condition.is_active = is_active
+ #
+ condition.title.set(title)
+ condition.description.set(description)
+ condition.save(self._conn)
+ return f'/condition?id={condition.id_}'
--- /dev/null
+"""What doesn't fit elsewhere so far."""
+from typing import Any
+
+
+class DictableNode:
+ """Template for display chain nodes providing .as_dict_and_refs."""
+ # pylint: disable=too-few-public-methods
+ _to_dict: list[str] = []
+
+ def __init__(self, *args: Any) -> None:
+ for i, arg in enumerate(args):
+ setattr(self, self._to_dict[i], arg)
+
+ @property
+ def as_dict_and_refs(self) -> tuple[dict[str, object], list[Any]]:
+ """Return self as json.dumps-ready dict, list of referenced objects."""
+ d = {}
+ refs = []
+ for name in self._to_dict:
+ attr = getattr(self, name)
+ if hasattr(attr, 'id_'):
+ d[name] = attr.id_
+ continue
+ if isinstance(attr, list):
+ d[name] = []
+ for item in attr:
+ item_d, item_refs = item.as_dict_and_refs
+ d[name] += [item_d]
+ for item_ref in [r for r in item_refs if r not in refs]:
+ refs += [item_ref]
+ continue
+ d[name] = attr
+ return d, refs
--- /dev/null
+"""Collecting Processes and Process-related items."""
+from __future__ import annotations
+from typing import Set, Self, Any
+from sqlite3 import Row
+from taskplom.misc import DictableNode
+from taskplom.db import DatabaseConnection, BaseModel
+from taskplom.versioned_attributes import VersionedAttribute
+from taskplom.conditions import Condition, ConditionsRelations
+from taskplom.exceptions import (NotFoundException, BadFormatException,
+ HandledException)
+
+
+class ProcessStepsNode(DictableNode):
+ """Collects what's useful to know for ProcessSteps tree display."""
+ # pylint: disable=too-few-public-methods
+ step: ProcessStep
+ process: Process
+ is_explicit: bool
+ steps: list[ProcessStepsNode]
+ seen: bool = False
+ is_suppressed: bool = False
+ _to_dict = ['step', 'process', 'is_explicit', 'steps', 'seen',
+ 'is_suppressed']
+
+
+class Process(BaseModel, ConditionsRelations):
+ """Template for, and metadata for, Todos, and their arrangements."""
+ # pylint: disable=too-many-instance-attributes
+ table_name = 'processes'
+ to_save_simples = ['calendarize']
+ to_save_relations = [('process_conditions', 'process', 'conditions', 0),
+ ('process_blockers', 'process', 'blockers', 0),
+ ('process_enables', 'process', 'enables', 0),
+ ('process_disables', 'process', 'disables', 0),
+ ('process_step_suppressions', 'process',
+ 'suppressed_steps', 0)]
+ add_to_dict = ['explicit_steps']
+ versioned_defaults = {'title': 'UNNAMED', 'description': '', 'effort': 1.0}
+ to_search = ['title.newest', 'description.newest']
+ can_create_by_id = True
+ sorters = {'steps': lambda p: len(p.explicit_steps),
+ 'owners': lambda p: p.n_owners,
+ 'effort': lambda p: p.effort.newest,
+ 'title': lambda p: p.title.newest}
+
+ def __init__(self, id_: int | None, calendarize: bool = False) -> None:
+ super().__init__(id_)
+ ConditionsRelations.__init__(self)
+ for name in ['title', 'description', 'effort']:
+ attr = VersionedAttribute(self, f'process_{name}s',
+ self.versioned_defaults[name])
+ setattr(self, name, attr)
+ self.explicit_steps: list[ProcessStep] = []
+ self.suppressed_steps: list[ProcessStep] = []
+ self.calendarize = calendarize
+ self.n_owners: int | None = None # only set by from_table_row
+
+ @classmethod
+ def from_table_row(cls, db_conn: DatabaseConnection, row: Row | list[Any]
+ ) -> Self:
+ """Make from DB row, with dependencies."""
+ process = super().from_table_row(db_conn, row)
+ assert process.id_ is not None
+ for name in ('conditions', 'blockers', 'enables', 'disables'):
+ table = f'process_{name}'
+ for c_id in db_conn.column_where(table, 'condition',
+ 'process', process.id_):
+ target = getattr(process, name)
+ target += [Condition.by_id(db_conn, c_id)]
+ for row_ in db_conn.row_where('process_steps', 'owner', process.id_):
+ # NB: It's tempting to ProcessStep.from_table_row(row_) directly,
+ # but we don't want to unnecessarily invalidate cached ProcessSteps
+ # elsewhere (notably, other Processes .suppressed_steps), as a
+ # complete reload like this would do
+ step = ProcessStep.by_id(db_conn, row_[0])
+ process.explicit_steps += [step]
+ for row_ in db_conn.row_where('process_step_suppressions', 'process',
+ process.id_):
+ step = ProcessStep.by_id(db_conn, row_[1])
+ process.suppressed_steps += [step]
+ process.n_owners = len(process.used_as_step_by(db_conn))
+ return process
+
+ def used_as_step_by(self, db_conn: DatabaseConnection) -> list[Self]:
+ """Return Processes using self for a ProcessStep."""
+ if not self.id_:
+ return []
+ owner_ids = set()
+ for id_ in db_conn.column_where('process_steps', 'owner',
+ 'step_process', self.id_):
+ owner_ids.add(id_)
+ return [self.__class__.by_id(db_conn, id_) for id_ in owner_ids]
+
+ def get_steps(self,
+ db_conn: DatabaseConnection,
+ external_owner: Self | None = None
+ ) -> list[ProcessStepsNode]:
+ """Return tree of depended-on explicit and implicit ProcessSteps."""
+
+ def make_node(step: ProcessStep, suppressed: bool) -> ProcessStepsNode:
+ is_explicit = step.owner_id == top_owner.id_
+ process = self.__class__.by_id(db_conn, step.step_process_id)
+ step_steps = []
+ if not suppressed:
+ # exclude implicit siblings to explicit steps of same process
+ step_steps = [n for n in process.get_steps(db_conn, top_owner)
+ if not [s for s in top_owner.explicit_steps
+ if s.parent_step_id == step.id_
+ and s.step_process_id == n.process.id_]]
+ return ProcessStepsNode(step, process, is_explicit, step_steps,
+ False, suppressed)
+
+ def walk_steps(node: ProcessStepsNode) -> None:
+ node.seen = node.step.id_ in seen_step_ids
+ assert isinstance(node.step.id_, int)
+ seen_step_ids.add(node.step.id_)
+ if node.is_suppressed:
+ return
+ explicit_children = [s for s in self.explicit_steps
+ if s.parent_step_id == node.step.id_]
+ for child in explicit_children:
+ node.steps += [make_node(child, False)]
+ for step in node.steps:
+ walk_steps(step)
+
+ step_nodes: list[ProcessStepsNode] = []
+ seen_step_ids: Set[int] = set()
+ top_owner = external_owner or self
+ for step in [s for s in self.explicit_steps
+ if s.parent_step_id is None]:
+ new_node = make_node(step, step in top_owner.suppressed_steps)
+ step_nodes += [new_node]
+ for step_node in step_nodes:
+ walk_steps(step_node)
+ return step_nodes
+
+ def set_step_relations(self,
+ db_conn: DatabaseConnection,
+ owners: list[int],
+ suppressions: list[int],
+ owned_steps: list[ProcessStep]
+ ) -> None:
+ """Set step owners, suppressions, and owned steps."""
+ self._set_owners(db_conn, owners)
+ self._set_step_suppressions(db_conn, suppressions)
+ self.set_steps(db_conn, owned_steps)
+
+ def _set_step_suppressions(self,
+ db_conn: DatabaseConnection,
+ step_ids: list[int]
+ ) -> None:
+ """Set self.suppressed_steps from step_ids."""
+ assert isinstance(self.id_, int)
+ db_conn.delete_where('process_step_suppressions', 'process', self.id_)
+ self.suppressed_steps = [ProcessStep.by_id(db_conn, s)
+ for s in step_ids]
+
+ def _set_owners(self,
+ db_conn: DatabaseConnection,
+ owner_ids: list[int]
+ ) -> None:
+ """Re-set owners to those identified in owner_ids."""
+ owners_old = self.used_as_step_by(db_conn)
+ losers = [o for o in owners_old if o.id_ not in owner_ids]
+ owners_old_ids = [o.id_ for o in owners_old]
+ winners = [self.by_id(db_conn, id_) for id_ in owner_ids
+ if id_ not in owners_old_ids]
+ steps_to_remove = []
+ for loser in losers:
+ steps_to_remove += [s for s in loser.explicit_steps
+ if s.step_process_id == self.id_]
+ for step in steps_to_remove:
+ step.remove(db_conn)
+ for winner in winners:
+ assert isinstance(winner.id_, int)
+ assert isinstance(self.id_, int)
+ new_step = ProcessStep(None, winner.id_, self.id_, None)
+ new_explicit_steps = winner.explicit_steps + [new_step]
+ winner.set_steps(db_conn, new_explicit_steps)
+
+ def set_steps(self,
+ db_conn: DatabaseConnection,
+ steps: list[ProcessStep]
+ ) -> None:
+ """Set self.explicit_steps in bulk.
+
+ Checks against recursion, and turns into top-level steps any of
+ unknown or non-owned parent.
+ """
+ def walk_steps(node: ProcessStep) -> None:
+ if node.step_process_id == self.id_:
+ raise BadFormatException('bad step selection causes recursion')
+ step_process = self.by_id(db_conn, node.step_process_id)
+ for step in step_process.explicit_steps:
+ walk_steps(step)
+
+ # NB: separate the collection of steps to save/remove from the action
+ # because the latter may modify the collection / self.explicit_steps
+ to_remove = []
+ for step in [s for s in self.explicit_steps if s not in steps]:
+ to_remove += [step]
+ for step in to_remove:
+ step.remove(db_conn)
+ to_save = []
+ for step in [s for s in steps if s not in self.explicit_steps]:
+ if step.parent_step_id is not None:
+ try:
+ parent_step = ProcessStep.by_id(db_conn,
+ step.parent_step_id)
+ if parent_step.owner_id != self.id_:
+ step.parent_step_id = None
+ except NotFoundException:
+ step.parent_step_id = None
+ walk_steps(step)
+ to_save += [step]
+ for step in to_save:
+ step.save(db_conn)
+
+ def save(self, db_conn: DatabaseConnection) -> None:
+ """Add (or re-write) self and connected items to DB."""
+ super().save(db_conn)
+ assert isinstance(self.id_, int)
+ db_conn.delete_where('process_steps', 'owner', self.id_)
+ # NB: we separate the collection of steps to save from step.save()
+ # because the latter may modify the collection / self.explicit_steps
+ to_save = []
+ for step in self.explicit_steps:
+ to_save += [step]
+ for step in to_save:
+ step.save(db_conn)
+
+ def remove(self, db_conn: DatabaseConnection) -> None:
+ """Remove from DB, with dependencies.
+
+ Guard against removal of Processes in use.
+ """
+ assert isinstance(self.id_, int)
+ for _ in db_conn.row_where('process_steps', 'step_process', self.id_):
+ raise HandledException('cannot remove Process in use')
+ for _ in db_conn.row_where('todos', 'process', self.id_):
+ raise HandledException('cannot remove Process in use')
+ for step in self.explicit_steps:
+ step.remove(db_conn)
+ super().remove(db_conn)
+
+
+class ProcessStep(BaseModel):
+ """Sub-unit of Processes."""
+ table_name = 'process_steps'
+ to_save_simples = ['owner_id', 'step_process_id', 'parent_step_id']
+
+ def __init__(self, id_: int | None, owner_id: int, step_process_id: int,
+ parent_step_id: int | None) -> None:
+ super().__init__(id_)
+ self.owner_id = owner_id
+ self.step_process_id = step_process_id
+ self.parent_step_id = parent_step_id
+
+ def save(self, db_conn: DatabaseConnection) -> None:
+ """Update into DB/cache, and owner's .explicit_steps."""
+ super().save(db_conn)
+ owner = Process.by_id(db_conn, self.owner_id)
+ if self not in owner.explicit_steps:
+ for s in [s for s in owner.explicit_steps if s.id_ == self.id_]:
+ s.remove(db_conn)
+ owner.explicit_steps += [self]
+ owner.explicit_steps.sort(key=hash)
+
+ def remove(self, db_conn: DatabaseConnection) -> None:
+ """Remove from DB, and owner's .explicit_steps."""
+ owner = Process.by_id(db_conn, self.owner_id)
+ owner.explicit_steps.remove(self)
+ super().remove(db_conn)
--- /dev/null
+"""Actionables."""
+from __future__ import annotations
+from datetime import date as dt_date
+from typing import Any, Self, Set
+from sqlite3 import Row
+from taskplom.misc import DictableNode
+from taskplom.db import DatabaseConnection, BaseModel
+from taskplom.processes import Process, ProcessStepsNode
+from taskplom.versioned_attributes import VersionedAttribute
+from taskplom.conditions import Condition, ConditionsRelations
+from taskplom.exceptions import (NotFoundException, BadFormatException,
+ HandledException)
+from taskplom.dating import (
+ days_n_from_dt_date, dt_date_from_str, dt_date_from_days_n)
+
+
+class TodoNode(DictableNode):
+ """Collects what's useful to know for Todo/Condition tree display."""
+ # pylint: disable=too-few-public-methods
+ todo: Todo
+ seen: bool
+ children: list[TodoNode]
+ _to_dict = ['todo', 'seen', 'children']
+
+
+class TodoOrProcStepNode(DictableNode):
+ """Collect what's useful for Todo-or-ProcessStep tree display."""
+ # pylint: disable=too-few-public-methods
+ node_id: int
+ todo: Todo | None
+ process: Process | None
+ children: list[TodoOrProcStepNode] # pylint: disable=undefined-variable
+ fillable: bool = False
+ _to_dict = ['node_id', 'todo', 'process', 'children', 'fillable']
+
+
+class Todo(BaseModel, ConditionsRelations):
+ """Individual actionable."""
+ # pylint: disable=too-many-instance-attributes
+ # pylint: disable=too-many-public-methods
+ table_name = 'todos'
+ to_save_simples = ['process_id', 'is_done', 'day_id', 'comment', 'effort',
+ 'calendarize']
+ to_save_relations = [('todo_conditions', 'todo', 'conditions', 0),
+ ('todo_blockers', 'todo', 'blockers', 0),
+ ('todo_enables', 'todo', 'enables', 0),
+ ('todo_disables', 'todo', 'disables', 0),
+ ('todo_children', 'parent', 'children', 0),
+ ('todo_children', 'child', 'parents', 1)]
+ to_search = ['comment']
+ days_to_update: Set[int] = set()
+ children: list[Todo]
+ parents: list[Todo]
+ sorters = {'doneness': lambda t: t.is_done,
+ 'title': lambda t: t.title_then,
+ 'comment': lambda t: t.comment,
+ 'date': lambda t: t.day_id}
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, id_: int | None,
+ process: Process,
+ is_done: bool,
+ day_id: int,
+ comment: str = '',
+ effort: None | float = None,
+ calendarize: bool = False
+ ) -> None:
+ super().__init__(id_)
+ ConditionsRelations.__init__(self)
+ if process.id_ is None:
+ raise NotFoundException('Process of Todo without ID (not saved?)')
+ self.process = process
+ self._is_done = is_done
+ self.day_id = day_id
+ self.comment = comment
+ self.effort = effort
+ self.children = []
+ self.parents = []
+ self.calendarize = calendarize
+ if not self.id_:
+ self.calendarize = self.process.calendarize
+ self.conditions = self.process.conditions[:]
+ self.blockers = self.process.blockers[:]
+ self.enables = self.process.enables[:]
+ self.disables = self.process.disables[:]
+
+ @property
+ def date(self) -> str:
+ """Return ISO formatted date matching .day_id."""
+ return dt_date_from_days_n(self.day_id).isoformat()
+
+ @classmethod
+ def by_date_range_with_limits(cls,
+ db_conn: DatabaseConnection,
+ date_range: tuple[str, str],
+ ) -> tuple[list[Self], str, str]:
+ """Return Todos within (closed) date_range interval.
+
+ If no range values provided, defaults them to 'yesterday' and
+ 'tomorrow'. Knows to properly interpret these and 'today' as value.
+ """
+ dt_date_limits: list[dt_date] = []
+ for i in range(2):
+ dt_date_limits += [
+ dt_date_from_str(date_range[i] if date_range[i]
+ else ('yesterday', 'tomorrow')[i])]
+ items: list[Self] = []
+ for row in db_conn.exec(
+ f'SELECT id FROM {cls.table_name} WHERE day >= ? AND day <= ?',
+ tuple(days_n_from_dt_date(d) for d in dt_date_limits),
+ build_q_marks=False):
+ items += [cls.by_id(db_conn, row[0])]
+ return (items,
+ dt_date_limits[0].isoformat(), dt_date_limits[1].isoformat())
+
+ def ensure_children(self, db_conn: DatabaseConnection) -> None:
+ """Ensure Todo children (create or adopt) demanded by Process chain."""
+
+ def walk_steps(parent: Self, step_node: ProcessStepsNode) -> Todo:
+ adoptables = [t for t in self.by_date(db_conn, parent.date)
+ if (t not in parent.children)
+ and (t != parent)
+ and step_node.process.id_ == t.process_id]
+ satisfier = None
+ for adoptable in adoptables:
+ satisfier = adoptable
+ break
+ if not satisfier:
+ satisfier = self.__class__(None, step_node.process, False,
+ parent.day_id)
+ satisfier.save(db_conn)
+ sub_step_nodes = sorted(
+ step_node.steps,
+ key=lambda s: s.process.id_ if s.process.id_ else 0)
+ for sub_node in sub_step_nodes:
+ if sub_node.is_suppressed:
+ continue
+ n_slots = len([n for n in sub_step_nodes
+ if n.process == sub_node.process])
+ filled_slots = len([t for t in satisfier.children
+ if t.process.id_ == sub_node.process.id_])
+ # if we did not newly create satisfier, it may already fill
+ # some step dependencies, so only fill what remains open
+ if n_slots - filled_slots > 0:
+ satisfier.add_child(walk_steps(satisfier, sub_node))
+ satisfier.save(db_conn)
+ return satisfier
+
+ process = Process.by_id(db_conn, self.process_id)
+ steps_tree = process.get_steps(db_conn)
+ for step_node in steps_tree:
+ if step_node.is_suppressed:
+ continue
+ self.add_child(walk_steps(self, step_node))
+ self.save(db_conn)
+
+ @classmethod
+ def from_table_row(cls, db_conn: DatabaseConnection,
+ row: Row | list[Any]) -> Self:
+ """Make from DB row, with dependencies."""
+ if row[1] == 0:
+ raise NotFoundException('calling Todo of '
+ 'unsaved Process')
+ row_as_list = list(row)
+ row_as_list[1] = Process.by_id(db_conn, row[1])
+ todo = super().from_table_row(db_conn, row_as_list)
+ assert isinstance(todo.id_, int)
+ for t_id in db_conn.column_where('todo_children', 'child',
+ 'parent', todo.id_):
+ todo.children += [cls.by_id(db_conn, t_id)]
+ for t_id in db_conn.column_where('todo_children', 'parent',
+ 'child', todo.id_):
+ todo.parents += [cls.by_id(db_conn, t_id)]
+ for name in ('conditions', 'blockers', 'enables', 'disables'):
+ table = f'todo_{name}'
+ for cond_id in db_conn.column_where(table, 'condition',
+ 'todo', todo.id_):
+ target = getattr(todo, name)
+ target += [Condition.by_id(db_conn, cond_id)]
+ return todo
+
+ @classmethod
+ def by_process_id(cls, db_conn: DatabaseConnection,
+ process_id: int | None) -> list[Self]:
+ """Collect all Todos of Process of process_id."""
+ return [t for t in cls.all(db_conn) if t.process.id_ == process_id]
+
+ @classmethod
+ def by_date(cls, db_conn: DatabaseConnection, date: str) -> list[Self]:
+ """Collect all Todos for Day of date."""
+ return cls.by_date_range_with_limits(db_conn, (date, date))[0]
+
+ @property
+ def is_doable(self) -> bool:
+ """Decide whether .is_done settable based on children, Conditions."""
+ for child in self.children:
+ if not child.is_done:
+ return False
+ for condition in self.conditions:
+ if not condition.is_active:
+ return False
+ for condition in self.blockers:
+ if condition.is_active:
+ return False
+ return True
+
+ @property
+ def is_deletable(self) -> bool:
+ """Decide whether self be deletable (not if preserve-worthy values)."""
+ if self.comment:
+ return False
+ if self.effort and self.effort >= 0:
+ return False
+ return True
+
+ @property
+ def performed_effort(self) -> float:
+ """Return performed effort, i.e. self.effort or default if done.."""
+ if self.effort is not None:
+ return self.effort
+ if self.is_done:
+ return self.effort_then
+ return 0
+
+ @property
+ def process_id(self) -> int:
+ """Needed for super().save to save Processes as attributes."""
+ assert isinstance(self.process.id_, int)
+ return self.process.id_
+
+ @property
+ def is_done(self) -> bool:
+ """Wrapper around self._is_done so we can control its setter."""
+ return self._is_done
+
+ @is_done.setter
+ def is_done(self, value: bool) -> None:
+ if value != self.is_done and not self.is_doable:
+ raise BadFormatException('cannot change doneness of undoable Todo')
+ if self._is_done != value:
+ self._is_done = value
+ if value is True:
+ for condition in self.enables:
+ condition.is_active = True
+ for condition in self.disables:
+ condition.is_active = False
+
+ @property
+ def title(self) -> VersionedAttribute:
+ """Shortcut to .process.title."""
+ assert isinstance(self.process.title, VersionedAttribute)
+ return self.process.title
+
+ @property
+ def title_then(self) -> str:
+ """Shortcut to .process.title.at(self.date)."""
+ title_then = self.process.title.at(self.date)
+ assert isinstance(title_then, str)
+ return title_then
+
+ @property
+ def effort_then(self) -> float:
+ """Shortcut to .process.effort.at(self.date)"""
+ effort_then = self.process.effort.at(self.date)
+ assert isinstance(effort_then, float)
+ return effort_then
+
+ @property
+ def has_doneness_in_path(self) -> bool:
+ """Check whether self is done or has any children that are."""
+ if self.is_done:
+ return True
+ for child in self.children:
+ if child.is_done:
+ return True
+ if child.has_doneness_in_path:
+ return True
+ return False
+
+ def get_step_tree(self, seen_todos: set[int]) -> TodoNode:
+ """Return tree of depended-on Todos."""
+
+ def make_node(todo: Self) -> TodoNode:
+ children = []
+ seen = todo.id_ in seen_todos
+ assert isinstance(todo.id_, int)
+ seen_todos.add(todo.id_)
+ for child in todo.children:
+ children += [make_node(child)]
+ return TodoNode(todo, seen, children)
+
+ return make_node(self)
+
+ @property
+ def tree_effort(self) -> float:
+ """Return sum of performed efforts of self and all descendants."""
+
+ def walk_tree(node: Self) -> float:
+ local_effort = 0.0
+ for child in node.children:
+ local_effort += walk_tree(child)
+ return node.performed_effort + local_effort
+
+ return walk_tree(self)
+
+ def add_child(self, child: Self) -> None:
+ """Add child to self.children, avoid recursion, update parenthoods."""
+
+ def walk_steps(node: Self) -> None:
+ if node.id_ == self.id_:
+ raise BadFormatException('bad child choice causes recursion')
+ for child in node.children:
+ walk_steps(child)
+
+ if self.id_ is None:
+ raise HandledException('Can only add children to saved Todos.')
+ if child.id_ is None:
+ raise HandledException('Can only add saved children to Todos.')
+ if child in self.children:
+ raise BadFormatException('cannot adopt same child twice')
+ walk_steps(child)
+ self.children += [child]
+ child.parents += [self]
+
+ def remove_child(self, child: Self) -> None:
+ """Remove child from self.children, update counter relations."""
+ if child not in self.children:
+ raise HandledException('Cannot remove un-parented child.')
+ self.children.remove(child)
+ child.parents.remove(self)
+
+ def update_attrs(self, **kwargs: Any) -> None:
+ """Update self's attributes listed in kwargs."""
+ for k, v in kwargs.items():
+ setattr(self, k, v)
+
+ def save(self, db_conn: DatabaseConnection) -> None:
+ """On save calls, also check if auto-deletion by effort < 0."""
+ if self.effort and self.effort < 0 and self.is_deletable:
+ self.remove(db_conn)
+ return
+ if self.id_ is None:
+ self.__class__.days_to_update.add(self.day_id)
+ super().save(db_conn)
+ for condition in self.enables + self.disables + self.conditions:
+ condition.save(db_conn)
+
+ def remove(self, db_conn: DatabaseConnection) -> None:
+ """Remove from DB, including relations."""
+ if not self.is_deletable:
+ raise HandledException('Cannot remove non-deletable Todo.')
+ self.__class__.days_to_update.add(self.day_id)
+ children_to_remove = self.children[:]
+ parents_to_remove = self.parents[:]
+ for child in children_to_remove:
+ self.remove_child(child)
+ for parent in parents_to_remove:
+ parent.remove_child(self)
+ super().remove(db_conn)
--- /dev/null
+"""Attributes whose values are recorded as a timestamped history."""
+from datetime import datetime
+from typing import Any
+from sqlite3 import Row
+from time import sleep
+from taskplom.db import DatabaseConnection
+from taskplom.exceptions import (HandledException, BadFormatException,
+ NotFoundException)
+
+TIMESTAMP_FMT = '%Y-%m-%d %H:%M:%S.%f'
+
+
+class VersionedAttribute:
+ """Attributes whose values are recorded as a timestamped history."""
+
+ def __init__(self,
+ parent: Any, table_name: str, default: str | float) -> None:
+ self.parent = parent
+ self.table_name = table_name
+ self._default = default
+ self.history: dict[str, str | float] = {}
+ # NB: For tighter mypy testing, we might prefer self.history to be
+ # dict[str, float] | dict[str, str] instead, but my current coding
+ # knowledge only manages to make that work by adding much further
+ # complexity, so let's leave it at that for now …
+
+ def __hash__(self) -> int:
+ history_tuples = tuple((k, v) for k, v in self.history.items())
+ hashable = (self.parent.id_, self.table_name, self._default,
+ history_tuples)
+ return hash(hashable)
+
+ @property
+ def _newest_timestamp(self) -> str:
+ """Return most recent timestamp."""
+ return sorted(self.history.keys())[-1]
+
+ @property
+ def value_type_name(self) -> str:
+ """Return string of name of attribute value type."""
+ return type(self._default).__name__
+
+ @property
+ def newest(self) -> str | float:
+ """Return most recent value, or self._default if self.history empty."""
+ if 0 == len(self.history):
+ return self._default
+ return self.history[self._newest_timestamp]
+
+ def reset_timestamp(self, old_str: str, new_str: str) -> None:
+ """Rename self.history key (timestamp) old to new.
+
+ Chronological sequence of keys must be preserved, i.e. cannot move
+ key before earlier or after later timestamp.
+ """
+ try:
+ new = datetime.strptime(new_str, TIMESTAMP_FMT)
+ old = datetime.strptime(old_str, TIMESTAMP_FMT)
+ except ValueError as exc:
+ raise BadFormatException('Timestamp of illegal format.') from exc
+ timestamps = list(self.history.keys())
+ if old_str not in timestamps:
+ raise HandledException(f'Timestamp {old} not found in history.')
+ sorted_timestamps = sorted([datetime.strptime(t, TIMESTAMP_FMT)
+ for t in timestamps])
+ expected_position = sorted_timestamps.index(old)
+ sorted_timestamps.remove(old)
+ sorted_timestamps += [new]
+ sorted_timestamps.sort()
+ if sorted_timestamps.index(new) != expected_position:
+ raise HandledException('Timestamp not respecting chronology.')
+ value = self.history[old_str]
+ del self.history[old_str]
+ self.history[new_str] = value
+
+ def set(self, value: str | float) -> None:
+ """Add to self.history if and only if not same value as newest one.
+
+ Note that we wait one micro-second, as timestamp comparison to check
+ most recent elements only goes up to that precision.
+
+ Also note that we don't check against .newest because that may make us
+ compare value against .default even if not set. We want to be able to
+ explicitly set .default as the first element.
+ """
+ sleep(0.00001)
+ if 0 == len(self.history) \
+ or value != self.history[self._newest_timestamp]:
+ self.history[datetime.now().strftime(TIMESTAMP_FMT)] = value
+
+ def history_from_row(self, row: Row) -> None:
+ """Extend self.history from expected table row format."""
+ self.history[row[1]] = row[2]
+
+ def at(self, queried_time: str) -> str | float:
+ """Retrieve value of timestamp nearest queried_time from the past."""
+ if len(queried_time) == 10:
+ queried_time += ' 23:59:59.999'
+ sorted_timestamps = sorted(self.history.keys())
+ if 0 == len(sorted_timestamps):
+ return self._default
+ selected_timestamp = sorted_timestamps[0]
+ for timestamp in sorted_timestamps[1:]:
+ if timestamp > queried_time:
+ break
+ selected_timestamp = timestamp
+ return self.history[selected_timestamp]
+
+ def save(self, db_conn: DatabaseConnection) -> None:
+ """Save as self.history entries, but first wipe old ones."""
+ if self.parent.id_ is None:
+ raise NotFoundException('cannot save attribute to parent if no ID')
+ db_conn.rewrite_relations(self.table_name, 'parent', self.parent.id_,
+ [[item[0], item[1]]
+ for item in self.history.items()])
+
+ def remove(self, db_conn: DatabaseConnection) -> None:
+ """Remove from DB."""
+ db_conn.delete_where(self.table_name, 'parent', self.parent.id_)
from typing import Any
from tests.utils import (TestCaseSansDB, TestCaseWithDB, TestCaseWithServer,
Expected)
-from plomtask.conditions import Condition
+from taskplom.conditions import Condition
class TestsSansDB(TestCaseSansDB):
from typing import Any
from tests.utils import (TestCaseSansDB, TestCaseWithDB, TestCaseWithServer,
Expected, date_and_day_id, dt_date_from_day_id)
-from plomtask.dating import date_in_n_days as tested_date_in_n_days
-from plomtask.days import Day
+from taskplom.dating import date_in_n_days as tested_date_in_n_days
+from taskplom.days import Day
-# Simply the ISO format for dates as used in plomtask.dating, but for testing
+# Simply the ISO format for dates as used in taskplom.dating, but for testing
# purposes we state our expectations here independently and explicitly
TESTING_DATE_FORMAT = '%Y-%m-%d'
"""Return in ISO format / TEST_DATE_FORMAT date from today + n days.
As with TESTING_DATE_FORMAT, we assume this equal the original's code
- at plomtask.dating.date_in_n_days, but want to state our expectations
+ at taskplom.dating.date_in_n_days, but want to state our expectations
explicitly to rule out importing issues from the original.
"""
date = dt_date.today() + timedelta(days=n)
from typing import Callable
from unittest import TestCase
from tests.utils import TestCaseWithServer
-from plomtask.http import InputsParser
-from plomtask.exceptions import BadFormatException
+from taskplom.http import InputsParser
+from taskplom.exceptions import BadFormatException
class TestsSansServer(TestCase):
from typing import Any
from tests.utils import (TestCaseSansDB, TestCaseWithDB, TestCaseWithServer,
Expected)
-from plomtask.processes import Process, ProcessStep
-from plomtask.exceptions import NotFoundException
+from taskplom.processes import Process, ProcessStep
+from taskplom.exceptions import NotFoundException
class TestsSansDB(TestCaseSansDB):
from datetime import date as dt_date, timedelta
from tests.utils import (TestCaseSansDB, TestCaseWithDB, TestCaseWithServer,
Expected, date_and_day_id)
-from plomtask.todos import Todo
-from plomtask.processes import Process
-from plomtask.exceptions import BadFormatException, HandledException
+from taskplom.todos import Todo
+from taskplom.processes import Process
+from taskplom.exceptions import BadFormatException, HandledException
class TestsWithDB(TestCaseWithDB, TestCaseSansDB):
NAME_SRC_DIR = 'src'
sys_path[0:0] = [NAME_SRC_DIR]
# pylint: disable=wrong-import-position
-from plomtask.db import DatabaseFile, DatabaseConnection # noqa: E402
-from plomtask.http import TaskHandler, TaskServer # noqa: E402
-from plomtask.processes import Process, ProcessStep # noqa: E402
-from plomtask.conditions import Condition # noqa: E402
-from plomtask.days import Day # noqa: E402
-from plomtask.todos import Todo # noqa: E402
-from plomtask.versioned_attributes import ( # noqa: E402
+from taskplom.db import DatabaseFile, DatabaseConnection # noqa: E402
+from taskplom.http import TaskHandler, TaskServer # noqa: E402
+from taskplom.processes import Process, ProcessStep # noqa: E402
+from taskplom.conditions import Condition # noqa: E402
+from taskplom.days import Day # noqa: E402
+from taskplom.todos import Todo # noqa: E402
+from taskplom.versioned_attributes import ( # noqa: E402
VersionedAttribute, TIMESTAMP_FMT)
-from plomtask.exceptions import ( # noqa: E402
+from taskplom.exceptions import ( # noqa: E402
NotFoundException, HandledException)