[submodule "plomlib"]
- path = plomlib
+ path = src/plomlib
url = https://plomlompom.com/repos/clone/plomlib
+++ /dev/null
-[BASIC]
-init-hook='import sys; sys.path.append(".")'
-good-names-rgxs=(.*_)?(GET|POST)(_.+)?,,test_[A-Z]+
--- /dev/null
+#!/usr/bin/sh
+set -e
+
+PATH_APP_SHARE=~/.local/share/taskplom
+PATH_LOCAL_BIN=~/.local/bin
+NAME_EXECUTABLE=taskplom
+
+mkdir -p "${PATH_APP_SHARE}" "${PATH_LOCAL_BIN}"
+
+cp -r ./src/* "${PATH_APP_SHARE}/"
+cp "${NAME_EXECUTABLE}" "${PATH_LOCAL_BIN}/"
+
+echo "Installed executable to ${PATH_LOCAL_BIN}/${NAME_EXECUTABLE}, app files to ${PATH_APP_SHARE}."
+++ /dev/null
-CREATE TABLE condition_descriptions (
- parent INTEGER NOT NULL,
- timestamp TEXT NOT NULL,
- description TEXT NOT NULL,
- PRIMARY KEY (parent, timestamp),
- FOREIGN KEY (parent) REFERENCES conditions(id)
-);
-CREATE TABLE condition_titles (
- parent INTEGER NOT NULL,
- timestamp TEXT NOT NULL,
- title TEXT NOT NULL,
- PRIMARY KEY (parent, timestamp),
- FOREIGN KEY (parent) REFERENCES conditions(id)
-);
-CREATE TABLE conditions (
- id INTEGER PRIMARY KEY,
- is_active BOOLEAN NOT NULL
-);
-CREATE TABLE days (
- id TEXT PRIMARY KEY,
- comment TEXT NOT NULL
-);
-CREATE TABLE process_conditions (
- process INTEGER NOT NULL,
- condition INTEGER NOT NULL,
- PRIMARY KEY (process, condition),
- FOREIGN KEY (process) REFERENCES processes(id),
- FOREIGN KEY (condition) REFERENCES conditions(id)
-);
-CREATE TABLE process_descriptions (
- parent INTEGER NOT NULL,
- timestamp TEXT NOT NULL,
- description TEXT NOT NULL,
- PRIMARY KEY (parent, timestamp),
- FOREIGN KEY (parent) REFERENCES processes(id)
-);
-CREATE TABLE process_disables (
- process INTEGER NOT NULL,
- condition INTEGER NOT NULL,
- PRIMARY KEY(process, condition),
- FOREIGN KEY (process) REFERENCES processes(id),
- FOREIGN KEY (condition) REFERENCES conditions(id)
-);
-CREATE TABLE process_efforts (
- parent INTEGER NOT NULL,
- timestamp TEXT NOT NULL,
- effort REAL NOT NULL,
- PRIMARY KEY (parent, timestamp),
- FOREIGN KEY (parent) REFERENCES processes(id)
-);
-CREATE TABLE process_enables (
- process INTEGER NOT NULL,
- condition INTEGER NOT NULL,
- PRIMARY KEY(process, condition),
- FOREIGN KEY (process) REFERENCES processes(id),
- FOREIGN KEY (condition) REFERENCES conditions(id)
-);
-CREATE TABLE process_steps (
- id INTEGER PRIMARY KEY,
- owner INTEGER NOT NULL,
- step_process INTEGER NOT NULL,
- parent_step INTEGER,
- FOREIGN KEY (owner) REFERENCES processes(id),
- FOREIGN KEY (step_process) REFERENCES processes(id),
- FOREIGN KEY (parent_step) REFERENCES process_steps(step_id)
-);
-CREATE TABLE process_titles (
- parent INTEGER NOT NULL,
- timestamp TEXT NOT NULL,
- title TEXT NOT NULL,
- PRIMARY KEY (parent, timestamp),
- FOREIGN KEY (parent) REFERENCES processes(id)
-);
-CREATE TABLE processes (
- id INTEGER PRIMARY KEY
-);
-CREATE TABLE todo_children (
- parent INTEGER NOT NULL,
- child INTEGER NOT NULL,
- PRIMARY KEY (parent, child),
- FOREIGN KEY (parent) REFERENCES todos(id),
- FOREIGN KEY (child) REFERENCES todos(id)
-);
-CREATE TABLE todo_conditions (
- todo INTEGER NOT NULL,
- condition INTEGER NOT NULL,
- PRIMARY KEY(todo, condition),
- FOREIGN KEY (todo) REFERENCES todos(id),
- FOREIGN KEY (condition) REFERENCES conditions(id)
-);
-CREATE TABLE todo_disables (
- todo INTEGER NOT NULL,
- condition INTEGER NOT NULL,
- PRIMARY KEY(todo, condition),
- FOREIGN KEY (todo) REFERENCES todos(id),
- FOREIGN KEY (condition) REFERENCES conditions(id)
-);
-CREATE TABLE todo_enables (
- todo INTEGER NOT NULL,
- condition INTEGER NOT NULL,
- PRIMARY KEY(todo, condition),
- FOREIGN KEY (todo) REFERENCES todos(id),
- FOREIGN KEY (condition) REFERENCES conditions(id)
-);
-CREATE TABLE todos (
- id INTEGER PRIMARY KEY,
- process INTEGER NOT NULL,
- is_done BOOLEAN NOT NULL,
- day TEXT NOT NULL,
- FOREIGN KEY (process) REFERENCES processes(id),
- FOREIGN KEY (day) REFERENCES days(id)
-);
+++ /dev/null
-ALTER TABLE todos ADD COLUMN comment TEXT NOT NULL DEFAULT "";
+++ /dev/null
-ALTER TABLE todos ADD COLUMN effort REAL;
+++ /dev/null
-ALTER TABLE todos ADD COLUMN calendarize BOOLEAN NOT NULL DEFAULT FALSE;
-ALTER TABLE processes ADD COLUMN calendarize BOOLEAN NOT NULL DEFAULT FALSE;
+++ /dev/null
-CREATE TABLE process_blockers (
- process INTEGER NOT NULL,
- condition INTEGER NOT NULL,
- PRIMARY KEY (process, condition),
- FOREIGN KEY (process) REFERENCES processes(id),
- FOREIGN KEY (condition) REFERENCES conditions(id)
-);
-CREATE TABLE todo_blockers (
- todo INTEGER NOT NULL,
- condition INTEGER NOT NULL,
- PRIMARY KEY (todo, condition),
- FOREIGN KEY (todo) REFERENCES todos(id),
- FOREIGN KEY (condition) REFERENCES conditions(id)
-);
+++ /dev/null
-CREATE TABLE process_step_suppressions (
- process INTEGER NOT NULL,
- process_step INTEGER NOT NULL,
- PRIMARY KEY (process, process_step),
- FOREIGN KEY (process) REFERENCES processes(id),
- FOREIGN KEY (process_step) REFERENCES process_steps(id)
-);
-
+++ /dev/null
-ALTER TABLE days ADD COLUMN days_since_millennium INTEGER NOT NULL DEFAULT 0;
-
+++ /dev/null
-ALTER TABLE todos ADD COLUMN new_day_id INTEGER;
-UPDATE todos SET new_day_id = (
- SELECT days.days_since_millennium
- FROM days
- WHERE days.id = todos.day);
-
-CREATE TABLE days_new (
- id INTEGER PRIMARY KEY,
- comment TEXT NOT NULL
-);
-INSERT INTO days_new SELECT
- days_since_millennium,
- comment
-FROM days;
-DROP TABLE days;
-ALTER TABLE days_new RENAME TO days;
-
-CREATE TABLE todos_new (
- id INTEGER PRIMARY KEY,
- process INTEGER NOT NULL,
- is_done BOOLEAN NOT NULL,
- day INTEGER NOT NULL,
- comment TEXT NOT NULL DEFAULT "",
- effort REAL,
- calendarize BOOLEAN NOT NULL DEFAULT FALSE,
- FOREIGN KEY (process) REFERENCES processes(id),
- FOREIGN KEY (day) REFERENCES days(id)
-);
-INSERT INTO todos_new SELECT
- id,
- process,
- is_done,
- new_day_id,
- comment,
- effort,
- calendarize
-FROM todos;
-DROP TABLE todos;
-ALTER TABLE todos_new RENAME TO todos;
+++ /dev/null
-CREATE TABLE "days" (
- id INTEGER PRIMARY KEY,
- comment TEXT NOT NULL
-);
-CREATE TABLE "todos" (
- id INTEGER PRIMARY KEY,
- process INTEGER NOT NULL,
- is_done BOOLEAN NOT NULL,
- day INTEGER NOT NULL,
- comment TEXT NOT NULL DEFAULT "",
- effort REAL,
- calendarize BOOLEAN NOT NULL DEFAULT FALSE,
- FOREIGN KEY (process) REFERENCES processes(id),
- FOREIGN KEY (day) REFERENCES days(id)
-);
-CREATE TABLE condition_descriptions (
- parent INTEGER NOT NULL,
- timestamp TEXT NOT NULL,
- description TEXT NOT NULL,
- PRIMARY KEY (parent, timestamp),
- FOREIGN KEY (parent) REFERENCES conditions(id)
-);
-CREATE TABLE condition_titles (
- parent INTEGER NOT NULL,
- timestamp TEXT NOT NULL,
- title TEXT NOT NULL,
- PRIMARY KEY (parent, timestamp),
- FOREIGN KEY (parent) REFERENCES conditions(id)
-);
-CREATE TABLE conditions (
- id INTEGER PRIMARY KEY,
- is_active BOOLEAN NOT NULL
-);
-CREATE TABLE process_blockers (
- process INTEGER NOT NULL,
- condition INTEGER NOT NULL,
- PRIMARY KEY (process, condition),
- FOREIGN KEY (process) REFERENCES processes(id),
- FOREIGN KEY (condition) REFERENCES conditions(id)
-);
-CREATE TABLE process_conditions (
- process INTEGER NOT NULL,
- condition INTEGER NOT NULL,
- PRIMARY KEY (process, condition),
- FOREIGN KEY (process) REFERENCES processes(id),
- FOREIGN KEY (condition) REFERENCES conditions(id)
-);
-CREATE TABLE process_descriptions (
- parent INTEGER NOT NULL,
- timestamp TEXT NOT NULL,
- description TEXT NOT NULL,
- PRIMARY KEY (parent, timestamp),
- FOREIGN KEY (parent) REFERENCES processes(id)
-);
-CREATE TABLE process_disables (
- process INTEGER NOT NULL,
- condition INTEGER NOT NULL,
- PRIMARY KEY(process, condition),
- FOREIGN KEY (process) REFERENCES processes(id),
- FOREIGN KEY (condition) REFERENCES conditions(id)
-);
-CREATE TABLE process_efforts (
- parent INTEGER NOT NULL,
- timestamp TEXT NOT NULL,
- effort REAL NOT NULL,
- PRIMARY KEY (parent, timestamp),
- FOREIGN KEY (parent) REFERENCES processes(id)
-);
-CREATE TABLE process_enables (
- process INTEGER NOT NULL,
- condition INTEGER NOT NULL,
- PRIMARY KEY(process, condition),
- FOREIGN KEY (process) REFERENCES processes(id),
- FOREIGN KEY (condition) REFERENCES conditions(id)
-);
-CREATE TABLE process_step_suppressions (
- process INTEGER NOT NULL,
- process_step INTEGER NOT NULL,
- PRIMARY KEY (process, process_step),
- FOREIGN KEY (process) REFERENCES processes(id),
- FOREIGN KEY (process_step) REFERENCES process_steps(id)
-);
-CREATE TABLE process_steps (
- id INTEGER PRIMARY KEY,
- owner INTEGER NOT NULL,
- step_process INTEGER NOT NULL,
- parent_step INTEGER,
- FOREIGN KEY (owner) REFERENCES processes(id),
- FOREIGN KEY (step_process) REFERENCES processes(id),
- FOREIGN KEY (parent_step) REFERENCES process_steps(step_id)
-);
-CREATE TABLE process_titles (
- parent INTEGER NOT NULL,
- timestamp TEXT NOT NULL,
- title TEXT NOT NULL,
- PRIMARY KEY (parent, timestamp),
- FOREIGN KEY (parent) REFERENCES processes(id)
-);
-CREATE TABLE processes (
- id INTEGER PRIMARY KEY,
- calendarize BOOLEAN NOT NULL DEFAULT FALSE
-);
-CREATE TABLE todo_blockers (
- todo INTEGER NOT NULL,
- condition INTEGER NOT NULL,
- PRIMARY KEY (todo, condition),
- FOREIGN KEY (todo) REFERENCES todos(id),
- FOREIGN KEY (condition) REFERENCES conditions(id)
-);
-CREATE TABLE todo_children (
- parent INTEGER NOT NULL,
- child INTEGER NOT NULL,
- PRIMARY KEY (parent, child),
- FOREIGN KEY (parent) REFERENCES todos(id),
- FOREIGN KEY (child) REFERENCES todos(id)
-);
-CREATE TABLE todo_conditions (
- todo INTEGER NOT NULL,
- condition INTEGER NOT NULL,
- PRIMARY KEY(todo, condition),
- FOREIGN KEY (todo) REFERENCES todos(id),
- FOREIGN KEY (condition) REFERENCES conditions(id)
-);
-CREATE TABLE todo_disables (
- todo INTEGER NOT NULL,
- condition INTEGER NOT NULL,
- PRIMARY KEY(todo, condition),
- FOREIGN KEY (todo) REFERENCES todos(id),
- FOREIGN KEY (condition) REFERENCES conditions(id)
-);
-CREATE TABLE todo_enables (
- todo INTEGER NOT NULL,
- condition INTEGER NOT NULL,
- PRIMARY KEY(todo, condition),
- FOREIGN KEY (todo) REFERENCES todos(id),
- FOREIGN KEY (condition) REFERENCES conditions(id)
-);
+++ /dev/null
-Subproject commit dee7c0f6218e6bdd07b477dc5d9e4b5540ffcf4a
+++ /dev/null
-"""Non-doable elements of ProcessStep/Todo chains."""
-from __future__ import annotations
-from plomtask.db import DatabaseConnection, BaseModel
-from plomtask.versioned_attributes import VersionedAttribute
-from plomtask.exceptions import HandledException
-
-
-class Condition(BaseModel):
- """Non-Process dependency for ProcessSteps and Todos."""
- table_name = 'conditions'
- to_save_simples = ['is_active']
- versioned_defaults = {'title': 'UNNAMED', 'description': ''}
- to_search = ['title.newest', 'description.newest']
- can_create_by_id = True
- sorters = {'is_active': lambda c: c.is_active,
- 'title': lambda c: c.title.newest}
-
- def __init__(self, id_: int | None, is_active: bool = False) -> None:
- super().__init__(id_)
- self.is_active = is_active
- for name in ['title', 'description']:
- attr = VersionedAttribute(self, f'condition_{name}s',
- self.versioned_defaults[name])
- setattr(self, name, attr)
-
- def remove(self, db_conn: DatabaseConnection) -> None:
- """Remove from DB, with VersionedAttributes.
-
- Checks for Todos and Processes that depend on Condition, prohibits
- deletion if found.
- """
- if self.id_ is not None:
- for item in ('process', 'todo'):
- for attr in ('conditions', 'blockers', 'enables', 'disables'):
- table_name = f'{item}_{attr}'
- for _ in db_conn.row_where(table_name, 'condition',
- self.id_):
- msg = 'cannot remove Condition in use'
- raise HandledException(msg)
- super().remove(db_conn)
-
-
-class ConditionsRelations:
- """Methods for handling relations to Conditions, for Todo and Process."""
- # pylint: disable=too-few-public-methods
-
- def __init__(self) -> None:
- self.conditions: list[Condition] = []
- self.blockers: list[Condition] = []
- self.enables: list[Condition] = []
- self.disables: list[Condition] = []
-
- def set_condition_relations(self,
- db_conn: DatabaseConnection,
- ids_conditions: list[int],
- ids_blockers: list[int],
- ids_enables: list[int],
- ids_disables: list[int]
- ) -> None:
- """Set owned Condition lists to those identified by respective IDs."""
- # pylint: disable=too-many-arguments
- for ids, target in [(ids_conditions, 'conditions'),
- (ids_blockers, 'blockers'),
- (ids_enables, 'enables'),
- (ids_disables, 'disables')]:
- target_list = getattr(self, target)
- while len(target_list) > 0:
- target_list.pop()
- for id_ in ids:
- target_list += [Condition.by_id(db_conn, id_)]
+++ /dev/null
-"""Various utilities for handling dates."""
-from datetime import date as dt_date, timedelta
-from plomtask.exceptions import BadFormatException
-
-
-def dt_date_from_str(date_str: str) -> dt_date:
- """Validate against ISO format, colloq. terms; return as datetime.date."""
- if date_str == 'today':
- date_str = date_in_n_days(0)
- elif date_str == 'yesterday':
- date_str = date_in_n_days(-1)
- elif date_str == 'tomorrow':
- date_str = date_in_n_days(1)
- try:
- date = dt_date.fromisoformat(date_str)
- except (ValueError, TypeError) as e:
- msg = f'Given date of wrong format: {date_str}'
- raise BadFormatException(msg) from e
- return date
-
-
-def days_n_from_dt_date(date: dt_date) -> int:
- """Return number of days from Jan 1st 2000 to datetime.date."""
- return (date - dt_date(2000, 1, 1)).days
-
-
-def dt_date_from_days_n(days_n: int) -> dt_date:
- """Return datetime.date for days_n after Jan 1st 2000."""
- return dt_date(2000, 1, 1) + timedelta(days=days_n)
-
-
-def date_in_n_days(n: int) -> str:
- """Return in ISO format date from today + n days."""
- date = dt_date.today() + timedelta(days=n)
- return date.isoformat()
+++ /dev/null
-"""Collecting Day and date-related items."""
-from __future__ import annotations
-from typing import Any, Self
-from sqlite3 import Row
-from datetime import date as dt_date, timedelta
-from plomtask.db import DatabaseConnection, BaseModel
-from plomtask.todos import Todo
-from plomtask.dating import dt_date_from_days_n, days_n_from_dt_date
-
-
-class Day(BaseModel):
- """Individual days defined by their dates."""
- table_name = 'days'
- to_save_simples = ['comment']
- add_to_dict = ['todos']
- can_create_by_id = True
-
- def __init__(self, id_: int, comment: str = '') -> None:
- super().__init__(id_)
- self.comment = comment
- self.todos: list[Todo] = []
-
- @classmethod
- def from_table_row(cls, db_conn: DatabaseConnection, row: Row | list[Any]
- ) -> Self:
- """Make from DB row, with linked Todos."""
- day = super().from_table_row(db_conn, row)
- day.todos = Todo.by_date(db_conn, day.date)
- return day
-
- @classmethod
- def by_id(cls, db_conn: DatabaseConnection, id_: int) -> Self:
- """Checks Todo.days_to_update if we need to a retrieved Day's .todos"""
- day = super().by_id(db_conn, id_)
- assert isinstance(day.id_, int)
- if day.id_ in Todo.days_to_update:
- Todo.days_to_update.remove(day.id_)
- day.todos = Todo.by_date(db_conn, day.date)
- return day
-
- @classmethod
- def with_filled_gaps(
- cls, conn: DatabaseConnection, dt_start: dt_date, dt_end: dt_date
- ) -> list[Self]:
- """Show days >= start_date, <= end_date, fill gaps with un-storeds."""
- if dt_start > dt_end:
- return []
- start_n_days = days_n_from_dt_date(dt_start)
- end_n_days = days_n_from_dt_date(dt_end)
- ranged_days = [d for d in cls.all(conn)
- if isinstance(d.id_, int)
- and d.id_ >= start_n_days and d.id_ <= end_n_days]
- ranged_days.sort()
- if (not ranged_days) or (isinstance(ranged_days[0].id_, int)
- and start_n_days < ranged_days[0].id_):
- ranged_days.insert(0, cls(start_n_days))
- assert isinstance(ranged_days[-1].id_, int)
- if end_n_days > ranged_days[-1].id_:
- ranged_days.append(cls(end_n_days))
- if len(ranged_days) > 1:
- degapped_ranged_days = []
- for i, day in enumerate(ranged_days):
- degapped_ranged_days += [day]
- if i < len(ranged_days) - 1:
- next_one = ranged_days[i+1]
- assert isinstance(day.id_, int)
- assert isinstance(next_one.id_, int)
- while day.id_ + 1 != next_one.id_:
- assert isinstance(day.id_, int)
- day = cls(day.id_ + 1)
- degapped_ranged_days += [day]
- return degapped_ranged_days
- return ranged_days
-
- @property
- def _dt_date(self) -> dt_date:
- """Return chronological location as datetime.date."""
- assert isinstance(self.id_, int)
- return dt_date_from_days_n(self.id_)
-
- @property
- def date(self) -> str:
- """Return chronological location as ISO format date."""
- return self._dt_date.isoformat()
-
- @property
- def first_of_month(self) -> bool:
- """Return if self is first day of a month."""
- return self.date[-2:] == '01'
-
- @property
- def month_name(self) -> str:
- """Return name of month self is part of."""
- return self._dt_date.strftime('%B')
-
- @property
- def weekday(self) -> str:
- """Return weekday name matching self."""
- return self._dt_date.strftime('%A')
-
- @property
- def prev_date(self) -> str:
- """Return ISO-formatted date preceding date of self."""
- return (self._dt_date - timedelta(days=1)).isoformat()
-
- @property
- def next_date(self) -> str:
- """Return ISO-formatted date succeeding date of this Day."""
- return (self._dt_date + timedelta(days=1)).isoformat()
-
- @property
- def calendarized_todos(self) -> list[Todo]:
- """Return only those of self.todos that have .calendarize set."""
- return [t for t in self.todos if t.calendarize]
-
- @property
- def total_effort(self) -> float:
- """"Sum all .performed_effort of self.todos."""
- total_effort = 0.0
- for todo in self.todos:
- total_effort += todo.performed_effort
- return total_effort
+++ /dev/null
-"""Database management."""
-from __future__ import annotations
-from datetime import date as dt_date
-from os import listdir
-from pathlib import Path
-from sqlite3 import Row
-from typing import cast, Any, Self, Callable
-from plomtask.exceptions import (HandledException, NotFoundException,
- BadFormatException)
-from plomlib.db import (
- PlomDbConn, PlomDbFile, PlomDbMigration, TypePlomDbMigration)
-
-_EXPECTED_DB_VERSION = 7
-_MIGRATIONS_DIR = Path('migrations')
-_FILENAME_DB_SCHEMA = f'init_{_EXPECTED_DB_VERSION}.sql'
-_PATH_DB_SCHEMA = _MIGRATIONS_DIR.joinpath(_FILENAME_DB_SCHEMA)
-
-
-def _mig_6_calc_days_since_millennium(conn: PlomDbConn) -> None:
- rows = conn.exec('SELECT * FROM days').fetchall()
- for row in [list(r) for r in rows]:
- row[-1] = (dt_date.fromisoformat(row[0]) - dt_date(2000, 1, 1)).days
- conn.exec('REPLACE INTO days VALUES', tuple(row))
-
-
-MIGRATION_STEPS_POST_SQL: dict[int, Callable[[PlomDbConn], None]] = {
- 6: _mig_6_calc_days_since_millennium
-}
-
-
-class DatabaseMigration(PlomDbMigration):
- """Collects and enacts DatabaseFile migration commands."""
- migs_dir_path = _MIGRATIONS_DIR
-
- @classmethod
- def gather(cls, from_version: int, base_set: set[TypePlomDbMigration]
- ) -> list[TypePlomDbMigration]:
- msg_prefix = 'Migration directory contains'
- msg_bad_entry = f'{msg_prefix} unexpected entry: '
- migs = []
- total_migs = set()
- post_sql_steps_added = set()
- for entry in [e for e in listdir(cls.migs_dir_path)
- if e != _FILENAME_DB_SCHEMA]:
- path = cls.migs_dir_path.joinpath(entry)
- if not path.is_file():
- continue
- toks = entry.split('_', maxsplit=1)
- if len(toks) < 2 or (not toks[0].isdigit()):
- raise HandledException(f'{msg_bad_entry}{entry}')
- i = int(toks[0])
- if i <= from_version:
- continue
- if i > _EXPECTED_DB_VERSION:
- raise HandledException(f'{msg_prefix} unexpected version {i}')
- post_sql_steps = MIGRATION_STEPS_POST_SQL.get(i, None)
- if post_sql_steps:
- post_sql_steps_added.add(i)
- total_migs.add(cls(i, Path(entry), post_sql_steps))
- for k in [k for k in MIGRATION_STEPS_POST_SQL
- if k > from_version
- and k not in post_sql_steps_added]:
- total_migs.add(cls(k, None, MIGRATION_STEPS_POST_SQL[k]))
- for i in range(from_version + 1, _EXPECTED_DB_VERSION + 1):
- migs_found = [m for m in total_migs if m.target_version == i]
- if not migs_found:
- raise HandledException(f'{msg_prefix} no migration of v. {i}')
- if len(migs_found) > 1:
- raise HandledException(f'{msg_prefix} >1 migration of v. {i}')
- migs += migs_found
- return cast(list[TypePlomDbMigration], migs)
-
-
-class DatabaseFile(PlomDbFile):
- """File readable as DB of expected schema, user version."""
- target_version = _EXPECTED_DB_VERSION
- path_schema = _PATH_DB_SCHEMA
- mig_class = DatabaseMigration
-
-
-class DatabaseConnection(PlomDbConn):
- """A single connection to the database."""
-
- def close(self) -> None:
- """Shortcut to sqlite3.Connection.close()."""
- self._conn.close()
-
- def rewrite_relations(self, table_name: str, key: str, target: int | str,
- rows: list[list[Any]], key_index: int = 0) -> None:
- # pylint: disable=too-many-arguments
- """Rewrite relations in table_name to target, with rows values.
-
- Note that single rows are expected without the column and value
- identified by key and target, which are inserted inside the function
- at key_index.
- """
- self.delete_where(table_name, key, target)
- for row in rows:
- values = tuple(row[:key_index] + [target] + row[key_index:])
- self.exec(f'INSERT INTO {table_name} VALUES', values)
-
- def row_where(self, table_name: str, key: str,
- target: int | str) -> list[Row]:
- """Return list of Rows at table where key == target."""
- return list(self.exec(f'SELECT * FROM {table_name} WHERE {key} =',
- (target,)))
-
- # def column_where_pattern(self,
- # table_name: str,
- # column: str,
- # pattern: str,
- # keys: list[str]) -> list[Any]:
- # """Return column of rows where one of keys matches pattern."""
- # targets = tuple([f'%{pattern}%'] * len(keys))
- # haystack = ' OR '.join([f'{k} LIKE ?' for k in keys])
- # sql = f'SELECT {column} FROM {table_name} WHERE {haystack}'
- # return [row[0] for row in self.exec(sql, targets)]
-
- def column_where(self, table_name: str, column: str, key: str,
- target: int | str) -> list[Any]:
- """Return column of table where key == target."""
- return [row[0] for row in
- self.exec(f'SELECT {column} FROM {table_name} '
- f'WHERE {key} =', (target,))]
-
- def column_all(self, table_name: str, column: str) -> list[Any]:
- """Return complete column of table."""
- return [row[0] for row in
- self.exec(f'SELECT {column} FROM {table_name}')]
-
- def delete_where(self, table_name: str, key: str,
- target: int | str) -> None:
- """Delete from table where key == target."""
- self.exec(f'DELETE FROM {table_name} WHERE {key} =', (target,))
-
-
-class BaseModel:
- """Template for most of the models we use/derive from the DB."""
- table_name = ''
- to_save_simples: list[str] = []
- to_save_relations: list[tuple[str, str, str, int]] = []
- versioned_defaults: dict[str, str | float] = {}
- add_to_dict: list[str] = []
- id_: None | int
- cache_: dict[int, Self]
- to_search: list[str] = []
- can_create_by_id = False
- _exists = True
- sorters: dict[str, Callable[..., Any]] = {}
-
- def __init__(self, id_: int | None) -> None:
- if isinstance(id_, int) and id_ < 1:
- msg = f'illegal {self.__class__.__name__} ID, must be >=1: {id_}'
- raise BadFormatException(msg)
- self.id_ = id_
-
- def __hash__(self) -> int:
- hashable = [self.id_] + [getattr(self, name)
- for name in self.to_save_simples]
- for definition in self.to_save_relations:
- attr = getattr(self, definition[2])
- hashable += [tuple(rel.id_ for rel in attr)]
- for name in self.to_save_versioned():
- hashable += [hash(getattr(self, name))]
- return hash(tuple(hashable))
-
- def __eq__(self, other: object) -> bool:
- if not isinstance(other, self.__class__):
- return False
- return hash(self) == hash(other)
-
- def __lt__(self, other: Any) -> bool:
- if not isinstance(other, self.__class__):
- msg = 'cannot compare to object of different class'
- raise HandledException(msg)
- assert isinstance(self.id_, int)
- assert isinstance(other.id_, int)
- return self.id_ < other.id_
-
- @classmethod
- def to_save_versioned(cls) -> list[str]:
- """Return keys of cls.versioned_defaults assuming we wanna save 'em."""
- return list(cls.versioned_defaults.keys())
-
- @property
- def as_dict_and_refs(self) -> tuple[dict[str, object], list[Self]]:
- """Return self as json.dumps-ready dict, list of referenced objects."""
- d: dict[str, object] = {'id': self.id_}
- refs: list[Self] = []
- for to_save in self.to_save_simples:
- d[to_save] = getattr(self, to_save)
- if len(self.to_save_versioned()) > 0:
- d['_versioned'] = {}
- for k in self.to_save_versioned():
- attr = getattr(self, k)
- assert isinstance(d['_versioned'], dict)
- d['_versioned'][k] = attr.history
- rels_to_collect = [rel[2] for rel in self.to_save_relations]
- rels_to_collect += self.add_to_dict
- for attr_name in rels_to_collect:
- rel_list = []
- for item in getattr(self, attr_name):
- rel_list += [item.id_]
- if item not in refs:
- refs += [item]
- d[attr_name] = rel_list
- return d, refs
-
- @classmethod
- def name_lowercase(cls) -> str:
- """Convenience method to return cls' name in lowercase."""
- return cls.__name__.lower()
-
- @classmethod
- def sort_by(cls, seq: list[Any], sort_key: str, default: str = 'title'
- ) -> str:
- """Sort cls list by cls.sorters[sort_key] (reverse if '-'-prefixed).
-
- Before cls.sorters[sort_key] is applied, seq is sorted by .id_, to
- ensure predictability where parts of seq are of same sort value.
- """
- reverse = False
- if len(sort_key) > 1 and '-' == sort_key[0]:
- sort_key = sort_key[1:]
- reverse = True
- if sort_key not in cls.sorters:
- sort_key = default
- seq.sort(key=lambda x: x.id_, reverse=reverse)
- sorter: Callable[..., Any] = cls.sorters[sort_key]
- seq.sort(key=sorter, reverse=reverse)
- if reverse:
- sort_key = f'-{sort_key}'
- return sort_key
-
- # cache management
- # (we primarily use the cache to ensure we work on the same object in
- # memory no matter where and how we retrieve it, e.g. we don't want
- # .by_id() calls to create a new object each time, but rather a pointer
- # to the one already instantiated)
-
- def __getattribute__(self, name: str) -> Any:
- """Ensure fail if ._disappear() was called, except to check ._exists"""
- if name != '_exists' and not super().__getattribute__('_exists'):
- msg = f'Object for attribute does not exist: {name}'
- raise HandledException(msg)
- return super().__getattribute__(name)
-
- def _disappear(self) -> None:
- """Invalidate object, make future use raise exceptions."""
- assert self.id_ is not None
- if self._get_cached(self.id_):
- self._uncache()
- to_kill = list(self.__dict__.keys())
- for attr in to_kill:
- delattr(self, attr)
- self._exists = False
-
- @classmethod
- def empty_cache(cls) -> None:
- """Empty class's cache, and disappear all former inhabitants."""
- # pylint: disable=protected-access
- # (cause we remain within the class)
- if hasattr(cls, 'cache_'):
- to_disappear = list(cls.cache_.values())
- for item in to_disappear:
- item._disappear()
- cls.cache_ = {}
-
- @classmethod
- def get_cache(cls) -> dict[int, Self]:
- """Get cache dictionary, create it if not yet existing."""
- if not hasattr(cls, 'cache_'):
- d: dict[int, Self] = {}
- cls.cache_ = d
- return cls.cache_
-
- @classmethod
- def _get_cached(cls, id_: int) -> Self | None:
- """Get object of id_ from class's cache, or None if not found."""
- cache = cls.get_cache()
- if id_ in cache:
- obj = cache[id_]
- return obj
- return None
-
- def cache(self) -> None:
- """Update object in class's cache.
-
- Also calls ._disappear if cache holds older reference to object of same
- ID, but different memory address, to avoid doing anything with
- dangling leftovers.
- """
- if self.id_ is None:
- raise HandledException('Cannot cache object without ID.')
- cache = self.get_cache()
- old_cached = self._get_cached(self.id_)
- if old_cached and id(old_cached) != id(self):
- # pylint: disable=protected-access
- # (cause we remain within the class)
- old_cached._disappear()
- cache[self.id_] = self
-
- def _uncache(self) -> None:
- """Remove self from cache."""
- if self.id_ is None:
- raise HandledException('Cannot un-cache object without ID.')
- cache = self.get_cache()
- del cache[self.id_]
-
- # object retrieval and generation
-
- @classmethod
- def from_table_row(cls,
- db_conn: DatabaseConnection,
- row: Row | list[Any]) -> Self:
- """Make from DB row (sans relations), update DB cache with it."""
- obj = cls(*row)
- assert obj.id_ is not None
- for attr_name in cls.to_save_versioned():
- attr = getattr(obj, attr_name)
- table_name = attr.table_name
- for row_ in db_conn.row_where(table_name, 'parent', obj.id_):
- attr.history_from_row(row_)
- obj.cache()
- return obj
-
- @classmethod
- def by_id(cls, db_conn: DatabaseConnection, id_: int) -> Self:
- """Retrieve by id_, on failure throw NotFoundException.
-
- First try to get from cls.cache_, only then check DB; if found,
- put into cache.
- """
- obj = None
- if id_ is not None:
- if isinstance(id_, int) and id_ == 0:
- raise BadFormatException('illegal ID of value 0')
- obj = cls._get_cached(id_)
- if not obj:
- for row in db_conn.row_where(cls.table_name, 'id', id_):
- obj = cls.from_table_row(db_conn, row)
- break
- if obj:
- return obj
- raise NotFoundException(f'found no object of ID {id_}')
-
- @classmethod
- def by_id_or_create(cls, db_conn: DatabaseConnection, id_: int | None
- ) -> Self:
- """Wrapper around .by_id, creating (not caching/saving) if no find."""
- if not cls.can_create_by_id:
- raise HandledException('Class cannot .by_id_or_create.')
- if id_ is None:
- return cls(None)
- try:
- return cls.by_id(db_conn, id_)
- except NotFoundException:
- return cls(id_)
-
- @classmethod
- def all(cls, db_conn: DatabaseConnection) -> list[Self]:
- """Collect all objects of class into list.
-
- Note that this primarily returns the contents of the cache, and only
- _expands_ that by additional findings in the DB. This assumes the
- cache is always instantly cleaned of any items that would be removed
- from the DB.
- """
- items: dict[int, Self] = {}
- for k, v in cls.get_cache().items():
- items[k] = v
- already_recorded = items.keys()
- for id_ in db_conn.column_all(cls.table_name, 'id'):
- if id_ not in already_recorded:
- item = cls.by_id(db_conn, id_)
- assert item.id_ is not None
- items[item.id_] = item
- return sorted(list(items.values()))
-
- @classmethod
- def matching(cls, db_conn: DatabaseConnection, pattern: str) -> list[Self]:
- """Return all objects whose .to_search match pattern."""
- items = cls.all(db_conn)
- if pattern:
- filtered = []
- for item in items:
- for attr_name in cls.to_search:
- toks = attr_name.split('.')
- parent = item
- for tok in toks:
- attr = getattr(parent, tok)
- parent = attr
- if pattern in attr:
- filtered += [item]
- break
- return filtered
- return items
-
- # database writing
-
- def save(self, db_conn: DatabaseConnection) -> None:
- """Write self to DB and cache and ensure .id_.
-
- Write both to DB, and to cache. To DB, write .id_ and attributes
- listed in cls.to_save_[simples|versioned|_relations].
-
- Ensure self.id_ by setting it to what the DB command returns as the
- last saved row's ID (cursor.lastrowid), EXCEPT if self.id_ already
- exists as a 'str', which implies we do our own ID creation (so far
- only the case with the Day class, where it's to be a date string.
- """
- values = tuple([self.id_] + [getattr(self, key)
- for key in self.to_save_simples])
- table_name = self.table_name
- cursor = db_conn.exec(f'REPLACE INTO {table_name} VALUES', values)
- self.id_ = cursor.lastrowid
- self.cache()
- for attr_name in self.to_save_versioned():
- getattr(self, attr_name).save(db_conn)
- for table, column, attr_name, key_index in self.to_save_relations:
- assert isinstance(self.id_, int)
- db_conn.rewrite_relations(table, column, self.id_,
- [[i.id_] for i
- in getattr(self, attr_name)], key_index)
-
- def remove(self, db_conn: DatabaseConnection) -> None:
- """Remove from DB and cache, including dependencies."""
- if self.id_ is None or self._get_cached(self.id_) is None:
- raise HandledException('cannot remove unsaved item')
- for attr_name in self.to_save_versioned():
- getattr(self, attr_name).remove(db_conn)
- for table, column, attr_name, _ in self.to_save_relations:
- db_conn.delete_where(table, column, self.id_)
- self._uncache()
- db_conn.delete_where(self.table_name, 'id', self.id_)
- self._disappear()
+++ /dev/null
-"""Exceptions triggering different HTTP codes."""
-
-
-class HandledException(Exception):
- """To identify Exceptions based on expected (if faulty) user behavior."""
- http_code = 500
-
-
-class BadFormatException(HandledException):
- """To identify Exceptions on malformed inputs."""
- http_code = 400
-
-
-class NotFoundException(HandledException):
- """To identify Exceptions on unsuccessful queries."""
- http_code = 404
+++ /dev/null
-"""Web server stuff."""
-from __future__ import annotations
-from pathlib import Path
-from inspect import signature
-from typing import Any, Callable
-from base64 import b64encode, b64decode
-from binascii import Error as binascii_Exception
-from json import dumps as json_dumps
-from plomtask.dating import (
- days_n_from_dt_date, dt_date_from_str, date_in_n_days)
-from plomtask.days import Day
-from plomtask.exceptions import (HandledException, BadFormatException,
- NotFoundException)
-from plomtask.db import DatabaseConnection, DatabaseFile, BaseModel
-from plomtask.processes import Process, ProcessStep, ProcessStepsNode
-from plomtask.conditions import Condition
-from plomtask.todos import Todo, TodoOrProcStepNode
-from plomtask.misc import DictableNode
-from plomlib.web import PlomHttpServer, PlomHttpHandler, PlomQueryMap
-
-TEMPLATES_DIR = Path('templates')
-
-
-class TaskServer(PlomHttpServer):
- """Extends parent by DatabaseFile .db and .render_mode='html'."""
-
- def __init__(self, db_file: DatabaseFile, *args, **kwargs) -> None:
- super().__init__(TEMPLATES_DIR, *args, **kwargs)
- self.db = db_file
- self.render_mode = 'html'
-
-
-class InputsParser(PlomQueryMap):
- """Wrapper for validating and retrieving dict-like HTTP inputs."""
-
- def get_all_str(self, key: str) -> list[str]:
- """Retrieve list of string values at key (empty if no key)."""
- return self.all(key) or []
-
- def get_all_int(self, key: str, fail_on_empty: bool = False) -> list[int]:
- """Retrieve list of int values at key."""
- all_str = self.get_all_str(key)
- try:
- return [int(s) for s in all_str if fail_on_empty or s != '']
- except ValueError as e:
- msg = f'cannot int a form field value for key {key} in: {all_str}'
- raise BadFormatException(msg) from e
-
- def get_str(self, key: str, default: str | None = None) -> str | None:
- """Retrieve single/first string value of key, or default."""
- first = self.first(key)
- return default if first is None else first
-
- def get_str_or_fail(self, key: str, default: str | None = None) -> str:
- """Retrieve first string value of key, if none: fail or default."""
- vals = self.get_all_str(key)
- if not vals:
- if default is not None:
- return default
- raise BadFormatException(f'no value found for key: {key}')
- return vals[0]
-
- def get_int_or_none(self, key: str) -> int | None:
- """Retrieve single/first value of key as int, return None if empty."""
- val = self.get_str_or_fail(key, '')
- if val == '':
- return None
- try:
- return int(val)
- except (ValueError, TypeError) as e:
- msg = f'cannot int form field value for key {key}: {val}'
- raise BadFormatException(msg) from e
-
- def get_bool(self, key: str) -> bool:
- """Return if value to key truish; return False if None/no value."""
- return self.get_str(key) in {'True', 'true', '1', 'on'}
-
- def get_all_of_key_prefixed(self, key_prefix: str) -> dict[str, list[str]]:
- """Retrieve dict of strings at keys starting with key_prefix."""
- ret = {}
- for key in self.keys_prefixed(key_prefix):
- ret[key[len(key_prefix):]] = self.as_dict[key]
- return ret
-
- def get_float_or_fail(self, key: str) -> float:
- """Retrieve float value of key from self.postvars, fail if none."""
- val = self.get_str_or_fail(key)
- try:
- return float(val)
- except ValueError as e:
- msg = f'cannot float form field value for key {key}: {val}'
- raise BadFormatException(msg) from e
-
- def get_all_floats_or_nones(self, key: str) -> list[float | None]:
- """Retrieve list of float value at key, None if empty strings."""
- ret: list[float | None] = []
- for val in self.get_all_str(key):
- if '' == val:
- ret += [None]
- else:
- try:
- ret += [float(val)]
- except ValueError as e:
- msg = f'cannot float form field value for key {key}: {val}'
- raise BadFormatException(msg) from e
- return ret
-
-
-class TaskHandler(PlomHttpHandler):
- """Handles single HTTP request."""
- # pylint: disable=too-many-public-methods
- server: TaskServer
- params: InputsParser
- postvars: InputsParser
- mapper = InputsParser
- _conn: DatabaseConnection
- _site: str
-
- def _send_page(
- self, ctx: dict[str, Any], tmpl_name: str, code: int = 200
- ) -> None:
- """HTTP-send ctx as HTML or JSON, as defined by .server.render_mode.
-
- The differentiation by .server.render_mode serves to allow easily
- comparable JSON responses for automatic testing.
- """
- if 'html' == self.server.render_mode:
- self.send_rendered(Path(f'{tmpl_name}.html'), ctx, code)
- else:
- self.send_http(self._ctx_to_json(ctx).encode(),
- [('Content-Type', 'application/json')],
- code)
-
- def _ctx_to_json(self, ctx: dict[str, object]) -> str:
- """Render ctx into JSON string.
-
- Flattens any objects that json.dumps might not want to serialize, and
- turns occurrences of BaseModel objects into listings of their .id_, to
- be resolved to a full dict inside a top-level '_library' dictionary,
- to avoid endless and circular nesting.
- """
-
- def flatten(node: object) -> object:
-
- def update_library_with(item: BaseModel) -> None:
- cls_name = item.__class__.__name__
- if cls_name not in library:
- library[cls_name] = {}
- if item.id_ not in library[cls_name]:
- d, refs = item.as_dict_and_refs
- id_key = -1 if item.id_ is None else item.id_
- library[cls_name][id_key] = d
- for ref in refs:
- update_library_with(ref)
-
- if isinstance(node, BaseModel):
- update_library_with(node)
- return node.id_
- if isinstance(node, DictableNode):
- d, refs = node.as_dict_and_refs
- for ref in refs:
- update_library_with(ref)
- return d
- if isinstance(node, (list, tuple)):
- return [flatten(item) for item in node]
- if isinstance(node, dict):
- d = {}
- for k, v in node.items():
- d[k] = flatten(v)
- return d
- if isinstance(node, HandledException):
- return str(node)
- return node
-
- library: dict[str, dict[int, object]] = {}
- for k, v in ctx.items():
- ctx[k] = flatten(v)
- ctx['_library'] = library
- return json_dumps(ctx)
-
- @staticmethod
- def _request_wrapper(http_method: str, not_found_msg: str
- ) -> Callable[..., Callable[[TaskHandler], None]]:
- """Wrapper for do_GET… and do_POST… handlers, to init and clean up.
-
- Among other things, conditionally cleans all caches, but only on POST
- requests, as only those are expected to change the states of objects
- that may be cached, and certainly only those are expected to write any
- changes to the database. We want to call them as early though as
- possible here, either exactly after the specific request handler
- returns successfully, or right after any exception is triggered –
- otherwise, race conditions become plausible.
-
- Note that otherwise any POST attempt, even a failed one, may end in
- problematic inconsistencies:
-
- - if the POST handler experiences an Exception, changes to objects
- won't get written to the DB, but the changed objects may remain in
- the cache and affect other objects despite their possibly illegal
- state
-
- - even if an object was just saved to the DB, we cannot be sure its
- current state is completely identical to what we'd get if loading it
- fresh from the DB (e.g. currently Process.n_owners is only updated
- when loaded anew via .from_table_row, nor is its state written to
- the DB by .save; a questionable design choice, but proof that we
- have no guarantee that objects' .save stores all their states we'd
- prefer at their most up-to-date.
- """
-
- def clear_caches() -> None:
- for cls in (Day, Todo, Condition, Process, ProcessStep):
- cls.empty_cache()
-
- def decorator(f: Callable[..., str | None]
- ) -> Callable[[TaskHandler], None]:
- def wrapper(self: TaskHandler) -> None:
- # pylint: disable=protected-access
- # (because pylint here fails to detect the use of wrapper as a
- # method to self with respective access privileges)
- try:
- self._conn = DatabaseConnection(self.server.db)
- handler_name = f'do_{http_method}_{self.pagename}'
- if hasattr(self, handler_name):
- handler = getattr(self, handler_name)
- redir_target = f(self, handler)
- if 'POST' == http_method:
- clear_caches()
- if redir_target:
- self.redirect(Path(redir_target))
- else:
- msg = f'{not_found_msg}: {self.pagename}'
- raise NotFoundException(msg)
- except HandledException as error:
- if 'POST' == http_method:
- clear_caches()
- ctx = {'msg': error}
- self._send_page(ctx, 'msg', error.http_code)
- finally:
- self._conn.close()
- return wrapper
- return decorator
-
- @_request_wrapper('GET', 'Unknown page')
- def do_GET(self, handler: Callable[[], str | dict[str, object]]
- ) -> str | None:
- """Render page with result of handler, or redirect if result is str."""
- tmpl_name = f'{self.pagename}'
- ctx_or_redir_target = handler()
- if isinstance(ctx_or_redir_target, str):
- return ctx_or_redir_target
- self._send_page(ctx_or_redir_target, tmpl_name)
- return None
-
- @_request_wrapper('POST', 'Unknown POST target')
- def do_POST(self, handler: Callable[[], str]) -> str:
- """Handle POST with handler, prepare redirection to result."""
- redir_target = handler()
- self._conn.commit()
- return redir_target
-
- # GET handlers
-
- @staticmethod
- def _get_item(target_class: Any
- ) -> Callable[..., Callable[[TaskHandler],
- dict[str, object]]]:
- def decorator(f: Callable[..., dict[str, object]]
- ) -> Callable[[TaskHandler], dict[str, object]]:
- def wrapper(self: TaskHandler) -> dict[str, object]:
- # pylint: disable=protected-access
- # (because pylint here fails to detect the use of wrapper as a
- # method to self with respective access privileges)
- id_ = None
- for val in self.params.get_all_int('id', fail_on_empty=True):
- id_ = val
- if target_class.can_create_by_id:
- item = target_class.by_id_or_create(self._conn, id_)
- else:
- item = target_class.by_id(self._conn, id_)
- if 'exists' in signature(f).parameters:
- exists = id_ is not None and target_class._get_cached(id_)
- return f(self, item, exists)
- return f(self, item)
- return wrapper
- return decorator
-
- def do_GET_(self) -> str:
- """Return redirect target on GET /."""
- return '/day'
-
- def _do_GET_calendar(self) -> dict[str, object]:
- """Show Days from ?start= to ?end=.
-
- Both .do_GET_calendar and .do_GET_calendar_txt refer to this to do the
- same, the only difference being the HTML template they are rendered to,
- which .do_GET selects from their method name.
- """
- start = self.params.get_str_or_fail('start', '')
- end = self.params.get_str_or_fail('end', '')
- dt_start = dt_date_from_str(start if start else date_in_n_days(-1))
- dt_end = dt_date_from_str(end if end else date_in_n_days(366))
- days = Day.with_filled_gaps(self._conn, dt_start, dt_end)
- today = date_in_n_days(0)
- return {'start': dt_start.isoformat(), 'end': dt_end.isoformat(),
- 'today': today, 'days': days}
-
- def do_GET_calendar(self) -> dict[str, object]:
- """Show Days from ?start= to ?end= – normal view."""
- return self._do_GET_calendar()
-
- def do_GET_calendar_txt(self) -> dict[str, object]:
- """Show Days from ?start= to ?end= – minimalist view."""
- return self._do_GET_calendar()
-
- def do_GET_day(self) -> dict[str, object]:
- """Show single Day of ?date=."""
- date = self.params.get_str('date', date_in_n_days(0))
- make_type = self.params.get_str_or_fail('make_type', 'full')
- #
- assert isinstance(date, str)
- day = Day.by_id_or_create(self._conn,
- days_n_from_dt_date(dt_date_from_str(date)))
- conditions_present = []
- enablers_for = {}
- disablers_for = {}
- for todo in day.todos:
- for condition in todo.conditions + todo.blockers:
- if condition not in conditions_present:
- conditions_present += [condition]
- enablers_for[condition.id_] = [p for p in
- Process.all(self._conn)
- if condition in p.enables]
- disablers_for[condition.id_] = [p for p in
- Process.all(self._conn)
- if condition in p.disables]
- seen_todos: set[int] = set()
- top_nodes = [t.get_step_tree(seen_todos)
- for t in day.todos if not t.parents]
- return {'day': day,
- 'top_nodes': top_nodes,
- 'make_type': make_type,
- 'enablers_for': enablers_for,
- 'disablers_for': disablers_for,
- 'conditions_present': conditions_present,
- 'processes': Process.all(self._conn)}
-
- @_get_item(Todo)
- def do_GET_todo(self, todo: Todo) -> dict[str, object]:
- """Show single Todo of ?id=."""
-
- def walk_process_steps(node_id: int,
- process_step_nodes: list[ProcessStepsNode],
- steps_nodes: list[TodoOrProcStepNode]) -> int:
- for process_step_node in process_step_nodes:
- node_id += 1
- proc = Process.by_id(self._conn,
- process_step_node.step.step_process_id)
- node = TodoOrProcStepNode(node_id, None, proc, [])
- steps_nodes += [node]
- node_id = walk_process_steps(
- node_id, process_step_node.steps, node.children)
- return node_id
-
- def walk_todo_steps(node_id: int, todos: list[Todo],
- steps_nodes: list[TodoOrProcStepNode]) -> int:
- for todo in todos:
- matched = False
- for match in [item for item in steps_nodes
- if item.process
- and item.process == todo.process]:
- match.todo = todo
- matched = True
- for child in match.children:
- child.fillable = True
- node_id = walk_todo_steps(
- node_id, todo.children, match.children)
- if not matched:
- node_id += 1
- node = TodoOrProcStepNode(node_id, todo, None, [])
- steps_nodes += [node]
- node_id = walk_todo_steps(
- node_id, todo.children, node.children)
- return node_id
-
- def collect_adoptables_keys(
- steps_nodes: list[TodoOrProcStepNode]) -> set[int]:
- ids = set()
- for node in steps_nodes:
- if not node.todo:
- assert isinstance(node.process, Process)
- assert isinstance(node.process.id_, int)
- ids.add(node.process.id_)
- ids = ids | collect_adoptables_keys(node.children)
- return ids
-
- todo_steps = [step.todo for step in todo.get_step_tree(set()).children]
- process_tree = todo.process.get_steps(self._conn, None)
- steps_todo_to_process: list[TodoOrProcStepNode] = []
- last_node_id = walk_process_steps(0, process_tree,
- steps_todo_to_process)
- for steps_node in steps_todo_to_process:
- steps_node.fillable = True
- walk_todo_steps(last_node_id, todo_steps, steps_todo_to_process)
- adoptables: dict[int, list[Todo]] = {}
- any_adoptables = [Todo.by_id(self._conn, t.id_)
- for t in Todo.by_date(self._conn, todo.date)
- if t.id_ is not None
- and t != todo]
- for id_ in collect_adoptables_keys(steps_todo_to_process):
- adoptables[id_] = [t for t in any_adoptables
- if t.process.id_ == id_]
- return {'todo': todo,
- 'steps_todo_to_process': steps_todo_to_process,
- 'adoption_candidates_for': adoptables,
- 'process_candidates': sorted(Process.all(self._conn)),
- 'todo_candidates': any_adoptables,
- 'condition_candidates': Condition.all(self._conn)}
-
- def do_GET_todos(self) -> dict[str, object]:
- """Show Todos from ?start= to ?end=, of ?process=, ?comment= pattern"""
- sort_by = self.params.get_str_or_fail('sort_by', 'title')
- start = self.params.get_str_or_fail('start', '')
- end = self.params.get_str_or_fail('end', '')
- process_id = self.params.get_int_or_none('process_id')
- comment_pattern = self.params.get_str_or_fail('comment_pattern', '')
- #
- ret = Todo.by_date_range_with_limits(self._conn, (start, end))
- todos_by_date_range, start, end = ret
- todos = [t for t in todos_by_date_range
- if comment_pattern in t.comment
- and ((not process_id) or t.process.id_ == process_id)]
- sort_by = Todo.sort_by(todos, sort_by)
- return {'start': start, 'end': end, 'process_id': process_id,
- 'comment_pattern': comment_pattern, 'todos': todos,
- 'all_processes': Process.all(self._conn), 'sort_by': sort_by}
-
- def do_GET_conditions(self) -> dict[str, object]:
- """Show all Conditions."""
- pattern = self.params.get_str_or_fail('pattern', '')
- sort_by = self.params.get_str_or_fail('sort_by', 'title')
- #
- conditions = Condition.matching(self._conn, pattern)
- sort_by = Condition.sort_by(conditions, sort_by)
- return {'conditions': conditions,
- 'sort_by': sort_by,
- 'pattern': pattern}
-
- @_get_item(Condition)
- def do_GET_condition(self,
- c: Condition,
- exists: bool
- ) -> dict[str, object]:
- """Show Condition of ?id=."""
- ps = Process.all(self._conn)
- return {'condition': c,
- 'is_new': not exists,
- 'enabled_processes': [p for p in ps if c in p.conditions],
- 'disabled_processes': [p for p in ps if c in p.blockers],
- 'enabling_processes': [p for p in ps if c in p.enables],
- 'disabling_processes': [p for p in ps if c in p.disables]}
-
- @_get_item(Condition)
- def do_GET_condition_titles(self, c: Condition) -> dict[str, object]:
- """Show title history of Condition of ?id=."""
- return {'condition': c}
-
- @_get_item(Condition)
- def do_GET_condition_descriptions(self, c: Condition) -> dict[str, object]:
- """Show description historys of Condition of ?id=."""
- return {'condition': c}
-
- @_get_item(Process)
- def do_GET_process(self,
- process: Process,
- exists: bool
- ) -> dict[str, object]:
- """Show Process of ?id=."""
- owner_ids = self.params.get_all_int('step_to')
- owned_ids = self.params.get_all_int('has_step')
- title_64 = self.params.get_str('title_b64')
- title_new = None
- if title_64:
- try:
- title_new = b64decode(title_64.encode()).decode()
- except binascii_Exception as exc:
- msg = 'invalid base64 for ?title_b64='
- raise BadFormatException(msg) from exc
- #
- if title_new:
- process.title.set(title_new)
- preset_top_step = None
- owners = process.used_as_step_by(self._conn)
- for step_id in owner_ids:
- owners += [Process.by_id(self._conn, step_id)]
- for process_id in owned_ids:
- Process.by_id(self._conn, process_id) # to ensure ID exists
- preset_top_step = process_id
- return {'process': process,
- 'is_new': not exists,
- 'preset_top_step': preset_top_step,
- 'steps': process.get_steps(self._conn),
- 'owners': owners,
- 'n_todos': len(Todo.by_process_id(self._conn, process.id_)),
- 'process_candidates': Process.all(self._conn),
- 'condition_candidates': Condition.all(self._conn)}
-
- @_get_item(Process)
- def do_GET_process_titles(self, p: Process) -> dict[str, object]:
- """Show title history of Process of ?id=."""
- return {'process': p}
-
- @_get_item(Process)
- def do_GET_process_descriptions(self, p: Process) -> dict[str, object]:
- """Show description historys of Process of ?id=."""
- return {'process': p}
-
- @_get_item(Process)
- def do_GET_process_efforts(self, p: Process) -> dict[str, object]:
- """Show default effort history of Process of ?id=."""
- return {'process': p}
-
- def do_GET_processes(self) -> dict[str, object]:
- """Show all Processes."""
- pattern = self.params.get_str_or_fail('pattern', '')
- sort_by = self.params.get_str_or_fail('sort_by', 'title')
- #
- processes = Process.matching(self._conn, pattern)
- sort_by = Process.sort_by(processes, sort_by)
- return {'processes': processes, 'sort_by': sort_by, 'pattern': pattern}
-
- # POST handlers
-
- @staticmethod
- def _delete_or_post(target_class: Any, redir_target: str = '/'
- ) -> Callable[..., Callable[[TaskHandler], str]]:
- def decorator(f: Callable[..., str]
- ) -> Callable[[TaskHandler], str]:
- def wrapper(self: TaskHandler) -> str:
- # pylint: disable=protected-access
- # (because pylint here fails to detect the use of wrapper as a
- # method to self with respective access privileges)
- id_ = self.params.get_int_or_none('id')
- for _ in self.postvars.get_all_str('delete'):
- if id_ is None:
- msg = 'trying to delete non-saved ' +\
- f'{target_class.__name__}'
- raise NotFoundException(msg)
- item = target_class.by_id(self._conn, id_)
- item.remove(self._conn)
- return redir_target
- if target_class.can_create_by_id:
- item = target_class.by_id_or_create(self._conn, id_)
- else:
- item = target_class.by_id(self._conn, id_)
- return f(self, item)
- return wrapper
- return decorator
-
- def _change_versioned_timestamps(self, cls: Any, attr_name: str) -> str:
- """Update history timestamps for VersionedAttribute."""
- id_ = self.params.get_int_or_none('id')
- item = cls.by_id(self._conn, id_)
- attr = getattr(item, attr_name)
- for k, vals in self.postvars.get_all_of_key_prefixed('at:').items():
- if k[19:] != vals[0]:
- attr.reset_timestamp(k, f'{vals[0]}.0')
- attr.save(self._conn)
- return f'/{cls.name_lowercase()}_{attr_name}s?id={item.id_}'
-
- def do_POST_day(self) -> str:
- """Update or insert Day of date and Todos mapped to it."""
- # pylint: disable=too-many-locals
- date = self.params.get_str_or_fail('date')
- day_comment = self.postvars.get_str_or_fail('day_comment')
- make_type = self.postvars.get_str_or_fail('make_type')
- old_todos = self.postvars.get_all_int('todo_id')
- new_todos_by_process = self.postvars.get_all_int('new_todo')
- comments = self.postvars.get_all_str('comment')
- efforts = self.postvars.get_all_floats_or_nones('effort')
- done_todos = self.postvars.get_all_int('done')
- is_done = [t_id in done_todos for t_id in old_todos]
- if not (len(old_todos) == len(is_done) == len(comments)
- == len(efforts)):
- msg = 'not equal number each of number of todo_id, comments, ' +\
- 'and efforts inputs'
- raise BadFormatException(msg)
- for _ in [id_ for id_ in done_todos if id_ not in old_todos]:
- raise BadFormatException('"done" field refers to unknown Todo')
- #
- day_id = days_n_from_dt_date(dt_date_from_str(date))
- day = Day.by_id_or_create(self._conn, day_id)
- day.comment = day_comment
- day.save(self._conn)
- new_todos = []
- for process_id in sorted(new_todos_by_process):
- process = Process.by_id(self._conn, process_id)
- todo = Todo(None, process, False, day_id)
- todo.save(self._conn)
- new_todos += [todo]
- if 'full' == make_type:
- for todo in new_todos:
- todo.ensure_children(self._conn)
- for i, todo_id in enumerate(old_todos):
- todo = Todo.by_id(self._conn, todo_id)
- todo.is_done = is_done[i]
- todo.comment = comments[i]
- todo.effort = efforts[i]
- todo.save(self._conn)
- return f'/day?date={date}&make_type={make_type}'
-
- @_delete_or_post(Todo, '/')
- def do_POST_todo(self, todo: Todo) -> str:
- """Update Todo and its children."""
- # pylint: disable=too-many-locals
- # pylint: disable=too-many-branches
- # pylint: disable=too-many-statements
- assert isinstance(todo.id_, int)
- adoptees = [(id_, todo.id_) for id_
- in self.postvars.get_all_int('adopt')]
- to_make = {'full': [(id_, todo.id_) for id_
- in self.postvars.get_all_int('make_full')],
- 'empty': [(id_, todo.id_) for id_
- in self.postvars.get_all_int('make_empty')]}
- step_fillers_to = self.postvars.get_all_of_key_prefixed(
- 'step_filler_to_')
- to_update: dict[str, Any] = {
- 'comment': self.postvars.get_str_or_fail('comment', ''),
- 'is_done': self.postvars.get_bool('is_done'),
- 'calendarize': self.postvars.get_bool('calendarize')}
- cond_rels = [self.postvars.get_all_int(name) for name in
- ['conditions', 'blockers', 'enables', 'disables']]
- effort_or_not = self.postvars.get_str('effort')
- if effort_or_not is not None:
- if effort_or_not == '':
- to_update['effort'] = None
- else:
- try:
- to_update['effort'] = float(effort_or_not)
- except ValueError as e:
- msg = 'cannot float form field value for key: effort'
- raise BadFormatException(msg) from e
- for k, fillers in step_fillers_to.items():
- try:
- parent_id = int(k)
- except ValueError as e:
- msg = f'bad step_filler_to_ key: {k}'
- raise BadFormatException(msg) from e
- for filler in [f for f in fillers if f != 'ignore']:
- target_id: int
- prefix = 'make_'
- to_int = filler[5:] if filler.startswith(prefix) else filler
- try:
- target_id = int(to_int)
- except ValueError as e:
- msg = f'bad fill_for target: {filler}'
- raise BadFormatException(msg) from e
- if filler.startswith(prefix):
- to_make['empty'] += [(target_id, parent_id)]
- else:
- adoptees += [(target_id, parent_id)]
- #
- todo.set_condition_relations(self._conn, *cond_rels)
- for parent in [Todo.by_id(self._conn, a[1])
- for a in adoptees] + [todo]:
- for child in parent.children:
- if child not in [t[0] for t in adoptees
- if t[0] == child.id_ and t[1] == parent.id_]:
- parent.remove_child(child)
- parent.save(self._conn)
- for child_id, parent_id in adoptees:
- parent = Todo.by_id(self._conn, parent_id)
- if child_id not in [c.id_ for c in parent.children]:
- parent.add_child(Todo.by_id(self._conn, child_id))
- parent.save(self._conn)
- todo.update_attrs(**to_update)
- for approach, make_data in to_make.items():
- for process_id, parent_id in make_data:
- parent = Todo.by_id(self._conn, parent_id)
- process = Process.by_id(self._conn, process_id)
- made = Todo(None, process, False, todo.day_id)
- made.save(self._conn)
- if 'full' == approach:
- made.ensure_children(self._conn)
- parent.add_child(made)
- parent.save(self._conn)
- # todo.save() may destroy Todo if .effort < 0, so retrieve .id_ early
- url = f'/todo?id={todo.id_}'
- todo.save(self._conn)
- return url
-
- def do_POST_process_descriptions(self) -> str:
- """Update history timestamps for Process.description."""
- return self._change_versioned_timestamps(Process, 'description')
-
- def do_POST_process_efforts(self) -> str:
- """Update history timestamps for Process.effort."""
- return self._change_versioned_timestamps(Process, 'effort')
-
- def do_POST_process_titles(self) -> str:
- """Update history timestamps for Process.title."""
- return self._change_versioned_timestamps(Process, 'title')
-
- @_delete_or_post(Process, '/processes')
- def do_POST_process(self, process: Process) -> str:
- """Update or insert Process of ?id= and fields defined in postvars."""
- # pylint: disable=too-many-locals
-
- def id_or_title(l_id_or_title: list[str]) -> tuple[str, list[int]]:
- l_ids, title = [], ''
- for id_or_title in l_id_or_title:
- try:
- l_ids += [int(id_or_title)]
- except ValueError:
- title = id_or_title
- return title, l_ids
-
- versioned = {
- 'title': self.postvars.get_str_or_fail('title'),
- 'description': self.postvars.get_str_or_fail('description'),
- 'effort': self.postvars.get_float_or_fail('effort')}
- cond_rels = [self.postvars.get_all_int(s) for s
- in ['conditions', 'blockers', 'enables', 'disables']]
- calendarize = self.postvars.get_bool('calendarize')
- step_of = self.postvars.get_all_str('step_of')
- suppressions = self.postvars.get_all_int('suppressed_steps')
- kept_steps = self.postvars.get_all_int('kept_steps')
- new_top_step_procs = self.postvars.get_all_str('new_top_step')
- new_steps_to = {
- int(k): [int(n) for n in v] for (k, v)
- in self.postvars.get_all_of_key_prefixed('new_step_to_').items()}
- new_owner_title, owners_to_set = id_or_title(step_of)
- new_step_title, new_top_step_proc_ids = id_or_title(new_top_step_procs)
- #
- for k, v in versioned.items():
- getattr(process, k).set(v)
- process.calendarize = calendarize
- process.save(self._conn)
- assert isinstance(process.id_, int)
- # set relations to Conditions and ProcessSteps / other Processes
- process.set_condition_relations(self._conn, *cond_rels)
- owned_steps = [ProcessStep.by_id(self._conn, step_id)
- for step_id in kept_steps]
- for parent_step_id, step_process_ids in new_steps_to.items():
- owned_steps += [ProcessStep(None, process.id_, step_process_id,
- parent_step_id)
- for step_process_id in step_process_ids]
- owned_steps += [ProcessStep(None, process.id_, step_process_id, None)
- for step_process_id in new_top_step_proc_ids]
- process.set_step_relations(self._conn, owners_to_set, suppressions,
- owned_steps)
- # encode titles for potential newly-to-create Processes up or down
- params = f'id={process.id_}'
- if new_step_title:
- title_b64_encoded = b64encode(new_step_title.encode()).decode()
- params = f'step_to={process.id_}&title_b64={title_b64_encoded}'
- elif new_owner_title:
- title_b64_encoded = b64encode(new_owner_title.encode()).decode()
- params = f'has_step={process.id_}&title_b64={title_b64_encoded}'
- process.save(self._conn)
- return f'/process?{params}'
-
- def do_POST_condition_descriptions(self) -> str:
- """Update history timestamps for Condition.description."""
- return self._change_versioned_timestamps(Condition, 'description')
-
- def do_POST_condition_titles(self) -> str:
- """Update history timestamps for Condition.title."""
- return self._change_versioned_timestamps(Condition, 'title')
-
- @_delete_or_post(Condition, '/conditions')
- def do_POST_condition(self, condition: Condition) -> str:
- """Update/insert Condition of ?id= and fields defined in postvars."""
- title = self.postvars.get_str_or_fail('title')
- description = self.postvars.get_str_or_fail('description')
- is_active = self.postvars.get_bool('is_active')
- condition.is_active = is_active
- #
- condition.title.set(title)
- condition.description.set(description)
- condition.save(self._conn)
- return f'/condition?id={condition.id_}'
+++ /dev/null
-"""What doesn't fit elsewhere so far."""
-from typing import Any
-
-
-class DictableNode:
- """Template for display chain nodes providing .as_dict_and_refs."""
- # pylint: disable=too-few-public-methods
- _to_dict: list[str] = []
-
- def __init__(self, *args: Any) -> None:
- for i, arg in enumerate(args):
- setattr(self, self._to_dict[i], arg)
-
- @property
- def as_dict_and_refs(self) -> tuple[dict[str, object], list[Any]]:
- """Return self as json.dumps-ready dict, list of referenced objects."""
- d = {}
- refs = []
- for name in self._to_dict:
- attr = getattr(self, name)
- if hasattr(attr, 'id_'):
- d[name] = attr.id_
- continue
- if isinstance(attr, list):
- d[name] = []
- for item in attr:
- item_d, item_refs = item.as_dict_and_refs
- d[name] += [item_d]
- for item_ref in [r for r in item_refs if r not in refs]:
- refs += [item_ref]
- continue
- d[name] = attr
- return d, refs
+++ /dev/null
-"""Collecting Processes and Process-related items."""
-from __future__ import annotations
-from typing import Set, Self, Any
-from sqlite3 import Row
-from plomtask.misc import DictableNode
-from plomtask.db import DatabaseConnection, BaseModel
-from plomtask.versioned_attributes import VersionedAttribute
-from plomtask.conditions import Condition, ConditionsRelations
-from plomtask.exceptions import (NotFoundException, BadFormatException,
- HandledException)
-
-
-class ProcessStepsNode(DictableNode):
- """Collects what's useful to know for ProcessSteps tree display."""
- # pylint: disable=too-few-public-methods
- step: ProcessStep
- process: Process
- is_explicit: bool
- steps: list[ProcessStepsNode]
- seen: bool = False
- is_suppressed: bool = False
- _to_dict = ['step', 'process', 'is_explicit', 'steps', 'seen',
- 'is_suppressed']
-
-
-class Process(BaseModel, ConditionsRelations):
- """Template for, and metadata for, Todos, and their arrangements."""
- # pylint: disable=too-many-instance-attributes
- table_name = 'processes'
- to_save_simples = ['calendarize']
- to_save_relations = [('process_conditions', 'process', 'conditions', 0),
- ('process_blockers', 'process', 'blockers', 0),
- ('process_enables', 'process', 'enables', 0),
- ('process_disables', 'process', 'disables', 0),
- ('process_step_suppressions', 'process',
- 'suppressed_steps', 0)]
- add_to_dict = ['explicit_steps']
- versioned_defaults = {'title': 'UNNAMED', 'description': '', 'effort': 1.0}
- to_search = ['title.newest', 'description.newest']
- can_create_by_id = True
- sorters = {'steps': lambda p: len(p.explicit_steps),
- 'owners': lambda p: p.n_owners,
- 'effort': lambda p: p.effort.newest,
- 'title': lambda p: p.title.newest}
-
- def __init__(self, id_: int | None, calendarize: bool = False) -> None:
- super().__init__(id_)
- ConditionsRelations.__init__(self)
- for name in ['title', 'description', 'effort']:
- attr = VersionedAttribute(self, f'process_{name}s',
- self.versioned_defaults[name])
- setattr(self, name, attr)
- self.explicit_steps: list[ProcessStep] = []
- self.suppressed_steps: list[ProcessStep] = []
- self.calendarize = calendarize
- self.n_owners: int | None = None # only set by from_table_row
-
- @classmethod
- def from_table_row(cls, db_conn: DatabaseConnection, row: Row | list[Any]
- ) -> Self:
- """Make from DB row, with dependencies."""
- process = super().from_table_row(db_conn, row)
- assert process.id_ is not None
- for name in ('conditions', 'blockers', 'enables', 'disables'):
- table = f'process_{name}'
- for c_id in db_conn.column_where(table, 'condition',
- 'process', process.id_):
- target = getattr(process, name)
- target += [Condition.by_id(db_conn, c_id)]
- for row_ in db_conn.row_where('process_steps', 'owner', process.id_):
- # NB: It's tempting to ProcessStep.from_table_row(row_) directly,
- # but we don't want to unnecessarily invalidate cached ProcessSteps
- # elsewhere (notably, other Processes .suppressed_steps), as a
- # complete reload like this would do
- step = ProcessStep.by_id(db_conn, row_[0])
- process.explicit_steps += [step]
- for row_ in db_conn.row_where('process_step_suppressions', 'process',
- process.id_):
- step = ProcessStep.by_id(db_conn, row_[1])
- process.suppressed_steps += [step]
- process.n_owners = len(process.used_as_step_by(db_conn))
- return process
-
- def used_as_step_by(self, db_conn: DatabaseConnection) -> list[Self]:
- """Return Processes using self for a ProcessStep."""
- if not self.id_:
- return []
- owner_ids = set()
- for id_ in db_conn.column_where('process_steps', 'owner',
- 'step_process', self.id_):
- owner_ids.add(id_)
- return [self.__class__.by_id(db_conn, id_) for id_ in owner_ids]
-
- def get_steps(self,
- db_conn: DatabaseConnection,
- external_owner: Self | None = None
- ) -> list[ProcessStepsNode]:
- """Return tree of depended-on explicit and implicit ProcessSteps."""
-
- def make_node(step: ProcessStep, suppressed: bool) -> ProcessStepsNode:
- is_explicit = step.owner_id == top_owner.id_
- process = self.__class__.by_id(db_conn, step.step_process_id)
- step_steps = []
- if not suppressed:
- # exclude implicit siblings to explicit steps of same process
- step_steps = [n for n in process.get_steps(db_conn, top_owner)
- if not [s for s in top_owner.explicit_steps
- if s.parent_step_id == step.id_
- and s.step_process_id == n.process.id_]]
- return ProcessStepsNode(step, process, is_explicit, step_steps,
- False, suppressed)
-
- def walk_steps(node: ProcessStepsNode) -> None:
- node.seen = node.step.id_ in seen_step_ids
- assert isinstance(node.step.id_, int)
- seen_step_ids.add(node.step.id_)
- if node.is_suppressed:
- return
- explicit_children = [s for s in self.explicit_steps
- if s.parent_step_id == node.step.id_]
- for child in explicit_children:
- node.steps += [make_node(child, False)]
- for step in node.steps:
- walk_steps(step)
-
- step_nodes: list[ProcessStepsNode] = []
- seen_step_ids: Set[int] = set()
- top_owner = external_owner or self
- for step in [s for s in self.explicit_steps
- if s.parent_step_id is None]:
- new_node = make_node(step, step in top_owner.suppressed_steps)
- step_nodes += [new_node]
- for step_node in step_nodes:
- walk_steps(step_node)
- return step_nodes
-
- def set_step_relations(self,
- db_conn: DatabaseConnection,
- owners: list[int],
- suppressions: list[int],
- owned_steps: list[ProcessStep]
- ) -> None:
- """Set step owners, suppressions, and owned steps."""
- self._set_owners(db_conn, owners)
- self._set_step_suppressions(db_conn, suppressions)
- self.set_steps(db_conn, owned_steps)
-
- def _set_step_suppressions(self,
- db_conn: DatabaseConnection,
- step_ids: list[int]
- ) -> None:
- """Set self.suppressed_steps from step_ids."""
- assert isinstance(self.id_, int)
- db_conn.delete_where('process_step_suppressions', 'process', self.id_)
- self.suppressed_steps = [ProcessStep.by_id(db_conn, s)
- for s in step_ids]
-
- def _set_owners(self,
- db_conn: DatabaseConnection,
- owner_ids: list[int]
- ) -> None:
- """Re-set owners to those identified in owner_ids."""
- owners_old = self.used_as_step_by(db_conn)
- losers = [o for o in owners_old if o.id_ not in owner_ids]
- owners_old_ids = [o.id_ for o in owners_old]
- winners = [self.by_id(db_conn, id_) for id_ in owner_ids
- if id_ not in owners_old_ids]
- steps_to_remove = []
- for loser in losers:
- steps_to_remove += [s for s in loser.explicit_steps
- if s.step_process_id == self.id_]
- for step in steps_to_remove:
- step.remove(db_conn)
- for winner in winners:
- assert isinstance(winner.id_, int)
- assert isinstance(self.id_, int)
- new_step = ProcessStep(None, winner.id_, self.id_, None)
- new_explicit_steps = winner.explicit_steps + [new_step]
- winner.set_steps(db_conn, new_explicit_steps)
-
- def set_steps(self,
- db_conn: DatabaseConnection,
- steps: list[ProcessStep]
- ) -> None:
- """Set self.explicit_steps in bulk.
-
- Checks against recursion, and turns into top-level steps any of
- unknown or non-owned parent.
- """
- def walk_steps(node: ProcessStep) -> None:
- if node.step_process_id == self.id_:
- raise BadFormatException('bad step selection causes recursion')
- step_process = self.by_id(db_conn, node.step_process_id)
- for step in step_process.explicit_steps:
- walk_steps(step)
-
- # NB: separate the collection of steps to save/remove from the action
- # because the latter may modify the collection / self.explicit_steps
- to_remove = []
- for step in [s for s in self.explicit_steps if s not in steps]:
- to_remove += [step]
- for step in to_remove:
- step.remove(db_conn)
- to_save = []
- for step in [s for s in steps if s not in self.explicit_steps]:
- if step.parent_step_id is not None:
- try:
- parent_step = ProcessStep.by_id(db_conn,
- step.parent_step_id)
- if parent_step.owner_id != self.id_:
- step.parent_step_id = None
- except NotFoundException:
- step.parent_step_id = None
- walk_steps(step)
- to_save += [step]
- for step in to_save:
- step.save(db_conn)
-
- def save(self, db_conn: DatabaseConnection) -> None:
- """Add (or re-write) self and connected items to DB."""
- super().save(db_conn)
- assert isinstance(self.id_, int)
- db_conn.delete_where('process_steps', 'owner', self.id_)
- # NB: we separate the collection of steps to save from step.save()
- # because the latter may modify the collection / self.explicit_steps
- to_save = []
- for step in self.explicit_steps:
- to_save += [step]
- for step in to_save:
- step.save(db_conn)
-
- def remove(self, db_conn: DatabaseConnection) -> None:
- """Remove from DB, with dependencies.
-
- Guard against removal of Processes in use.
- """
- assert isinstance(self.id_, int)
- for _ in db_conn.row_where('process_steps', 'step_process', self.id_):
- raise HandledException('cannot remove Process in use')
- for _ in db_conn.row_where('todos', 'process', self.id_):
- raise HandledException('cannot remove Process in use')
- for step in self.explicit_steps:
- step.remove(db_conn)
- super().remove(db_conn)
-
-
-class ProcessStep(BaseModel):
- """Sub-unit of Processes."""
- table_name = 'process_steps'
- to_save_simples = ['owner_id', 'step_process_id', 'parent_step_id']
-
- def __init__(self, id_: int | None, owner_id: int, step_process_id: int,
- parent_step_id: int | None) -> None:
- super().__init__(id_)
- self.owner_id = owner_id
- self.step_process_id = step_process_id
- self.parent_step_id = parent_step_id
-
- def save(self, db_conn: DatabaseConnection) -> None:
- """Update into DB/cache, and owner's .explicit_steps."""
- super().save(db_conn)
- owner = Process.by_id(db_conn, self.owner_id)
- if self not in owner.explicit_steps:
- for s in [s for s in owner.explicit_steps if s.id_ == self.id_]:
- s.remove(db_conn)
- owner.explicit_steps += [self]
- owner.explicit_steps.sort(key=hash)
-
- def remove(self, db_conn: DatabaseConnection) -> None:
- """Remove from DB, and owner's .explicit_steps."""
- owner = Process.by_id(db_conn, self.owner_id)
- owner.explicit_steps.remove(self)
- super().remove(db_conn)
+++ /dev/null
-"""Actionables."""
-from __future__ import annotations
-from datetime import date as dt_date
-from typing import Any, Self, Set
-from sqlite3 import Row
-from plomtask.misc import DictableNode
-from plomtask.db import DatabaseConnection, BaseModel
-from plomtask.processes import Process, ProcessStepsNode
-from plomtask.versioned_attributes import VersionedAttribute
-from plomtask.conditions import Condition, ConditionsRelations
-from plomtask.exceptions import (NotFoundException, BadFormatException,
- HandledException)
-from plomtask.dating import (
- days_n_from_dt_date, dt_date_from_str, dt_date_from_days_n)
-
-
-class TodoNode(DictableNode):
- """Collects what's useful to know for Todo/Condition tree display."""
- # pylint: disable=too-few-public-methods
- todo: Todo
- seen: bool
- children: list[TodoNode]
- _to_dict = ['todo', 'seen', 'children']
-
-
-class TodoOrProcStepNode(DictableNode):
- """Collect what's useful for Todo-or-ProcessStep tree display."""
- # pylint: disable=too-few-public-methods
- node_id: int
- todo: Todo | None
- process: Process | None
- children: list[TodoOrProcStepNode] # pylint: disable=undefined-variable
- fillable: bool = False
- _to_dict = ['node_id', 'todo', 'process', 'children', 'fillable']
-
-
-class Todo(BaseModel, ConditionsRelations):
- """Individual actionable."""
- # pylint: disable=too-many-instance-attributes
- # pylint: disable=too-many-public-methods
- table_name = 'todos'
- to_save_simples = ['process_id', 'is_done', 'day_id', 'comment', 'effort',
- 'calendarize']
- to_save_relations = [('todo_conditions', 'todo', 'conditions', 0),
- ('todo_blockers', 'todo', 'blockers', 0),
- ('todo_enables', 'todo', 'enables', 0),
- ('todo_disables', 'todo', 'disables', 0),
- ('todo_children', 'parent', 'children', 0),
- ('todo_children', 'child', 'parents', 1)]
- to_search = ['comment']
- days_to_update: Set[int] = set()
- children: list[Todo]
- parents: list[Todo]
- sorters = {'doneness': lambda t: t.is_done,
- 'title': lambda t: t.title_then,
- 'comment': lambda t: t.comment,
- 'date': lambda t: t.day_id}
-
- # pylint: disable=too-many-arguments
- def __init__(self, id_: int | None,
- process: Process,
- is_done: bool,
- day_id: int,
- comment: str = '',
- effort: None | float = None,
- calendarize: bool = False
- ) -> None:
- super().__init__(id_)
- ConditionsRelations.__init__(self)
- if process.id_ is None:
- raise NotFoundException('Process of Todo without ID (not saved?)')
- self.process = process
- self._is_done = is_done
- self.day_id = day_id
- self.comment = comment
- self.effort = effort
- self.children = []
- self.parents = []
- self.calendarize = calendarize
- if not self.id_:
- self.calendarize = self.process.calendarize
- self.conditions = self.process.conditions[:]
- self.blockers = self.process.blockers[:]
- self.enables = self.process.enables[:]
- self.disables = self.process.disables[:]
-
- @property
- def date(self) -> str:
- """Return ISO formatted date matching .day_id."""
- return dt_date_from_days_n(self.day_id).isoformat()
-
- @classmethod
- def by_date_range_with_limits(cls,
- db_conn: DatabaseConnection,
- date_range: tuple[str, str],
- ) -> tuple[list[Self], str, str]:
- """Return Todos within (closed) date_range interval.
-
- If no range values provided, defaults them to 'yesterday' and
- 'tomorrow'. Knows to properly interpret these and 'today' as value.
- """
- dt_date_limits: list[dt_date] = []
- for i in range(2):
- dt_date_limits += [
- dt_date_from_str(date_range[i] if date_range[i]
- else ('yesterday', 'tomorrow')[i])]
- items: list[Self] = []
- for row in db_conn.exec(
- f'SELECT id FROM {cls.table_name} WHERE day >= ? AND day <= ?',
- tuple(days_n_from_dt_date(d) for d in dt_date_limits),
- build_q_marks=False):
- items += [cls.by_id(db_conn, row[0])]
- return (items,
- dt_date_limits[0].isoformat(), dt_date_limits[1].isoformat())
-
- def ensure_children(self, db_conn: DatabaseConnection) -> None:
- """Ensure Todo children (create or adopt) demanded by Process chain."""
-
- def walk_steps(parent: Self, step_node: ProcessStepsNode) -> Todo:
- adoptables = [t for t in self.by_date(db_conn, parent.date)
- if (t not in parent.children)
- and (t != parent)
- and step_node.process.id_ == t.process_id]
- satisfier = None
- for adoptable in adoptables:
- satisfier = adoptable
- break
- if not satisfier:
- satisfier = self.__class__(None, step_node.process, False,
- parent.day_id)
- satisfier.save(db_conn)
- sub_step_nodes = sorted(
- step_node.steps,
- key=lambda s: s.process.id_ if s.process.id_ else 0)
- for sub_node in sub_step_nodes:
- if sub_node.is_suppressed:
- continue
- n_slots = len([n for n in sub_step_nodes
- if n.process == sub_node.process])
- filled_slots = len([t for t in satisfier.children
- if t.process.id_ == sub_node.process.id_])
- # if we did not newly create satisfier, it may already fill
- # some step dependencies, so only fill what remains open
- if n_slots - filled_slots > 0:
- satisfier.add_child(walk_steps(satisfier, sub_node))
- satisfier.save(db_conn)
- return satisfier
-
- process = Process.by_id(db_conn, self.process_id)
- steps_tree = process.get_steps(db_conn)
- for step_node in steps_tree:
- if step_node.is_suppressed:
- continue
- self.add_child(walk_steps(self, step_node))
- self.save(db_conn)
-
- @classmethod
- def from_table_row(cls, db_conn: DatabaseConnection,
- row: Row | list[Any]) -> Self:
- """Make from DB row, with dependencies."""
- if row[1] == 0:
- raise NotFoundException('calling Todo of '
- 'unsaved Process')
- row_as_list = list(row)
- row_as_list[1] = Process.by_id(db_conn, row[1])
- todo = super().from_table_row(db_conn, row_as_list)
- assert isinstance(todo.id_, int)
- for t_id in db_conn.column_where('todo_children', 'child',
- 'parent', todo.id_):
- todo.children += [cls.by_id(db_conn, t_id)]
- for t_id in db_conn.column_where('todo_children', 'parent',
- 'child', todo.id_):
- todo.parents += [cls.by_id(db_conn, t_id)]
- for name in ('conditions', 'blockers', 'enables', 'disables'):
- table = f'todo_{name}'
- for cond_id in db_conn.column_where(table, 'condition',
- 'todo', todo.id_):
- target = getattr(todo, name)
- target += [Condition.by_id(db_conn, cond_id)]
- return todo
-
- @classmethod
- def by_process_id(cls, db_conn: DatabaseConnection,
- process_id: int | None) -> list[Self]:
- """Collect all Todos of Process of process_id."""
- return [t for t in cls.all(db_conn) if t.process.id_ == process_id]
-
- @classmethod
- def by_date(cls, db_conn: DatabaseConnection, date: str) -> list[Self]:
- """Collect all Todos for Day of date."""
- return cls.by_date_range_with_limits(db_conn, (date, date))[0]
-
- @property
- def is_doable(self) -> bool:
- """Decide whether .is_done settable based on children, Conditions."""
- for child in self.children:
- if not child.is_done:
- return False
- for condition in self.conditions:
- if not condition.is_active:
- return False
- for condition in self.blockers:
- if condition.is_active:
- return False
- return True
-
- @property
- def is_deletable(self) -> bool:
- """Decide whether self be deletable (not if preserve-worthy values)."""
- if self.comment:
- return False
- if self.effort and self.effort >= 0:
- return False
- return True
-
- @property
- def performed_effort(self) -> float:
- """Return performed effort, i.e. self.effort or default if done.."""
- if self.effort is not None:
- return self.effort
- if self.is_done:
- return self.effort_then
- return 0
-
- @property
- def process_id(self) -> int:
- """Needed for super().save to save Processes as attributes."""
- assert isinstance(self.process.id_, int)
- return self.process.id_
-
- @property
- def is_done(self) -> bool:
- """Wrapper around self._is_done so we can control its setter."""
- return self._is_done
-
- @is_done.setter
- def is_done(self, value: bool) -> None:
- if value != self.is_done and not self.is_doable:
- raise BadFormatException('cannot change doneness of undoable Todo')
- if self._is_done != value:
- self._is_done = value
- if value is True:
- for condition in self.enables:
- condition.is_active = True
- for condition in self.disables:
- condition.is_active = False
-
- @property
- def title(self) -> VersionedAttribute:
- """Shortcut to .process.title."""
- assert isinstance(self.process.title, VersionedAttribute)
- return self.process.title
-
- @property
- def title_then(self) -> str:
- """Shortcut to .process.title.at(self.date)."""
- title_then = self.process.title.at(self.date)
- assert isinstance(title_then, str)
- return title_then
-
- @property
- def effort_then(self) -> float:
- """Shortcut to .process.effort.at(self.date)"""
- effort_then = self.process.effort.at(self.date)
- assert isinstance(effort_then, float)
- return effort_then
-
- @property
- def has_doneness_in_path(self) -> bool:
- """Check whether self is done or has any children that are."""
- if self.is_done:
- return True
- for child in self.children:
- if child.is_done:
- return True
- if child.has_doneness_in_path:
- return True
- return False
-
- def get_step_tree(self, seen_todos: set[int]) -> TodoNode:
- """Return tree of depended-on Todos."""
-
- def make_node(todo: Self) -> TodoNode:
- children = []
- seen = todo.id_ in seen_todos
- assert isinstance(todo.id_, int)
- seen_todos.add(todo.id_)
- for child in todo.children:
- children += [make_node(child)]
- return TodoNode(todo, seen, children)
-
- return make_node(self)
-
- @property
- def tree_effort(self) -> float:
- """Return sum of performed efforts of self and all descendants."""
-
- def walk_tree(node: Self) -> float:
- local_effort = 0.0
- for child in node.children:
- local_effort += walk_tree(child)
- return node.performed_effort + local_effort
-
- return walk_tree(self)
-
- def add_child(self, child: Self) -> None:
- """Add child to self.children, avoid recursion, update parenthoods."""
-
- def walk_steps(node: Self) -> None:
- if node.id_ == self.id_:
- raise BadFormatException('bad child choice causes recursion')
- for child in node.children:
- walk_steps(child)
-
- if self.id_ is None:
- raise HandledException('Can only add children to saved Todos.')
- if child.id_ is None:
- raise HandledException('Can only add saved children to Todos.')
- if child in self.children:
- raise BadFormatException('cannot adopt same child twice')
- walk_steps(child)
- self.children += [child]
- child.parents += [self]
-
- def remove_child(self, child: Self) -> None:
- """Remove child from self.children, update counter relations."""
- if child not in self.children:
- raise HandledException('Cannot remove un-parented child.')
- self.children.remove(child)
- child.parents.remove(self)
-
- def update_attrs(self, **kwargs: Any) -> None:
- """Update self's attributes listed in kwargs."""
- for k, v in kwargs.items():
- setattr(self, k, v)
-
- def save(self, db_conn: DatabaseConnection) -> None:
- """On save calls, also check if auto-deletion by effort < 0."""
- if self.effort and self.effort < 0 and self.is_deletable:
- self.remove(db_conn)
- return
- if self.id_ is None:
- self.__class__.days_to_update.add(self.day_id)
- super().save(db_conn)
- for condition in self.enables + self.disables + self.conditions:
- condition.save(db_conn)
-
- def remove(self, db_conn: DatabaseConnection) -> None:
- """Remove from DB, including relations."""
- if not self.is_deletable:
- raise HandledException('Cannot remove non-deletable Todo.')
- self.__class__.days_to_update.add(self.day_id)
- children_to_remove = self.children[:]
- parents_to_remove = self.parents[:]
- for child in children_to_remove:
- self.remove_child(child)
- for parent in parents_to_remove:
- parent.remove_child(self)
- super().remove(db_conn)
+++ /dev/null
-"""Attributes whose values are recorded as a timestamped history."""
-from datetime import datetime
-from typing import Any
-from sqlite3 import Row
-from time import sleep
-from plomtask.db import DatabaseConnection
-from plomtask.exceptions import (HandledException, BadFormatException,
- NotFoundException)
-
-TIMESTAMP_FMT = '%Y-%m-%d %H:%M:%S.%f'
-
-
-class VersionedAttribute:
- """Attributes whose values are recorded as a timestamped history."""
-
- def __init__(self,
- parent: Any, table_name: str, default: str | float) -> None:
- self.parent = parent
- self.table_name = table_name
- self._default = default
- self.history: dict[str, str | float] = {}
- # NB: For tighter mypy testing, we might prefer self.history to be
- # dict[str, float] | dict[str, str] instead, but my current coding
- # knowledge only manages to make that work by adding much further
- # complexity, so let's leave it at that for now …
-
- def __hash__(self) -> int:
- history_tuples = tuple((k, v) for k, v in self.history.items())
- hashable = (self.parent.id_, self.table_name, self._default,
- history_tuples)
- return hash(hashable)
-
- @property
- def _newest_timestamp(self) -> str:
- """Return most recent timestamp."""
- return sorted(self.history.keys())[-1]
-
- @property
- def value_type_name(self) -> str:
- """Return string of name of attribute value type."""
- return type(self._default).__name__
-
- @property
- def newest(self) -> str | float:
- """Return most recent value, or self._default if self.history empty."""
- if 0 == len(self.history):
- return self._default
- return self.history[self._newest_timestamp]
-
- def reset_timestamp(self, old_str: str, new_str: str) -> None:
- """Rename self.history key (timestamp) old to new.
-
- Chronological sequence of keys must be preserved, i.e. cannot move
- key before earlier or after later timestamp.
- """
- try:
- new = datetime.strptime(new_str, TIMESTAMP_FMT)
- old = datetime.strptime(old_str, TIMESTAMP_FMT)
- except ValueError as exc:
- raise BadFormatException('Timestamp of illegal format.') from exc
- timestamps = list(self.history.keys())
- if old_str not in timestamps:
- raise HandledException(f'Timestamp {old} not found in history.')
- sorted_timestamps = sorted([datetime.strptime(t, TIMESTAMP_FMT)
- for t in timestamps])
- expected_position = sorted_timestamps.index(old)
- sorted_timestamps.remove(old)
- sorted_timestamps += [new]
- sorted_timestamps.sort()
- if sorted_timestamps.index(new) != expected_position:
- raise HandledException('Timestamp not respecting chronology.')
- value = self.history[old_str]
- del self.history[old_str]
- self.history[new_str] = value
-
- def set(self, value: str | float) -> None:
- """Add to self.history if and only if not same value as newest one.
-
- Note that we wait one micro-second, as timestamp comparison to check
- most recent elements only goes up to that precision.
-
- Also note that we don't check against .newest because that may make us
- compare value against .default even if not set. We want to be able to
- explicitly set .default as the first element.
- """
- sleep(0.00001)
- if 0 == len(self.history) \
- or value != self.history[self._newest_timestamp]:
- self.history[datetime.now().strftime(TIMESTAMP_FMT)] = value
-
- def history_from_row(self, row: Row) -> None:
- """Extend self.history from expected table row format."""
- self.history[row[1]] = row[2]
-
- def at(self, queried_time: str) -> str | float:
- """Retrieve value of timestamp nearest queried_time from the past."""
- if len(queried_time) == 10:
- queried_time += ' 23:59:59.999'
- sorted_timestamps = sorted(self.history.keys())
- if 0 == len(sorted_timestamps):
- return self._default
- selected_timestamp = sorted_timestamps[0]
- for timestamp in sorted_timestamps[1:]:
- if timestamp > queried_time:
- break
- selected_timestamp = timestamp
- return self.history[selected_timestamp]
-
- def save(self, db_conn: DatabaseConnection) -> None:
- """Save as self.history entries, but first wipe old ones."""
- if self.parent.id_ is None:
- raise NotFoundException('cannot save attribute to parent if no ID')
- db_conn.rewrite_relations(self.table_name, 'parent', self.parent.id_,
- [[item[0], item[1]]
- for item in self.history.items()])
-
- def remove(self, db_conn: DatabaseConnection) -> None:
- """Remove from DB."""
- db_conn.delete_where(self.table_name, 'parent', self.parent.id_)
+++ /dev/null
-Jinja2==3.1.3
-unittest-parallel==1.6.1
+++ /dev/null
-#!/usr/bin/env python3
-"""Call this to start the application."""
-from sys import exit as sys_exit
-from os import environ
-from pathlib import Path
-from plomtask.exceptions import HandledException
-from plomtask.http import TaskHandler, TaskServer
-from plomtask.db import DatabaseFile
-from plomlib.db import PlomDbException
-
-PLOMTASK_DB_PATH = environ.get('PLOMTASK_DB_PATH')
-HTTP_PORT = 8082
-DB_CREATION_ASK = 'Database file not found. Create? Y/n\n'
-DB_MIGRATE_ASK = 'Database file needs migration. Migrate? Y/n\n'
-
-
-def yes_or_fail(question: str, fail_msg: str) -> None:
- """Ask question, raise HandledException(fail_msg) if reply not yes."""
- reply = input(question)
- if not reply.lower() in {'y', 'yes', 'yes.', 'yes!'}:
- print('Not recognizing reply as "yes".')
- raise HandledException(fail_msg)
-
-
-if __name__ == '__main__':
- try:
- if not PLOMTASK_DB_PATH:
- raise HandledException('PLOMTASK_DB_PATH not set.')
- db_path = Path(PLOMTASK_DB_PATH)
- try:
- db_file = DatabaseFile(db_path)
- except PlomDbException as e:
- if e.name == 'no_is_file':
- yes_or_fail(DB_CREATION_ASK, 'Cannot run without DB.')
- DatabaseFile.create(db_path)
- elif e.name == 'bad_version':
- yes_or_fail(DB_MIGRATE_ASK, 'Cannot run with unmigrated DB.')
- db_file = DatabaseFile(db_path, skip_validations=True)
- db_file.migrate(set())
- else:
- raise e
- else:
- server = TaskServer(db_file, ('localhost', HTTP_PORT), TaskHandler)
- print(f'running at port {HTTP_PORT}')
- try:
- server.serve_forever()
- except KeyboardInterrupt:
- print('aborting due to keyboard interrupt')
- server.server_close()
- except HandledException as e:
- print(f'Aborting because: {e}')
- sys_exit(1)
#!/bin/sh
-set -e
+cd src
for dir in $(echo '.' 'plomtask' 'tests'); do
echo "Running mypy on ${dir}/ …."
python3 -m mypy ${dir}/*.py
done
echo "Running unittest-parallel on tests/."
unittest-parallel -t . -s tests/ -p '*.py'
-set +e
rm test_db:*
-set -e
-exit 0
--- /dev/null
+[BASIC]
+init-hook='import sys; sys.path.append(".")'
+good-names-rgxs=(.*_)?(GET|POST)(_.+)?,,test_[A-Z]+
--- /dev/null
+CREATE TABLE condition_descriptions (
+ parent INTEGER NOT NULL,
+ timestamp TEXT NOT NULL,
+ description TEXT NOT NULL,
+ PRIMARY KEY (parent, timestamp),
+ FOREIGN KEY (parent) REFERENCES conditions(id)
+);
+CREATE TABLE condition_titles (
+ parent INTEGER NOT NULL,
+ timestamp TEXT NOT NULL,
+ title TEXT NOT NULL,
+ PRIMARY KEY (parent, timestamp),
+ FOREIGN KEY (parent) REFERENCES conditions(id)
+);
+CREATE TABLE conditions (
+ id INTEGER PRIMARY KEY,
+ is_active BOOLEAN NOT NULL
+);
+CREATE TABLE days (
+ id TEXT PRIMARY KEY,
+ comment TEXT NOT NULL
+);
+CREATE TABLE process_conditions (
+ process INTEGER NOT NULL,
+ condition INTEGER NOT NULL,
+ PRIMARY KEY (process, condition),
+ FOREIGN KEY (process) REFERENCES processes(id),
+ FOREIGN KEY (condition) REFERENCES conditions(id)
+);
+CREATE TABLE process_descriptions (
+ parent INTEGER NOT NULL,
+ timestamp TEXT NOT NULL,
+ description TEXT NOT NULL,
+ PRIMARY KEY (parent, timestamp),
+ FOREIGN KEY (parent) REFERENCES processes(id)
+);
+CREATE TABLE process_disables (
+ process INTEGER NOT NULL,
+ condition INTEGER NOT NULL,
+ PRIMARY KEY(process, condition),
+ FOREIGN KEY (process) REFERENCES processes(id),
+ FOREIGN KEY (condition) REFERENCES conditions(id)
+);
+CREATE TABLE process_efforts (
+ parent INTEGER NOT NULL,
+ timestamp TEXT NOT NULL,
+ effort REAL NOT NULL,
+ PRIMARY KEY (parent, timestamp),
+ FOREIGN KEY (parent) REFERENCES processes(id)
+);
+CREATE TABLE process_enables (
+ process INTEGER NOT NULL,
+ condition INTEGER NOT NULL,
+ PRIMARY KEY(process, condition),
+ FOREIGN KEY (process) REFERENCES processes(id),
+ FOREIGN KEY (condition) REFERENCES conditions(id)
+);
+CREATE TABLE process_steps (
+ id INTEGER PRIMARY KEY,
+ owner INTEGER NOT NULL,
+ step_process INTEGER NOT NULL,
+ parent_step INTEGER,
+ FOREIGN KEY (owner) REFERENCES processes(id),
+ FOREIGN KEY (step_process) REFERENCES processes(id),
+ FOREIGN KEY (parent_step) REFERENCES process_steps(step_id)
+);
+CREATE TABLE process_titles (
+ parent INTEGER NOT NULL,
+ timestamp TEXT NOT NULL,
+ title TEXT NOT NULL,
+ PRIMARY KEY (parent, timestamp),
+ FOREIGN KEY (parent) REFERENCES processes(id)
+);
+CREATE TABLE processes (
+ id INTEGER PRIMARY KEY
+);
+CREATE TABLE todo_children (
+ parent INTEGER NOT NULL,
+ child INTEGER NOT NULL,
+ PRIMARY KEY (parent, child),
+ FOREIGN KEY (parent) REFERENCES todos(id),
+ FOREIGN KEY (child) REFERENCES todos(id)
+);
+CREATE TABLE todo_conditions (
+ todo INTEGER NOT NULL,
+ condition INTEGER NOT NULL,
+ PRIMARY KEY(todo, condition),
+ FOREIGN KEY (todo) REFERENCES todos(id),
+ FOREIGN KEY (condition) REFERENCES conditions(id)
+);
+CREATE TABLE todo_disables (
+ todo INTEGER NOT NULL,
+ condition INTEGER NOT NULL,
+ PRIMARY KEY(todo, condition),
+ FOREIGN KEY (todo) REFERENCES todos(id),
+ FOREIGN KEY (condition) REFERENCES conditions(id)
+);
+CREATE TABLE todo_enables (
+ todo INTEGER NOT NULL,
+ condition INTEGER NOT NULL,
+ PRIMARY KEY(todo, condition),
+ FOREIGN KEY (todo) REFERENCES todos(id),
+ FOREIGN KEY (condition) REFERENCES conditions(id)
+);
+CREATE TABLE todos (
+ id INTEGER PRIMARY KEY,
+ process INTEGER NOT NULL,
+ is_done BOOLEAN NOT NULL,
+ day TEXT NOT NULL,
+ FOREIGN KEY (process) REFERENCES processes(id),
+ FOREIGN KEY (day) REFERENCES days(id)
+);
--- /dev/null
+ALTER TABLE todos ADD COLUMN comment TEXT NOT NULL DEFAULT "";
--- /dev/null
+ALTER TABLE todos ADD COLUMN effort REAL;
--- /dev/null
+ALTER TABLE todos ADD COLUMN calendarize BOOLEAN NOT NULL DEFAULT FALSE;
+ALTER TABLE processes ADD COLUMN calendarize BOOLEAN NOT NULL DEFAULT FALSE;
--- /dev/null
+CREATE TABLE process_blockers (
+ process INTEGER NOT NULL,
+ condition INTEGER NOT NULL,
+ PRIMARY KEY (process, condition),
+ FOREIGN KEY (process) REFERENCES processes(id),
+ FOREIGN KEY (condition) REFERENCES conditions(id)
+);
+CREATE TABLE todo_blockers (
+ todo INTEGER NOT NULL,
+ condition INTEGER NOT NULL,
+ PRIMARY KEY (todo, condition),
+ FOREIGN KEY (todo) REFERENCES todos(id),
+ FOREIGN KEY (condition) REFERENCES conditions(id)
+);
--- /dev/null
+CREATE TABLE process_step_suppressions (
+ process INTEGER NOT NULL,
+ process_step INTEGER NOT NULL,
+ PRIMARY KEY (process, process_step),
+ FOREIGN KEY (process) REFERENCES processes(id),
+ FOREIGN KEY (process_step) REFERENCES process_steps(id)
+);
+
--- /dev/null
+ALTER TABLE days ADD COLUMN days_since_millennium INTEGER NOT NULL DEFAULT 0;
+
--- /dev/null
+ALTER TABLE todos ADD COLUMN new_day_id INTEGER;
+UPDATE todos SET new_day_id = (
+ SELECT days.days_since_millennium
+ FROM days
+ WHERE days.id = todos.day);
+
+CREATE TABLE days_new (
+ id INTEGER PRIMARY KEY,
+ comment TEXT NOT NULL
+);
+INSERT INTO days_new SELECT
+ days_since_millennium,
+ comment
+FROM days;
+DROP TABLE days;
+ALTER TABLE days_new RENAME TO days;
+
+CREATE TABLE todos_new (
+ id INTEGER PRIMARY KEY,
+ process INTEGER NOT NULL,
+ is_done BOOLEAN NOT NULL,
+ day INTEGER NOT NULL,
+ comment TEXT NOT NULL DEFAULT "",
+ effort REAL,
+ calendarize BOOLEAN NOT NULL DEFAULT FALSE,
+ FOREIGN KEY (process) REFERENCES processes(id),
+ FOREIGN KEY (day) REFERENCES days(id)
+);
+INSERT INTO todos_new SELECT
+ id,
+ process,
+ is_done,
+ new_day_id,
+ comment,
+ effort,
+ calendarize
+FROM todos;
+DROP TABLE todos;
+ALTER TABLE todos_new RENAME TO todos;
--- /dev/null
+CREATE TABLE "days" (
+ id INTEGER PRIMARY KEY,
+ comment TEXT NOT NULL
+);
+CREATE TABLE "todos" (
+ id INTEGER PRIMARY KEY,
+ process INTEGER NOT NULL,
+ is_done BOOLEAN NOT NULL,
+ day INTEGER NOT NULL,
+ comment TEXT NOT NULL DEFAULT "",
+ effort REAL,
+ calendarize BOOLEAN NOT NULL DEFAULT FALSE,
+ FOREIGN KEY (process) REFERENCES processes(id),
+ FOREIGN KEY (day) REFERENCES days(id)
+);
+CREATE TABLE condition_descriptions (
+ parent INTEGER NOT NULL,
+ timestamp TEXT NOT NULL,
+ description TEXT NOT NULL,
+ PRIMARY KEY (parent, timestamp),
+ FOREIGN KEY (parent) REFERENCES conditions(id)
+);
+CREATE TABLE condition_titles (
+ parent INTEGER NOT NULL,
+ timestamp TEXT NOT NULL,
+ title TEXT NOT NULL,
+ PRIMARY KEY (parent, timestamp),
+ FOREIGN KEY (parent) REFERENCES conditions(id)
+);
+CREATE TABLE conditions (
+ id INTEGER PRIMARY KEY,
+ is_active BOOLEAN NOT NULL
+);
+CREATE TABLE process_blockers (
+ process INTEGER NOT NULL,
+ condition INTEGER NOT NULL,
+ PRIMARY KEY (process, condition),
+ FOREIGN KEY (process) REFERENCES processes(id),
+ FOREIGN KEY (condition) REFERENCES conditions(id)
+);
+CREATE TABLE process_conditions (
+ process INTEGER NOT NULL,
+ condition INTEGER NOT NULL,
+ PRIMARY KEY (process, condition),
+ FOREIGN KEY (process) REFERENCES processes(id),
+ FOREIGN KEY (condition) REFERENCES conditions(id)
+);
+CREATE TABLE process_descriptions (
+ parent INTEGER NOT NULL,
+ timestamp TEXT NOT NULL,
+ description TEXT NOT NULL,
+ PRIMARY KEY (parent, timestamp),
+ FOREIGN KEY (parent) REFERENCES processes(id)
+);
+CREATE TABLE process_disables (
+ process INTEGER NOT NULL,
+ condition INTEGER NOT NULL,
+ PRIMARY KEY(process, condition),
+ FOREIGN KEY (process) REFERENCES processes(id),
+ FOREIGN KEY (condition) REFERENCES conditions(id)
+);
+CREATE TABLE process_efforts (
+ parent INTEGER NOT NULL,
+ timestamp TEXT NOT NULL,
+ effort REAL NOT NULL,
+ PRIMARY KEY (parent, timestamp),
+ FOREIGN KEY (parent) REFERENCES processes(id)
+);
+CREATE TABLE process_enables (
+ process INTEGER NOT NULL,
+ condition INTEGER NOT NULL,
+ PRIMARY KEY(process, condition),
+ FOREIGN KEY (process) REFERENCES processes(id),
+ FOREIGN KEY (condition) REFERENCES conditions(id)
+);
+CREATE TABLE process_step_suppressions (
+ process INTEGER NOT NULL,
+ process_step INTEGER NOT NULL,
+ PRIMARY KEY (process, process_step),
+ FOREIGN KEY (process) REFERENCES processes(id),
+ FOREIGN KEY (process_step) REFERENCES process_steps(id)
+);
+CREATE TABLE process_steps (
+ id INTEGER PRIMARY KEY,
+ owner INTEGER NOT NULL,
+ step_process INTEGER NOT NULL,
+ parent_step INTEGER,
+ FOREIGN KEY (owner) REFERENCES processes(id),
+ FOREIGN KEY (step_process) REFERENCES processes(id),
+ FOREIGN KEY (parent_step) REFERENCES process_steps(step_id)
+);
+CREATE TABLE process_titles (
+ parent INTEGER NOT NULL,
+ timestamp TEXT NOT NULL,
+ title TEXT NOT NULL,
+ PRIMARY KEY (parent, timestamp),
+ FOREIGN KEY (parent) REFERENCES processes(id)
+);
+CREATE TABLE processes (
+ id INTEGER PRIMARY KEY,
+ calendarize BOOLEAN NOT NULL DEFAULT FALSE
+);
+CREATE TABLE todo_blockers (
+ todo INTEGER NOT NULL,
+ condition INTEGER NOT NULL,
+ PRIMARY KEY (todo, condition),
+ FOREIGN KEY (todo) REFERENCES todos(id),
+ FOREIGN KEY (condition) REFERENCES conditions(id)
+);
+CREATE TABLE todo_children (
+ parent INTEGER NOT NULL,
+ child INTEGER NOT NULL,
+ PRIMARY KEY (parent, child),
+ FOREIGN KEY (parent) REFERENCES todos(id),
+ FOREIGN KEY (child) REFERENCES todos(id)
+);
+CREATE TABLE todo_conditions (
+ todo INTEGER NOT NULL,
+ condition INTEGER NOT NULL,
+ PRIMARY KEY(todo, condition),
+ FOREIGN KEY (todo) REFERENCES todos(id),
+ FOREIGN KEY (condition) REFERENCES conditions(id)
+);
+CREATE TABLE todo_disables (
+ todo INTEGER NOT NULL,
+ condition INTEGER NOT NULL,
+ PRIMARY KEY(todo, condition),
+ FOREIGN KEY (todo) REFERENCES todos(id),
+ FOREIGN KEY (condition) REFERENCES conditions(id)
+);
+CREATE TABLE todo_enables (
+ todo INTEGER NOT NULL,
+ condition INTEGER NOT NULL,
+ PRIMARY KEY(todo, condition),
+ FOREIGN KEY (todo) REFERENCES todos(id),
+ FOREIGN KEY (condition) REFERENCES conditions(id)
+);
--- /dev/null
+Subproject commit dee7c0f6218e6bdd07b477dc5d9e4b5540ffcf4a
--- /dev/null
+"""Non-doable elements of ProcessStep/Todo chains."""
+from __future__ import annotations
+from plomtask.db import DatabaseConnection, BaseModel
+from plomtask.versioned_attributes import VersionedAttribute
+from plomtask.exceptions import HandledException
+
+
+class Condition(BaseModel):
+ """Non-Process dependency for ProcessSteps and Todos."""
+ table_name = 'conditions'
+ to_save_simples = ['is_active']
+ versioned_defaults = {'title': 'UNNAMED', 'description': ''}
+ to_search = ['title.newest', 'description.newest']
+ can_create_by_id = True
+ sorters = {'is_active': lambda c: c.is_active,
+ 'title': lambda c: c.title.newest}
+
+ def __init__(self, id_: int | None, is_active: bool = False) -> None:
+ super().__init__(id_)
+ self.is_active = is_active
+ for name in ['title', 'description']:
+ attr = VersionedAttribute(self, f'condition_{name}s',
+ self.versioned_defaults[name])
+ setattr(self, name, attr)
+
+ def remove(self, db_conn: DatabaseConnection) -> None:
+ """Remove from DB, with VersionedAttributes.
+
+ Checks for Todos and Processes that depend on Condition, prohibits
+ deletion if found.
+ """
+ if self.id_ is not None:
+ for item in ('process', 'todo'):
+ for attr in ('conditions', 'blockers', 'enables', 'disables'):
+ table_name = f'{item}_{attr}'
+ for _ in db_conn.row_where(table_name, 'condition',
+ self.id_):
+ msg = 'cannot remove Condition in use'
+ raise HandledException(msg)
+ super().remove(db_conn)
+
+
+class ConditionsRelations:
+ """Methods for handling relations to Conditions, for Todo and Process."""
+ # pylint: disable=too-few-public-methods
+
+ def __init__(self) -> None:
+ self.conditions: list[Condition] = []
+ self.blockers: list[Condition] = []
+ self.enables: list[Condition] = []
+ self.disables: list[Condition] = []
+
+ def set_condition_relations(self,
+ db_conn: DatabaseConnection,
+ ids_conditions: list[int],
+ ids_blockers: list[int],
+ ids_enables: list[int],
+ ids_disables: list[int]
+ ) -> None:
+ """Set owned Condition lists to those identified by respective IDs."""
+ # pylint: disable=too-many-arguments
+ for ids, target in [(ids_conditions, 'conditions'),
+ (ids_blockers, 'blockers'),
+ (ids_enables, 'enables'),
+ (ids_disables, 'disables')]:
+ target_list = getattr(self, target)
+ while len(target_list) > 0:
+ target_list.pop()
+ for id_ in ids:
+ target_list += [Condition.by_id(db_conn, id_)]
--- /dev/null
+"""Various utilities for handling dates."""
+from datetime import date as dt_date, timedelta
+from plomtask.exceptions import BadFormatException
+
+
+def dt_date_from_str(date_str: str) -> dt_date:
+ """Validate against ISO format, colloq. terms; return as datetime.date."""
+ if date_str == 'today':
+ date_str = date_in_n_days(0)
+ elif date_str == 'yesterday':
+ date_str = date_in_n_days(-1)
+ elif date_str == 'tomorrow':
+ date_str = date_in_n_days(1)
+ try:
+ date = dt_date.fromisoformat(date_str)
+ except (ValueError, TypeError) as e:
+ msg = f'Given date of wrong format: {date_str}'
+ raise BadFormatException(msg) from e
+ return date
+
+
+def days_n_from_dt_date(date: dt_date) -> int:
+ """Return number of days from Jan 1st 2000 to datetime.date."""
+ return (date - dt_date(2000, 1, 1)).days
+
+
+def dt_date_from_days_n(days_n: int) -> dt_date:
+ """Return datetime.date for days_n after Jan 1st 2000."""
+ return dt_date(2000, 1, 1) + timedelta(days=days_n)
+
+
+def date_in_n_days(n: int) -> str:
+ """Return in ISO format date from today + n days."""
+ date = dt_date.today() + timedelta(days=n)
+ return date.isoformat()
--- /dev/null
+"""Collecting Day and date-related items."""
+from __future__ import annotations
+from typing import Any, Self
+from sqlite3 import Row
+from datetime import date as dt_date, timedelta
+from plomtask.db import DatabaseConnection, BaseModel
+from plomtask.todos import Todo
+from plomtask.dating import dt_date_from_days_n, days_n_from_dt_date
+
+
+class Day(BaseModel):
+ """Individual days defined by their dates."""
+ table_name = 'days'
+ to_save_simples = ['comment']
+ add_to_dict = ['todos']
+ can_create_by_id = True
+
+ def __init__(self, id_: int, comment: str = '') -> None:
+ super().__init__(id_)
+ self.comment = comment
+ self.todos: list[Todo] = []
+
+ @classmethod
+ def from_table_row(cls, db_conn: DatabaseConnection, row: Row | list[Any]
+ ) -> Self:
+ """Make from DB row, with linked Todos."""
+ day = super().from_table_row(db_conn, row)
+ day.todos = Todo.by_date(db_conn, day.date)
+ return day
+
+ @classmethod
+ def by_id(cls, db_conn: DatabaseConnection, id_: int) -> Self:
+ """Checks Todo.days_to_update if we need to a retrieved Day's .todos"""
+ day = super().by_id(db_conn, id_)
+ assert isinstance(day.id_, int)
+ if day.id_ in Todo.days_to_update:
+ Todo.days_to_update.remove(day.id_)
+ day.todos = Todo.by_date(db_conn, day.date)
+ return day
+
+ @classmethod
+ def with_filled_gaps(
+ cls, conn: DatabaseConnection, dt_start: dt_date, dt_end: dt_date
+ ) -> list[Self]:
+ """Show days >= start_date, <= end_date, fill gaps with un-storeds."""
+ if dt_start > dt_end:
+ return []
+ start_n_days = days_n_from_dt_date(dt_start)
+ end_n_days = days_n_from_dt_date(dt_end)
+ ranged_days = [d for d in cls.all(conn)
+ if isinstance(d.id_, int)
+ and d.id_ >= start_n_days and d.id_ <= end_n_days]
+ ranged_days.sort()
+ if (not ranged_days) or (isinstance(ranged_days[0].id_, int)
+ and start_n_days < ranged_days[0].id_):
+ ranged_days.insert(0, cls(start_n_days))
+ assert isinstance(ranged_days[-1].id_, int)
+ if end_n_days > ranged_days[-1].id_:
+ ranged_days.append(cls(end_n_days))
+ if len(ranged_days) > 1:
+ degapped_ranged_days = []
+ for i, day in enumerate(ranged_days):
+ degapped_ranged_days += [day]
+ if i < len(ranged_days) - 1:
+ next_one = ranged_days[i+1]
+ assert isinstance(day.id_, int)
+ assert isinstance(next_one.id_, int)
+ while day.id_ + 1 != next_one.id_:
+ assert isinstance(day.id_, int)
+ day = cls(day.id_ + 1)
+ degapped_ranged_days += [day]
+ return degapped_ranged_days
+ return ranged_days
+
+ @property
+ def _dt_date(self) -> dt_date:
+ """Return chronological location as datetime.date."""
+ assert isinstance(self.id_, int)
+ return dt_date_from_days_n(self.id_)
+
+ @property
+ def date(self) -> str:
+ """Return chronological location as ISO format date."""
+ return self._dt_date.isoformat()
+
+ @property
+ def first_of_month(self) -> bool:
+ """Return if self is first day of a month."""
+ return self.date[-2:] == '01'
+
+ @property
+ def month_name(self) -> str:
+ """Return name of month self is part of."""
+ return self._dt_date.strftime('%B')
+
+ @property
+ def weekday(self) -> str:
+ """Return weekday name matching self."""
+ return self._dt_date.strftime('%A')
+
+ @property
+ def prev_date(self) -> str:
+ """Return ISO-formatted date preceding date of self."""
+ return (self._dt_date - timedelta(days=1)).isoformat()
+
+ @property
+ def next_date(self) -> str:
+ """Return ISO-formatted date succeeding date of this Day."""
+ return (self._dt_date + timedelta(days=1)).isoformat()
+
+ @property
+ def calendarized_todos(self) -> list[Todo]:
+ """Return only those of self.todos that have .calendarize set."""
+ return [t for t in self.todos if t.calendarize]
+
+ @property
+ def total_effort(self) -> float:
+ """"Sum all .performed_effort of self.todos."""
+ total_effort = 0.0
+ for todo in self.todos:
+ total_effort += todo.performed_effort
+ return total_effort
--- /dev/null
+"""Database management."""
+from __future__ import annotations
+from datetime import date as dt_date
+from os import listdir
+from pathlib import Path
+from sqlite3 import Row
+from typing import cast, Any, Self, Callable
+from plomtask.exceptions import (HandledException, NotFoundException,
+ BadFormatException)
+from plomlib.db import (
+ PlomDbConn, PlomDbFile, PlomDbMigration, TypePlomDbMigration)
+
+_EXPECTED_DB_VERSION = 7
+_MIGRATIONS_DIR = Path('migrations')
+_FILENAME_DB_SCHEMA = f'init_{_EXPECTED_DB_VERSION}.sql'
+_PATH_DB_SCHEMA = _MIGRATIONS_DIR.joinpath(_FILENAME_DB_SCHEMA)
+
+
+def _mig_6_calc_days_since_millennium(conn: PlomDbConn) -> None:
+ rows = conn.exec('SELECT * FROM days').fetchall()
+ for row in [list(r) for r in rows]:
+ row[-1] = (dt_date.fromisoformat(row[0]) - dt_date(2000, 1, 1)).days
+ conn.exec('REPLACE INTO days VALUES', tuple(row))
+
+
+MIGRATION_STEPS_POST_SQL: dict[int, Callable[[PlomDbConn], None]] = {
+ 6: _mig_6_calc_days_since_millennium
+}
+
+
+class DatabaseMigration(PlomDbMigration):
+ """Collects and enacts DatabaseFile migration commands."""
+ migs_dir_path = _MIGRATIONS_DIR
+
+ @classmethod
+ def gather(cls, from_version: int, base_set: set[TypePlomDbMigration]
+ ) -> list[TypePlomDbMigration]:
+ msg_prefix = 'Migration directory contains'
+ msg_bad_entry = f'{msg_prefix} unexpected entry: '
+ migs = []
+ total_migs = set()
+ post_sql_steps_added = set()
+ for entry in [e for e in listdir(cls.migs_dir_path)
+ if e != _FILENAME_DB_SCHEMA]:
+ path = cls.migs_dir_path.joinpath(entry)
+ if not path.is_file():
+ continue
+ toks = entry.split('_', maxsplit=1)
+ if len(toks) < 2 or (not toks[0].isdigit()):
+ raise HandledException(f'{msg_bad_entry}{entry}')
+ i = int(toks[0])
+ if i <= from_version:
+ continue
+ if i > _EXPECTED_DB_VERSION:
+ raise HandledException(f'{msg_prefix} unexpected version {i}')
+ post_sql_steps = MIGRATION_STEPS_POST_SQL.get(i, None)
+ if post_sql_steps:
+ post_sql_steps_added.add(i)
+ total_migs.add(cls(i, Path(entry), post_sql_steps))
+ for k in [k for k in MIGRATION_STEPS_POST_SQL
+ if k > from_version
+ and k not in post_sql_steps_added]:
+ total_migs.add(cls(k, None, MIGRATION_STEPS_POST_SQL[k]))
+ for i in range(from_version + 1, _EXPECTED_DB_VERSION + 1):
+ migs_found = [m for m in total_migs if m.target_version == i]
+ if not migs_found:
+ raise HandledException(f'{msg_prefix} no migration of v. {i}')
+ if len(migs_found) > 1:
+ raise HandledException(f'{msg_prefix} >1 migration of v. {i}')
+ migs += migs_found
+ return cast(list[TypePlomDbMigration], migs)
+
+
+class DatabaseFile(PlomDbFile):
+ """File readable as DB of expected schema, user version."""
+ target_version = _EXPECTED_DB_VERSION
+ path_schema = _PATH_DB_SCHEMA
+ mig_class = DatabaseMigration
+
+
+class DatabaseConnection(PlomDbConn):
+ """A single connection to the database."""
+
+ def close(self) -> None:
+ """Shortcut to sqlite3.Connection.close()."""
+ self._conn.close()
+
+ def rewrite_relations(self, table_name: str, key: str, target: int | str,
+ rows: list[list[Any]], key_index: int = 0) -> None:
+ # pylint: disable=too-many-arguments
+ """Rewrite relations in table_name to target, with rows values.
+
+ Note that single rows are expected without the column and value
+ identified by key and target, which are inserted inside the function
+ at key_index.
+ """
+ self.delete_where(table_name, key, target)
+ for row in rows:
+ values = tuple(row[:key_index] + [target] + row[key_index:])
+ self.exec(f'INSERT INTO {table_name} VALUES', values)
+
+ def row_where(self, table_name: str, key: str,
+ target: int | str) -> list[Row]:
+ """Return list of Rows at table where key == target."""
+ return list(self.exec(f'SELECT * FROM {table_name} WHERE {key} =',
+ (target,)))
+
+ # def column_where_pattern(self,
+ # table_name: str,
+ # column: str,
+ # pattern: str,
+ # keys: list[str]) -> list[Any]:
+ # """Return column of rows where one of keys matches pattern."""
+ # targets = tuple([f'%{pattern}%'] * len(keys))
+ # haystack = ' OR '.join([f'{k} LIKE ?' for k in keys])
+ # sql = f'SELECT {column} FROM {table_name} WHERE {haystack}'
+ # return [row[0] for row in self.exec(sql, targets)]
+
+ def column_where(self, table_name: str, column: str, key: str,
+ target: int | str) -> list[Any]:
+ """Return column of table where key == target."""
+ return [row[0] for row in
+ self.exec(f'SELECT {column} FROM {table_name} '
+ f'WHERE {key} =', (target,))]
+
+ def column_all(self, table_name: str, column: str) -> list[Any]:
+ """Return complete column of table."""
+ return [row[0] for row in
+ self.exec(f'SELECT {column} FROM {table_name}')]
+
+ def delete_where(self, table_name: str, key: str,
+ target: int | str) -> None:
+ """Delete from table where key == target."""
+ self.exec(f'DELETE FROM {table_name} WHERE {key} =', (target,))
+
+
+class BaseModel:
+ """Template for most of the models we use/derive from the DB."""
+ table_name = ''
+ to_save_simples: list[str] = []
+ to_save_relations: list[tuple[str, str, str, int]] = []
+ versioned_defaults: dict[str, str | float] = {}
+ add_to_dict: list[str] = []
+ id_: None | int
+ cache_: dict[int, Self]
+ to_search: list[str] = []
+ can_create_by_id = False
+ _exists = True
+ sorters: dict[str, Callable[..., Any]] = {}
+
+ def __init__(self, id_: int | None) -> None:
+ if isinstance(id_, int) and id_ < 1:
+ msg = f'illegal {self.__class__.__name__} ID, must be >=1: {id_}'
+ raise BadFormatException(msg)
+ self.id_ = id_
+
+ def __hash__(self) -> int:
+ hashable = [self.id_] + [getattr(self, name)
+ for name in self.to_save_simples]
+ for definition in self.to_save_relations:
+ attr = getattr(self, definition[2])
+ hashable += [tuple(rel.id_ for rel in attr)]
+ for name in self.to_save_versioned():
+ hashable += [hash(getattr(self, name))]
+ return hash(tuple(hashable))
+
+ def __eq__(self, other: object) -> bool:
+ if not isinstance(other, self.__class__):
+ return False
+ return hash(self) == hash(other)
+
+ def __lt__(self, other: Any) -> bool:
+ if not isinstance(other, self.__class__):
+ msg = 'cannot compare to object of different class'
+ raise HandledException(msg)
+ assert isinstance(self.id_, int)
+ assert isinstance(other.id_, int)
+ return self.id_ < other.id_
+
+ @classmethod
+ def to_save_versioned(cls) -> list[str]:
+ """Return keys of cls.versioned_defaults assuming we wanna save 'em."""
+ return list(cls.versioned_defaults.keys())
+
+ @property
+ def as_dict_and_refs(self) -> tuple[dict[str, object], list[Self]]:
+ """Return self as json.dumps-ready dict, list of referenced objects."""
+ d: dict[str, object] = {'id': self.id_}
+ refs: list[Self] = []
+ for to_save in self.to_save_simples:
+ d[to_save] = getattr(self, to_save)
+ if len(self.to_save_versioned()) > 0:
+ d['_versioned'] = {}
+ for k in self.to_save_versioned():
+ attr = getattr(self, k)
+ assert isinstance(d['_versioned'], dict)
+ d['_versioned'][k] = attr.history
+ rels_to_collect = [rel[2] for rel in self.to_save_relations]
+ rels_to_collect += self.add_to_dict
+ for attr_name in rels_to_collect:
+ rel_list = []
+ for item in getattr(self, attr_name):
+ rel_list += [item.id_]
+ if item not in refs:
+ refs += [item]
+ d[attr_name] = rel_list
+ return d, refs
+
+ @classmethod
+ def name_lowercase(cls) -> str:
+ """Convenience method to return cls' name in lowercase."""
+ return cls.__name__.lower()
+
+ @classmethod
+ def sort_by(cls, seq: list[Any], sort_key: str, default: str = 'title'
+ ) -> str:
+ """Sort cls list by cls.sorters[sort_key] (reverse if '-'-prefixed).
+
+ Before cls.sorters[sort_key] is applied, seq is sorted by .id_, to
+ ensure predictability where parts of seq are of same sort value.
+ """
+ reverse = False
+ if len(sort_key) > 1 and '-' == sort_key[0]:
+ sort_key = sort_key[1:]
+ reverse = True
+ if sort_key not in cls.sorters:
+ sort_key = default
+ seq.sort(key=lambda x: x.id_, reverse=reverse)
+ sorter: Callable[..., Any] = cls.sorters[sort_key]
+ seq.sort(key=sorter, reverse=reverse)
+ if reverse:
+ sort_key = f'-{sort_key}'
+ return sort_key
+
+ # cache management
+ # (we primarily use the cache to ensure we work on the same object in
+ # memory no matter where and how we retrieve it, e.g. we don't want
+ # .by_id() calls to create a new object each time, but rather a pointer
+ # to the one already instantiated)
+
+ def __getattribute__(self, name: str) -> Any:
+ """Ensure fail if ._disappear() was called, except to check ._exists"""
+ if name != '_exists' and not super().__getattribute__('_exists'):
+ msg = f'Object for attribute does not exist: {name}'
+ raise HandledException(msg)
+ return super().__getattribute__(name)
+
+ def _disappear(self) -> None:
+ """Invalidate object, make future use raise exceptions."""
+ assert self.id_ is not None
+ if self._get_cached(self.id_):
+ self._uncache()
+ to_kill = list(self.__dict__.keys())
+ for attr in to_kill:
+ delattr(self, attr)
+ self._exists = False
+
+ @classmethod
+ def empty_cache(cls) -> None:
+ """Empty class's cache, and disappear all former inhabitants."""
+ # pylint: disable=protected-access
+ # (cause we remain within the class)
+ if hasattr(cls, 'cache_'):
+ to_disappear = list(cls.cache_.values())
+ for item in to_disappear:
+ item._disappear()
+ cls.cache_ = {}
+
+ @classmethod
+ def get_cache(cls) -> dict[int, Self]:
+ """Get cache dictionary, create it if not yet existing."""
+ if not hasattr(cls, 'cache_'):
+ d: dict[int, Self] = {}
+ cls.cache_ = d
+ return cls.cache_
+
+ @classmethod
+ def _get_cached(cls, id_: int) -> Self | None:
+ """Get object of id_ from class's cache, or None if not found."""
+ cache = cls.get_cache()
+ if id_ in cache:
+ obj = cache[id_]
+ return obj
+ return None
+
+ def cache(self) -> None:
+ """Update object in class's cache.
+
+ Also calls ._disappear if cache holds older reference to object of same
+ ID, but different memory address, to avoid doing anything with
+ dangling leftovers.
+ """
+ if self.id_ is None:
+ raise HandledException('Cannot cache object without ID.')
+ cache = self.get_cache()
+ old_cached = self._get_cached(self.id_)
+ if old_cached and id(old_cached) != id(self):
+ # pylint: disable=protected-access
+ # (cause we remain within the class)
+ old_cached._disappear()
+ cache[self.id_] = self
+
+ def _uncache(self) -> None:
+ """Remove self from cache."""
+ if self.id_ is None:
+ raise HandledException('Cannot un-cache object without ID.')
+ cache = self.get_cache()
+ del cache[self.id_]
+
+ # object retrieval and generation
+
+ @classmethod
+ def from_table_row(cls,
+ db_conn: DatabaseConnection,
+ row: Row | list[Any]) -> Self:
+ """Make from DB row (sans relations), update DB cache with it."""
+ obj = cls(*row)
+ assert obj.id_ is not None
+ for attr_name in cls.to_save_versioned():
+ attr = getattr(obj, attr_name)
+ table_name = attr.table_name
+ for row_ in db_conn.row_where(table_name, 'parent', obj.id_):
+ attr.history_from_row(row_)
+ obj.cache()
+ return obj
+
+ @classmethod
+ def by_id(cls, db_conn: DatabaseConnection, id_: int) -> Self:
+ """Retrieve by id_, on failure throw NotFoundException.
+
+ First try to get from cls.cache_, only then check DB; if found,
+ put into cache.
+ """
+ obj = None
+ if id_ is not None:
+ if isinstance(id_, int) and id_ == 0:
+ raise BadFormatException('illegal ID of value 0')
+ obj = cls._get_cached(id_)
+ if not obj:
+ for row in db_conn.row_where(cls.table_name, 'id', id_):
+ obj = cls.from_table_row(db_conn, row)
+ break
+ if obj:
+ return obj
+ raise NotFoundException(f'found no object of ID {id_}')
+
+ @classmethod
+ def by_id_or_create(cls, db_conn: DatabaseConnection, id_: int | None
+ ) -> Self:
+ """Wrapper around .by_id, creating (not caching/saving) if no find."""
+ if not cls.can_create_by_id:
+ raise HandledException('Class cannot .by_id_or_create.')
+ if id_ is None:
+ return cls(None)
+ try:
+ return cls.by_id(db_conn, id_)
+ except NotFoundException:
+ return cls(id_)
+
+ @classmethod
+ def all(cls, db_conn: DatabaseConnection) -> list[Self]:
+ """Collect all objects of class into list.
+
+ Note that this primarily returns the contents of the cache, and only
+ _expands_ that by additional findings in the DB. This assumes the
+ cache is always instantly cleaned of any items that would be removed
+ from the DB.
+ """
+ items: dict[int, Self] = {}
+ for k, v in cls.get_cache().items():
+ items[k] = v
+ already_recorded = items.keys()
+ for id_ in db_conn.column_all(cls.table_name, 'id'):
+ if id_ not in already_recorded:
+ item = cls.by_id(db_conn, id_)
+ assert item.id_ is not None
+ items[item.id_] = item
+ return sorted(list(items.values()))
+
+ @classmethod
+ def matching(cls, db_conn: DatabaseConnection, pattern: str) -> list[Self]:
+ """Return all objects whose .to_search match pattern."""
+ items = cls.all(db_conn)
+ if pattern:
+ filtered = []
+ for item in items:
+ for attr_name in cls.to_search:
+ toks = attr_name.split('.')
+ parent = item
+ for tok in toks:
+ attr = getattr(parent, tok)
+ parent = attr
+ if pattern in attr:
+ filtered += [item]
+ break
+ return filtered
+ return items
+
+ # database writing
+
+ def save(self, db_conn: DatabaseConnection) -> None:
+ """Write self to DB and cache and ensure .id_.
+
+ Write both to DB, and to cache. To DB, write .id_ and attributes
+ listed in cls.to_save_[simples|versioned|_relations].
+
+ Ensure self.id_ by setting it to what the DB command returns as the
+ last saved row's ID (cursor.lastrowid), EXCEPT if self.id_ already
+ exists as a 'str', which implies we do our own ID creation (so far
+ only the case with the Day class, where it's to be a date string.
+ """
+ values = tuple([self.id_] + [getattr(self, key)
+ for key in self.to_save_simples])
+ table_name = self.table_name
+ cursor = db_conn.exec(f'REPLACE INTO {table_name} VALUES', values)
+ self.id_ = cursor.lastrowid
+ self.cache()
+ for attr_name in self.to_save_versioned():
+ getattr(self, attr_name).save(db_conn)
+ for table, column, attr_name, key_index in self.to_save_relations:
+ assert isinstance(self.id_, int)
+ db_conn.rewrite_relations(table, column, self.id_,
+ [[i.id_] for i
+ in getattr(self, attr_name)], key_index)
+
+ def remove(self, db_conn: DatabaseConnection) -> None:
+ """Remove from DB and cache, including dependencies."""
+ if self.id_ is None or self._get_cached(self.id_) is None:
+ raise HandledException('cannot remove unsaved item')
+ for attr_name in self.to_save_versioned():
+ getattr(self, attr_name).remove(db_conn)
+ for table, column, attr_name, _ in self.to_save_relations:
+ db_conn.delete_where(table, column, self.id_)
+ self._uncache()
+ db_conn.delete_where(self.table_name, 'id', self.id_)
+ self._disappear()
--- /dev/null
+"""Exceptions triggering different HTTP codes."""
+
+
+class HandledException(Exception):
+ """To identify Exceptions based on expected (if faulty) user behavior."""
+ http_code = 500
+
+
+class BadFormatException(HandledException):
+ """To identify Exceptions on malformed inputs."""
+ http_code = 400
+
+
+class NotFoundException(HandledException):
+ """To identify Exceptions on unsuccessful queries."""
+ http_code = 404
--- /dev/null
+"""Web server stuff."""
+from __future__ import annotations
+from pathlib import Path
+from inspect import signature
+from typing import Any, Callable
+from base64 import b64encode, b64decode
+from binascii import Error as binascii_Exception
+from json import dumps as json_dumps
+from plomtask.dating import (
+ days_n_from_dt_date, dt_date_from_str, date_in_n_days)
+from plomtask.days import Day
+from plomtask.exceptions import (HandledException, BadFormatException,
+ NotFoundException)
+from plomtask.db import DatabaseConnection, DatabaseFile, BaseModel
+from plomtask.processes import Process, ProcessStep, ProcessStepsNode
+from plomtask.conditions import Condition
+from plomtask.todos import Todo, TodoOrProcStepNode
+from plomtask.misc import DictableNode
+from plomlib.web import PlomHttpServer, PlomHttpHandler, PlomQueryMap
+
+TEMPLATES_DIR = Path('templates')
+
+
+class TaskServer(PlomHttpServer):
+ """Extends parent by DatabaseFile .db and .render_mode='html'."""
+
+ def __init__(self, db_file: DatabaseFile, *args, **kwargs) -> None:
+ super().__init__(TEMPLATES_DIR, *args, **kwargs)
+ self.db = db_file
+ self.render_mode = 'html'
+
+
+class InputsParser(PlomQueryMap):
+ """Wrapper for validating and retrieving dict-like HTTP inputs."""
+
+ def get_all_str(self, key: str) -> list[str]:
+ """Retrieve list of string values at key (empty if no key)."""
+ return self.all(key) or []
+
+ def get_all_int(self, key: str, fail_on_empty: bool = False) -> list[int]:
+ """Retrieve list of int values at key."""
+ all_str = self.get_all_str(key)
+ try:
+ return [int(s) for s in all_str if fail_on_empty or s != '']
+ except ValueError as e:
+ msg = f'cannot int a form field value for key {key} in: {all_str}'
+ raise BadFormatException(msg) from e
+
+ def get_str(self, key: str, default: str | None = None) -> str | None:
+ """Retrieve single/first string value of key, or default."""
+ first = self.first(key)
+ return default if first is None else first
+
+ def get_str_or_fail(self, key: str, default: str | None = None) -> str:
+ """Retrieve first string value of key, if none: fail or default."""
+ vals = self.get_all_str(key)
+ if not vals:
+ if default is not None:
+ return default
+ raise BadFormatException(f'no value found for key: {key}')
+ return vals[0]
+
+ def get_int_or_none(self, key: str) -> int | None:
+ """Retrieve single/first value of key as int, return None if empty."""
+ val = self.get_str_or_fail(key, '')
+ if val == '':
+ return None
+ try:
+ return int(val)
+ except (ValueError, TypeError) as e:
+ msg = f'cannot int form field value for key {key}: {val}'
+ raise BadFormatException(msg) from e
+
+ def get_bool(self, key: str) -> bool:
+ """Return if value to key truish; return False if None/no value."""
+ return self.get_str(key) in {'True', 'true', '1', 'on'}
+
+ def get_all_of_key_prefixed(self, key_prefix: str) -> dict[str, list[str]]:
+ """Retrieve dict of strings at keys starting with key_prefix."""
+ ret = {}
+ for key in self.keys_prefixed(key_prefix):
+ ret[key[len(key_prefix):]] = self.as_dict[key]
+ return ret
+
+ def get_float_or_fail(self, key: str) -> float:
+ """Retrieve float value of key from self.postvars, fail if none."""
+ val = self.get_str_or_fail(key)
+ try:
+ return float(val)
+ except ValueError as e:
+ msg = f'cannot float form field value for key {key}: {val}'
+ raise BadFormatException(msg) from e
+
+ def get_all_floats_or_nones(self, key: str) -> list[float | None]:
+ """Retrieve list of float value at key, None if empty strings."""
+ ret: list[float | None] = []
+ for val in self.get_all_str(key):
+ if '' == val:
+ ret += [None]
+ else:
+ try:
+ ret += [float(val)]
+ except ValueError as e:
+ msg = f'cannot float form field value for key {key}: {val}'
+ raise BadFormatException(msg) from e
+ return ret
+
+
+class TaskHandler(PlomHttpHandler):
+ """Handles single HTTP request."""
+ # pylint: disable=too-many-public-methods
+ server: TaskServer
+ params: InputsParser
+ postvars: InputsParser
+ mapper = InputsParser
+ _conn: DatabaseConnection
+ _site: str
+
+ def _send_page(
+ self, ctx: dict[str, Any], tmpl_name: str, code: int = 200
+ ) -> None:
+ """HTTP-send ctx as HTML or JSON, as defined by .server.render_mode.
+
+ The differentiation by .server.render_mode serves to allow easily
+ comparable JSON responses for automatic testing.
+ """
+ if 'html' == self.server.render_mode:
+ self.send_rendered(Path(f'{tmpl_name}.html'), ctx, code)
+ else:
+ self.send_http(self._ctx_to_json(ctx).encode(),
+ [('Content-Type', 'application/json')],
+ code)
+
+ def _ctx_to_json(self, ctx: dict[str, object]) -> str:
+ """Render ctx into JSON string.
+
+ Flattens any objects that json.dumps might not want to serialize, and
+ turns occurrences of BaseModel objects into listings of their .id_, to
+ be resolved to a full dict inside a top-level '_library' dictionary,
+ to avoid endless and circular nesting.
+ """
+
+ def flatten(node: object) -> object:
+
+ def update_library_with(item: BaseModel) -> None:
+ cls_name = item.__class__.__name__
+ if cls_name not in library:
+ library[cls_name] = {}
+ if item.id_ not in library[cls_name]:
+ d, refs = item.as_dict_and_refs
+ id_key = -1 if item.id_ is None else item.id_
+ library[cls_name][id_key] = d
+ for ref in refs:
+ update_library_with(ref)
+
+ if isinstance(node, BaseModel):
+ update_library_with(node)
+ return node.id_
+ if isinstance(node, DictableNode):
+ d, refs = node.as_dict_and_refs
+ for ref in refs:
+ update_library_with(ref)
+ return d
+ if isinstance(node, (list, tuple)):
+ return [flatten(item) for item in node]
+ if isinstance(node, dict):
+ d = {}
+ for k, v in node.items():
+ d[k] = flatten(v)
+ return d
+ if isinstance(node, HandledException):
+ return str(node)
+ return node
+
+ library: dict[str, dict[int, object]] = {}
+ for k, v in ctx.items():
+ ctx[k] = flatten(v)
+ ctx['_library'] = library
+ return json_dumps(ctx)
+
+ @staticmethod
+ def _request_wrapper(http_method: str, not_found_msg: str
+ ) -> Callable[..., Callable[[TaskHandler], None]]:
+ """Wrapper for do_GET… and do_POST… handlers, to init and clean up.
+
+ Among other things, conditionally cleans all caches, but only on POST
+ requests, as only those are expected to change the states of objects
+ that may be cached, and certainly only those are expected to write any
+ changes to the database. We want to call them as early though as
+ possible here, either exactly after the specific request handler
+ returns successfully, or right after any exception is triggered –
+ otherwise, race conditions become plausible.
+
+ Note that otherwise any POST attempt, even a failed one, may end in
+ problematic inconsistencies:
+
+ - if the POST handler experiences an Exception, changes to objects
+ won't get written to the DB, but the changed objects may remain in
+ the cache and affect other objects despite their possibly illegal
+ state
+
+ - even if an object was just saved to the DB, we cannot be sure its
+ current state is completely identical to what we'd get if loading it
+ fresh from the DB (e.g. currently Process.n_owners is only updated
+ when loaded anew via .from_table_row, nor is its state written to
+ the DB by .save; a questionable design choice, but proof that we
+ have no guarantee that objects' .save stores all their states we'd
+ prefer at their most up-to-date.
+ """
+
+ def clear_caches() -> None:
+ for cls in (Day, Todo, Condition, Process, ProcessStep):
+ cls.empty_cache()
+
+ def decorator(f: Callable[..., str | None]
+ ) -> Callable[[TaskHandler], None]:
+ def wrapper(self: TaskHandler) -> None:
+ # pylint: disable=protected-access
+ # (because pylint here fails to detect the use of wrapper as a
+ # method to self with respective access privileges)
+ try:
+ self._conn = DatabaseConnection(self.server.db)
+ handler_name = f'do_{http_method}_{self.pagename}'
+ if hasattr(self, handler_name):
+ handler = getattr(self, handler_name)
+ redir_target = f(self, handler)
+ if 'POST' == http_method:
+ clear_caches()
+ if redir_target:
+ self.redirect(Path(redir_target))
+ else:
+ msg = f'{not_found_msg}: {self.pagename}'
+ raise NotFoundException(msg)
+ except HandledException as error:
+ if 'POST' == http_method:
+ clear_caches()
+ ctx = {'msg': error}
+ self._send_page(ctx, 'msg', error.http_code)
+ finally:
+ self._conn.close()
+ return wrapper
+ return decorator
+
+ @_request_wrapper('GET', 'Unknown page')
+ def do_GET(self, handler: Callable[[], str | dict[str, object]]
+ ) -> str | None:
+ """Render page with result of handler, or redirect if result is str."""
+ tmpl_name = f'{self.pagename}'
+ ctx_or_redir_target = handler()
+ if isinstance(ctx_or_redir_target, str):
+ return ctx_or_redir_target
+ self._send_page(ctx_or_redir_target, tmpl_name)
+ return None
+
+ @_request_wrapper('POST', 'Unknown POST target')
+ def do_POST(self, handler: Callable[[], str]) -> str:
+ """Handle POST with handler, prepare redirection to result."""
+ redir_target = handler()
+ self._conn.commit()
+ return redir_target
+
+ # GET handlers
+
+ @staticmethod
+ def _get_item(target_class: Any
+ ) -> Callable[..., Callable[[TaskHandler],
+ dict[str, object]]]:
+ def decorator(f: Callable[..., dict[str, object]]
+ ) -> Callable[[TaskHandler], dict[str, object]]:
+ def wrapper(self: TaskHandler) -> dict[str, object]:
+ # pylint: disable=protected-access
+ # (because pylint here fails to detect the use of wrapper as a
+ # method to self with respective access privileges)
+ id_ = None
+ for val in self.params.get_all_int('id', fail_on_empty=True):
+ id_ = val
+ if target_class.can_create_by_id:
+ item = target_class.by_id_or_create(self._conn, id_)
+ else:
+ item = target_class.by_id(self._conn, id_)
+ if 'exists' in signature(f).parameters:
+ exists = id_ is not None and target_class._get_cached(id_)
+ return f(self, item, exists)
+ return f(self, item)
+ return wrapper
+ return decorator
+
+ def do_GET_(self) -> str:
+ """Return redirect target on GET /."""
+ return '/day'
+
+ def _do_GET_calendar(self) -> dict[str, object]:
+ """Show Days from ?start= to ?end=.
+
+ Both .do_GET_calendar and .do_GET_calendar_txt refer to this to do the
+ same, the only difference being the HTML template they are rendered to,
+ which .do_GET selects from their method name.
+ """
+ start = self.params.get_str_or_fail('start', '')
+ end = self.params.get_str_or_fail('end', '')
+ dt_start = dt_date_from_str(start if start else date_in_n_days(-1))
+ dt_end = dt_date_from_str(end if end else date_in_n_days(366))
+ days = Day.with_filled_gaps(self._conn, dt_start, dt_end)
+ today = date_in_n_days(0)
+ return {'start': dt_start.isoformat(), 'end': dt_end.isoformat(),
+ 'today': today, 'days': days}
+
+ def do_GET_calendar(self) -> dict[str, object]:
+ """Show Days from ?start= to ?end= – normal view."""
+ return self._do_GET_calendar()
+
+ def do_GET_calendar_txt(self) -> dict[str, object]:
+ """Show Days from ?start= to ?end= – minimalist view."""
+ return self._do_GET_calendar()
+
+ def do_GET_day(self) -> dict[str, object]:
+ """Show single Day of ?date=."""
+ date = self.params.get_str('date', date_in_n_days(0))
+ make_type = self.params.get_str_or_fail('make_type', 'full')
+ #
+ assert isinstance(date, str)
+ day = Day.by_id_or_create(self._conn,
+ days_n_from_dt_date(dt_date_from_str(date)))
+ conditions_present = []
+ enablers_for = {}
+ disablers_for = {}
+ for todo in day.todos:
+ for condition in todo.conditions + todo.blockers:
+ if condition not in conditions_present:
+ conditions_present += [condition]
+ enablers_for[condition.id_] = [p for p in
+ Process.all(self._conn)
+ if condition in p.enables]
+ disablers_for[condition.id_] = [p for p in
+ Process.all(self._conn)
+ if condition in p.disables]
+ seen_todos: set[int] = set()
+ top_nodes = [t.get_step_tree(seen_todos)
+ for t in day.todos if not t.parents]
+ return {'day': day,
+ 'top_nodes': top_nodes,
+ 'make_type': make_type,
+ 'enablers_for': enablers_for,
+ 'disablers_for': disablers_for,
+ 'conditions_present': conditions_present,
+ 'processes': Process.all(self._conn)}
+
+ @_get_item(Todo)
+ def do_GET_todo(self, todo: Todo) -> dict[str, object]:
+ """Show single Todo of ?id=."""
+
+ def walk_process_steps(node_id: int,
+ process_step_nodes: list[ProcessStepsNode],
+ steps_nodes: list[TodoOrProcStepNode]) -> int:
+ for process_step_node in process_step_nodes:
+ node_id += 1
+ proc = Process.by_id(self._conn,
+ process_step_node.step.step_process_id)
+ node = TodoOrProcStepNode(node_id, None, proc, [])
+ steps_nodes += [node]
+ node_id = walk_process_steps(
+ node_id, process_step_node.steps, node.children)
+ return node_id
+
+ def walk_todo_steps(node_id: int, todos: list[Todo],
+ steps_nodes: list[TodoOrProcStepNode]) -> int:
+ for todo in todos:
+ matched = False
+ for match in [item for item in steps_nodes
+ if item.process
+ and item.process == todo.process]:
+ match.todo = todo
+ matched = True
+ for child in match.children:
+ child.fillable = True
+ node_id = walk_todo_steps(
+ node_id, todo.children, match.children)
+ if not matched:
+ node_id += 1
+ node = TodoOrProcStepNode(node_id, todo, None, [])
+ steps_nodes += [node]
+ node_id = walk_todo_steps(
+ node_id, todo.children, node.children)
+ return node_id
+
+ def collect_adoptables_keys(
+ steps_nodes: list[TodoOrProcStepNode]) -> set[int]:
+ ids = set()
+ for node in steps_nodes:
+ if not node.todo:
+ assert isinstance(node.process, Process)
+ assert isinstance(node.process.id_, int)
+ ids.add(node.process.id_)
+ ids = ids | collect_adoptables_keys(node.children)
+ return ids
+
+ todo_steps = [step.todo for step in todo.get_step_tree(set()).children]
+ process_tree = todo.process.get_steps(self._conn, None)
+ steps_todo_to_process: list[TodoOrProcStepNode] = []
+ last_node_id = walk_process_steps(0, process_tree,
+ steps_todo_to_process)
+ for steps_node in steps_todo_to_process:
+ steps_node.fillable = True
+ walk_todo_steps(last_node_id, todo_steps, steps_todo_to_process)
+ adoptables: dict[int, list[Todo]] = {}
+ any_adoptables = [Todo.by_id(self._conn, t.id_)
+ for t in Todo.by_date(self._conn, todo.date)
+ if t.id_ is not None
+ and t != todo]
+ for id_ in collect_adoptables_keys(steps_todo_to_process):
+ adoptables[id_] = [t for t in any_adoptables
+ if t.process.id_ == id_]
+ return {'todo': todo,
+ 'steps_todo_to_process': steps_todo_to_process,
+ 'adoption_candidates_for': adoptables,
+ 'process_candidates': sorted(Process.all(self._conn)),
+ 'todo_candidates': any_adoptables,
+ 'condition_candidates': Condition.all(self._conn)}
+
+ def do_GET_todos(self) -> dict[str, object]:
+ """Show Todos from ?start= to ?end=, of ?process=, ?comment= pattern"""
+ sort_by = self.params.get_str_or_fail('sort_by', 'title')
+ start = self.params.get_str_or_fail('start', '')
+ end = self.params.get_str_or_fail('end', '')
+ process_id = self.params.get_int_or_none('process_id')
+ comment_pattern = self.params.get_str_or_fail('comment_pattern', '')
+ #
+ ret = Todo.by_date_range_with_limits(self._conn, (start, end))
+ todos_by_date_range, start, end = ret
+ todos = [t for t in todos_by_date_range
+ if comment_pattern in t.comment
+ and ((not process_id) or t.process.id_ == process_id)]
+ sort_by = Todo.sort_by(todos, sort_by)
+ return {'start': start, 'end': end, 'process_id': process_id,
+ 'comment_pattern': comment_pattern, 'todos': todos,
+ 'all_processes': Process.all(self._conn), 'sort_by': sort_by}
+
+ def do_GET_conditions(self) -> dict[str, object]:
+ """Show all Conditions."""
+ pattern = self.params.get_str_or_fail('pattern', '')
+ sort_by = self.params.get_str_or_fail('sort_by', 'title')
+ #
+ conditions = Condition.matching(self._conn, pattern)
+ sort_by = Condition.sort_by(conditions, sort_by)
+ return {'conditions': conditions,
+ 'sort_by': sort_by,
+ 'pattern': pattern}
+
+ @_get_item(Condition)
+ def do_GET_condition(self,
+ c: Condition,
+ exists: bool
+ ) -> dict[str, object]:
+ """Show Condition of ?id=."""
+ ps = Process.all(self._conn)
+ return {'condition': c,
+ 'is_new': not exists,
+ 'enabled_processes': [p for p in ps if c in p.conditions],
+ 'disabled_processes': [p for p in ps if c in p.blockers],
+ 'enabling_processes': [p for p in ps if c in p.enables],
+ 'disabling_processes': [p for p in ps if c in p.disables]}
+
+ @_get_item(Condition)
+ def do_GET_condition_titles(self, c: Condition) -> dict[str, object]:
+ """Show title history of Condition of ?id=."""
+ return {'condition': c}
+
+ @_get_item(Condition)
+ def do_GET_condition_descriptions(self, c: Condition) -> dict[str, object]:
+ """Show description historys of Condition of ?id=."""
+ return {'condition': c}
+
+ @_get_item(Process)
+ def do_GET_process(self,
+ process: Process,
+ exists: bool
+ ) -> dict[str, object]:
+ """Show Process of ?id=."""
+ owner_ids = self.params.get_all_int('step_to')
+ owned_ids = self.params.get_all_int('has_step')
+ title_64 = self.params.get_str('title_b64')
+ title_new = None
+ if title_64:
+ try:
+ title_new = b64decode(title_64.encode()).decode()
+ except binascii_Exception as exc:
+ msg = 'invalid base64 for ?title_b64='
+ raise BadFormatException(msg) from exc
+ #
+ if title_new:
+ process.title.set(title_new)
+ preset_top_step = None
+ owners = process.used_as_step_by(self._conn)
+ for step_id in owner_ids:
+ owners += [Process.by_id(self._conn, step_id)]
+ for process_id in owned_ids:
+ Process.by_id(self._conn, process_id) # to ensure ID exists
+ preset_top_step = process_id
+ return {'process': process,
+ 'is_new': not exists,
+ 'preset_top_step': preset_top_step,
+ 'steps': process.get_steps(self._conn),
+ 'owners': owners,
+ 'n_todos': len(Todo.by_process_id(self._conn, process.id_)),
+ 'process_candidates': Process.all(self._conn),
+ 'condition_candidates': Condition.all(self._conn)}
+
+ @_get_item(Process)
+ def do_GET_process_titles(self, p: Process) -> dict[str, object]:
+ """Show title history of Process of ?id=."""
+ return {'process': p}
+
+ @_get_item(Process)
+ def do_GET_process_descriptions(self, p: Process) -> dict[str, object]:
+ """Show description historys of Process of ?id=."""
+ return {'process': p}
+
+ @_get_item(Process)
+ def do_GET_process_efforts(self, p: Process) -> dict[str, object]:
+ """Show default effort history of Process of ?id=."""
+ return {'process': p}
+
+ def do_GET_processes(self) -> dict[str, object]:
+ """Show all Processes."""
+ pattern = self.params.get_str_or_fail('pattern', '')
+ sort_by = self.params.get_str_or_fail('sort_by', 'title')
+ #
+ processes = Process.matching(self._conn, pattern)
+ sort_by = Process.sort_by(processes, sort_by)
+ return {'processes': processes, 'sort_by': sort_by, 'pattern': pattern}
+
+ # POST handlers
+
+ @staticmethod
+ def _delete_or_post(target_class: Any, redir_target: str = '/'
+ ) -> Callable[..., Callable[[TaskHandler], str]]:
+ def decorator(f: Callable[..., str]
+ ) -> Callable[[TaskHandler], str]:
+ def wrapper(self: TaskHandler) -> str:
+ # pylint: disable=protected-access
+ # (because pylint here fails to detect the use of wrapper as a
+ # method to self with respective access privileges)
+ id_ = self.params.get_int_or_none('id')
+ for _ in self.postvars.get_all_str('delete'):
+ if id_ is None:
+ msg = 'trying to delete non-saved ' +\
+ f'{target_class.__name__}'
+ raise NotFoundException(msg)
+ item = target_class.by_id(self._conn, id_)
+ item.remove(self._conn)
+ return redir_target
+ if target_class.can_create_by_id:
+ item = target_class.by_id_or_create(self._conn, id_)
+ else:
+ item = target_class.by_id(self._conn, id_)
+ return f(self, item)
+ return wrapper
+ return decorator
+
+ def _change_versioned_timestamps(self, cls: Any, attr_name: str) -> str:
+ """Update history timestamps for VersionedAttribute."""
+ id_ = self.params.get_int_or_none('id')
+ item = cls.by_id(self._conn, id_)
+ attr = getattr(item, attr_name)
+ for k, vals in self.postvars.get_all_of_key_prefixed('at:').items():
+ if k[19:] != vals[0]:
+ attr.reset_timestamp(k, f'{vals[0]}.0')
+ attr.save(self._conn)
+ return f'/{cls.name_lowercase()}_{attr_name}s?id={item.id_}'
+
+ def do_POST_day(self) -> str:
+ """Update or insert Day of date and Todos mapped to it."""
+ # pylint: disable=too-many-locals
+ date = self.params.get_str_or_fail('date')
+ day_comment = self.postvars.get_str_or_fail('day_comment')
+ make_type = self.postvars.get_str_or_fail('make_type')
+ old_todos = self.postvars.get_all_int('todo_id')
+ new_todos_by_process = self.postvars.get_all_int('new_todo')
+ comments = self.postvars.get_all_str('comment')
+ efforts = self.postvars.get_all_floats_or_nones('effort')
+ done_todos = self.postvars.get_all_int('done')
+ is_done = [t_id in done_todos for t_id in old_todos]
+ if not (len(old_todos) == len(is_done) == len(comments)
+ == len(efforts)):
+ msg = 'not equal number each of number of todo_id, comments, ' +\
+ 'and efforts inputs'
+ raise BadFormatException(msg)
+ for _ in [id_ for id_ in done_todos if id_ not in old_todos]:
+ raise BadFormatException('"done" field refers to unknown Todo')
+ #
+ day_id = days_n_from_dt_date(dt_date_from_str(date))
+ day = Day.by_id_or_create(self._conn, day_id)
+ day.comment = day_comment
+ day.save(self._conn)
+ new_todos = []
+ for process_id in sorted(new_todos_by_process):
+ process = Process.by_id(self._conn, process_id)
+ todo = Todo(None, process, False, day_id)
+ todo.save(self._conn)
+ new_todos += [todo]
+ if 'full' == make_type:
+ for todo in new_todos:
+ todo.ensure_children(self._conn)
+ for i, todo_id in enumerate(old_todos):
+ todo = Todo.by_id(self._conn, todo_id)
+ todo.is_done = is_done[i]
+ todo.comment = comments[i]
+ todo.effort = efforts[i]
+ todo.save(self._conn)
+ return f'/day?date={date}&make_type={make_type}'
+
+ @_delete_or_post(Todo, '/')
+ def do_POST_todo(self, todo: Todo) -> str:
+ """Update Todo and its children."""
+ # pylint: disable=too-many-locals
+ # pylint: disable=too-many-branches
+ # pylint: disable=too-many-statements
+ assert isinstance(todo.id_, int)
+ adoptees = [(id_, todo.id_) for id_
+ in self.postvars.get_all_int('adopt')]
+ to_make = {'full': [(id_, todo.id_) for id_
+ in self.postvars.get_all_int('make_full')],
+ 'empty': [(id_, todo.id_) for id_
+ in self.postvars.get_all_int('make_empty')]}
+ step_fillers_to = self.postvars.get_all_of_key_prefixed(
+ 'step_filler_to_')
+ to_update: dict[str, Any] = {
+ 'comment': self.postvars.get_str_or_fail('comment', ''),
+ 'is_done': self.postvars.get_bool('is_done'),
+ 'calendarize': self.postvars.get_bool('calendarize')}
+ cond_rels = [self.postvars.get_all_int(name) for name in
+ ['conditions', 'blockers', 'enables', 'disables']]
+ effort_or_not = self.postvars.get_str('effort')
+ if effort_or_not is not None:
+ if effort_or_not == '':
+ to_update['effort'] = None
+ else:
+ try:
+ to_update['effort'] = float(effort_or_not)
+ except ValueError as e:
+ msg = 'cannot float form field value for key: effort'
+ raise BadFormatException(msg) from e
+ for k, fillers in step_fillers_to.items():
+ try:
+ parent_id = int(k)
+ except ValueError as e:
+ msg = f'bad step_filler_to_ key: {k}'
+ raise BadFormatException(msg) from e
+ for filler in [f for f in fillers if f != 'ignore']:
+ target_id: int
+ prefix = 'make_'
+ to_int = filler[5:] if filler.startswith(prefix) else filler
+ try:
+ target_id = int(to_int)
+ except ValueError as e:
+ msg = f'bad fill_for target: {filler}'
+ raise BadFormatException(msg) from e
+ if filler.startswith(prefix):
+ to_make['empty'] += [(target_id, parent_id)]
+ else:
+ adoptees += [(target_id, parent_id)]
+ #
+ todo.set_condition_relations(self._conn, *cond_rels)
+ for parent in [Todo.by_id(self._conn, a[1])
+ for a in adoptees] + [todo]:
+ for child in parent.children:
+ if child not in [t[0] for t in adoptees
+ if t[0] == child.id_ and t[1] == parent.id_]:
+ parent.remove_child(child)
+ parent.save(self._conn)
+ for child_id, parent_id in adoptees:
+ parent = Todo.by_id(self._conn, parent_id)
+ if child_id not in [c.id_ for c in parent.children]:
+ parent.add_child(Todo.by_id(self._conn, child_id))
+ parent.save(self._conn)
+ todo.update_attrs(**to_update)
+ for approach, make_data in to_make.items():
+ for process_id, parent_id in make_data:
+ parent = Todo.by_id(self._conn, parent_id)
+ process = Process.by_id(self._conn, process_id)
+ made = Todo(None, process, False, todo.day_id)
+ made.save(self._conn)
+ if 'full' == approach:
+ made.ensure_children(self._conn)
+ parent.add_child(made)
+ parent.save(self._conn)
+ # todo.save() may destroy Todo if .effort < 0, so retrieve .id_ early
+ url = f'/todo?id={todo.id_}'
+ todo.save(self._conn)
+ return url
+
+ def do_POST_process_descriptions(self) -> str:
+ """Update history timestamps for Process.description."""
+ return self._change_versioned_timestamps(Process, 'description')
+
+ def do_POST_process_efforts(self) -> str:
+ """Update history timestamps for Process.effort."""
+ return self._change_versioned_timestamps(Process, 'effort')
+
+ def do_POST_process_titles(self) -> str:
+ """Update history timestamps for Process.title."""
+ return self._change_versioned_timestamps(Process, 'title')
+
+ @_delete_or_post(Process, '/processes')
+ def do_POST_process(self, process: Process) -> str:
+ """Update or insert Process of ?id= and fields defined in postvars."""
+ # pylint: disable=too-many-locals
+
+ def id_or_title(l_id_or_title: list[str]) -> tuple[str, list[int]]:
+ l_ids, title = [], ''
+ for id_or_title in l_id_or_title:
+ try:
+ l_ids += [int(id_or_title)]
+ except ValueError:
+ title = id_or_title
+ return title, l_ids
+
+ versioned = {
+ 'title': self.postvars.get_str_or_fail('title'),
+ 'description': self.postvars.get_str_or_fail('description'),
+ 'effort': self.postvars.get_float_or_fail('effort')}
+ cond_rels = [self.postvars.get_all_int(s) for s
+ in ['conditions', 'blockers', 'enables', 'disables']]
+ calendarize = self.postvars.get_bool('calendarize')
+ step_of = self.postvars.get_all_str('step_of')
+ suppressions = self.postvars.get_all_int('suppressed_steps')
+ kept_steps = self.postvars.get_all_int('kept_steps')
+ new_top_step_procs = self.postvars.get_all_str('new_top_step')
+ new_steps_to = {
+ int(k): [int(n) for n in v] for (k, v)
+ in self.postvars.get_all_of_key_prefixed('new_step_to_').items()}
+ new_owner_title, owners_to_set = id_or_title(step_of)
+ new_step_title, new_top_step_proc_ids = id_or_title(new_top_step_procs)
+ #
+ for k, v in versioned.items():
+ getattr(process, k).set(v)
+ process.calendarize = calendarize
+ process.save(self._conn)
+ assert isinstance(process.id_, int)
+ # set relations to Conditions and ProcessSteps / other Processes
+ process.set_condition_relations(self._conn, *cond_rels)
+ owned_steps = [ProcessStep.by_id(self._conn, step_id)
+ for step_id in kept_steps]
+ for parent_step_id, step_process_ids in new_steps_to.items():
+ owned_steps += [ProcessStep(None, process.id_, step_process_id,
+ parent_step_id)
+ for step_process_id in step_process_ids]
+ owned_steps += [ProcessStep(None, process.id_, step_process_id, None)
+ for step_process_id in new_top_step_proc_ids]
+ process.set_step_relations(self._conn, owners_to_set, suppressions,
+ owned_steps)
+ # encode titles for potential newly-to-create Processes up or down
+ params = f'id={process.id_}'
+ if new_step_title:
+ title_b64_encoded = b64encode(new_step_title.encode()).decode()
+ params = f'step_to={process.id_}&title_b64={title_b64_encoded}'
+ elif new_owner_title:
+ title_b64_encoded = b64encode(new_owner_title.encode()).decode()
+ params = f'has_step={process.id_}&title_b64={title_b64_encoded}'
+ process.save(self._conn)
+ return f'/process?{params}'
+
+ def do_POST_condition_descriptions(self) -> str:
+ """Update history timestamps for Condition.description."""
+ return self._change_versioned_timestamps(Condition, 'description')
+
+ def do_POST_condition_titles(self) -> str:
+ """Update history timestamps for Condition.title."""
+ return self._change_versioned_timestamps(Condition, 'title')
+
+ @_delete_or_post(Condition, '/conditions')
+ def do_POST_condition(self, condition: Condition) -> str:
+ """Update/insert Condition of ?id= and fields defined in postvars."""
+ title = self.postvars.get_str_or_fail('title')
+ description = self.postvars.get_str_or_fail('description')
+ is_active = self.postvars.get_bool('is_active')
+ condition.is_active = is_active
+ #
+ condition.title.set(title)
+ condition.description.set(description)
+ condition.save(self._conn)
+ return f'/condition?id={condition.id_}'
--- /dev/null
+"""What doesn't fit elsewhere so far."""
+from typing import Any
+
+
+class DictableNode:
+ """Template for display chain nodes providing .as_dict_and_refs."""
+ # pylint: disable=too-few-public-methods
+ _to_dict: list[str] = []
+
+ def __init__(self, *args: Any) -> None:
+ for i, arg in enumerate(args):
+ setattr(self, self._to_dict[i], arg)
+
+ @property
+ def as_dict_and_refs(self) -> tuple[dict[str, object], list[Any]]:
+ """Return self as json.dumps-ready dict, list of referenced objects."""
+ d = {}
+ refs = []
+ for name in self._to_dict:
+ attr = getattr(self, name)
+ if hasattr(attr, 'id_'):
+ d[name] = attr.id_
+ continue
+ if isinstance(attr, list):
+ d[name] = []
+ for item in attr:
+ item_d, item_refs = item.as_dict_and_refs
+ d[name] += [item_d]
+ for item_ref in [r for r in item_refs if r not in refs]:
+ refs += [item_ref]
+ continue
+ d[name] = attr
+ return d, refs
--- /dev/null
+"""Collecting Processes and Process-related items."""
+from __future__ import annotations
+from typing import Set, Self, Any
+from sqlite3 import Row
+from plomtask.misc import DictableNode
+from plomtask.db import DatabaseConnection, BaseModel
+from plomtask.versioned_attributes import VersionedAttribute
+from plomtask.conditions import Condition, ConditionsRelations
+from plomtask.exceptions import (NotFoundException, BadFormatException,
+ HandledException)
+
+
+class ProcessStepsNode(DictableNode):
+ """Collects what's useful to know for ProcessSteps tree display."""
+ # pylint: disable=too-few-public-methods
+ step: ProcessStep
+ process: Process
+ is_explicit: bool
+ steps: list[ProcessStepsNode]
+ seen: bool = False
+ is_suppressed: bool = False
+ _to_dict = ['step', 'process', 'is_explicit', 'steps', 'seen',
+ 'is_suppressed']
+
+
+class Process(BaseModel, ConditionsRelations):
+ """Template for, and metadata for, Todos, and their arrangements."""
+ # pylint: disable=too-many-instance-attributes
+ table_name = 'processes'
+ to_save_simples = ['calendarize']
+ to_save_relations = [('process_conditions', 'process', 'conditions', 0),
+ ('process_blockers', 'process', 'blockers', 0),
+ ('process_enables', 'process', 'enables', 0),
+ ('process_disables', 'process', 'disables', 0),
+ ('process_step_suppressions', 'process',
+ 'suppressed_steps', 0)]
+ add_to_dict = ['explicit_steps']
+ versioned_defaults = {'title': 'UNNAMED', 'description': '', 'effort': 1.0}
+ to_search = ['title.newest', 'description.newest']
+ can_create_by_id = True
+ sorters = {'steps': lambda p: len(p.explicit_steps),
+ 'owners': lambda p: p.n_owners,
+ 'effort': lambda p: p.effort.newest,
+ 'title': lambda p: p.title.newest}
+
+ def __init__(self, id_: int | None, calendarize: bool = False) -> None:
+ super().__init__(id_)
+ ConditionsRelations.__init__(self)
+ for name in ['title', 'description', 'effort']:
+ attr = VersionedAttribute(self, f'process_{name}s',
+ self.versioned_defaults[name])
+ setattr(self, name, attr)
+ self.explicit_steps: list[ProcessStep] = []
+ self.suppressed_steps: list[ProcessStep] = []
+ self.calendarize = calendarize
+ self.n_owners: int | None = None # only set by from_table_row
+
+ @classmethod
+ def from_table_row(cls, db_conn: DatabaseConnection, row: Row | list[Any]
+ ) -> Self:
+ """Make from DB row, with dependencies."""
+ process = super().from_table_row(db_conn, row)
+ assert process.id_ is not None
+ for name in ('conditions', 'blockers', 'enables', 'disables'):
+ table = f'process_{name}'
+ for c_id in db_conn.column_where(table, 'condition',
+ 'process', process.id_):
+ target = getattr(process, name)
+ target += [Condition.by_id(db_conn, c_id)]
+ for row_ in db_conn.row_where('process_steps', 'owner', process.id_):
+ # NB: It's tempting to ProcessStep.from_table_row(row_) directly,
+ # but we don't want to unnecessarily invalidate cached ProcessSteps
+ # elsewhere (notably, other Processes .suppressed_steps), as a
+ # complete reload like this would do
+ step = ProcessStep.by_id(db_conn, row_[0])
+ process.explicit_steps += [step]
+ for row_ in db_conn.row_where('process_step_suppressions', 'process',
+ process.id_):
+ step = ProcessStep.by_id(db_conn, row_[1])
+ process.suppressed_steps += [step]
+ process.n_owners = len(process.used_as_step_by(db_conn))
+ return process
+
+ def used_as_step_by(self, db_conn: DatabaseConnection) -> list[Self]:
+ """Return Processes using self for a ProcessStep."""
+ if not self.id_:
+ return []
+ owner_ids = set()
+ for id_ in db_conn.column_where('process_steps', 'owner',
+ 'step_process', self.id_):
+ owner_ids.add(id_)
+ return [self.__class__.by_id(db_conn, id_) for id_ in owner_ids]
+
+ def get_steps(self,
+ db_conn: DatabaseConnection,
+ external_owner: Self | None = None
+ ) -> list[ProcessStepsNode]:
+ """Return tree of depended-on explicit and implicit ProcessSteps."""
+
+ def make_node(step: ProcessStep, suppressed: bool) -> ProcessStepsNode:
+ is_explicit = step.owner_id == top_owner.id_
+ process = self.__class__.by_id(db_conn, step.step_process_id)
+ step_steps = []
+ if not suppressed:
+ # exclude implicit siblings to explicit steps of same process
+ step_steps = [n for n in process.get_steps(db_conn, top_owner)
+ if not [s for s in top_owner.explicit_steps
+ if s.parent_step_id == step.id_
+ and s.step_process_id == n.process.id_]]
+ return ProcessStepsNode(step, process, is_explicit, step_steps,
+ False, suppressed)
+
+ def walk_steps(node: ProcessStepsNode) -> None:
+ node.seen = node.step.id_ in seen_step_ids
+ assert isinstance(node.step.id_, int)
+ seen_step_ids.add(node.step.id_)
+ if node.is_suppressed:
+ return
+ explicit_children = [s for s in self.explicit_steps
+ if s.parent_step_id == node.step.id_]
+ for child in explicit_children:
+ node.steps += [make_node(child, False)]
+ for step in node.steps:
+ walk_steps(step)
+
+ step_nodes: list[ProcessStepsNode] = []
+ seen_step_ids: Set[int] = set()
+ top_owner = external_owner or self
+ for step in [s for s in self.explicit_steps
+ if s.parent_step_id is None]:
+ new_node = make_node(step, step in top_owner.suppressed_steps)
+ step_nodes += [new_node]
+ for step_node in step_nodes:
+ walk_steps(step_node)
+ return step_nodes
+
+ def set_step_relations(self,
+ db_conn: DatabaseConnection,
+ owners: list[int],
+ suppressions: list[int],
+ owned_steps: list[ProcessStep]
+ ) -> None:
+ """Set step owners, suppressions, and owned steps."""
+ self._set_owners(db_conn, owners)
+ self._set_step_suppressions(db_conn, suppressions)
+ self.set_steps(db_conn, owned_steps)
+
+ def _set_step_suppressions(self,
+ db_conn: DatabaseConnection,
+ step_ids: list[int]
+ ) -> None:
+ """Set self.suppressed_steps from step_ids."""
+ assert isinstance(self.id_, int)
+ db_conn.delete_where('process_step_suppressions', 'process', self.id_)
+ self.suppressed_steps = [ProcessStep.by_id(db_conn, s)
+ for s in step_ids]
+
+ def _set_owners(self,
+ db_conn: DatabaseConnection,
+ owner_ids: list[int]
+ ) -> None:
+ """Re-set owners to those identified in owner_ids."""
+ owners_old = self.used_as_step_by(db_conn)
+ losers = [o for o in owners_old if o.id_ not in owner_ids]
+ owners_old_ids = [o.id_ for o in owners_old]
+ winners = [self.by_id(db_conn, id_) for id_ in owner_ids
+ if id_ not in owners_old_ids]
+ steps_to_remove = []
+ for loser in losers:
+ steps_to_remove += [s for s in loser.explicit_steps
+ if s.step_process_id == self.id_]
+ for step in steps_to_remove:
+ step.remove(db_conn)
+ for winner in winners:
+ assert isinstance(winner.id_, int)
+ assert isinstance(self.id_, int)
+ new_step = ProcessStep(None, winner.id_, self.id_, None)
+ new_explicit_steps = winner.explicit_steps + [new_step]
+ winner.set_steps(db_conn, new_explicit_steps)
+
+ def set_steps(self,
+ db_conn: DatabaseConnection,
+ steps: list[ProcessStep]
+ ) -> None:
+ """Set self.explicit_steps in bulk.
+
+ Checks against recursion, and turns into top-level steps any of
+ unknown or non-owned parent.
+ """
+ def walk_steps(node: ProcessStep) -> None:
+ if node.step_process_id == self.id_:
+ raise BadFormatException('bad step selection causes recursion')
+ step_process = self.by_id(db_conn, node.step_process_id)
+ for step in step_process.explicit_steps:
+ walk_steps(step)
+
+ # NB: separate the collection of steps to save/remove from the action
+ # because the latter may modify the collection / self.explicit_steps
+ to_remove = []
+ for step in [s for s in self.explicit_steps if s not in steps]:
+ to_remove += [step]
+ for step in to_remove:
+ step.remove(db_conn)
+ to_save = []
+ for step in [s for s in steps if s not in self.explicit_steps]:
+ if step.parent_step_id is not None:
+ try:
+ parent_step = ProcessStep.by_id(db_conn,
+ step.parent_step_id)
+ if parent_step.owner_id != self.id_:
+ step.parent_step_id = None
+ except NotFoundException:
+ step.parent_step_id = None
+ walk_steps(step)
+ to_save += [step]
+ for step in to_save:
+ step.save(db_conn)
+
+ def save(self, db_conn: DatabaseConnection) -> None:
+ """Add (or re-write) self and connected items to DB."""
+ super().save(db_conn)
+ assert isinstance(self.id_, int)
+ db_conn.delete_where('process_steps', 'owner', self.id_)
+ # NB: we separate the collection of steps to save from step.save()
+ # because the latter may modify the collection / self.explicit_steps
+ to_save = []
+ for step in self.explicit_steps:
+ to_save += [step]
+ for step in to_save:
+ step.save(db_conn)
+
+ def remove(self, db_conn: DatabaseConnection) -> None:
+ """Remove from DB, with dependencies.
+
+ Guard against removal of Processes in use.
+ """
+ assert isinstance(self.id_, int)
+ for _ in db_conn.row_where('process_steps', 'step_process', self.id_):
+ raise HandledException('cannot remove Process in use')
+ for _ in db_conn.row_where('todos', 'process', self.id_):
+ raise HandledException('cannot remove Process in use')
+ for step in self.explicit_steps:
+ step.remove(db_conn)
+ super().remove(db_conn)
+
+
+class ProcessStep(BaseModel):
+ """Sub-unit of Processes."""
+ table_name = 'process_steps'
+ to_save_simples = ['owner_id', 'step_process_id', 'parent_step_id']
+
+ def __init__(self, id_: int | None, owner_id: int, step_process_id: int,
+ parent_step_id: int | None) -> None:
+ super().__init__(id_)
+ self.owner_id = owner_id
+ self.step_process_id = step_process_id
+ self.parent_step_id = parent_step_id
+
+ def save(self, db_conn: DatabaseConnection) -> None:
+ """Update into DB/cache, and owner's .explicit_steps."""
+ super().save(db_conn)
+ owner = Process.by_id(db_conn, self.owner_id)
+ if self not in owner.explicit_steps:
+ for s in [s for s in owner.explicit_steps if s.id_ == self.id_]:
+ s.remove(db_conn)
+ owner.explicit_steps += [self]
+ owner.explicit_steps.sort(key=hash)
+
+ def remove(self, db_conn: DatabaseConnection) -> None:
+ """Remove from DB, and owner's .explicit_steps."""
+ owner = Process.by_id(db_conn, self.owner_id)
+ owner.explicit_steps.remove(self)
+ super().remove(db_conn)
--- /dev/null
+"""Actionables."""
+from __future__ import annotations
+from datetime import date as dt_date
+from typing import Any, Self, Set
+from sqlite3 import Row
+from plomtask.misc import DictableNode
+from plomtask.db import DatabaseConnection, BaseModel
+from plomtask.processes import Process, ProcessStepsNode
+from plomtask.versioned_attributes import VersionedAttribute
+from plomtask.conditions import Condition, ConditionsRelations
+from plomtask.exceptions import (NotFoundException, BadFormatException,
+ HandledException)
+from plomtask.dating import (
+ days_n_from_dt_date, dt_date_from_str, dt_date_from_days_n)
+
+
+class TodoNode(DictableNode):
+ """Collects what's useful to know for Todo/Condition tree display."""
+ # pylint: disable=too-few-public-methods
+ todo: Todo
+ seen: bool
+ children: list[TodoNode]
+ _to_dict = ['todo', 'seen', 'children']
+
+
+class TodoOrProcStepNode(DictableNode):
+ """Collect what's useful for Todo-or-ProcessStep tree display."""
+ # pylint: disable=too-few-public-methods
+ node_id: int
+ todo: Todo | None
+ process: Process | None
+ children: list[TodoOrProcStepNode] # pylint: disable=undefined-variable
+ fillable: bool = False
+ _to_dict = ['node_id', 'todo', 'process', 'children', 'fillable']
+
+
+class Todo(BaseModel, ConditionsRelations):
+ """Individual actionable."""
+ # pylint: disable=too-many-instance-attributes
+ # pylint: disable=too-many-public-methods
+ table_name = 'todos'
+ to_save_simples = ['process_id', 'is_done', 'day_id', 'comment', 'effort',
+ 'calendarize']
+ to_save_relations = [('todo_conditions', 'todo', 'conditions', 0),
+ ('todo_blockers', 'todo', 'blockers', 0),
+ ('todo_enables', 'todo', 'enables', 0),
+ ('todo_disables', 'todo', 'disables', 0),
+ ('todo_children', 'parent', 'children', 0),
+ ('todo_children', 'child', 'parents', 1)]
+ to_search = ['comment']
+ days_to_update: Set[int] = set()
+ children: list[Todo]
+ parents: list[Todo]
+ sorters = {'doneness': lambda t: t.is_done,
+ 'title': lambda t: t.title_then,
+ 'comment': lambda t: t.comment,
+ 'date': lambda t: t.day_id}
+
+ # pylint: disable=too-many-arguments
+ def __init__(self, id_: int | None,
+ process: Process,
+ is_done: bool,
+ day_id: int,
+ comment: str = '',
+ effort: None | float = None,
+ calendarize: bool = False
+ ) -> None:
+ super().__init__(id_)
+ ConditionsRelations.__init__(self)
+ if process.id_ is None:
+ raise NotFoundException('Process of Todo without ID (not saved?)')
+ self.process = process
+ self._is_done = is_done
+ self.day_id = day_id
+ self.comment = comment
+ self.effort = effort
+ self.children = []
+ self.parents = []
+ self.calendarize = calendarize
+ if not self.id_:
+ self.calendarize = self.process.calendarize
+ self.conditions = self.process.conditions[:]
+ self.blockers = self.process.blockers[:]
+ self.enables = self.process.enables[:]
+ self.disables = self.process.disables[:]
+
+ @property
+ def date(self) -> str:
+ """Return ISO formatted date matching .day_id."""
+ return dt_date_from_days_n(self.day_id).isoformat()
+
+ @classmethod
+ def by_date_range_with_limits(cls,
+ db_conn: DatabaseConnection,
+ date_range: tuple[str, str],
+ ) -> tuple[list[Self], str, str]:
+ """Return Todos within (closed) date_range interval.
+
+ If no range values provided, defaults them to 'yesterday' and
+ 'tomorrow'. Knows to properly interpret these and 'today' as value.
+ """
+ dt_date_limits: list[dt_date] = []
+ for i in range(2):
+ dt_date_limits += [
+ dt_date_from_str(date_range[i] if date_range[i]
+ else ('yesterday', 'tomorrow')[i])]
+ items: list[Self] = []
+ for row in db_conn.exec(
+ f'SELECT id FROM {cls.table_name} WHERE day >= ? AND day <= ?',
+ tuple(days_n_from_dt_date(d) for d in dt_date_limits),
+ build_q_marks=False):
+ items += [cls.by_id(db_conn, row[0])]
+ return (items,
+ dt_date_limits[0].isoformat(), dt_date_limits[1].isoformat())
+
+ def ensure_children(self, db_conn: DatabaseConnection) -> None:
+ """Ensure Todo children (create or adopt) demanded by Process chain."""
+
+ def walk_steps(parent: Self, step_node: ProcessStepsNode) -> Todo:
+ adoptables = [t for t in self.by_date(db_conn, parent.date)
+ if (t not in parent.children)
+ and (t != parent)
+ and step_node.process.id_ == t.process_id]
+ satisfier = None
+ for adoptable in adoptables:
+ satisfier = adoptable
+ break
+ if not satisfier:
+ satisfier = self.__class__(None, step_node.process, False,
+ parent.day_id)
+ satisfier.save(db_conn)
+ sub_step_nodes = sorted(
+ step_node.steps,
+ key=lambda s: s.process.id_ if s.process.id_ else 0)
+ for sub_node in sub_step_nodes:
+ if sub_node.is_suppressed:
+ continue
+ n_slots = len([n for n in sub_step_nodes
+ if n.process == sub_node.process])
+ filled_slots = len([t for t in satisfier.children
+ if t.process.id_ == sub_node.process.id_])
+ # if we did not newly create satisfier, it may already fill
+ # some step dependencies, so only fill what remains open
+ if n_slots - filled_slots > 0:
+ satisfier.add_child(walk_steps(satisfier, sub_node))
+ satisfier.save(db_conn)
+ return satisfier
+
+ process = Process.by_id(db_conn, self.process_id)
+ steps_tree = process.get_steps(db_conn)
+ for step_node in steps_tree:
+ if step_node.is_suppressed:
+ continue
+ self.add_child(walk_steps(self, step_node))
+ self.save(db_conn)
+
+ @classmethod
+ def from_table_row(cls, db_conn: DatabaseConnection,
+ row: Row | list[Any]) -> Self:
+ """Make from DB row, with dependencies."""
+ if row[1] == 0:
+ raise NotFoundException('calling Todo of '
+ 'unsaved Process')
+ row_as_list = list(row)
+ row_as_list[1] = Process.by_id(db_conn, row[1])
+ todo = super().from_table_row(db_conn, row_as_list)
+ assert isinstance(todo.id_, int)
+ for t_id in db_conn.column_where('todo_children', 'child',
+ 'parent', todo.id_):
+ todo.children += [cls.by_id(db_conn, t_id)]
+ for t_id in db_conn.column_where('todo_children', 'parent',
+ 'child', todo.id_):
+ todo.parents += [cls.by_id(db_conn, t_id)]
+ for name in ('conditions', 'blockers', 'enables', 'disables'):
+ table = f'todo_{name}'
+ for cond_id in db_conn.column_where(table, 'condition',
+ 'todo', todo.id_):
+ target = getattr(todo, name)
+ target += [Condition.by_id(db_conn, cond_id)]
+ return todo
+
+ @classmethod
+ def by_process_id(cls, db_conn: DatabaseConnection,
+ process_id: int | None) -> list[Self]:
+ """Collect all Todos of Process of process_id."""
+ return [t for t in cls.all(db_conn) if t.process.id_ == process_id]
+
+ @classmethod
+ def by_date(cls, db_conn: DatabaseConnection, date: str) -> list[Self]:
+ """Collect all Todos for Day of date."""
+ return cls.by_date_range_with_limits(db_conn, (date, date))[0]
+
+ @property
+ def is_doable(self) -> bool:
+ """Decide whether .is_done settable based on children, Conditions."""
+ for child in self.children:
+ if not child.is_done:
+ return False
+ for condition in self.conditions:
+ if not condition.is_active:
+ return False
+ for condition in self.blockers:
+ if condition.is_active:
+ return False
+ return True
+
+ @property
+ def is_deletable(self) -> bool:
+ """Decide whether self be deletable (not if preserve-worthy values)."""
+ if self.comment:
+ return False
+ if self.effort and self.effort >= 0:
+ return False
+ return True
+
+ @property
+ def performed_effort(self) -> float:
+ """Return performed effort, i.e. self.effort or default if done.."""
+ if self.effort is not None:
+ return self.effort
+ if self.is_done:
+ return self.effort_then
+ return 0
+
+ @property
+ def process_id(self) -> int:
+ """Needed for super().save to save Processes as attributes."""
+ assert isinstance(self.process.id_, int)
+ return self.process.id_
+
+ @property
+ def is_done(self) -> bool:
+ """Wrapper around self._is_done so we can control its setter."""
+ return self._is_done
+
+ @is_done.setter
+ def is_done(self, value: bool) -> None:
+ if value != self.is_done and not self.is_doable:
+ raise BadFormatException('cannot change doneness of undoable Todo')
+ if self._is_done != value:
+ self._is_done = value
+ if value is True:
+ for condition in self.enables:
+ condition.is_active = True
+ for condition in self.disables:
+ condition.is_active = False
+
+ @property
+ def title(self) -> VersionedAttribute:
+ """Shortcut to .process.title."""
+ assert isinstance(self.process.title, VersionedAttribute)
+ return self.process.title
+
+ @property
+ def title_then(self) -> str:
+ """Shortcut to .process.title.at(self.date)."""
+ title_then = self.process.title.at(self.date)
+ assert isinstance(title_then, str)
+ return title_then
+
+ @property
+ def effort_then(self) -> float:
+ """Shortcut to .process.effort.at(self.date)"""
+ effort_then = self.process.effort.at(self.date)
+ assert isinstance(effort_then, float)
+ return effort_then
+
+ @property
+ def has_doneness_in_path(self) -> bool:
+ """Check whether self is done or has any children that are."""
+ if self.is_done:
+ return True
+ for child in self.children:
+ if child.is_done:
+ return True
+ if child.has_doneness_in_path:
+ return True
+ return False
+
+ def get_step_tree(self, seen_todos: set[int]) -> TodoNode:
+ """Return tree of depended-on Todos."""
+
+ def make_node(todo: Self) -> TodoNode:
+ children = []
+ seen = todo.id_ in seen_todos
+ assert isinstance(todo.id_, int)
+ seen_todos.add(todo.id_)
+ for child in todo.children:
+ children += [make_node(child)]
+ return TodoNode(todo, seen, children)
+
+ return make_node(self)
+
+ @property
+ def tree_effort(self) -> float:
+ """Return sum of performed efforts of self and all descendants."""
+
+ def walk_tree(node: Self) -> float:
+ local_effort = 0.0
+ for child in node.children:
+ local_effort += walk_tree(child)
+ return node.performed_effort + local_effort
+
+ return walk_tree(self)
+
+ def add_child(self, child: Self) -> None:
+ """Add child to self.children, avoid recursion, update parenthoods."""
+
+ def walk_steps(node: Self) -> None:
+ if node.id_ == self.id_:
+ raise BadFormatException('bad child choice causes recursion')
+ for child in node.children:
+ walk_steps(child)
+
+ if self.id_ is None:
+ raise HandledException('Can only add children to saved Todos.')
+ if child.id_ is None:
+ raise HandledException('Can only add saved children to Todos.')
+ if child in self.children:
+ raise BadFormatException('cannot adopt same child twice')
+ walk_steps(child)
+ self.children += [child]
+ child.parents += [self]
+
+ def remove_child(self, child: Self) -> None:
+ """Remove child from self.children, update counter relations."""
+ if child not in self.children:
+ raise HandledException('Cannot remove un-parented child.')
+ self.children.remove(child)
+ child.parents.remove(self)
+
+ def update_attrs(self, **kwargs: Any) -> None:
+ """Update self's attributes listed in kwargs."""
+ for k, v in kwargs.items():
+ setattr(self, k, v)
+
+ def save(self, db_conn: DatabaseConnection) -> None:
+ """On save calls, also check if auto-deletion by effort < 0."""
+ if self.effort and self.effort < 0 and self.is_deletable:
+ self.remove(db_conn)
+ return
+ if self.id_ is None:
+ self.__class__.days_to_update.add(self.day_id)
+ super().save(db_conn)
+ for condition in self.enables + self.disables + self.conditions:
+ condition.save(db_conn)
+
+ def remove(self, db_conn: DatabaseConnection) -> None:
+ """Remove from DB, including relations."""
+ if not self.is_deletable:
+ raise HandledException('Cannot remove non-deletable Todo.')
+ self.__class__.days_to_update.add(self.day_id)
+ children_to_remove = self.children[:]
+ parents_to_remove = self.parents[:]
+ for child in children_to_remove:
+ self.remove_child(child)
+ for parent in parents_to_remove:
+ parent.remove_child(self)
+ super().remove(db_conn)
--- /dev/null
+"""Attributes whose values are recorded as a timestamped history."""
+from datetime import datetime
+from typing import Any
+from sqlite3 import Row
+from time import sleep
+from plomtask.db import DatabaseConnection
+from plomtask.exceptions import (HandledException, BadFormatException,
+ NotFoundException)
+
+TIMESTAMP_FMT = '%Y-%m-%d %H:%M:%S.%f'
+
+
+class VersionedAttribute:
+ """Attributes whose values are recorded as a timestamped history."""
+
+ def __init__(self,
+ parent: Any, table_name: str, default: str | float) -> None:
+ self.parent = parent
+ self.table_name = table_name
+ self._default = default
+ self.history: dict[str, str | float] = {}
+ # NB: For tighter mypy testing, we might prefer self.history to be
+ # dict[str, float] | dict[str, str] instead, but my current coding
+ # knowledge only manages to make that work by adding much further
+ # complexity, so let's leave it at that for now …
+
+ def __hash__(self) -> int:
+ history_tuples = tuple((k, v) for k, v in self.history.items())
+ hashable = (self.parent.id_, self.table_name, self._default,
+ history_tuples)
+ return hash(hashable)
+
+ @property
+ def _newest_timestamp(self) -> str:
+ """Return most recent timestamp."""
+ return sorted(self.history.keys())[-1]
+
+ @property
+ def value_type_name(self) -> str:
+ """Return string of name of attribute value type."""
+ return type(self._default).__name__
+
+ @property
+ def newest(self) -> str | float:
+ """Return most recent value, or self._default if self.history empty."""
+ if 0 == len(self.history):
+ return self._default
+ return self.history[self._newest_timestamp]
+
+ def reset_timestamp(self, old_str: str, new_str: str) -> None:
+ """Rename self.history key (timestamp) old to new.
+
+ Chronological sequence of keys must be preserved, i.e. cannot move
+ key before earlier or after later timestamp.
+ """
+ try:
+ new = datetime.strptime(new_str, TIMESTAMP_FMT)
+ old = datetime.strptime(old_str, TIMESTAMP_FMT)
+ except ValueError as exc:
+ raise BadFormatException('Timestamp of illegal format.') from exc
+ timestamps = list(self.history.keys())
+ if old_str not in timestamps:
+ raise HandledException(f'Timestamp {old} not found in history.')
+ sorted_timestamps = sorted([datetime.strptime(t, TIMESTAMP_FMT)
+ for t in timestamps])
+ expected_position = sorted_timestamps.index(old)
+ sorted_timestamps.remove(old)
+ sorted_timestamps += [new]
+ sorted_timestamps.sort()
+ if sorted_timestamps.index(new) != expected_position:
+ raise HandledException('Timestamp not respecting chronology.')
+ value = self.history[old_str]
+ del self.history[old_str]
+ self.history[new_str] = value
+
+ def set(self, value: str | float) -> None:
+ """Add to self.history if and only if not same value as newest one.
+
+ Note that we wait one micro-second, as timestamp comparison to check
+ most recent elements only goes up to that precision.
+
+ Also note that we don't check against .newest because that may make us
+ compare value against .default even if not set. We want to be able to
+ explicitly set .default as the first element.
+ """
+ sleep(0.00001)
+ if 0 == len(self.history) \
+ or value != self.history[self._newest_timestamp]:
+ self.history[datetime.now().strftime(TIMESTAMP_FMT)] = value
+
+ def history_from_row(self, row: Row) -> None:
+ """Extend self.history from expected table row format."""
+ self.history[row[1]] = row[2]
+
+ def at(self, queried_time: str) -> str | float:
+ """Retrieve value of timestamp nearest queried_time from the past."""
+ if len(queried_time) == 10:
+ queried_time += ' 23:59:59.999'
+ sorted_timestamps = sorted(self.history.keys())
+ if 0 == len(sorted_timestamps):
+ return self._default
+ selected_timestamp = sorted_timestamps[0]
+ for timestamp in sorted_timestamps[1:]:
+ if timestamp > queried_time:
+ break
+ selected_timestamp = timestamp
+ return self.history[selected_timestamp]
+
+ def save(self, db_conn: DatabaseConnection) -> None:
+ """Save as self.history entries, but first wipe old ones."""
+ if self.parent.id_ is None:
+ raise NotFoundException('cannot save attribute to parent if no ID')
+ db_conn.rewrite_relations(self.table_name, 'parent', self.parent.id_,
+ [[item[0], item[1]]
+ for item in self.history.items()])
+
+ def remove(self, db_conn: DatabaseConnection) -> None:
+ """Remove from DB."""
+ db_conn.delete_where(self.table_name, 'parent', self.parent.id_)
--- /dev/null
+Jinja2==3.1.3
+unittest-parallel==1.6.1
--- /dev/null
+#!/usr/bin/env python3
+"""Call this to start the application."""
+
+# included libs
+from sys import exit as sys_exit
+from os import environ
+from pathlib import Path
+# might need module installation(s)
+try:
+ from plomtask.exceptions import HandledException
+ from plomtask.http import TaskHandler, TaskServer
+ from plomtask.db import DatabaseFile
+ from plomlib.db import PlomDbException
+except ModuleNotFoundError as e:
+ print('FAIL: Missing module(s), please run with "install_deps" argument.')
+ print(e)
+ sys_exit(1)
+
+PLOMTASK_DB_PATH = environ.get('PLOMTASK_DB_PATH')
+HTTP_PORT = 8082
+DB_CREATION_ASK = 'Database file not found. Create? Y/n\n'
+DB_MIGRATE_ASK = 'Database file needs migration. Migrate? Y/n\n'
+
+
+def yes_or_fail(question: str, fail_msg: str) -> None:
+ """Ask question, raise HandledException(fail_msg) if reply not yes."""
+ reply = input(question)
+ if not reply.lower() in {'y', 'yes', 'yes.', 'yes!'}:
+ print('Not recognizing reply as "yes".')
+ raise HandledException(fail_msg)
+
+
+if __name__ == '__main__':
+ try:
+ if not PLOMTASK_DB_PATH:
+ raise HandledException('PLOMTASK_DB_PATH not set.')
+ db_path = Path(PLOMTASK_DB_PATH)
+ try:
+ db_file = DatabaseFile(db_path)
+ except PlomDbException as e:
+ if e.name == 'no_is_file':
+ yes_or_fail(DB_CREATION_ASK, 'Cannot run without DB.')
+ DatabaseFile.create(db_path)
+ elif e.name == 'bad_version':
+ yes_or_fail(DB_MIGRATE_ASK, 'Cannot run with unmigrated DB.')
+ db_file = DatabaseFile(db_path, skip_validations=True)
+ db_file.migrate(set())
+ else:
+ raise e
+ else:
+ server = TaskServer(db_file, ('localhost', HTTP_PORT), TaskHandler)
+ print(f'running at port {HTTP_PORT}')
+ try:
+ server.serve_forever()
+ except KeyboardInterrupt:
+ print('aborting due to keyboard interrupt')
+ server.server_close()
+ except HandledException as e:
+ print(f'Aborting because: {e}')
+ sys_exit(1)
--- /dev/null
+<!DOCTYPE html>
+<html>
+<meta charset="UTF-8">
+<style>
+body {
+ font-family: monospace;
+ text-align: left;
+ padding: 0;
+ background-color: white;
+}
+input[type="text"] {
+ width: 100em;
+}
+input.timestamp {
+ width: 11em;
+}
+input.date {
+ width: 6em;
+}
+input.btn-harmless {
+ color: green;
+}
+input.btn-dangerous {
+ color: red;
+}
+div.btn-to-right {
+ float: right;
+ text-align: right;
+}
+td, th, tr, table {
+ margin-top: 1em;
+ padding: 0;
+ border-collapse: collapse;
+}
+th, td {
+ padding-right: 1em;
+}
+a {
+ color: black;
+}
+table.edit_table > tbody > tr > td,
+table.edit_table > tbody > tr > th {
+ border-bottom: 0.1em solid #bbbbbb;
+ padding-top: 0.5em;
+ padding-bottom: 0.5em;
+}
+td.number, input[type="number"] {
+ text-align: right;
+}
+input[name="effort"] {
+ width: 3.5em;
+}
+textarea {
+ width: 100%;
+}
+table.alternating > tbody > tr:nth-child(odd) {
+ background-color: #dfdfdf;
+}
+div.edit_buttons {
+ margin-top: 1em;
+}
+{% block css %}
+{% endblock %}
+</style>
+<body>
+<a href="day">today</a>
+<a href="calendar">calendar</a>
+<a href="conditions">conditions</a>
+<a href="processes">processes</a>
+<a href="todos">todos</a>
+<hr>
+{% block content %}
+{% endblock %}
+</body>
+</html>
--- /dev/null
+{% macro edit_buttons() %}
+<div class="edit_buttons">
+<input class="btn-harmless" type="submit" name="update" value="update" />
+<div class="btn-to-right">
+<input class="btn-dangerous" type="submit" name="delete" value="delete" />
+</div>
+</div>
+{% endmacro %}
+
+
+
+{% macro datalist_of_titles(title, candidates, historical=false, with_comments=false) %}
+<datalist id="{{title}}">
+{% for candidate in candidates %}
+<option value="{{candidate.id_}}">
+{% if historical is true %}
+{{candidate.title_then|e}}
+{% else %}
+{{candidate.title.newest|e}}
+{% endif %}
+{% if with_comments and candidate.comment %}
+/ {{candidate.comment}}
+{% endif %}
+</option>
+{% endfor %}
+</datalist>
+{% endmacro %}
+
+
+
+{% macro simple_checkbox_table(title, items, type_name, list_name, add_string="add", historical=false) %}
+{% if items|length > 0 %}
+<table>
+{% for item in items %}
+<tr>
+<td>
+<input type="checkbox" name="{{title}}" value="{{item.id_}}" checked />
+</td>
+<td>
+<a href="{{type_name}}?id={{item.id_}}">{% if historical is true %}{{item.title_then}}{% else %}{{item.title.newest|e}}{% endif %}</a>
+</td>
+</tr>
+{% endfor %}
+</table>
+{% endif %}
+{{add_string}}: <input name="{{title}}" type="text" list="{{list_name}}" autocomplete="off" />
+{% endmacro %}
+
+
+
+{% macro history_page(item_name, item, attribute_name, attribute, as_pre=false) %}
+<h3>{{item_name}} {{attribute_name}} history</h3>
+<form action="{{item_name}}_{{attribute_name}}s?id={{item.id_}}" method="POST">
+<table>
+
+<tr>
+<th>{{item_name}}</th>
+<td><a href="{{item_name}}?id={{item.id_}}">{{item.title.newest|e}}</a></td>
+</tr>
+
+
+{% for date in attribute.history.keys() | sort(reverse=True) %}
+<tr>
+<td><input name="at:{{date}}" class="timestamp" value="{{date|truncate(19, True, '', 0)}}"></td>
+<td>{% if as_pre %}<pre>{% endif %}{{attribute.history[date]}}{% if as_pre %}</pre>{% endif %}</td>
+</tr>
+{% endfor %}
+
+</table>
+<input class="btn-harmless" type="submit" name="update" value="update" />
+</form>
+{% endmacro %}
--- /dev/null
+{% extends '_base.html' %}
+
+
+
+{% block css %}
+tr.week_row td {
+ height: 0.3em;
+ background-color: black;
+ padding: 0;
+ margin: 0;
+ border-top: 0.2em solid white;
+}
+tr.month_row td {
+ border-top: 0.2em solid white;
+ color: white;
+ background-color: #555555;
+}
+table {
+ width: 100%;
+}
+tr.day_row td {
+ background-color: #cccccc;
+ border-top: 0.2em solid white;
+}
+td.day_name {
+ padding-right: 0.5em;
+}
+td.today {
+ font-weight: bold;
+}
+span.total_effort {
+ white-space: pre;
+}
+{% endblock %}
+
+
+
+{% block content %}
+<h3>calendar</h3>
+
+<p><a href="/calendar_txt">basic view</a></p>
+
+<form action="calendar" method="GET">
+from <input name="start" class="date" value="{{start}}" />
+to <input name="end" class="date" value="{{end}}" />
+<input type="submit" value="OK" />
+</form>
+<table>
+{% for day in days %}
+
+{% if day.first_of_month %}
+<tr class="month_row">
+<td colspan=2>{{ day.month_name }}</td>
+</tr>
+{% endif %}
+
+{% if day.weekday == "Monday" %}
+<tr class="week_row">
+<td colspan=2></td>
+</tr>
+{% endif %}
+
+<tr class="day_row">
+<td class="day_name {% if day.date == today %}today{% endif %}">
+<a href="day?date={{day.date}}">{{day.weekday|truncate(2,True,'',0)}} {% if day.date == today %} {% endif %}{{day.date}}</a>
+[<span class="total_effort">{{ '{:5.1f}'.format(day.total_effort) }}</span>]
+{{day.comment|e}}</td>
+</tr>
+
+{% for todo in day.calendarized_todos %}
+<tr>
+<td>[{% if todo.is_done %}X{% else %} {% endif %}] <a href="todo?id={{todo.id_}}">{{todo.title_then|e}}</a>{% if todo.comment %} · {{todo.comment|e}}{% endif %}</td>
+</tr>
+{% endfor %}
+
+{% endfor %}
+</table>
+{% endblock %}
--- /dev/null
+{% extends '_base.html' %}
+
+{% block content %}
+<h3>calendar</h3>
+
+<p><a href="/calendar">normal view</a></p>
+
+<form action="calendar_txt" method="GET">
+from <input name="start" class="date" value="{{start}}" />
+to <input name="end" class="date" value="{{end}}" />
+<input type="submit" value="OK" />
+</form>
+<table>
+
+<pre>{% for day in days %}{% if day.weekday == "Monday" %}
+---{% endif %}{% if day.comment or day.calendarized_todos %}
+{{day.weekday|truncate(2,True,'',0)}} {{day.date}} {{day.comment|e}}{% endif %}{% if day.calendarized_todos%}{% for todo in day.calendarized_todos %}
+* {{todo.title_then|e}}{% if todo.comment %} / {{todo.comment|e}}{% endif %}{% endfor %}{% endif %}{% endfor %}
+</pre>
+{% endblock %}
--- /dev/null
+{% extends '_base.html' %}
+{% import '_macros.html' as macros %}
+
+
+
+{% block content %}
+<h3>
+{% if is_new %}
+add NEW condition
+{% else %}
+edit condition of ID {{condition.id_}}
+{% endif %}
+</h3>
+<form action="condition?id={{condition.id_ or ''}}" method="POST">
+
+<table class="edit_table">
+<tr>
+<th>title</th>
+<td><input name="title" type="text" value="{{condition.title.newest|e}}" />{% if condition.id_ %} [<a href="condition_titles?id={{condition.id_}}">history</a>]{% endif %}</td>
+<tr/>
+<tr>
+<th>is active</th>
+<td><input name="is_active" type="checkbox" {% if condition.is_active %}checked{% endif %} /></td>
+<tr/>
+<tr>
+<th>description</th>
+<td><textarea name="description">{{condition.description.newest|e}}</textarea>{% if condition.id_ %} [<a href="condition_descriptions?id={{condition.id_}}">history</a>]{% endif %}</td>
+<tr/>
+<tr>
+<th>enables</th>
+<td>
+{% for process in enabled_processes %}
+<a href="process?id={{process.id_}}">{{process.title.newest|e}}</a><br />
+{% endfor %}
+</td>
+</tr>
+<tr>
+<th>disables</th>
+<td>
+{% for process in disabled_processes %}
+<a href="process?id={{process.id_}}">{{process.title.newest|e}}</a><br />
+{% endfor %}
+</td>
+</tr>
+<tr>
+<th>enabled by</th>
+<td>
+{% for process in enabling_processes %}
+<a href="process?id={{process.id_}}">{{process.title.newest|e}}</a><br />
+{% endfor %}
+</td>
+</tr>
+<tr>
+<th>disabled by</th>
+<td>
+{% for process in disabling_processes %}
+<a href="process?id={{process.id_}}">{{process.title.newest|e}}</a><br />
+{% endfor %}
+</td>
+</tr>
+</table>
+
+{{ macros.edit_buttons() }}
+{% endblock %}
+
--- /dev/null
+{% extends '_base.html' %}
+{% import '_macros.html' as macros %}
+
+
+
+{% block content %}
+{{ macros.history_page("condition", condition, "description", condition.description, true) }}
+{% endblock %}
--- /dev/null
+{% extends '_base.html' %}
+{% import '_macros.html' as macros %}
+
+
+
+{% block content %}
+{{ macros.history_page("condition", condition, "title", condition.title) }}
+{% endblock %}
--- /dev/null
+{% extends '_base.html' %}
+
+{% block content %}
+<h3>conditions</h3>
+
+<form action="conditions" method="GET">
+<input type="submit" value="filter" />
+<input name="pattern" type="text" value="{{pattern}}" />
+</form>
+
+<table class="alternating">
+<tr>
+<th><a href="?sort_by={% if sort_by == "is_active" %}-{% endif %}is_active">active</a></th>
+<th><a href="?sort_by={% if sort_by == "title" %}-{% endif %}title">title</a></th>
+</tr>
+{% for condition in conditions %}
+<tr>
+<td>[{% if condition.is_active %}X{% else %} {% endif %}]</td>
+<td><a href="condition?id={{condition.id_}}">{{condition.title.newest}}</a></td>
+</tr>
+{% endfor %}
+</table>
+
+<p>
+<a href="condition">add</a>
+</p>
+
+{% endblock %}
--- /dev/null
+{% extends '_base.html' %}
+{% import '_macros.html' as macros %}
+
+
+
+{% block css %}
+th {
+ border: 1px solid black;
+}
+td.cond_line {
+ padding: 0;
+ border-top: 1px solid white;
+}
+td.cond_0 {
+ background-color: #bfbfbf;
+}
+td.cond_1 {
+ background-color: #dfdfdf;
+}
+td.cond_2 {
+ background-color: fffff;
+}
+td.cond_shrink {
+ max-width: 0px;
+ white-space: nowrap;
+ overflow: hidden;
+ text-overflow: clip;
+}
+td.todo_line {
+ border-bottom: 1px solid #bfbfbf;
+ height: 1.7em;
+}
+tr.inactive > td.todo_line {
+ background-color: #bfbfbf;
+ border-bottom: 1px solid white;
+}
+tr.hidden_undone > td, tr.hidden_undone a {
+ color: #9f9f9f;
+}
+td.left_border {
+ border-left: 1px solid black;
+}
+td.right_border {
+ border-right: 1px solid black;
+}
+input.ablers {
+ width: 50em;
+}
+{% endblock %}
+
+
+
+{% macro show_node_undone(node, indent) %}
+{% if not node.todo.is_done %}
+<tr {% if node.seen or not node.todo.is_doable %}class="inactive"{% endif %}>
+{% if not node.seen %}
+<input type="hidden" name="todo_id" value="{{node.todo.id_}}" />
+{% endif %}
+
+{% for condition in conditions_present %}
+{% if condition in node.todo.conditions and not condition.is_active %}
+<td class="cond_line cond_{{loop.index0 % 3}}">
++>
+{% elif condition in node.todo.blockers and condition.is_active %}
+<td class="cond_line cond_{{loop.index0 % 3}}">
+->
+{% else %}
+<td class="cond_line cond_{{loop.index0 % 3}} cond_shrink">
+|
+{% endif %}
+</td>
+{% endfor %}
+
+{% if node.seen %}
+<td class="todo_line left_border"></td>
+<td class="todo_line">{% if node.todo.effort %}{{ node.todo.effort }}{% endif %}</td>
+{% else %}
+<td class="todo_line left_border"><input name="done" type="checkbox" value="{{node.todo.id_}}" {% if not node.todo.is_doable %}disabled{% endif %}/></td>
+<td class="todo_line"><input name="effort" type="number" step=0.1 placeholder={{node.todo.effort_then}} value={{node.todo.effort}} /></td>
+{% endif %}
+<td class="todo_line right_border">
+{% for i in range(indent) %} {% endfor %} +
+{% if node.seen %}({% endif %}<a href="todo?id={{node.todo.id_}}">{{node.todo.title_then|e}}</a>{% if node.seen %}){% endif %}
+</td>
+
+{% for condition in conditions_present|reverse %}
+{% if condition in node.todo.enables %}
+<td class="cond_line cond_{{(conditions_present|length - loop.index) % 3}}">
++>
+{% elif condition in node.todo.disables %}
+<td class="cond_line cond_{{(conditions_present|length - loop.index) % 3}}">
+->
+{% else %}
+<td class="cond_line cond_{{(conditions_present|length - loop.index) % 3}} cond_shrink">
+ |
+{% endif %}
+</td>
+{% endfor %}
+
+<td colspan=2>
+{% if node.seen %}
+{{node.todo.comment|e}}
+{% else %}
+<input name="comment" type="text" value="{{node.todo.comment|e}}" />
+{% endif %}
+</td>
+
+</tr>
+{% endif %}
+
+{% if not node.seen %}
+{% for child in node.children %}
+{{ show_node_undone(child, indent+1) }}
+{% endfor %}
+{% endif %}
+
+{% endmacro %}
+
+
+
+{% macro show_node_done(node, indent, path) %}
+{% if node.todo.has_doneness_in_path %}
+<tr{% if not node.todo.is_done %} class="hidden_undone"{% endif %}>
+<td class="number">{{ '{:4.1f}'.format(node.todo.performed_effort) }}</td>
+<td class="number">{{ '{:4.1f}'.format(node.todo.tree_effort) }}</td>
+<td>
+{% for i in range(indent) %} {% endfor %} +
+{% if not node.todo.is_done %}({% endif %}{% if node.seen %}[{% endif %}<a href="todo?id={{node.todo.id_}}">{{node.todo.title_then|e}}</a>{% if node.seen %}]{% endif %}{% if not node.todo.is_done %}){% endif %}{% if node.todo.comment %} · {{node.todo.comment|e}}{% endif %}</td>
+</tr>
+{% if not node.seen %}
+{% for child in node.children %}
+{{ show_node_done(child, indent+1) }}
+{% endfor %}
+{% endif %}
+{% endif %}
+{% endmacro %}
+
+
+
+{% block content %}
+<h3>{{day.date}} / {{day.weekday}} ({{day.total_effort|round(1)}})</h3>
+<p>
+<a href="day?date={{day.prev_date}}">prev</a> | <a href="day?date={{day.next_date}}">next</a>
+</p>
+<form action="day?date={{day.date}}" method="POST">
+
+<p>
+comment:
+<input name="day_comment" type="text" value="{{day.comment|e}}" />
+<input type="submit" value="OK" /></td>
+</p>
+
+<h4>to do</h4>
+
+<p>
+add: <input type="text" name="new_todo" list="processes">
+</p>
+<p>
+make new todos
+<select name="make_type">
+<option value="full">with</option>
+<option value="empty"{% if make_type == "empty" %}selected {% endif %}>without</option>
+</select>
+descendants (i.e. adopt where possible, otherwise create anew)
+</p>
+
+<table>
+
+<tr>
+<th colspan={{ conditions_present|length + 3 + conditions_present|length }}>conditions</th>
+<th>add enabler</th>
+<th>add disabler</th>
+</tr>
+
+{% for condition in conditions_present %}
+{% set outer_loop = loop %}
+<tr>
+
+{% for _ in conditions_present %}
+{% if outer_loop.index > loop.index %}
+<td class="cond_line cond_{{loop.index0 % 3}} cond_shrink">|
+{% elif outer_loop.index < loop.index %}
+<td class="cond_line cond_{{outer_loop.index0 % 3}}">
+{% else %}
+<td class="cond_line cond_{{outer_loop.index0 % 3}} cond_shrink">/
+{% endif %}
+</td>
+{% endfor %}
+
+<td class="cond_line cond_{{loop.index0 % 3}}"><input type="checkbox" disabled{% if condition.is_active %} checked{% endif %}></td>
+<td colspan=2 class="cond_line cond_{{loop.index0 % 3}}"><a href="condition?id={{condition.id_}}">{{condition.title.at(day.date)|e}}</a></td>
+
+{% for _ in conditions_present %}
+{% if outer_loop.index0 + loop.index < conditions_present|length %}
+<td class="cond_line cond_{{outer_loop.index0 % 3}}">
+{% elif outer_loop.index0 + loop.index > conditions_present|length %}
+<td class="cond_line cond_{{(conditions_present|length - loop.index) % 3}} cond_shrink"> |
+{% else %}
+<td class="cond_line cond_{{outer_loop.index0 % 3}} cond_shrink"> \
+{% endif %}
+{% endfor %}
+
+{% set list_name = "todos_for_%s"|format(condition.id_) %}
+<td><input class="ablers" type="text" name="new_todo" list="{{list_name}}" autocomplete="off" /></td>
+{{ macros.datalist_of_titles(list_name, enablers_for[condition.id_]) }}
+</td>
+{% set list_name = "todos_against_%s"|format(condition.id_) %}
+<td><input class="ablers" type="text" name="new_todo" list="{{list_name}}" autocomplete="off" /></td>
+{{ macros.datalist_of_titles(list_name, disablers_for[condition.id_]) }}
+</td>
+</tr>
+{% endfor %}
+
+<tr>
+{% for condition in conditions_present %}
+<td class="cond_line cond_{{loop.index0 % 3}} cond_shrink">|</td>
+{% endfor %}
+<th colspan=3>doables</th>
+{% for condition in conditions_present %}
+<td class="cond_line cond_{{(conditions_present|length - loop.index) % 3}} cond_shrink"> |</td>
+{% endfor %}
+<th colspan=2>comments</th>
+</tr>
+{% for node in top_nodes %}
+{{ show_node_undone(node, 0) }}
+{% endfor %}
+
+</table>
+
+<h4>done</h4>
+
+<table class="alternating">
+<tr>
+<th colspan=2>effort</th><th rowspan=2>action · comment</th>
+</tr>
+<tr>
+<th>self</th><th>tree</th>
+</tr>
+{% for node in top_nodes %}
+{{ show_node_done(node, 0, []) }}
+{% endfor %}
+</table>
+
+</form>
+
+{{ macros.datalist_of_titles("processes", processes) }}
+{% endblock %}
--- /dev/null
+{% extends '_base.html' %}
+
+
+
+{% block content %}
+<p>{{msg}}</p>
+{% endblock %}
+
--- /dev/null
+{% extends '_base.html' %}
+{% import '_macros.html' as macros %}
+
+
+
+{% block css %}
+details > summary::after {
+ content: '[+]';
+}
+details summary {
+ list-style: none;
+}
+details[open] > summary::after {
+ content: '[-]';
+}
+{% endblock %}
+
+
+
+{% macro step_with_steps(step_node, indent) %}
+<tr>
+<td>
+<input type="hidden" name="steps" value="{{step_node.step.id_}}" />
+{% if step_node.is_explicit %}
+<input type="checkbox" name="kept_steps" value="{{step_node.step.id_}}" checked />
+{% endif %}
+</td>
+
+{% if step_node.is_explicit and not step_node.seen %}
+<td colspan=2>
+<details>
+<summary>
+{% else %}
+<td>
+{% endif %}
+
+{% for i in range(indent) %}+{%endfor %}
+{% if step_node.is_suppressed %}<del>{% endif %}
+{% if step_node.seen %}
+<a href="process?id={{step_node.process.id_}}">({{step_node.process.title.newest|e}})</a>
+{% else %}
+<a href="process?id={{step_node.process.id_}}">{{step_node.process.title.newest|e}}</a>
+{% endif %}
+{% if step_node.is_suppressed %}</del>{% endif %}
+
+
+{% if step_node.is_explicit and not step_node.seen %}
+</summary>
+<div>add sub-step: <input name="new_step_to_{{step_id}}" list="process_candidates" autocomplete="off" size="100" /></div>
+</details>
+{% endif %}
+
+</td>
+{% if (not step_node.is_explicit) and (not step_node.seen) %}
+<td>
+<input type="checkbox" name="suppressed_steps" value="{{step_id}}" {% if step_node.is_suppressed %}checked{% endif %}> suppress
+</td>
+{% endif %}
+</tr>
+{% if step_node.is_explicit or not step_node.seen %}
+{% for substep in step_node.steps %}
+{{ step_with_steps(substep, indent+1) }}
+{% endfor %}
+{% endif %}
+{% endmacro %}
+
+
+
+{% block content %}
+<h3>
+{% if is_new %}
+add NEW process
+{% else %}
+edit process of ID {{process.id_}}
+{% endif %}
+</h3>
+<form action="process?id={{process.id_ or ''}}" method="POST">
+
+<table class="edit_table">
+<tr>
+<th>title</th>
+<td><input name="title" type="text" value="{{process.title.newest|e}}" />{% if process.id_ %} [<a href="process_titles?id={{process.id_}}">history</a>]{% endif %}</td>
+</tr>
+<tr>
+<th>effort</th>
+<td><input type="number" name="effort" step=0.1 value={{process.effort.newest}} />{% if process.id_ %} [<a href="process_efforts?id={{process.id_}}">history</a>]{% endif %}</td>
+</tr>
+<tr>
+<th>description</th>
+<td><textarea name="description">{{process.description.newest|e}}</textarea><br />{% if process.id_ %} [<a href="process_descriptions?id={{process.id_}}">history</a>]{% endif %}</td>
+</tr>
+<tr>
+<th>calendarize</th>
+<td><input type="checkbox" name="calendarize" {% if process.calendarize %}checked {% endif %}</td>
+</tr>
+<tr>
+<th>conditions</th>
+<td>{{ macros.simple_checkbox_table("conditions", process.conditions, "condition", "condition_candidates") }}</td>
+</tr>
+<tr>
+<th>blockers</th>
+<td>{{ macros.simple_checkbox_table("blockers", process.blockers, "condition", "condition_candidates") }}</td>
+</tr>
+<tr>
+<th>enables</th>
+<td>{{ macros.simple_checkbox_table("enables", process.enables, "condition", "condition_candidates") }}</td>
+</tr>
+<tr>
+<th>disables</th>
+<td>{{ macros.simple_checkbox_table("disables", process.disables, "condition", "condition_candidates") }}</td>
+</tr>
+<tr>
+<th>steps</th>
+<td>
+{% if steps %}
+<table>
+{% for step_node in steps %}
+{{ step_with_steps(step_node, 0) }}
+{% endfor %}
+</table>
+{% endif %}
+add: <input type="text" name="new_top_step" list="process_candidates" autocomplete="off" value="{{preset_top_step or ''}}" />
+</td>
+</tr>
+<tr>
+<th>step of</th>
+<td>{{ macros.simple_checkbox_table("step_of", owners, "process", "process_candidates") }}</td>
+</tr>
+<tr>
+<th>todos</th>
+<td>
+<a href="todos?process_id={{process.id_}}">{{n_todos}}</a><br />
+</td>
+</tr>
+</table>
+
+{{ macros.edit_buttons() }}
+</form>
+{{ macros.datalist_of_titles("condition_candidates", condition_candidates) }}
+{{ macros.datalist_of_titles("process_candidates", process_candidates) }}
+{% endblock %}
--- /dev/null
+{% extends '_base.html' %}
+{% import '_macros.html' as macros %}
+
+
+
+{% block content %}
+{{ macros.history_page("process", process, "description", process.description, as_pre=true) }}
+{% endblock %}
+
--- /dev/null
+{% extends '_base.html' %}
+{% import '_macros.html' as macros %}
+
+
+
+{% block content %}
+{{ macros.history_page("process", process, "effort", process.effort) }}
+{% endblock %}
--- /dev/null
+{% extends '_base.html' %}
+{% import '_macros.html' as macros %}
+
+
+
+{% block content %}
+{{ macros.history_page("process", process, "title", process.title) }}
+{% endblock %}
--- /dev/null
+{% extends '_base.html' %}
+
+{% block content %}
+<h3>processes</h3>
+
+<form action="processes" method="GET">
+<input type="submit" value="filter" />
+<input name="pattern" type="text" value="{{pattern}}" />
+</form>
+
+<table class="alternating">
+<tr>
+<th><a href="?sort_by={% if sort_by == "steps" %}-{% endif %}steps">steps</a></th>
+<th><a href="?sort_by={% if sort_by == "owners" %}-{% endif %}owners">owners</a></th>
+<th><a href="?sort_by={% if sort_by == "effort" %}-{% endif %}effort">effort</a></th>
+<th><a href="?sort_by={% if sort_by == "title" %}-{% endif %}title">title</a></th>
+</tr>
+{% for process in processes %}
+<tr>
+<td class="number">{{ process.explicit_steps|count }}</td>
+<td class="number">{{ process.n_owners }}</td>
+<td class="number">{{ process.effort.newest }}</td>
+<td><a href="process?id={{process.id_}}">{{process.title.newest}}</a></td>
+</tr>
+{% endfor %}
+</table>
+
+<p>
+<a href="process">add</a>
+</p>
+{% endblock %}
--- /dev/null
+{% extends '_base.html' %}
+{% import '_macros.html' as macros %}
+
+
+
+{% block css %}
+select{ font-size: 0.5em; margin: 0; padding: 0; }
+{% endblock %}
+
+
+
+{% macro draw_tree_row(item, parent_todo, indent=0) %}
+<tr>
+<td>
+{% if item.todo %}
+{% if not item.process %}+{% else %} {% endif %}<input type="checkbox" name="adopt" value="{{item.todo.id_}}" checked {% if indent > 0 %}disabled{% endif %}/>
+{% endif %}
+</td>
+<td>
+{% for i in range(indent-1) %} {%endfor %}{% if indent > 0 %}·{% endif %}
+{% if item.todo %}
+<a href="todo?id={{item.todo.id_}}">{{item.todo.title_then|e}}</a>
+{% else %}
+{{item.process.title.newest|e}}
+{% if parent_todo %}
+· fill: <select name="step_filler_to_{{parent_todo.id_}}">
+<option value="ignore">--</option>
+<option value="make_{{item.process.id_}}">make empty</option>
+{% for adoptable in adoption_candidates_for[item.process.id_] %}
+<option value="{{adoptable.id_}}">adopt #{{adoptable.id_}}{% if adoptable.comment %} / {{adoptable.comment}}{% endif %}</option>
+{% endfor %}
+</select>
+{% endif %}
+
+{% endif %}
+</td>
+</tr>
+{% for child in item.children %}
+{{ draw_tree_row(child, item.todo, indent+1) }}
+{% endfor %}
+{% endmacro %}
+
+
+
+{% block content %}
+<h3>Todo: {{todo.title_then|e}}</h3>
+<form action="todo?id={{todo.id_}}" method="POST">
+
+<table class="edit_table">
+<tr>
+<th>day</th>
+<td><a href="day?date={{todo.date}}">{{todo.date}}</a></td>
+</tr>
+<tr>
+<th>process</th>
+<td><a href="process?id={{todo.process.id_}}">{{todo.process.title.newest|e}}</a></td>
+</tr>
+<tr>
+<th>done</th>
+<td><input type="checkbox" name="is_done" {% if todo.is_done %}checked {% endif %} {% if not todo.is_doable %}disabled {% endif %}/>
+{% if not todo.is_doable and todo.is_done %}<input type="hidden" name="is_done" value="1" />{% endif %}
+</td>
+</tr>
+<tr>
+<th>effort</th>
+<td><input type="number" name="effort" step=0.1 placeholder={{todo.effort_then}} value={{todo.effort}} /></td>
+</tr>
+<tr>
+<th>comment</th>
+<td><input name="comment" type="text" value="{{todo.comment|e}}"/></td>
+</tr>
+<tr>
+<th>calendarize</th>
+<td><input type="checkbox" name="calendarize" {% if todo.calendarize %}checked {% endif %}</td>
+</tr>
+<tr>
+<th>conditions</th>
+<td>{{ macros.simple_checkbox_table("conditions", todo.conditions, "condition", "condition_candidates") }}</td>
+</tr>
+<tr>
+<th>blockers</th>
+<td>{{ macros.simple_checkbox_table("blockers", todo.blockers, "condition", "condition_candidates") }}</td>
+</tr>
+<tr>
+<th>enables</th>
+<td>{{ macros.simple_checkbox_table("enables", todo.enables, "condition", "condition_candidates") }}</td>
+</tr>
+<tr>
+<th>disables</th>
+<td>{{ macros.simple_checkbox_table("disables", todo.disables, "condition", "condition_candidates") }}</td>
+</tr>
+<tr>
+<th>parents</th>
+<td>
+{% for parent in todo.parents %}
+<a href="todo?id={{parent.id_}}">{{parent.title_then|e}}</a><br />
+{% endfor %}
+</td>
+</tr>
+<tr>
+<th>descendants</th>
+<td>
+{% if steps_todo_to_process|length > 0 %}
+<table>
+{% for step in steps_todo_to_process %}
+{{ draw_tree_row(step, todo) }}
+{% endfor %}
+</table>
+{% endif %}
+adopt: <input type="text" name="adopt" list="todo_candidates" autocomplete="off" /><br />
+make empty: <input type="text" name="make_empty" list="process_candidates" autocomplete="off" /><br />
+make full: <input type="text" name="make_full" list="process_candidates" autocomplete="off" />
+</td>
+</tr>
+</table>
+
+{{ macros.edit_buttons() }}
+</form>
+{{ macros.datalist_of_titles("condition_candidates", condition_candidates) }}
+{{ macros.datalist_of_titles("process_candidates", process_candidates) }}
+{{ macros.datalist_of_titles("todo_candidates", todo_candidates, historical=true, with_comments=true) }}
+{% endblock %}
--- /dev/null
+{% extends '_base.html' %}
+{% import '_macros.html' as macros %}
+
+
+
+{% block content %}
+<h3>todos</h3>
+
+<form action="todos" method="GET">
+from <input name="start" class="date" value="{{start}}" />
+to <input name="end" class="date" value="{{end}}" /><br />
+process <input name="process_id" type="text" value="{{process_id or ''}}" list="processes" /><br />
+in comment <input name="comment_pattern" type="text" value="{{comment_pattern}}" /><br />
+<input type="submit" value="filter" />
+</form>
+
+<table class="alternating">
+<tr>
+<th><a href="?sort_by={% if sort_by == "doneness" %}-{% endif %}doneness">done</a></th>
+<th><a href="?sort_by={% if sort_by == "date" %}-{% endif %}date">date</a></th>
+<th><a href="?sort_by={% if sort_by == "title" %}-{% endif %}title">title</a></th>
+<th><a href="?sort_by={% if sort_by == "comment" %}-{% endif %}comment">comment</a></th>
+</tr>
+{% for todo in todos %}
+<tr>
+<td>[{% if todo.is_done %}x{% else %} {% endif %}]</td>
+<td><a href="day?date={{todo.date}}">{{todo.date}}</a></td>
+<td><a href="todo?id={{todo.id_}}">{{todo.title_then}}</a></td>
+<td>{{todo.comment}}</td>
+</tr>
+{% endfor %}
+</table>
+{{ macros.datalist_of_titles("processes", all_processes) }}
+{% endblock %}
+
--- /dev/null
+"""Test Conditions module."""
+from typing import Any
+from tests.utils import (TestCaseSansDB, TestCaseWithDB, TestCaseWithServer,
+ Expected)
+from plomtask.conditions import Condition
+
+
+class TestsSansDB(TestCaseSansDB):
+ """Tests requiring no DB setup."""
+ checked_class = Condition
+
+
+class TestsWithDB(TestCaseWithDB):
+ """Tests requiring DB, but not server setup."""
+ checked_class = Condition
+ default_init_kwargs = {'is_active': 0}
+
+
+class ExpectedGetConditions(Expected):
+ """Builder of expectations for GET /conditions."""
+ _default_dict = {'sort_by': 'title', 'pattern': ''}
+
+ def recalc(self) -> None:
+ """Update internal dictionary by subclass-specific rules."""
+ super().recalc()
+ self._fields['conditions'] = self.as_ids(self.lib_all('Condition'))
+
+
+class ExpectedGetCondition(Expected):
+ """Builder of expectations for GET /condition."""
+ _default_dict = {'is_new': False}
+ _on_empty_make_temp = ('Condition', 'cond_as_dict')
+
+ def __init__(self, id_: int | None, *args: Any, **kwargs: Any) -> None:
+ self._fields = {'condition': id_}
+ super().__init__(*args, **kwargs)
+
+ def recalc(self) -> None:
+ """Update internal dictionary by subclass-specific rules."""
+ super().recalc()
+ for p_field, c_field in [('conditions', 'enabled_processes'),
+ ('disables', 'disabling_processes'),
+ ('blockers', 'disabled_processes'),
+ ('enables', 'enabling_processes')]:
+ self._fields[c_field] = self.as_ids([
+ p for p in self.lib_all('Process')
+ if self._fields['condition'] in p[p_field]])
+
+
+class TestsWithServer(TestCaseWithServer):
+ """Module tests against our HTTP server/handler (and database)."""
+ checked_class = Condition
+
+ def test_fail_POST_condition(self) -> None:
+ """Test malformed/illegal POST /condition requests."""
+ # check incomplete POST payloads
+ valid_payload = {'title': '', 'description': ''}
+ self.check_minimal_inputs('/condition', valid_payload)
+ # check valid POST payload on bad paths
+ self.check_post(valid_payload, '/condition?id=foo', 400)
+ # check cannot delete depended-upon Condition
+ self.post_exp_cond([], {})
+ for key in ('conditions', 'blockers', 'enables', 'disables'):
+ self.post_exp_process([], {key: [1]}, 1)
+ self.check_post({'delete': ''}, '/condition?id=1', 500)
+ self.post_exp_process([], {}, 1)
+ self.post_exp_day([], {'new_todo': '1'})
+ for key in ('conditions', 'blockers', 'enables', 'disables'):
+ self.post_exp_todo([], {key: [1]}, 1)
+ self.check_post({'delete': ''}, '/condition?id=1', 500)
+
+ def test_POST_condition(self) -> None:
+ """Test (valid) POST /condition and its effect on GET /condition[s]."""
+ url_single, url_all = '/condition?id=1', '/conditions'
+ exp_single, exp_all = ExpectedGetCondition(1), ExpectedGetConditions()
+ all_exps = [exp_single, exp_all]
+ # test valid POST's effect on single /condition and full /conditions
+ self.post_exp_cond(all_exps, {}, post_to_id=False)
+ self.check_json_get(url_single, exp_single)
+ self.check_json_get(url_all, exp_all)
+ # test (no) effect of invalid POST to existing Condition on /condition
+ self.check_post({}, url_single, 400)
+ self.check_json_get(url_single, exp_single)
+ # test effect of POST changing title, description, and activeness
+ self.post_exp_cond(all_exps, {'title': 'bar', 'description': 'oof',
+ 'is_active': 1})
+ self.check_json_get(url_single, exp_single)
+ # test POST sans 'is_active' setting it negative
+ self.post_exp_cond(all_exps, {})
+ self.check_json_get(url_single, exp_single)
+ # test deletion POST's effect, both to return id=1 into empty single,
+ # full /conditions into empty list
+ self.check_json_get(url_single, exp_single)
+ self.post_exp_cond(all_exps, {'delete': ''}, redir_to_id=False)
+ exp_single.set('is_new', True)
+ self.check_json_get(url_single, exp_single)
+ self.check_json_get(url_all, exp_all)
+
+ def test_GET_condition(self) -> None:
+ """More GET /condition testing, especially for Process relations."""
+ # check expected default status codes
+ self.check_get_defaults('/condition')
+ # check 'is_new' set if id= absent or pointing to not-yet-existing ID
+ exp = ExpectedGetCondition(None)
+ exp.set('is_new', True)
+ self.check_json_get('/condition', exp)
+ exp = ExpectedGetCondition(1)
+ exp.set('is_new', True)
+ self.check_json_get('/condition?id=1', exp)
+ # make Condition and two Processes that among them establish all
+ # possible ConditionsRelations to it, check /condition displays all
+ exp = ExpectedGetCondition(1)
+ self.post_exp_cond([exp], {}, post_to_id=False)
+ for i, p in enumerate([('conditions', 'disables'),
+ ('enables', 'blockers')]):
+ self.post_exp_process([exp], {k: [1] for k in p}, i+1)
+ self.check_json_get('/condition?id=1', exp)
+
+ def test_GET_conditions(self) -> None:
+ """Test GET /conditions."""
+ # test empty result on empty DB, default-settings on empty params
+ exp = ExpectedGetConditions()
+ self.check_json_get('/conditions', exp)
+ # test 'sort_by' default to 'title' (even if set to something else, as
+ # long as without handler) and 'pattern' get preserved
+ exp.set('pattern', 'bar')
+ self.check_json_get('/conditions?sort_by=foo&pattern=bar&foo=x', exp)
+ exp.set('pattern', '')
+ # test non-empty result, automatic (positive) sorting by title
+ post_cond1 = {'is_active': 0, 'title': 'foo', 'description': 'oof'}
+ post_cond2 = {'is_active': 0, 'title': 'bar', 'description': 'rab'}
+ post_cond3 = {'is_active': 1, 'title': 'baz', 'description': 'zab'}
+ for i, post in enumerate([post_cond1, post_cond2, post_cond3]):
+ self.post_exp_cond([exp], post, i+1, post_to_id=False)
+ self.check_filter(exp, 'conditions', 'sort_by', 'title', [2, 3, 1])
+ # test other sortings
+ self.check_filter(exp, 'conditions', 'sort_by', '-title', [1, 3, 2])
+ self.check_filter(exp, 'conditions', 'sort_by', 'is_active', [1, 2, 3])
+ self.check_filter(exp, 'conditions', 'sort_by', '-is_active',
+ [3, 2, 1])
+ exp.set('sort_by', 'title')
+ # test pattern matching on title
+ exp.lib_del('Condition', 1)
+ self.check_filter(exp, 'conditions', 'pattern', 'ba', [2, 3])
+ # test pattern matching on description
+ exp.lib_wipe('Condition')
+ exp.set_cond_from_post(1, post_cond1)
+ self.check_filter(exp, 'conditions', 'pattern', 'of', [1])
--- /dev/null
+"""Test Days module."""
+from datetime import date as dt_date, datetime, timedelta
+from typing import Any
+from tests.utils import (TestCaseSansDB, TestCaseWithDB, TestCaseWithServer,
+ Expected, date_and_day_id, dt_date_from_day_id)
+from plomtask.dating import date_in_n_days as tested_date_in_n_days
+from plomtask.days import Day
+
+# Simply the ISO format for dates as used in plomtask.dating, but for testing
+# purposes we state our expectations here independently and explicitly
+TESTING_DATE_FORMAT = '%Y-%m-%d'
+
+
+def _testing_date_in_n_days(n: int) -> str:
+ """Return in ISO format / TEST_DATE_FORMAT date from today + n days.
+
+ As with TESTING_DATE_FORMAT, we assume this equal the original's code
+ at plomtask.dating.date_in_n_days, but want to state our expectations
+ explicitly to rule out importing issues from the original.
+ """
+ date = dt_date.today() + timedelta(days=n)
+ return date.strftime(TESTING_DATE_FORMAT)
+
+
+def _days_n_for_date(date: str) -> int:
+ return (dt_date.fromisoformat(date) - dt_date(2000, 1, 1)).days
+
+
+class TestsSansDB(TestCaseSansDB):
+ """Days module tests not requiring DB setup."""
+ checked_class = Day
+
+ def test_date_in_n_days(self) -> None:
+ """Test dating.date_in_n_days"""
+ for n in [-100, -2, -1, 0, 1, 2, 1000]:
+ date = datetime.now() + timedelta(days=n)
+ self.assertEqual(tested_date_in_n_days(n),
+ date.strftime(TESTING_DATE_FORMAT))
+
+ def test_Day_date_weekday_neighbor_dates(self) -> None:
+ """Test Day's date parsing and neighbourhood resolution."""
+ self.assertEqual(dt_date(2000, 1, 2).isoformat(), Day(1).date)
+ self.assertEqual(dt_date(2001, 1, 2).isoformat(), Day(367).date)
+ self.assertEqual('Sunday', Day(1).weekday)
+ self.assertEqual('March', Day(75).month_name)
+ self.assertEqual('2000-12-31', Day(366).prev_date)
+ self.assertEqual('2001-03-01', Day(424).next_date)
+
+
+class TestsWithDB(TestCaseWithDB):
+ """Tests requiring DB, but not server setup."""
+ checked_class = Day
+
+ def test_Day_with_filled_gaps(self) -> None:
+ """Test .with_filled_gaps."""
+ day_ids = [n + 1 for n in range(9)]
+ dt_dates = [dt_date_from_day_id(id_) for id_ in day_ids]
+
+ def expect_within_full_range_as_commented(
+ range_indexes: tuple[int, int],
+ indexes_to_provide: list[int]
+ ) -> None:
+ start_i, end_i = range_indexes
+ days_expected = [Day(n) for n in day_ids]
+ to_remove = []
+ for idx in indexes_to_provide:
+ days_expected[idx] = Day(day_ids[idx], '#')
+ days_expected[idx].save(self.db_conn)
+ to_remove += [days_expected[idx]]
+ days_expected = days_expected[start_i:end_i+1]
+ days_result = Day.with_filled_gaps(
+ self.db_conn, dt_dates[start_i], dt_dates[end_i])
+ self.assertEqual(days_result, days_expected)
+ for day in to_remove:
+ day.remove(self.db_conn)
+
+ # check provided Days recognizable in (full-range) interval
+ expect_within_full_range_as_commented((0, 8), [0, 4, 8])
+ # check limited range, but limiting Days provided
+ expect_within_full_range_as_commented((2, 6), [2, 5, 6])
+ # check Days within range but beyond provided Days also filled in
+ expect_within_full_range_as_commented((1, 7), [2, 5])
+ # check provided Days beyond range ignored
+ expect_within_full_range_as_commented((3, 5), [1, 2, 4, 6, 7])
+ # check inversion of start_date and end_date returns empty list
+ expect_within_full_range_as_commented((5, 3), [2, 4, 6])
+ # check empty provision still creates filler elements in interval
+ expect_within_full_range_as_commented((3, 5), [])
+ # check single-element selection creating only filler beyond provided
+ expect_within_full_range_as_commented((1, 1), [2, 4, 6])
+ # check (un-saved) filler Days don't show up in cache or DB
+ day = Day(day_ids[3])
+ day.save(self.db_conn)
+ Day.with_filled_gaps(self.db_conn, dt_dates[0], dt_dates[-1])
+ self.check_identity_with_cache_and_db([day])
+
+
+class ExpectedGetCalendar(Expected):
+ """Builder of expectations for GET /calendar."""
+
+ def __init__(self, start: int, end: int, *args: Any, **kwargs: Any
+ ) -> None:
+ today_dt = dt_date.today()
+ today_iso = today_dt.isoformat()
+ self._fields = {
+ 'start': (today_dt + timedelta(days=start)).isoformat(),
+ 'end': (today_dt + timedelta(days=end)).isoformat(),
+ 'today': today_iso}
+ self._fields['days'] = [
+ _days_n_for_date(today_iso) + i for i in range(start, end+1)]
+ super().__init__(*args, **kwargs)
+ for day_id in self._fields['days']:
+ self.lib_set('Day', [self.day_as_dict(day_id)])
+
+
+class ExpectedGetDay(Expected):
+ """Builder of expectations for GET /day."""
+ _default_dict = {'make_type': 'full'}
+ _on_empty_make_temp = ('Day', 'day_as_dict')
+
+ def __init__(self, day_id: int, *args: Any, **kwargs: Any) -> None:
+ self._fields = {'day': day_id}
+ super().__init__(*args, **kwargs)
+
+ def recalc(self) -> None:
+ super().recalc()
+ todos = [t for t in self.lib_all('Todo')
+ if t['day_id'] == self._fields['day']]
+ self.lib_get('Day', self._fields['day'])['todos'] = self.as_ids(todos)
+ self._fields['top_nodes'] = [
+ {'children': [], 'seen': 0, 'todo': todo['id']}
+ for todo in todos]
+ for todo in todos:
+ proc = self.lib_get('Process', todo['process_id'])
+ for title in ['conditions', 'enables', 'blockers', 'disables']:
+ todo[title] = proc[title]
+ conds_present = set()
+ for todo in todos:
+ for title in ['conditions', 'enables', 'blockers', 'disables']:
+ for cond_id in todo[title]:
+ conds_present.add(cond_id)
+ self._fields['conditions_present'] = list(conds_present)
+ for prefix in ['en', 'dis']:
+ blers = {}
+ for cond_id in conds_present:
+ blers[cond_id] = self.as_ids(
+ [t for t in todos if cond_id in t[f'{prefix}ables']])
+ self._fields[f'{prefix}ablers_for'] = blers
+ self._fields['processes'] = self.as_ids(self.lib_all('Process'))
+
+
+class TestsWithServer(TestCaseWithServer):
+ """Tests against our HTTP server/handler (and database)."""
+ checked_class = Day
+
+ def test_basic_GET_day(self) -> None:
+ """Test basic (no Processes/Conditions/Todos) GET /day basics."""
+ # check illegal date parameters
+ self.check_get_defaults('/day', '2024-01-01', 'date')
+ self.check_get('/day?date=2024-02-30', 400)
+ # check undefined day
+ today_iso = dt_date.today().isoformat()
+ exp = ExpectedGetDay(_days_n_for_date(today_iso))
+ self.check_json_get('/day', exp)
+ # check defined day with make_type parameter
+ date, day_id = date_and_day_id(1)
+ exp = ExpectedGetDay(day_id)
+ exp.set('make_type', 'bar')
+ self.check_json_get(f'/day?date={date}&make_type=bar', exp)
+ # check parsing of 'yesterday', 'today', 'tomorrow'
+ for name, dist in [('yesterday', -1), ('today', 0), ('tomorrow', +1)]:
+ exp = ExpectedGetDay(_days_n_for_date(today_iso) + dist)
+ self.check_json_get(f'/day?date={name}', exp)
+
+ def test_fail_POST_day(self) -> None:
+ """Test malformed/illegal POST /day requests."""
+ # check payloads lacking minimum expecteds
+ url = '/day?date=2024-01-01'
+ minimal_post = {'make_type': '', 'day_comment': ''}
+ self.check_minimal_inputs(url, minimal_post)
+ # to next check illegal new_todo values, we need an actual Process
+ self.post_exp_process([], {}, 1)
+ # check illegal new_todo values
+ self.check_post(minimal_post | {'new_todo': ['foo']}, url, 400)
+ self.check_post(minimal_post | {'new_todo': [1, 2]}, url, 404)
+ # to next check illegal old_todo inputs, we need to first post Todo
+ self.check_post(minimal_post | {'new_todo': [1]}, url, 302,
+ '/day?date=2024-01-01&make_type=')
+ # check illegal old_todo inputs (equal list lengths though)
+ post = minimal_post | {'comment': ['foo'], 'effort': [3.3],
+ 'done': [], 'todo_id': [1]}
+ self.check_post(post, url, 302, '/day?date=2024-01-01&make_type=')
+ post['todo_id'] = [2] # reference to non-existant Process
+ self.check_post(post, url, 404)
+ post['todo_id'] = ['a']
+ self.check_post(post, url, 400)
+ post['todo_id'] = [1]
+ post['done'] = ['foo']
+ self.check_post(post, url, 400)
+ post['done'] = [2] # reference to non-posted todo_id
+ self.check_post(post, url, 400)
+ post['done'] = []
+ post['effort'] = ['foo']
+ self.check_post(post, url, 400)
+ post['effort'] = [None]
+ self.check_post(post, url, 400)
+ post['effort'] = [3.3]
+ # check illegal old_todo inputs: unequal list lengths
+ post['comment'] = []
+ self.check_post(post, url, 400)
+ post['comment'] = ['foo', 'foo']
+ self.check_post(post, url, 400)
+ post['comment'] = ['foo']
+ post['effort'] = []
+ self.check_post(post, url, 400)
+ post['effort'] = [3.3, 3.3]
+ self.check_post(post, url, 400)
+ post['effort'] = [3.3]
+ post['todo_id'] = [1, 1]
+ self.check_post(post, url, 400)
+ post['todo_id'] = [1]
+ # # check valid POST payload on bad paths
+ self.check_post(post, '/day', 400)
+ self.check_post(post, '/day?date=', 400)
+ self.check_post(post, '/day?date=foo', 400)
+
+ def test_basic_POST_day(self) -> None:
+ """Test basic (no Processes/Conditions/Todos) POST /day.
+
+ Check POST requests properly parse 'today', 'tomorrow', 'yesterday',
+ and actual date strings; store 'day_comment'; preserve 'make_type'
+ setting in redirect even if nonsensical; and allow '' as 'new_todo'.
+ """
+ for name, dist, test_str in [('2024-01-01', None, 'a'),
+ ('today', 0, 'b'),
+ ('yesterday', -1, 'c'),
+ ('tomorrow', +1, 'd')]:
+ date = name if dist is None else _testing_date_in_n_days(dist)
+ post = {'day_comment': test_str, 'make_type': f'x:{test_str}',
+ 'new_todo': ['', '']}
+ post_url = f'/day?date={name}'
+ redir_url = f'{post_url}&make_type={post["make_type"]}'
+ self.check_post(post, post_url, 302, redir_url)
+ day_id = _days_n_for_date(date)
+ exp = ExpectedGetDay(day_id)
+ exp.set_day_from_post(day_id, post)
+ self.check_json_get(post_url, exp)
+
+ def test_GET_day_with_processes_and_todos(self) -> None:
+ """Test GET /day displaying Processes and Todos (no trees)."""
+ date, day_id = date_and_day_id(1)
+ exp = ExpectedGetDay(day_id)
+ # check Processes get displayed in ['processes'] and ['_library'],
+ # even without any Todos referencing them
+ proc_posts = [{'title': 'foo', 'description': 'oof', 'effort': 1.1},
+ {'title': 'bar', 'description': 'rab', 'effort': 0.9}]
+ for i, proc_post in enumerate(proc_posts):
+ self.post_exp_process([exp], proc_post, i+1)
+ self.check_json_get(f'/day?date={date}', exp)
+ # post Todos of either Process and check their display
+ self.post_exp_day([exp], {'new_todo': [1, 2]})
+ self.check_json_get(f'/day?date={date}', exp)
+ # test malformed Todo manipulation posts
+ post_day = {'day_comment': '', 'make_type': '', 'comment': [''],
+ 'new_todo': [], 'done': [1], 'effort': [2.3]}
+ self.check_post(post_day, f'/day?date={date}', 400) # no todo_id
+ post_day['todo_id'] = [2] # not identifying Todo refered by done
+ self.check_post(post_day, f'/day?date={date}', 400)
+ post_day['todo_id'] = [1, 2] # imply range beyond that of effort etc.
+ self.check_post(post_day, f'/day?date={date}', 400)
+ post_day['comment'] = ['FOO', '']
+ self.check_post(post_day, f'/day?date={date}', 400)
+ post_day['effort'] = [2.3, '']
+ post_day['comment'] = ['']
+ self.check_post(post_day, f'/day?date={date}', 400)
+ # add a comment to one Todo and set the other's doneness and effort
+ post_day['comment'] = ['FOO', '']
+ self.post_exp_day([exp], post_day)
+ self.check_json_get(f'/day?date={date}', exp)
+ # invert effort and comment between both Todos
+ # (cannot invert doneness, /day only collects positive setting)
+ post_day['comment'] = ['', 'FOO']
+ post_day['effort'] = ['', 2.3]
+ self.post_exp_day([exp], post_day)
+ self.check_json_get(f'/day?date={date}', exp)
+
+ def test_POST_day_todo_make_types(self) -> None:
+ """Test behavior of POST /todo on 'make_type'='full' and 'empty'."""
+ date, day_id = date_and_day_id(1)
+ exp = ExpectedGetDay(day_id)
+ # create two Processes, with second one step of first one
+ self.post_exp_process([exp], {}, 2)
+ self.post_exp_process([exp], {'new_top_step': 2}, 1)
+ exp.lib_set('ProcessStep', [
+ exp.procstep_as_dict(1, owner_id=1, step_process_id=2)])
+ self.check_json_get(f'/day?date={date}', exp)
+ # post Todo of adopting Process, with make_type=full
+ self.post_exp_day([exp], {'make_type': 'full', 'new_todo': [1]})
+ exp.lib_get('Todo', 1)['children'] = [2]
+ exp.lib_set('Todo', [exp.todo_as_dict(2, 2)])
+ top_nodes = [{'todo': 1,
+ 'seen': 0,
+ 'children': [{'todo': 2,
+ 'seen': 0,
+ 'children': []}]}]
+ exp.force('top_nodes', top_nodes)
+ self.check_json_get(f'/day?date={date}', exp)
+ # post another Todo of adopting Process, expect to adopt existing
+ self.post_exp_day([exp], {'make_type': 'full', 'new_todo': [1]})
+ exp.lib_set('Todo', [exp.todo_as_dict(3, 1, children=[2])])
+ top_nodes += [{'todo': 3,
+ 'seen': 0,
+ 'children': [{'todo': 2,
+ 'seen': 1,
+ 'children': []}]}]
+ exp.force('top_nodes', top_nodes)
+ self.check_json_get(f'/day?date={date}', exp)
+ # post another Todo of adopting Process, no adopt with make_type=empty
+ self.post_exp_day([exp], {'make_type': 'empty', 'new_todo': [1]})
+ exp.lib_set('Todo', [exp.todo_as_dict(4, 1)])
+ top_nodes += [{'todo': 4,
+ 'seen': 0,
+ 'children': []}]
+ exp.force('top_nodes', top_nodes)
+ self.check_json_get(f'/day?date={date}', exp)
+
+ def test_POST_day_new_todo_order_commutative(self) -> None:
+ """Check that order of 'new_todo' values in POST /day don't matter."""
+ date, day_id = date_and_day_id(1)
+ exp = ExpectedGetDay(day_id)
+ self.post_exp_process([exp], {}, 2)
+ self.post_exp_process([exp], {'new_top_step': 2}, 1)
+ exp.lib_set('ProcessStep', [
+ exp.procstep_as_dict(1, owner_id=1, step_process_id=2)])
+ # make-full-day-post batch of Todos of both Processes in one order …,
+ self.post_exp_day([exp], {'make_type': 'full', 'new_todo': [1, 2]})
+ top_nodes: list[dict[str, Any]] = [{'todo': 1,
+ 'seen': 0,
+ 'children': [{'todo': 2,
+ 'seen': 0,
+ 'children': []}]}]
+ exp.force('top_nodes', top_nodes)
+ exp.lib_get('Todo', 1)['children'] = [2]
+ self.check_json_get(f'/day?date={date}', exp)
+ # … and then in the other, expecting same node tree / relations
+ exp.lib_del('Day', day_id)
+ date, day_id = date_and_day_id(2)
+ exp.set('day', day_id)
+ day_post = {'make_type': 'full', 'new_todo': [2, 1]}
+ self.post_exp_day([exp], day_post, day_id)
+ exp.lib_del('Todo', 1)
+ exp.lib_del('Todo', 2)
+ top_nodes[0]['todo'] = 3 # was: 1
+ top_nodes[0]['children'][0]['todo'] = 4 # was: 2
+ exp.lib_get('Todo', 3)['children'] = [4]
+ self.check_json_get(f'/day?date={date}', exp)
+
+ def test_POST_day_todo_deletion_by_negative_effort(self) -> None:
+ """Test POST /day removal of Todos by setting negative effort."""
+ date, day_id = date_and_day_id(1)
+ exp = ExpectedGetDay(day_id)
+ self.post_exp_process([exp], {}, 1)
+ self.post_exp_day([exp], {'new_todo': [1]})
+ # check cannot remove Todo if commented
+ self.post_exp_day([exp],
+ {'todo_id': [1], 'comment': ['foo'], 'effort': [-1]})
+ self.check_json_get(f'/day?date={date}', exp)
+ # check *can* remove Todo while getting done
+ self.post_exp_day([exp],
+ {'todo_id': [1], 'comment': [''], 'effort': [-1],
+ 'done': [1]})
+ exp.lib_del('Todo', 1)
+ self.check_json_get(f'/day?date={date}', exp)
+
+ def test_GET_day_with_conditions(self) -> None:
+ """Test GET /day displaying Conditions and their relations."""
+ date, day_id = date_and_day_id(1)
+ exp = ExpectedGetDay(day_id)
+ # check non-referenced Conditions not shown
+ cond_posts = [{'is_active': 0, 'title': 'A', 'description': 'a'},
+ {'is_active': 1, 'title': 'B', 'description': 'b'}]
+ for i, cond_post in enumerate(cond_posts):
+ self.check_post(cond_post, f'/condition?id={i+1}')
+ self.check_json_get(f'/day?date={date}', exp)
+ # add Processes with Conditions, check Conditions now shown
+ for i, (c1, c2) in enumerate([(1, 2), (2, 1)]):
+ post = {'conditions': [c1], 'disables': [c1],
+ 'blockers': [c2], 'enables': [c2]}
+ self.post_exp_process([exp], post, i+1)
+ for i, cond_post in enumerate(cond_posts):
+ exp.set_cond_from_post(i+1, cond_post)
+ self.check_json_get(f'/day?date={date}', exp)
+ # add Todos in relation to Conditions, check consequence relations
+ self.post_exp_day([exp], {'new_todo': [1, 2]})
+ self.check_json_get(f'/day?date={date}', exp)
+
+ def test_GET_calendar(self) -> None:
+ """Test GET /calendar responses based on various inputs, DB states."""
+ # check illegal date range delimiters
+ self.check_get('/calendar?start=foo', 400)
+ self.check_get('/calendar?end=foo', 400)
+ # check default range for expected selection/order without saved days
+ exp = ExpectedGetCalendar(-1, 366)
+ self.check_json_get('/calendar', exp)
+ self.check_json_get('/calendar?start=&end=', exp)
+ # check with named days as delimiters
+ exp = ExpectedGetCalendar(-1, +1)
+ self.check_json_get('/calendar?start=yesterday&end=tomorrow', exp)
+ # check zero-element range
+ exp = ExpectedGetCalendar(+1, 0)
+ self.check_json_get('/calendar?start=tomorrow&end=today', exp)
+ # check saved day shows up in results, proven by its comment
+ start_date = _testing_date_in_n_days(-5)
+ date = _testing_date_in_n_days(-2)
+ end_date = _testing_date_in_n_days(+5)
+ exp = ExpectedGetCalendar(-5, +5)
+ self.post_exp_day([exp],
+ {'day_comment': 'foo'}, _days_n_for_date(date))
+ url = f'/calendar?start={start_date}&end={end_date}'
+ self.check_json_get(url, exp)
--- /dev/null
+"""Miscellaneous tests."""
+from typing import Callable
+from unittest import TestCase
+from tests.utils import TestCaseWithServer
+from plomtask.http import InputsParser
+from plomtask.exceptions import BadFormatException
+
+
+class TestsSansServer(TestCase):
+ """Tests that do not require DB setup or a server."""
+
+ def _test_parser(self,
+ method: Callable,
+ serialized: str,
+ expected: object,
+ method_args: list[object],
+ fails: bool = False
+ ) -> None:
+ # pylint: disable=too-many-arguments
+ parser = InputsParser(serialized)
+ if fails:
+ with self.assertRaises(BadFormatException):
+ method(parser, *method_args)
+ else:
+ self.assertEqual(expected, method(parser, *method_args))
+
+ def test_InputsParser_get_str_or_fail(self) -> None:
+ """Test InputsParser.get_str."""
+ m = InputsParser.get_str_or_fail
+ self._test_parser(m, '', 0, ['foo'], fails=True)
+ self._test_parser(m, '', 'bar', ['foo', 'bar'])
+ self._test_parser(m, 'foo=', '', ['foo'])
+ self._test_parser(m, 'foo=', '', ['foo', 'bar'])
+ self._test_parser(m, 'foo=baz', 'baz', ['foo', 'bar'])
+ self._test_parser(m, 'foo=baz&foo=quux', 'baz', ['foo', 'bar'])
+ self._test_parser(m, 'foo=baz,quux', 'baz,quux', ['foo', 'bar'])
+
+ def test_InputsParser_get_str(self) -> None:
+ """Test InputsParser.get_str."""
+ m = InputsParser.get_str
+ self._test_parser(m, '', None, ['foo'])
+ self._test_parser(m, '', 'bar', ['foo', 'bar'])
+ self._test_parser(m, 'foo=', '', ['foo'])
+ self._test_parser(m, 'foo=', '', ['foo', 'bar'])
+ self._test_parser(m, 'foo=baz', 'baz', ['foo', 'bar'])
+ self._test_parser(m, 'foo=baz&foo=quux', 'baz', ['foo', 'bar'])
+ self._test_parser(m, 'foo=baz,quux', 'baz,quux', ['foo', 'bar'])
+
+ def test_InputsParser_get_all_of_key_prefixed(self) -> None:
+ """Test InputsParser.get_all_of_key_prefixed."""
+ m = InputsParser.get_all_of_key_prefixed
+ self._test_parser(m, '', {}, [''])
+ self._test_parser(m, '', {}, ['foo'])
+ self._test_parser(m, 'foo=bar', {'foo': ['bar']}, [''])
+ self._test_parser(m, 'x=y&x=z', {'': ['y', 'z']}, ['x'])
+ self._test_parser(m, 'xx=y&xx=Z', {'x': ['y', 'Z']}, ['x'])
+ self._test_parser(m, 'xx=y', {}, ['xxx'])
+ self._test_parser(m, 'xxx=x&xxy=y&xyy=z', {'x': ['x'], 'y': ['y']},
+ ['xx'])
+
+ def test_InputsParser_get_int_or_none(self) -> None:
+ """Test InputsParser.get_int_or_none."""
+ m = InputsParser.get_int_or_none
+ self._test_parser(m, '', None, ['foo'])
+ self._test_parser(m, 'foo=', None, ['foo'])
+ self._test_parser(m, 'foo=0', 0, ['foo'])
+ self._test_parser(m, 'foo=None', 0, ['foo'], fails=True)
+ self._test_parser(m, 'foo=0.1', 0, ['foo'], fails=True)
+ self._test_parser(m, 'foo=23', 23, ['foo'])
+
+ def test_InputsParser_get_float_or_fail(self) -> None:
+ """Test InputsParser.get_float_or_fail."""
+ m = InputsParser.get_float_or_fail
+ self._test_parser(m, '', 0, ['foo'], fails=True)
+ self._test_parser(m, 'foo=', 0, ['foo'], fails=True)
+ self._test_parser(m, 'foo=bar', 0, ['foo'], fails=True)
+ self._test_parser(m, 'foo=0', 0, ['foo'])
+ self._test_parser(m, 'foo=0.1', 0.1, ['foo'])
+ self._test_parser(m, 'foo=1.23&foo=456', 1.23, ['foo'])
+
+ def test_InputsParser_get_bool(self) -> None:
+ """Test InputsParser.get_bool."""
+ m = InputsParser.get_bool
+ self._test_parser(m, '', 0, ['foo'])
+ self._test_parser(m, 'val=foo', 0, ['foo'])
+ self._test_parser(m, 'val=True', 0, ['foo'])
+ self._test_parser(m, 'foo=', 0, ['foo'])
+ self._test_parser(m, 'foo=None', 0, ['foo'])
+ self._test_parser(m, 'foo=0', 0, ['foo'])
+ self._test_parser(m, 'foo=bar', 0, ['foo'])
+ self._test_parser(m, 'foo=bar&foo=baz', 0, ['foo'])
+ self._test_parser(m, 'foo=False', 0, ['foo'])
+ self._test_parser(m, 'foo=true', 1, ['foo'])
+ self._test_parser(m, 'foo=True', 1, ['foo'])
+ self._test_parser(m, 'foo=1', 1, ['foo'])
+ self._test_parser(m, 'foo=on', 1, ['foo'])
+
+ def test_InputsParser_get_all_str(self) -> None:
+ """Test InputsParser.get_all_str."""
+ m = InputsParser.get_all_str
+ self._test_parser(m, '', [], ['foo'])
+ self._test_parser(m, 'foo=', [''], ['foo'])
+ self._test_parser(m, 'foo=bar', ['bar'], ['foo'])
+ self._test_parser(m, 'foo=bar&foo=baz', ['bar', 'baz'], ['foo'])
+
+ def test_InputsParser_get_all_int(self) -> None:
+ """Test InputsParser.get_all_int."""
+ m = InputsParser.get_all_int
+ self._test_parser(m, '', [], ['foo'])
+ self._test_parser(m, 'foo=', [], ['foo'])
+ self._test_parser(m, 'foo=', 0, ['foo', True], fails=True)
+ self._test_parser(m, 'foo=0', [0], ['foo'])
+ self._test_parser(m, 'foo=0&foo=17', [0, 17], ['foo'])
+ self._test_parser(m, 'foo=0.1&foo=17', 0, ['foo'], fails=True)
+ self._test_parser(m, 'foo=None&foo=17', 0, ['foo'], fails=True)
+
+
+class TestsWithServer(TestCaseWithServer):
+ """Tests against our HTTP server/handler (and database)."""
+
+ def test_do_GET(self) -> None:
+ """Test GET / redirect, and unknown targets failing."""
+ self.conn.request('GET', '/')
+ self.check_redirect('/day')
+ self.check_get('/foo', 404)
+
+ def test_do_POST(self) -> None:
+ """Test POST to / and other unknown targets failing."""
+ self.check_post({}, '/', 404)
+ self.check_post({}, '/foo', 404)
--- /dev/null
+"""Test Processes module."""
+from typing import Any
+from tests.utils import (TestCaseSansDB, TestCaseWithDB, TestCaseWithServer,
+ Expected)
+from plomtask.processes import Process, ProcessStep
+from plomtask.exceptions import NotFoundException
+
+
+class TestsSansDB(TestCaseSansDB):
+ """Module tests not requiring DB setup."""
+ checked_class = Process
+
+
+class TestsSansDBProcessStep(TestCaseSansDB):
+ """Module tests not requiring DB setup."""
+ checked_class = ProcessStep
+ default_init_kwargs = {'owner_id': 2, 'step_process_id': 3,
+ 'parent_step_id': 4}
+
+
+class TestsWithDB(TestCaseWithDB):
+ """Module tests requiring DB setup."""
+ checked_class = Process
+
+ def test_remove(self) -> None:
+ """Test removal of Processes and ProcessSteps."""
+ super().test_remove()
+ p1, p2, p3 = Process(None), Process(None), Process(None)
+ for p in [p1, p2, p3]:
+ p.save(self.db_conn)
+ assert isinstance(p1.id_, int)
+ assert isinstance(p2.id_, int)
+ assert isinstance(p3.id_, int)
+ step = ProcessStep(None, p2.id_, p1.id_, None)
+ p2.set_steps(self.db_conn, [step])
+ step_id = step.id_
+ p2.set_steps(self.db_conn, [])
+ with self.assertRaises(NotFoundException):
+ # check unset ProcessSteps actually cannot be found anymore
+ assert step_id is not None
+ ProcessStep.by_id(self.db_conn, step_id)
+ p1.remove(self.db_conn)
+ step = ProcessStep(None, p2.id_, p3.id_, None)
+ p2.set_steps(self.db_conn, [step])
+ step_id = step.id_
+ # check _can_ remove Process pointed to by ProcessStep.owner_id, and …
+ p2.remove(self.db_conn)
+ with self.assertRaises(NotFoundException):
+ # … being dis-owned eliminates ProcessStep
+ assert step_id is not None
+ ProcessStep.by_id(self.db_conn, step_id)
+
+
+class TestsWithDBForProcessStep(TestCaseWithDB):
+ """Module tests requiring DB setup."""
+ checked_class = ProcessStep
+ default_init_kwargs = {'owner_id': 1, 'step_process_id': 2,
+ 'parent_step_id': 3}
+
+ def setUp(self) -> None:
+ super().setUp()
+ self.p1 = Process(1)
+ self.p1.save(self.db_conn)
+
+ def test_remove(self) -> None:
+ """Test .remove and unsetting of owner's .explicit_steps entry."""
+ p2 = Process(2)
+ p2.save(self.db_conn)
+ assert isinstance(self.p1.id_, int)
+ assert isinstance(p2.id_, int)
+ step = ProcessStep(None, self.p1.id_, p2.id_, None)
+ self.p1.set_steps(self.db_conn, [step])
+ step.remove(self.db_conn)
+ self.assertEqual(self.p1.explicit_steps, [])
+ self.check_identity_with_cache_and_db([])
+
+
+class ExpectedGetProcess(Expected):
+ """Builder of expectations for GET /processes."""
+ _default_dict = {'is_new': False, 'preset_top_step': None, 'n_todos': 0}
+ _on_empty_make_temp = ('Process', 'proc_as_dict')
+
+ def __init__(self,
+ proc_id: int,
+ *args: Any, **kwargs: Any) -> None:
+ self._fields = {'process': proc_id, 'steps': []}
+ super().__init__(*args, **kwargs)
+
+ @staticmethod
+ def stepnode_as_dict(step_id: int,
+ proc_id: int,
+ seen: bool = False,
+ steps: None | list[dict[str, object]] = None,
+ is_explicit: bool = True,
+ is_suppressed: bool = False) -> dict[str, object]:
+ # pylint: disable=too-many-arguments
+ """Return JSON of ProcessStepNode to expect."""
+ return {'step': step_id,
+ 'process': proc_id,
+ 'seen': seen,
+ 'steps': steps if steps else [],
+ 'is_explicit': is_explicit,
+ 'is_suppressed': is_suppressed}
+
+ def recalc(self) -> None:
+ """Update internal dictionary by subclass-specific rules."""
+ super().recalc()
+ self._fields['process_candidates'] = self.as_ids(
+ self.lib_all('Process'))
+ self._fields['condition_candidates'] = self.as_ids(
+ self.lib_all('Condition'))
+ self._fields['owners'] = [
+ s['owner_id'] for s in self.lib_all('ProcessStep')
+ if s['step_process_id'] == self._fields['process']]
+
+
+class ExpectedGetProcesses(Expected):
+ """Builder of expectations for GET /processes."""
+ _default_dict = {'sort_by': 'title', 'pattern': ''}
+
+ def recalc(self) -> None:
+ """Update internal dictionary by subclass-specific rules."""
+ super().recalc()
+ self._fields['processes'] = self.as_ids(self.lib_all('Process'))
+
+
+class TestsWithServer(TestCaseWithServer):
+ """Module tests against our HTTP server/handler (and database)."""
+ checked_class = Process
+
+ def test_fail_POST_process(self) -> None:
+ """Test POST /process and its effect on the database."""
+ valid_post = {'title': '', 'description': '', 'effort': 1.0}
+ # check payloads lacking minimum expecteds
+ self.check_minimal_inputs('/process', valid_post)
+ # check payloads of bad data types
+ self.check_post(valid_post | {'effort': ''}, '/process', 400)
+ # check references to non-existant items
+ self.check_post(valid_post | {'conditions': [1]}, '/process', 404)
+ self.check_post(valid_post | {'disables': [1]}, '/process', 404)
+ self.check_post(valid_post | {'blockers': [1]}, '/process', 404)
+ self.check_post(valid_post | {'enables': [1]}, '/process', 404)
+ self.check_post(valid_post | {'new_top_step': 2}, '/process', 404)
+ # check deletion of non-existant
+ self.check_post({'delete': ''}, '/process?id=1', 404)
+
+ def test_basic_POST_process(self) -> None:
+ """Test basic GET/POST /process operations."""
+ # check on un-saved
+ exp = ExpectedGetProcess(1)
+ exp.force('process_candidates', [])
+ exp.set('is_new', True)
+ self.check_json_get('/process?id=1', exp)
+ # check on minimal payload post
+ exp = ExpectedGetProcess(1)
+ self.post_exp_process([exp], {}, 1)
+ self.check_json_get('/process?id=1', exp)
+ # check boolean 'calendarize'
+ self.post_exp_process([exp], {'calendarize': True}, 1)
+ self.check_json_get('/process?id=1', exp)
+ self.post_exp_process([exp], {}, 1)
+ self.check_json_get('/process?id=1', exp)
+ # check conditions posting
+ for i in range(3):
+ self.post_exp_cond([exp], {}, i+1)
+ p = {'conditions': [1, 2], 'disables': [1],
+ 'blockers': [3], 'enables': [2, 3]}
+ self.post_exp_process([exp], p, 1)
+ self.check_json_get('/process?id=1', exp)
+ # check n_todos field
+ self.post_exp_day([], {'new_todo': ['1']}, 1)
+ self.post_exp_day([], {'new_todo': ['1']}, 2)
+ exp.set('n_todos', 2)
+ self.check_json_get('/process?id=1', exp)
+ # check cannot delete if Todos to Process
+ self.check_post({'delete': ''}, '/process?id=1', 500)
+ # check cannot delete if some ProcessStep's .step_process_id
+ self.post_exp_process([exp], {}, 2)
+ self.post_exp_process([exp], {'new_top_step': 2}, 3)
+ self.check_post({'delete': ''}, '/process?id=2', 500)
+ # check successful deletion
+ self.post_exp_process([exp], {}, 4)
+ self.check_post({'delete': ''}, '/process?id=4', 302, '/processes')
+ exp = ExpectedGetProcess(4)
+ exp.set('is_new', True)
+ for i in range(3):
+ self.post_exp_cond([exp], {}, i+1)
+ self.post_exp_process([exp], {}, i+1)
+ exp.force('process_candidates', [1, 2, 3])
+ self.check_json_get('/process?id=4', exp)
+
+ def test_POST_process_steps(self) -> None:
+ """Test behavior of ProcessStep posting."""
+ # pylint: disable=too-many-statements
+ url = '/process?id=1'
+ exp = ExpectedGetProcess(1)
+ self.post_exp_process([exp], {}, 1)
+ # post first (top-level) step of proc2 to proc1 by 'step_of' in 2
+ self.post_exp_process([exp], {'step_of': 1}, 2)
+ exp.lib_set('ProcessStep',
+ [exp.procstep_as_dict(1, owner_id=1, step_process_id=2)])
+ exp.set('steps', [
+ exp.stepnode_as_dict(
+ step_id=1,
+ proc_id=2)])
+ self.check_json_get(url, exp)
+ # post empty/absent steps list to process, expect clean slate, and old
+ # step to completely disappear
+ self.post_exp_process([exp], {}, 1)
+ exp.lib_wipe('ProcessStep')
+ exp.set('steps', [])
+ self.check_json_get(url, exp)
+ # post anew (as only step yet) step of proc2 to proc1 by 'new_top_step'
+ self.post_exp_process([exp], {'new_top_step': 2}, 1)
+ exp.lib_set('ProcessStep',
+ [exp.procstep_as_dict(1, owner_id=1, step_process_id=2)])
+ self.post_exp_process([exp], {'kept_steps': [1]}, 1)
+ step_nodes = [exp.stepnode_as_dict(step_id=1, proc_id=2)]
+ exp.set('steps', step_nodes)
+ self.check_json_get(url, exp)
+ # fail on zero-step recursion
+ p_min = {'title': '', 'description': '', 'effort': 0}
+ self.check_post(p_min | {'new_top_step': 1}, url, 400)
+ self.check_post(p_min | {'step_of': 1}, url, 400)
+ # post sibling steps
+ self.post_exp_process([exp], {}, 3)
+ self.post_exp_process([exp], {'kept_steps': [1], 'new_top_step': 3}, 1)
+ exp.lib_set('ProcessStep',
+ [exp.procstep_as_dict(2, owner_id=1, step_process_id=3)])
+ step_nodes += [exp.stepnode_as_dict(step_id=2, proc_id=3)]
+ self.check_json_get(url, exp)
+ # # post implicit sub-step via post to proc2
+ self.post_exp_process([exp], {}, 4)
+ self.post_exp_process([exp], {'step_of': [1], 'new_top_step': 4}, 2)
+ exp.lib_set('ProcessStep',
+ [exp.procstep_as_dict(3, owner_id=2, step_process_id=4)])
+ step_nodes[0]['steps'] = [
+ exp.stepnode_as_dict(step_id=3, proc_id=4, is_explicit=False)]
+ self.check_json_get(url, exp)
+ # post explicit sub-step via post to proc1
+ p = {'kept_steps': [1, 2], 'new_step_to_2': 4}
+ self.post_exp_process([exp], p, 1)
+ exp.lib_set('ProcessStep', [exp.procstep_as_dict(
+ 4, owner_id=1, step_process_id=4, parent_step_id=2)])
+ step_nodes[1]['steps'] = [
+ exp.stepnode_as_dict(step_id=4, proc_id=4)]
+ self.check_json_get(url, exp)
+ # to ensure suppressed step nodes are hidden, add new step to proc4,
+ # implicitly adding it as sub-step to the proc4 steps in proc1, but
+ # suppress one of the proc4 occurences there, marking its
+ # .is_suppressed *and* hiding the new step below it
+ p = {'kept_steps': [1, 2, 4], 'suppressed_steps': [3]}
+ self.post_exp_process([exp], {'step_of': [4]}, 5)
+ self.post_exp_process([exp], p, 1)
+ exp.lib_set('ProcessStep',
+ [exp.procstep_as_dict(5, owner_id=4, step_process_id=5)])
+ assert isinstance(step_nodes[0]['steps'], list)
+ assert isinstance(step_nodes[1]['steps'], list)
+ step_nodes[0]['steps'][0]['is_suppressed'] = True
+ step_nodes[1]['steps'][0]['steps'] = [
+ exp.stepnode_as_dict(step_id=5, proc_id=5, is_explicit=False)]
+ self.check_json_get(url, exp)
+ # ensure implicit steps' non-top explicit sub-steps are shown
+ self.post_exp_process([exp], {}, 6)
+ self.post_exp_process([exp], {'kept_steps': [5], 'step_of': [1, 2],
+ 'new_step_to_5': 6}, 4)
+ exp.lib_set('ProcessStep', [exp.procstep_as_dict(
+ 6, owner_id=4, parent_step_id=5, step_process_id=6)])
+ step_nodes[1]['steps'][0]['steps'][0]['steps'] = [
+ exp.stepnode_as_dict(step_id=6, proc_id=6, is_explicit=False)]
+ self.check_json_get(url, exp)
+ # try to post sub-step to non-existing sub-step, expect it to become
+ # top-level step instead
+ p['new_step_to_9'] = 5
+ self.post_exp_process([exp], p, 1)
+ exp.lib_set('ProcessStep',
+ [exp.procstep_as_dict(7, owner_id=1, step_process_id=5)])
+ step_nodes += [
+ exp.stepnode_as_dict(step_id=7, proc_id=5)]
+ self.check_json_get(url, exp)
+ del p['new_step_to_9']
+ assert isinstance(p['kept_steps'], list)
+ p['kept_steps'] += [7]
+ # try to post sub-step to implicit sub-step, expect same result
+ p['new_step_to_5'] = 5
+ self.post_exp_process([exp], p, 1)
+ exp.lib_set('ProcessStep',
+ [exp.procstep_as_dict(8, owner_id=1, step_process_id=5)])
+ step_nodes += [
+ exp.stepnode_as_dict(step_id=8, proc_id=5)]
+ self.check_json_get(url, exp)
+ del p['new_step_to_5']
+ p['kept_steps'] += [8]
+ # post sub-step to explicit sub-step with implicit sub-step of same
+ # step process ID, expect it to eliminate/replace implicit sub-step
+ p['new_step_to_4'] = 5
+ self.post_exp_process([exp], p, 1)
+ step_nodes[1]['steps'][0]['steps'][0] = exp.stepnode_as_dict(
+ step_id=9, proc_id=5)
+ exp.lib_set('ProcessStep', [exp.procstep_as_dict(
+ 9, owner_id=1, parent_step_id=4, step_process_id=5)])
+ self.check_json_get(url, exp)
+ del p['new_step_to_4']
+ p['kept_steps'] += [9]
+ # fail on single-step recursion via top step
+ self.post_exp_process([exp], {}, 7)
+ self.post_exp_process([exp], {'new_top_step': 1}, 7)
+ exp.lib_set('ProcessStep', [exp.procstep_as_dict(
+ 10, owner_id=7, step_process_id=1)])
+ p['step_of'] = [7]
+ self.check_post(p_min | p | {'new_top_step': 7}, url, 400)
+ # fail on double-step recursion via top step
+ self.post_exp_process([exp], {}, 8)
+ self.post_exp_process([exp], {'new_top_step': 7}, 8)
+ exp.lib_set('ProcessStep', [exp.procstep_as_dict(
+ 11, owner_id=8, step_process_id=7)])
+ self.check_post(p_min | p | {'new_top_step': 8}, url, 400)
+ # fail on single- and double-step recursion via explicit sub-step
+ self.check_post(p_min | p | {'new_step_to_8': 7}, url, 400)
+ self.check_post(p_min | p | {'new_step_to_8': 8}, url, 400)
+
+ def test_fail_GET_process(self) -> None:
+ """Test invalid GET /process params."""
+ # check for invalid IDs
+ self.check_get_defaults('/process')
+ # check we catch invalid base64
+ self.check_get('/process?title_b64=foo', 400)
+ # check failure on references to unknown processes; we create Process
+ # of ID=1 here so we know the 404 comes from step_to=2 etc. (that tie
+ # the Process displayed by /process to others), not from not finding
+ # the main Process itself
+ self.post_exp_process([], {}, 1)
+ self.check_get('/process?id=1&step_to=2', 404)
+ self.check_get('/process?id=1&has_step=2', 404)
+
+ def test_GET_processes(self) -> None:
+ """Test GET /processes."""
+ # pylint: disable=too-many-statements
+ # test empty result on empty DB, default-settings on empty params
+ exp = ExpectedGetProcesses()
+ self.check_json_get('/processes', exp)
+ # test on meaningless non-empty params (incl. entirely un-used key),
+ # that 'sort_by' default to 'title' (even if set to something else, as
+ # long as without handler) and 'pattern' get preserved
+ exp.set('pattern', 'bar')
+ url = '/processes?sort_by=foo&pattern=bar&foo=x'
+ self.check_json_get(url, exp)
+ # test non-empty result, automatic (positive) sorting by title
+ for i, t in enumerate([('foo', 'oof', 1.0, []),
+ ('bar', 'rab', 1.1, [1]),
+ ('baz', 'zab', 0.9, [1, 2])]):
+ payload = {'title': t[0], 'description': t[1], 'effort': t[2],
+ 'new_top_step': t[3]}
+ self.post_exp_process([exp], payload, i+1)
+ exp.lib_set('ProcessStep',
+ [exp.procstep_as_dict(1, owner_id=2, step_process_id=1),
+ exp.procstep_as_dict(2, owner_id=3, step_process_id=1),
+ exp.procstep_as_dict(3, owner_id=3, step_process_id=2)])
+ exp.set('pattern', '')
+ self.check_filter(exp, 'processes', 'sort_by', 'title', [2, 3, 1])
+ # test other sortings
+ self.check_filter(exp, 'processes', 'sort_by', '-title', [1, 3, 2])
+ self.check_filter(exp, 'processes', 'sort_by', 'effort', [3, 1, 2])
+ self.check_filter(exp, 'processes', 'sort_by', '-effort', [2, 1, 3])
+ self.check_filter(exp, 'processes', 'sort_by', 'steps', [1, 2, 3])
+ self.check_filter(exp, 'processes', 'sort_by', '-steps', [3, 2, 1])
+ self.check_filter(exp, 'processes', 'sort_by', 'owners', [3, 2, 1])
+ self.check_filter(exp, 'processes', 'sort_by', '-owners', [1, 2, 3])
+ # test pattern matching on title
+ exp.set('sort_by', 'title')
+ exp.lib_del('Process', 1)
+ self.check_filter(exp, 'processes', 'pattern', 'ba', [2, 3])
+ # test pattern matching on description
+ exp.lib_wipe('Process')
+ exp.lib_wipe('ProcessStep')
+ self.post_exp_process([exp], {'description': 'oof', 'effort': 1.0}, 1)
+ self.check_filter(exp, 'processes', 'pattern', 'of', [1])
--- /dev/null
+"""Test Todos module."""
+from typing import Any
+from datetime import date as dt_date, timedelta
+from tests.utils import (TestCaseSansDB, TestCaseWithDB, TestCaseWithServer,
+ Expected, date_and_day_id)
+from plomtask.todos import Todo
+from plomtask.processes import Process
+from plomtask.exceptions import BadFormatException, HandledException
+
+
+class TestsWithDB(TestCaseWithDB, TestCaseSansDB):
+ """Tests requiring DB, but not server setup.
+
+ NB: We subclass TestCaseSansDB too, to run any tests there that due to any
+ Todo requiring a _saved_ Process wouldn't run without a DB.
+ """
+ checked_class = Todo
+ default_init_kwargs = {'process': None, 'is_done': False, 'day_id': 1}
+
+ def setUp(self) -> None:
+ super().setUp()
+ self.proc = Process(None)
+ self.proc.save(self.db_conn)
+ self.default_init_kwargs['process'] = self.proc
+
+ def test_Todo_by_date(self) -> None:
+ """Test findability of Todos by date."""
+ date_1, day_id_1 = date_and_day_id(1)
+ date_2, _ = date_and_day_id(2)
+ t1 = Todo(None, self.proc, False, day_id_1)
+ t1.save(self.db_conn)
+ t2 = Todo(None, self.proc, False, day_id_1)
+ t2.save(self.db_conn)
+ self.assertEqual(Todo.by_date(self.db_conn, date_1), [t1, t2])
+ self.assertEqual(Todo.by_date(self.db_conn, date_2), [])
+ with self.assertRaises(BadFormatException):
+ self.assertEqual(Todo.by_date(self.db_conn, 'foo'), [])
+
+ def test_Todo_by_date_range_with_limits(self) -> None:
+ """Test .by_date_range_with_limits."""
+ # pylint: disable=too-many-locals
+ f = Todo.by_date_range_with_limits
+ # check illegal ranges
+ legal_range = ('yesterday', 'tomorrow')
+ for i in [0, 1]:
+ for bad_date in ['foo', '2024-02-30', '2024-01-01 12:00:00']:
+ date_range_l = list(legal_range[:])
+ date_range_l[i] = bad_date
+ with self.assertRaises(HandledException):
+ f(self.db_conn, (date_range_l[0], date_range_l[1]))
+ # check empty, translation of 'yesterday' and 'tomorrow'
+ items, start, end = f(self.db_conn, legal_range)
+ self.assertEqual(items, [])
+ dt_today = dt_date.today()
+ dt_yesterday = dt_today + timedelta(days=-1)
+ dt_tomorrow = dt_today + timedelta(days=+1)
+ self.assertEqual(start, dt_yesterday.isoformat())
+ self.assertEqual(end, dt_tomorrow.isoformat())
+ # prepare dated items for non-empty results
+ kwargs = self.default_init_kwargs.copy()
+ todos = []
+ dates_and_day_ids = [date_and_day_id(i) for i in range(5)]
+ for day_id in [t[1] for t in dates_and_day_ids[1:-1]]:
+ kwargs['day_id'] = day_id
+ todos += [Todo(None, **kwargs)]
+ # check ranges still empty before saving
+ date_range = (dates_and_day_ids[1][0], dates_and_day_ids[-2][0])
+ self.assertEqual(f(self.db_conn, date_range)[0], [])
+ # check all objs displayed within interval
+ for todo in todos:
+ todo.save(self.db_conn)
+ self.assertEqual(f(self.db_conn, date_range)[0], todos)
+ # check that only displayed what exists within interval
+ date_range = (dates_and_day_ids[1][0], dates_and_day_ids[-3][0])
+ expected = [todos[0], todos[1]]
+ self.assertEqual(f(self.db_conn, date_range)[0], expected)
+ date_range = (dates_and_day_ids[-2][0], dates_and_day_ids[-1][0])
+ expected = [todos[2]]
+ self.assertEqual(f(self.db_conn, date_range)[0], expected)
+ # check that inverted interval displays nothing
+ date_range = (dates_and_day_ids[-1][0], dates_and_day_ids[0][0])
+ self.assertEqual(f(self.db_conn, date_range)[0], [])
+ # check that "today" is interpreted, and single-element interval
+ kwargs['day_id'] = (dt_today - dt_date(2000, 1, 1)).days
+ todo_today = Todo(None, **kwargs)
+ todo_today.save(self.db_conn)
+ date_range = ('today', 'today')
+ items, start, end = f(self.db_conn, date_range)
+ self.assertEqual(start, dt_today.isoformat())
+ self.assertEqual(start, end)
+ self.assertEqual(items, [todo_today])
+
+ def test_Todo_children(self) -> None:
+ """Test Todo.children relations."""
+ todo_1 = Todo(None, self.proc, False, 1)
+ todo_2 = Todo(None, self.proc, False, 1)
+ todo_2.save(self.db_conn)
+ # check un-saved Todo cannot parent
+ with self.assertRaises(HandledException):
+ todo_1.add_child(todo_2)
+ todo_1.save(self.db_conn)
+ todo_3 = Todo(None, self.proc, False, 1)
+ # check un-saved Todo cannot be parented
+ with self.assertRaises(HandledException):
+ todo_1.add_child(todo_3)
+
+
+class ExpectedGetTodo(Expected):
+ """Builder of expectations for GET /todo."""
+
+ def __init__(self,
+ todo_id: int,
+ *args: Any, **kwargs: Any) -> None:
+ self._fields = {'todo': todo_id,
+ 'steps_todo_to_process': []}
+ super().__init__(*args, **kwargs)
+
+ def recalc(self) -> None:
+ """Update internal dictionary by subclass-specific rules."""
+
+ def walk_steps(step: dict[str, Any]) -> None:
+ if not step['todo']:
+ proc_id = step['process']
+ cands = self.as_ids(
+ [t for t in todos if proc_id == t['process_id']
+ and t['id'] in self._fields['todo_candidates']])
+ self._fields['adoption_candidates_for'][str(proc_id)] = cands
+ for child in step['children']:
+ walk_steps(child)
+
+ super().recalc()
+ self.lib_wipe('Day')
+ todos = self.lib_all('Todo')
+ procs = self.lib_all('Process')
+ conds = self.lib_all('Condition')
+ self._fields['todo_candidates'] = self.as_ids(
+ [t for t in todos if t['id'] != self._fields['todo']])
+ self._fields['process_candidates'] = self.as_ids(procs)
+ self._fields['condition_candidates'] = self.as_ids(conds)
+ self._fields['adoption_candidates_for'] = {}
+ for step in self._fields['steps_todo_to_process']:
+ walk_steps(step)
+
+ @staticmethod
+ def step_as_dict(node_id: int,
+ process: int | None = None,
+ todo: int | None = None,
+ fillable: bool = False,
+ children: None | list[dict[str, object]] = None
+ ) -> dict[str, object]:
+ """Return JSON of TodoOrProcStepsNode to expect."""
+ return {'node_id': node_id,
+ 'children': children if children is not None else [],
+ 'process': process,
+ 'fillable': fillable,
+ 'todo': todo}
+
+
+class TestsWithServer(TestCaseWithServer):
+ """Tests against our HTTP server/handler (and database)."""
+ checked_class = Todo
+
+ def test_basic_fail_POST_todo(self) -> None:
+ """Test basic malformed/illegal POST /todo requests."""
+ self.post_exp_process([], {}, 1)
+ # test we cannot just POST into non-existing Todo
+ self.check_post({}, '/todo', 404)
+ self.check_post({}, '/todo?id=FOO', 400)
+ self.check_post({}, '/todo?id=0', 400)
+ self.check_post({}, '/todo?id=1', 404)
+ # test malformed values on existing Todo
+ self.post_exp_day([], {'new_todo': [1]})
+ for name in ['adopt', 'effort', 'make_full', 'make_empty',
+ 'conditions', 'disables', 'blockers', 'enables']:
+ self.check_post({name: 'x'}, '/todo?id=1', 400, '/todo')
+ for prefix in ['make_', '']:
+ for suffix in ['', 'x', '1.1']:
+ self.check_post({'step_filler_to_1': [f'{prefix}{suffix}']},
+ '/todo?id=1', 400, '/todo')
+ for suffix in ['', 'x', '1.1']:
+ self.check_post({'step_filler_to_{suffix}': ['1']},
+ '/todo?id=1', 400, '/todo')
+
+ def test_basic_POST_todo(self) -> None:
+ """Test basic POST /todo manipulations."""
+ exp = ExpectedGetTodo(1)
+ self.post_exp_process([exp], {'calendarize': 0}, 1)
+ self.post_exp_day([exp], {'new_todo': [1]})
+ # test posting naked entity at first changes nothing
+ self.check_json_get('/todo?id=1', exp)
+ self.check_post({}, '/todo?id=1')
+ self.check_json_get('/todo?id=1', exp)
+ # test posting doneness, comment, calendarization, effort
+ todo_post = {'is_done': 1, 'calendarize': 1,
+ 'comment': 'foo', 'effort': 2.3}
+ self.post_exp_todo([exp], todo_post, 1)
+ self.check_json_get('/todo?id=1', exp)
+ # test implicitly un-setting comment/calendarize/is_done by empty post
+ self.post_exp_todo([exp], {}, 1)
+ self.check_json_get('/todo?id=1', exp)
+ # test effort post can be explicitly unset by "effort":"" post
+ self.check_post({'effort': ''}, '/todo?id=1')
+ exp.lib_get('Todo', 1)['effort'] = None
+ self.check_json_get('/todo?id=1', exp)
+ # test Condition posts
+ c1_post = {'title': 'foo', 'description': 'oof', 'is_active': 0}
+ c2_post = {'title': 'bar', 'description': 'rab', 'is_active': 1}
+ self.post_exp_cond([exp], c1_post, 1)
+ self.post_exp_cond([exp], c2_post, 2)
+ self.check_json_get('/todo?id=1', exp)
+ todo_post = {'conditions': [1], 'disables': [1],
+ 'blockers': [2], 'enables': [2]}
+ self.post_exp_todo([exp], todo_post, 1)
+ self.check_json_get('/todo?id=1', exp)
+
+ def test_POST_todo_deletion(self) -> None:
+ """Test deletions via POST /todo."""
+ exp = ExpectedGetTodo(1)
+ self.post_exp_process([exp], {}, 1)
+ # test failure of deletion on non-existing Todo
+ self.check_post({'delete': ''}, '/todo?id=2', 404, '/')
+ # test deletion of existing Todo
+ self.post_exp_day([exp], {'new_todo': [1]})
+ self.check_post({'delete': ''}, '/todo?id=1', 302, '/')
+ self.check_get('/todo?id=1', 404)
+ exp.lib_del('Todo', 1)
+ # test deletion of adopted Todo
+ self.post_exp_day([exp], {'new_todo': [1]})
+ self.post_exp_day([exp], {'new_todo': [1]})
+ self.check_post({'adopt': 2}, '/todo?id=1')
+ self.check_post({'delete': ''}, '/todo?id=2', 302, '/')
+ exp.lib_del('Todo', 2)
+ self.check_get('/todo?id=2', 404)
+ self.check_json_get('/todo?id=1', exp)
+ # test deletion of adopting Todo
+ self.post_exp_day([exp], {'new_todo': [1]})
+ self.check_post({'adopt': 2}, '/todo?id=1')
+ self.check_post({'delete': ''}, '/todo?id=1', 302, '/')
+ exp.set('todo', 2)
+ exp.lib_del('Todo', 1)
+ self.check_json_get('/todo?id=2', exp)
+ # test cannot delete Todo with comment or effort
+ self.check_post({'comment': 'foo'}, '/todo?id=2')
+ self.check_post({'delete': ''}, '/todo?id=2', 500, '/')
+ self.check_post({'effort': 5}, '/todo?id=2')
+ self.check_post({'delete': ''}, '/todo?id=2', 500, '/')
+ # test deletion via effort < 0, but only if deletable
+ self.check_post({'effort': -1, 'comment': 'foo'}, '/todo?id=2')
+ self.check_post({}, '/todo?id=2')
+ self.check_get('/todo?id=2', 404)
+
+ def test_POST_todo_adoption(self) -> None:
+ """Test adoption via POST /todo with "adopt"."""
+ # post two Todos to Day, have first adopt second
+ exp = ExpectedGetTodo(1)
+ self.post_exp_process([exp], {}, 1)
+ self.post_exp_day([exp], {'new_todo': [1]})
+ self.post_exp_day([exp], {'new_todo': [1]})
+ self.post_exp_todo([exp], {'adopt': 2}, 1)
+ exp.set('steps_todo_to_process', [
+ exp.step_as_dict(node_id=1, process=None, todo=2)])
+ self.check_json_get('/todo?id=1', exp)
+ # test Todo un-adopting by just not sending an adopt
+ self.post_exp_todo([exp], {}, 1)
+ exp.set('steps_todo_to_process', [])
+ self.check_json_get('/todo?id=1', exp)
+ # test fail on trying to adopt non-existing Todo
+ self.check_post({'adopt': 3}, '/todo?id=1', 404)
+ # test cannot self-adopt
+ self.check_post({'adopt': 1}, '/todo?id=1', 400)
+ # test cannot do 1-step circular adoption
+ self.post_exp_todo([exp], {'adopt': 1}, 2)
+ self.check_post({'adopt': 2}, '/todo?id=1', 400)
+ # test cannot do 2-step circular adoption
+ self.post_exp_day([exp], {'new_todo': [1]})
+ self.post_exp_todo([exp], {'adopt': 2}, 3)
+ self.check_post({'adopt': 3}, '/todo?id=1', 400)
+ # test can adopt Todo into ProcessStep chain via its Process (with key
+ # 'step_filler' equivalent to single-element 'adopt' if intable)
+ self.post_exp_process([exp], {}, 2)
+ self.post_exp_process([exp], {}, 3)
+ self.post_exp_process([exp], {'new_top_step': [2, 3]}, 1)
+ exp.lib_set('ProcessStep',
+ [exp.procstep_as_dict(1, owner_id=1, step_process_id=2),
+ exp.procstep_as_dict(2, owner_id=1, step_process_id=3)])
+ slots = [
+ exp.step_as_dict(node_id=1, process=2, todo=None, fillable=True),
+ exp.step_as_dict(node_id=2, process=3, todo=None, fillable=True)]
+ exp.set('steps_todo_to_process', slots)
+ self.post_exp_day([exp], {'new_todo': [2]})
+ self.post_exp_day([exp], {'new_todo': [3]})
+ self.check_json_get('/todo?id=1', exp)
+ self.post_exp_todo([exp], {'step_filler_to_1': 5, 'adopt': [4]}, 1)
+ exp.lib_get('Todo', 1)['children'] += [5]
+ slots[0]['todo'] = 4
+ slots[1]['todo'] = 5
+ self.check_json_get('/todo?id=1', exp)
+ # test 'ignore' values for 'step_filler' are ignored, and intable
+ # 'step_filler' values are interchangeable with those of 'adopt'
+ todo_post = {'adopt': 5, 'step_filler_to_1': ['ignore', 4]}
+ self.check_post(todo_post, '/todo?id=1')
+ self.check_json_get('/todo?id=1', exp)
+ # test cannot adopt into non-top-level elements of chain, instead
+ # creating new top-level steps when adopting of respective Process
+ self.post_exp_process([exp], {}, 4)
+ self.post_exp_process([exp], {'new_top_step': 4, 'step_of': [1]}, 3)
+ exp.lib_set('ProcessStep',
+ [exp.procstep_as_dict(3, owner_id=3, step_process_id=4)])
+ slots[1]['children'] = [exp.step_as_dict(
+ node_id=3, process=4, todo=None, fillable=True)]
+ self.post_exp_day([exp], {'new_todo': [4]})
+ self.post_exp_todo([exp], {'adopt': [4, 5, 6]}, 1)
+ slots += [exp.step_as_dict(
+ node_id=4, process=None, todo=6, fillable=False)]
+ self.check_json_get('/todo?id=1', exp)
+
+ def test_POST_todo_make_empty(self) -> None:
+ """Test creation via POST /todo "step_filler_to"/"make"."""
+ # create chain of Processes
+ exp = ExpectedGetTodo(1)
+ self.post_exp_process([exp], {}, 1)
+ for i in range(1, 4):
+ self.post_exp_process([exp], {'new_top_step': i}, i+1)
+ exp.lib_set('ProcessStep',
+ [exp.procstep_as_dict(1, owner_id=2, step_process_id=1),
+ exp.procstep_as_dict(2, owner_id=3, step_process_id=2),
+ exp.procstep_as_dict(3, owner_id=4, step_process_id=3)])
+ # post (childless) Todo of chain end, then make empty on next in line
+ self.post_exp_day([exp], {'new_todo': [4]})
+ slots = [exp.step_as_dict(
+ node_id=1, process=3, todo=None, fillable=True,
+ children=[exp.step_as_dict(
+ node_id=2, process=2, todo=None, fillable=False,
+ children=[exp.step_as_dict(
+ node_id=3, process=1, todo=None, fillable=False)])])]
+ exp.set('steps_todo_to_process', slots)
+ self.check_json_get('/todo?id=1', exp)
+ self.check_post({'step_filler_to_1': 'make_3'}, '/todo?id=1')
+ exp.set_todo_from_post(2, {'process_id': 3})
+ exp.set_todo_from_post(1, {'process_id': 4, 'children': [2]})
+ slots[0]['todo'] = 2
+ assert isinstance(slots[0]['children'], list)
+ slots[0]['children'][0]['fillable'] = True
+ self.check_json_get('/todo?id=1', exp)
+ # make new top-level Todo without chain implied by its Process
+ self.check_post({'make_empty': 2, 'adopt': [2]}, '/todo?id=1')
+ exp.set_todo_from_post(3, {'process_id': 2})
+ exp.set_todo_from_post(1, {'process_id': 4, 'children': [2, 3]})
+ slots += [exp.step_as_dict(
+ node_id=4, process=None, todo=3, fillable=False)]
+ self.check_json_get('/todo?id=1', exp)
+ # fail on trying to call make_empty on non-existing Process
+ self.check_post({'make_full': 5}, '/todo?id=1', 404)
+
+ def test_GET_todo(self) -> None:
+ """Test GET /todo response codes."""
+ # test malformed or illegal parameter values
+ self.check_get_defaults('/todo')
+ # test all existing Processes are shown as available
+ exp = ExpectedGetTodo(1)
+ self.post_exp_process([exp], {}, 1)
+ self.post_exp_day([exp], {'new_todo': [1]})
+ self.post_exp_process([exp], {}, 2)
+ self.check_json_get('/todo?id=1', exp)
+ # test chain of Processes shown as potential step nodes
+ self.post_exp_process([exp], {}, 3)
+ self.post_exp_process([exp], {}, 4)
+ self.post_exp_process([exp], {'new_top_step': 2}, 1)
+ self.post_exp_process([exp], {'new_top_step': 3, 'step_of': [1]}, 2)
+ self.post_exp_process([exp], {'new_top_step': 4, 'step_of': [2]}, 3)
+ exp.lib_set('ProcessStep', [
+ exp.procstep_as_dict(1, owner_id=1, step_process_id=2),
+ exp.procstep_as_dict(2, owner_id=2, step_process_id=3),
+ exp.procstep_as_dict(3, owner_id=3, step_process_id=4)])
+ slots = [exp.step_as_dict(
+ node_id=1, process=2, todo=None, fillable=True,
+ children=[exp.step_as_dict(
+ node_id=2, process=3, todo=None, fillable=False,
+ children=[exp.step_as_dict(
+ node_id=3, process=4, todo=None, fillable=False)])])]
+ exp.set('steps_todo_to_process', slots)
+ self.check_json_get('/todo?id=1', exp)
+ # test display of parallel chains
+ proc_steps_post = {'new_top_step': 4, 'kept_steps': [1, 3]}
+ self.post_exp_process([], proc_steps_post, 1)
+ exp.lib_set('ProcessStep', [
+ exp.procstep_as_dict(4, owner_id=1, step_process_id=4)])
+ slots += [exp.step_as_dict(
+ node_id=4, process=4, todo=None, fillable=True)]
+ self.check_json_get('/todo?id=1', exp)
+
+ def test_POST_todo_doneness_relations(self) -> None:
+ """Test Todo.is_done Condition, adoption relations for /todo POSTs."""
+ self.post_exp_process([], {}, 1)
+ # test Todo with adoptee can only be set done if adoptee is done too
+ self.post_exp_day([], {'new_todo': [1]})
+ self.post_exp_day([], {'new_todo': [1]})
+ self.check_post({'adopt': 2, 'is_done': 1}, '/todo?id=1', 400)
+ self.check_post({'is_done': 1}, '/todo?id=2')
+ self.check_post({'adopt': 2, 'is_done': 1}, '/todo?id=1', 302)
+ # test Todo cannot be set undone with adopted Todo not done yet
+ self.check_post({'is_done': 0}, '/todo?id=2')
+ self.check_post({'adopt': 2, 'is_done': 0}, '/todo?id=1', 400)
+ # test unadoption relieves block
+ self.check_post({'is_done': 0}, '/todo?id=1', 302)
+ # test Condition being set or unset can block doneness setting
+ c1_post = {'title': '', 'description': '', 'is_active': 0}
+ c2_post = {'title': '', 'description': '', 'is_active': 1}
+ self.check_post(c1_post, '/condition', redir='/condition?id=1')
+ self.check_post(c2_post, '/condition', redir='/condition?id=2')
+ self.check_post({'conditions': [1], 'is_done': 1}, '/todo?id=1', 400)
+ self.check_post({'is_done': 1}, '/todo?id=1', 302)
+ self.check_post({'is_done': 0}, '/todo?id=1', 302)
+ self.check_post({'blockers': [2], 'is_done': 1}, '/todo?id=1', 400)
+ self.check_post({'is_done': 1}, '/todo?id=1', 302)
+ # test setting Todo doneness can set/un-set Conditions, but only on
+ # doneness change, not by mere passive state
+ self.check_post({'is_done': 0}, '/todo?id=2', 302)
+ self.check_post({'enables': [1], 'is_done': 1}, '/todo?id=1')
+ self.check_post({'conditions': [1], 'is_done': 1}, '/todo?id=2', 400)
+ self.check_post({'enables': [1], 'is_done': 0}, '/todo?id=1')
+ self.check_post({'enables': [1], 'is_done': 1}, '/todo?id=1')
+ self.check_post({'conditions': [1], 'is_done': 1}, '/todo?id=2')
+ self.check_post({'blockers': [1], 'is_done': 0}, '/todo?id=2', 400)
+ self.check_post({'disables': [1], 'is_done': 1}, '/todo?id=1')
+ self.check_post({'blockers': [1], 'is_done': 0}, '/todo?id=2', 400)
+ self.check_post({'disables': [1]}, '/todo?id=1')
+ self.check_post({'disables': [1], 'is_done': 1}, '/todo?id=1')
+ self.check_post({'blockers': [1]}, '/todo?id=2')
--- /dev/null
+"""Shared test utilities."""
+# pylint: disable=too-many-lines
+from __future__ import annotations
+from datetime import datetime, date as dt_date, timedelta
+from unittest import TestCase
+from typing import Mapping, Any, Callable
+from threading import Thread
+from pathlib import Path
+from http.client import HTTPConnection
+from time import sleep
+from json import loads as json_loads, dumps as json_dumps
+from urllib.parse import urlencode
+from uuid import uuid4
+from os import remove as remove_file
+from pprint import pprint
+from plomtask.db import DatabaseFile, DatabaseConnection
+from plomtask.http import TaskHandler, TaskServer
+from plomtask.processes import Process, ProcessStep
+from plomtask.conditions import Condition
+from plomtask.days import Day
+from plomtask.todos import Todo
+from plomtask.versioned_attributes import VersionedAttribute, TIMESTAMP_FMT
+from plomtask.exceptions import NotFoundException, HandledException
+
+
+_VERSIONED_VALS: dict[str,
+ list[str] | list[float]] = {'str': ['A', 'B'],
+ 'float': [0.3, 1.1]}
+_VALID_TRUES = {True, 'True', 'true', '1', 'on'}
+
+
+def dt_date_from_day_id(day_id: int) -> dt_date:
+ """Return datetime.date of adding day_id days to 2000-01-01."""
+ return dt_date(2000, 1, 1) + timedelta(days=day_id)
+
+
+def date_and_day_id(day_id: int) -> tuple[str, int]:
+ """Interpet day_id as n of days since millennium, return (date, day_id)."""
+ return dt_date_from_day_id(day_id).isoformat(), day_id
+
+
+class TestCaseAugmented(TestCase):
+ """Tester core providing helpful basic internal decorators and methods."""
+ checked_class: Any
+ default_init_kwargs: dict[str, Any] = {}
+
+ @staticmethod
+ def _run_on_versioned_attributes(f: Callable[..., None]
+ ) -> Callable[..., None]:
+ def wrapper(self: TestCase) -> None:
+ assert isinstance(self, TestCaseAugmented)
+ for attr_name in self.checked_class.to_save_versioned():
+ default = self.checked_class.versioned_defaults[attr_name]
+ owner = self.checked_class(None, **self.default_init_kwargs)
+ attr = getattr(owner, attr_name)
+ to_set = _VERSIONED_VALS[attr.value_type_name]
+ f(self, owner, attr_name, attr, default, to_set)
+ return wrapper
+
+ @classmethod
+ def _run_if_sans_db(cls, f: Callable[..., None]) -> Callable[..., None]:
+ def wrapper(self: TestCaseSansDB) -> None:
+ if issubclass(cls, TestCaseSansDB):
+ f(self)
+ return wrapper
+
+ @classmethod
+ def _run_if_with_db_but_not_server(cls,
+ f: Callable[..., None]
+ ) -> Callable[..., None]:
+ def wrapper(self: TestCaseWithDB) -> None:
+ if issubclass(cls, TestCaseWithDB) and\
+ not issubclass(cls, TestCaseWithServer):
+ f(self)
+ return wrapper
+
+ @classmethod
+ def _make_from_defaults(cls, id_: int | None) -> Any:
+ return cls.checked_class(id_, **cls.default_init_kwargs)
+
+
+class TestCaseSansDB(TestCaseAugmented):
+ """Tests requiring no DB setup."""
+ _legal_ids: list[int] = [1, 5]
+ _illegal_ids: list[int] = [0]
+
+ @TestCaseAugmented._run_if_sans_db
+ def test_id_validation(self) -> None:
+ """Test .id_ validation/setting."""
+ for id_ in self._illegal_ids:
+ with self.assertRaises(HandledException):
+ self._make_from_defaults(id_)
+ for id_ in self._legal_ids:
+ obj = self._make_from_defaults(id_)
+ self.assertEqual(obj.id_, id_)
+
+ @TestCaseAugmented._run_if_sans_db
+ @TestCaseAugmented._run_on_versioned_attributes
+ def test_versioned_set(self,
+ _: Any,
+ __: str,
+ attr: VersionedAttribute,
+ default: str | float,
+ to_set: list[str] | list[float]
+ ) -> None:
+ """Test VersionedAttribute.set() behaves as expected."""
+ attr.set(default)
+ self.assertEqual(list(attr.history.values()), [default])
+ # check same value does not get set twice in a row,
+ # and that not even its timestamp get updated
+ timestamp = list(attr.history.keys())[0]
+ attr.set(default)
+ self.assertEqual(list(attr.history.values()), [default])
+ self.assertEqual(list(attr.history.keys())[0], timestamp)
+ # check that different value _will_ be set/added
+ attr.set(to_set[0])
+ timesorted_vals = [attr.history[t] for
+ t in sorted(attr.history.keys())]
+ expected = [default, to_set[0]]
+ self.assertEqual(timesorted_vals, expected)
+ # check that a previously used value can be set if not most recent
+ attr.set(default)
+ timesorted_vals = [attr.history[t] for
+ t in sorted(attr.history.keys())]
+ expected = [default, to_set[0], default]
+ self.assertEqual(timesorted_vals, expected)
+ # again check for same value not being set twice in a row, even for
+ # later items
+ attr.set(to_set[1])
+ timesorted_vals = [attr.history[t] for
+ t in sorted(attr.history.keys())]
+ expected = [default, to_set[0], default, to_set[1]]
+ self.assertEqual(timesorted_vals, expected)
+ attr.set(to_set[1])
+ self.assertEqual(timesorted_vals, expected)
+
+ @TestCaseAugmented._run_if_sans_db
+ @TestCaseAugmented._run_on_versioned_attributes
+ def test_versioned_newest(self,
+ _: Any,
+ __: str,
+ attr: VersionedAttribute,
+ default: str | float,
+ to_set: list[str] | list[float]
+ ) -> None:
+ """Test VersionedAttribute.newest."""
+ # check .newest on empty history returns .default
+ self.assertEqual(attr.newest, default)
+ # check newest element always returned
+ for v in [to_set[0], to_set[1]]:
+ attr.set(v)
+ self.assertEqual(attr.newest, v)
+ # check newest element returned even if also early value
+ attr.set(default)
+ self.assertEqual(attr.newest, default)
+
+ @TestCaseAugmented._run_if_sans_db
+ @TestCaseAugmented._run_on_versioned_attributes
+ def test_versioned_at(self,
+ _: Any,
+ __: str,
+ attr: VersionedAttribute,
+ default: str | float,
+ to_set: list[str] | list[float]
+ ) -> None:
+ """Test .at() returns values nearest to queried time, or default."""
+ # check .at() return default on empty history
+ timestamp_a = datetime.now().strftime(TIMESTAMP_FMT)
+ self.assertEqual(attr.at(timestamp_a), default)
+ # check value exactly at timestamp returned
+ attr.set(to_set[0])
+ timestamp_b = list(attr.history.keys())[0]
+ self.assertEqual(attr.at(timestamp_b), to_set[0])
+ # check earliest value returned if exists, rather than default
+ self.assertEqual(attr.at(timestamp_a), to_set[0])
+ # check reverts to previous value for timestamps not indexed
+ sleep(0.00001)
+ timestamp_between = datetime.now().strftime(TIMESTAMP_FMT)
+ sleep(0.00001)
+ attr.set(to_set[1])
+ timestamp_c = sorted(attr.history.keys())[-1]
+ self.assertEqual(attr.at(timestamp_c), to_set[1])
+ self.assertEqual(attr.at(timestamp_between), to_set[0])
+ sleep(0.00001)
+ timestamp_after_c = datetime.now().strftime(TIMESTAMP_FMT)
+ self.assertEqual(attr.at(timestamp_after_c), to_set[1])
+
+
+class TestCaseWithDB(TestCaseAugmented):
+ """Module tests not requiring DB setup."""
+ _default_ids: tuple[int, int, int] = (1, 2, 3)
+
+ def setUp(self) -> None:
+ Condition.empty_cache()
+ Day.empty_cache()
+ Process.empty_cache()
+ ProcessStep.empty_cache()
+ Todo.empty_cache()
+ db_path = Path(f'test_db:{uuid4()}')
+ DatabaseFile.create(db_path)
+ self.db_file = DatabaseFile(db_path)
+ self.db_conn = DatabaseConnection(self.db_file)
+
+ def tearDown(self) -> None:
+ self.db_conn.close()
+ remove_file(self.db_file.path)
+
+ def _load_from_db(self, id_: int) -> list[object]:
+ db_found: list[object] = []
+ for row in self.db_conn.row_where(self.checked_class.table_name,
+ 'id', id_):
+ db_found += [self.checked_class.from_table_row(self.db_conn,
+ row)]
+ return db_found
+
+ def _change_obj(self, obj: object) -> str:
+ attr_name: str = self.checked_class.to_save_simples[-1]
+ attr = getattr(obj, attr_name)
+ new_attr: str | int | float | bool
+ if isinstance(attr, (int, float)):
+ new_attr = attr + 1
+ elif isinstance(attr, str):
+ new_attr = attr + '_'
+ elif isinstance(attr, bool):
+ new_attr = not attr
+ setattr(obj, attr_name, new_attr)
+ return attr_name
+
+ def check_identity_with_cache_and_db(self, content: list[Any]) -> None:
+ """Test both cache and DB equal content."""
+ expected_cache = {}
+ for item in content:
+ expected_cache[item.id_] = item
+ self.assertEqual(self.checked_class.get_cache(), expected_cache)
+ hashes_content = [hash(x) for x in content]
+ db_found: list[Any] = []
+ for item in content:
+ db_found += self._load_from_db(item.id_)
+ hashes_db_found = [hash(x) for x in db_found]
+ self.assertEqual(sorted(hashes_content), sorted(hashes_db_found))
+
+ @TestCaseAugmented._run_if_with_db_but_not_server
+ @TestCaseAugmented._run_on_versioned_attributes
+ def test_saving_versioned_attributes(self,
+ owner: Any,
+ attr_name: str,
+ attr: VersionedAttribute,
+ _: str | float,
+ to_set: list[str] | list[float]
+ ) -> None:
+ """Test storage and initialization of versioned attributes."""
+
+ def retrieve_attr_vals(attr: VersionedAttribute) -> list[object]:
+ attr_vals_saved: list[object] = []
+ for row in self.db_conn.row_where(attr.table_name, 'parent',
+ owner.id_):
+ attr_vals_saved += [row[2]]
+ return attr_vals_saved
+
+ attr.set(to_set[0])
+ # check that without attr.save() no rows in DB
+ rows = self.db_conn.row_where(attr.table_name, 'parent', owner.id_)
+ self.assertEqual([], rows)
+ # fail saving attributes on non-saved owner
+ with self.assertRaises(NotFoundException):
+ attr.save(self.db_conn)
+ # check owner.save() created entries as expected in attr table
+ owner.save(self.db_conn)
+ attr_vals_saved = retrieve_attr_vals(attr)
+ self.assertEqual([to_set[0]], attr_vals_saved)
+ # check changing attr val without save affects owner in memory …
+ attr.set(to_set[1])
+ cmp_attr = getattr(owner, attr_name)
+ self.assertEqual(to_set, list(cmp_attr.history.values()))
+ self.assertEqual(cmp_attr.history, attr.history)
+ # … but does not yet affect DB
+ attr_vals_saved = retrieve_attr_vals(attr)
+ self.assertEqual([to_set[0]], attr_vals_saved)
+ # check individual attr.save also stores new val to DB
+ attr.save(self.db_conn)
+ attr_vals_saved = retrieve_attr_vals(attr)
+ self.assertEqual(to_set, attr_vals_saved)
+
+ @TestCaseAugmented._run_if_with_db_but_not_server
+ def test_saving_and_caching(self) -> None:
+ """Test effects of .cache() and .save()."""
+ id1 = self._default_ids[0]
+ # check failure to cache without ID (if None-ID input possible)
+ obj0 = self._make_from_defaults(None)
+ with self.assertRaises(HandledException):
+ obj0.cache()
+ # check mere object init itself doesn't even store in cache
+ obj1 = self._make_from_defaults(id1)
+ self.assertEqual(self.checked_class.get_cache(), {})
+ # check .cache() fills cache, but not DB
+ obj1.cache()
+ self.assertEqual(self.checked_class.get_cache(), {id1: obj1})
+ found_in_db = self._load_from_db(id1)
+ self.assertEqual(found_in_db, [])
+ # check .save() sets ID, updates cache, and fills DB
+ # (expect ID to be set to id1, despite obj1 already having that as ID:
+ # it's generated by cursor.lastrowid on the DB table, and with obj1
+ # not written there, obj2 should get it first!)
+ obj2 = self._make_from_defaults(None)
+ obj2.save(self.db_conn)
+ self.assertEqual(self.checked_class.get_cache(), {id1: obj2})
+ # NB: we'll only compare hashes because obj2 itself disappears on
+ # .from_table_row-triggered database reload
+ obj2_hash = hash(obj2)
+ found_in_db += self._load_from_db(id1)
+ self.assertEqual([hash(o) for o in found_in_db], [obj2_hash])
+ # check we cannot overwrite obj2 with obj1 despite its same ID,
+ # since it has disappeared now
+ with self.assertRaises(HandledException):
+ obj1.save(self.db_conn)
+
+ @TestCaseAugmented._run_if_with_db_but_not_server
+ def test_by_id(self) -> None:
+ """Test .by_id()."""
+ id1, id2, _ = self._default_ids
+ # check failure if not yet saved
+ obj1 = self._make_from_defaults(id1)
+ with self.assertRaises(NotFoundException):
+ self.checked_class.by_id(self.db_conn, id1)
+ # check identity of cached and retrieved
+ obj1.cache()
+ self.assertEqual(obj1, self.checked_class.by_id(self.db_conn, id1))
+ # check identity of saved and retrieved
+ obj2 = self._make_from_defaults(id2)
+ obj2.save(self.db_conn)
+ self.assertEqual(obj2, self.checked_class.by_id(self.db_conn, id2))
+
+ @TestCaseAugmented._run_if_with_db_but_not_server
+ def test_by_id_or_create(self) -> None:
+ """Test .by_id_or_create."""
+ # check .by_id_or_create fails if wrong class
+ if not self.checked_class.can_create_by_id:
+ with self.assertRaises(HandledException):
+ self.checked_class.by_id_or_create(self.db_conn, None)
+ return
+ # check ID input of None creates, on saving, ID=1,2,…
+ for n in range(2):
+ item = self.checked_class.by_id_or_create(self.db_conn, None)
+ self.assertEqual(item.id_, None)
+ item.save(self.db_conn)
+ self.assertEqual(item.id_, n+1)
+ # check .by_id_or_create acts like normal instantiation (sans saving)
+ id_ = self._default_ids[2]
+ item = self.checked_class.by_id_or_create(self.db_conn, id_)
+ self.assertEqual(item.id_, id_)
+ with self.assertRaises(NotFoundException):
+ self.checked_class.by_id(self.db_conn, item.id_)
+ self.assertEqual(self.checked_class(item.id_), item)
+
+ @TestCaseAugmented._run_if_with_db_but_not_server
+ def test_from_table_row(self) -> None:
+ """Test .from_table_row() properly reads in class directly from DB."""
+ obj = self._make_from_defaults(self._default_ids[0])
+ obj.save(self.db_conn)
+ for row in self.db_conn.row_where(self.checked_class.table_name,
+ 'id', obj.id_):
+ # check .from_table_row reproduces state saved, no matter if obj
+ # later changed (with caching even)
+ # NB: we'll only compare hashes because obj itself disappears on
+ # .from_table_row-triggered database reload
+ hash_original = hash(obj)
+ attr_name = self._change_obj(obj)
+ obj.cache()
+ to_cmp = getattr(obj, attr_name)
+ retrieved = self.checked_class.from_table_row(self.db_conn, row)
+ self.assertNotEqual(to_cmp, getattr(retrieved, attr_name))
+ self.assertEqual(hash_original, hash(retrieved))
+ # check cache contains what .from_table_row just produced
+ self.assertEqual({retrieved.id_: retrieved},
+ self.checked_class.get_cache())
+
+ @TestCaseAugmented._run_if_with_db_but_not_server
+ @TestCaseAugmented._run_on_versioned_attributes
+ def test_versioned_history_from_row(self,
+ owner: Any,
+ _: str,
+ attr: VersionedAttribute,
+ default: str | float,
+ to_set: list[str] | list[float]
+ ) -> None:
+ """"Test VersionedAttribute.history_from_row() knows its DB rows."""
+ attr.set(to_set[0])
+ attr.set(to_set[1])
+ owner.save(self.db_conn)
+ # make empty VersionedAttribute, fill from rows, compare to owner's
+ for row in self.db_conn.row_where(owner.table_name, 'id', owner.id_):
+ loaded_attr = VersionedAttribute(owner, attr.table_name, default)
+ for row in self.db_conn.row_where(attr.table_name, 'parent',
+ owner.id_):
+ loaded_attr.history_from_row(row)
+ self.assertEqual(len(attr.history.keys()),
+ len(loaded_attr.history.keys()))
+ for timestamp, value in attr.history.items():
+ self.assertEqual(value, loaded_attr.history[timestamp])
+
+ @TestCaseAugmented._run_if_with_db_but_not_server
+ def test_all(self) -> None:
+ """Test .all() and its relation to cache and savings."""
+ id1, id2, id3 = self._default_ids
+ item1 = self._make_from_defaults(id1)
+ item2 = self._make_from_defaults(id2)
+ item3 = self._make_from_defaults(id3)
+ # check .all() returns empty list on un-cached items
+ self.assertEqual(self.checked_class.all(self.db_conn), [])
+ # check that all() shows only cached/saved items
+ item1.cache()
+ item3.save(self.db_conn)
+ self.assertEqual(sorted(self.checked_class.all(self.db_conn)),
+ sorted([item1, item3]))
+ item2.save(self.db_conn)
+ self.assertEqual(sorted(self.checked_class.all(self.db_conn)),
+ sorted([item1, item2, item3]))
+
+ @TestCaseAugmented._run_if_with_db_but_not_server
+ def test_singularity(self) -> None:
+ """Test pointers made for single object keep pointing to it."""
+ id1 = self._default_ids[0]
+ obj = self._make_from_defaults(id1)
+ obj.save(self.db_conn)
+ # change object, expect retrieved through .by_id to carry change
+ attr_name = self._change_obj(obj)
+ new_attr = getattr(obj, attr_name)
+ retrieved = self.checked_class.by_id(self.db_conn, id1)
+ self.assertEqual(new_attr, getattr(retrieved, attr_name))
+
+ @TestCaseAugmented._run_if_with_db_but_not_server
+ @TestCaseAugmented._run_on_versioned_attributes
+ def test_versioned_singularity(self,
+ owner: Any,
+ attr_name: str,
+ attr: VersionedAttribute,
+ _: str | float,
+ to_set: list[str] | list[float]
+ ) -> None:
+ """Test singularity of VersionedAttributes on saving."""
+ owner.save(self.db_conn)
+ # change obj, expect retrieved through .by_id to carry change
+ attr.set(to_set[0])
+ retrieved = self.checked_class.by_id(self.db_conn, owner.id_)
+ attr_retrieved = getattr(retrieved, attr_name)
+ self.assertEqual(attr.history, attr_retrieved.history)
+
+ @TestCaseAugmented._run_if_with_db_but_not_server
+ def test_remove(self) -> None:
+ """Test .remove() effects on DB and cache."""
+ obj = self._make_from_defaults(self._default_ids[0])
+ # check removal only works after saving
+ with self.assertRaises(HandledException):
+ obj.remove(self.db_conn)
+ obj.save(self.db_conn)
+ obj.remove(self.db_conn)
+ # check access to obj fails after removal
+ with self.assertRaises(HandledException):
+ print(obj.id_)
+ # check DB and cache now empty
+ self.check_identity_with_cache_and_db([])
+
+
+class Expected:
+ """Builder of (JSON-like) dict to compare against responses of test server.
+
+ Collects all items and relations we expect expressed in the server's JSON
+ responses and puts them into the proper json.dumps-friendly dict structure,
+ accessibla via .as_dict, to compare them in TestsWithServer.check_json_get.
+
+ On its own provides for .as_dict output only {"_library": …}, initialized
+ from .__init__ and to be directly manipulated via the .lib* methods.
+ Further structures of the expected response may be added and kept
+ up-to-date by subclassing .__init__, .recalc, and .d.
+
+ NB: Lots of expectations towards server behavior will be made explicit here
+ (or in the subclasses) rather than in the actual TestCase methods' code.
+ """
+ _default_dict: dict[str, Any]
+ _forced: dict[str, Any]
+ _fields: dict[str, Any]
+ _on_empty_make_temp: tuple[str, str]
+
+ def __init__(self) -> None:
+ for name in ['_default_dict', '_fields', '_forced']:
+ if not hasattr(self, name):
+ setattr(self, name, {})
+ self._lib: dict[str, dict[int, dict[str, Any]]] = {}
+ for k, v in self._default_dict.items():
+ if k not in self._fields:
+ self._fields[k] = v
+
+ def recalc(self) -> None:
+ """Update internal dictionary by subclass-specific rules."""
+ todos = self.lib_all('Todo')
+ for todo in todos:
+ todo['parents'] = []
+ for todo in todos:
+ for child_id in todo['children']:
+ self.lib_get('Todo', child_id)['parents'] += [todo['id']]
+ todo['children'].sort()
+ procsteps = self.lib_all('ProcessStep')
+ procs = self.lib_all('Process')
+ for proc in procs:
+ proc['explicit_steps'] = [s['id'] for s in procsteps
+ if s['owner_id'] == proc['id']]
+
+ @property
+ def as_dict(self) -> dict[str, Any]:
+ """Return dict to compare against test server JSON responses."""
+ make_temp = False
+ if hasattr(self, '_on_empty_make_temp'):
+ category, dicter = getattr(self, '_on_empty_make_temp')
+ id_ = self._fields[category.lower()]
+ make_temp = not bool(self.lib_get(category, id_))
+ if make_temp:
+ self.lib_set(category, [getattr(self, dicter)(id_)])
+ self.recalc()
+ d = {'_library': self._lib}
+ for k, v in self._fields.items():
+ # we expect everything sortable to be sorted
+ if isinstance(v, list) and k not in self._forced:
+ # NB: if we don't test for v being list, sorted() on an empty
+ # dict may return an empty list
+ try:
+ v = sorted(v)
+ except TypeError:
+ pass
+ d[k] = v
+ for k, v in self._forced.items():
+ d[k] = v
+ if make_temp:
+ json = json_dumps(d)
+ id_ = id_ if id_ is not None else -1
+ self.lib_del(category, id_)
+ d = json_loads(json)
+ return d
+
+ def lib_get(self, category: str, id_: int) -> dict[str, Any]:
+ """From library, return item of category and id_, or empty dict."""
+ if category in self._lib and id_ in self._lib[category]:
+ return self._lib[category][id_]
+ return {}
+
+ def lib_all(self, category: str) -> list[dict[str, Any]]:
+ """From library, return items of category, or [] if none."""
+ if category in self._lib:
+ return list(self._lib[category].values())
+ return []
+
+ def lib_set(self, category: str, items: list[dict[str, object]]) -> None:
+ """Update library for category with items."""
+ if category not in self._lib:
+ self._lib[category] = {}
+ for item in items:
+ id_ = item['id'] if item['id'] is not None else -1
+ assert isinstance(id_, int)
+ self._lib[category][id_] = item
+
+ def lib_del(self, category: str, id_: int) -> None:
+ """Remove category element of id_ from library."""
+ del self._lib[category][id_]
+ if 0 == len(self._lib[category]):
+ del self._lib[category]
+
+ def lib_wipe(self, category: str) -> None:
+ """Remove category from library."""
+ if category in self._lib:
+ del self._lib[category]
+
+ def set(self, field_name: str, value: object) -> None:
+ """Set top-level .as_dict field."""
+ self._fields[field_name] = value
+
+ def force(self, field_name: str, value: object) -> None:
+ """Set ._forced field to ensure value in .as_dict."""
+ self._forced[field_name] = value
+
+ @staticmethod
+ def as_ids(items: list[dict[str, Any]]) -> list[int]:
+ """Return list of only 'id' fields of items."""
+ return [item['id'] for item in items]
+
+ @staticmethod
+ def day_as_dict(id_: int, comment: str = '') -> dict[str, object]:
+ """Return JSON of Day to expect."""
+ return {'id': id_, 'comment': comment, 'todos': []}
+
+ def set_day_from_post(self, id_: int, d: dict[str, Any]) -> None:
+ """Set Day of id_ in library based on POST dict d."""
+ day = self.day_as_dict(id_)
+ for k, v in d.items():
+ if 'day_comment' == k:
+ day['comment'] = v
+ elif 'new_todo' == k:
+ next_id = 1
+ for todo in self.lib_all('Todo'):
+ if next_id <= todo['id']:
+ next_id = todo['id'] + 1
+ for proc_id in sorted([id_ for id_ in v if id_]):
+ todo = self.todo_as_dict(next_id, proc_id, id_)
+ self.lib_set('Todo', [todo])
+ next_id += 1
+ elif 'done' == k:
+ for todo_id in v:
+ self.lib_get('Todo', todo_id)['is_done'] = True
+ elif 'todo_id' == k:
+ for i, todo_id in enumerate(v):
+ t = self.lib_get('Todo', todo_id)
+ if 'comment' in d:
+ t['comment'] = d['comment'][i]
+ if 'effort' in d:
+ effort = d['effort'][i] if d['effort'][i] else None
+ t['effort'] = effort
+ self.lib_set('Day', [day])
+
+ @staticmethod
+ def cond_as_dict(id_: int = 1,
+ is_active: bool = False,
+ title: None | str = None,
+ description: None | str = None,
+ ) -> dict[str, object]:
+ """Return JSON of Condition to expect."""
+ versioned: dict[str, dict[str, object]]
+ versioned = {'title': {}, 'description': {}}
+ if title is not None:
+ versioned['title']['0'] = title
+ if description is not None:
+ versioned['description']['0'] = description
+ return {'id': id_, 'is_active': is_active, '_versioned': versioned}
+
+ def set_cond_from_post(self, id_: int, d: dict[str, Any]) -> None:
+ """Set Condition of id_ in library based on POST dict d."""
+ if 'delete' in d:
+ self.lib_del('Condition', id_)
+ return
+ cond = self.lib_get('Condition', id_)
+ if cond:
+ cond['is_active'] = 'is_active' in d and\
+ d['is_active'] in _VALID_TRUES
+ for category in ['title', 'description']:
+ history = cond['_versioned'][category]
+ if len(history) > 0:
+ last_i = sorted([int(k) for k in history.keys()])[-1]
+ if d[category] != history[str(last_i)]:
+ history[str(last_i + 1)] = d[category]
+ else:
+ history['0'] = d[category]
+ else:
+ cond = self.cond_as_dict(id_, **d)
+ self.lib_set('Condition', [cond])
+
+ @staticmethod
+ def todo_as_dict(id_: int = 1,
+ process_id: int = 1,
+ day_id: int = 1,
+ conditions: None | list[int] = None,
+ disables: None | list[int] = None,
+ blockers: None | list[int] = None,
+ enables: None | list[int] = None,
+ calendarize: bool = False,
+ comment: str = '',
+ is_done: bool = False,
+ effort: float | None = None,
+ children: list[int] | None = None,
+ parents: list[int] | None = None,
+ ) -> dict[str, object]:
+ """Return JSON of Todo to expect."""
+ # pylint: disable=too-many-arguments
+ d = {'id': id_,
+ 'day_id': day_id,
+ 'process_id': process_id,
+ 'is_done': is_done,
+ 'calendarize': calendarize,
+ 'comment': comment,
+ 'children': children if children else [],
+ 'parents': parents if parents else [],
+ 'effort': effort,
+ 'conditions': conditions if conditions else [],
+ 'disables': disables if disables else [],
+ 'blockers': blockers if blockers else [],
+ 'enables': enables if enables else []}
+ return d
+
+ def set_todo_from_post(self, id_: int, d: dict[str, Any]) -> None:
+ """Set Todo of id_ in library based on POST dict d."""
+ corrected_kwargs: dict[str, Any] = {
+ 'children': [], 'is_done': 0, 'calendarize': 0, 'comment': ''}
+ for k, v in d.items():
+ if k.startswith('step_filler_to_'):
+ continue
+ if 'adopt' == k:
+ new_children = v if isinstance(v, list) else [v]
+ corrected_kwargs['children'] += new_children
+ continue
+ if k in {'is_done', 'calendarize'} and v in _VALID_TRUES:
+ v = True
+ corrected_kwargs[k] = v
+ todo = self.lib_get('Todo', id_)
+ if todo:
+ for k, v in corrected_kwargs.items():
+ todo[k] = v
+ else:
+ todo = self.todo_as_dict(id_, **corrected_kwargs)
+ self.lib_set('Todo', [todo])
+
+ @staticmethod
+ def procstep_as_dict(id_: int,
+ owner_id: int,
+ step_process_id: int,
+ parent_step_id: int | None = None
+ ) -> dict[str, object]:
+ """Return JSON of ProcessStep to expect."""
+ return {'id': id_,
+ 'owner_id': owner_id,
+ 'step_process_id': step_process_id,
+ 'parent_step_id': parent_step_id}
+
+ @staticmethod
+ def proc_as_dict(id_: int = 1,
+ title: None | str = None,
+ description: None | str = None,
+ effort: None | float = None,
+ conditions: None | list[int] = None,
+ disables: None | list[int] = None,
+ blockers: None | list[int] = None,
+ enables: None | list[int] = None,
+ explicit_steps: None | list[int] = None,
+ suppressed_steps: None | list[int] = None
+ ) -> dict[str, object]:
+ """Return JSON of Process to expect."""
+ # pylint: disable=too-many-arguments
+ versioned: dict[str, dict[str, object]]
+ versioned = {'title': {}, 'description': {}, 'effort': {}}
+ if title is not None:
+ versioned['title']['0'] = title
+ if description is not None:
+ versioned['description']['0'] = description
+ if effort is not None:
+ versioned['effort']['0'] = effort
+ d = {'id': id_,
+ 'calendarize': False,
+ 'suppressed_steps': suppressed_steps if suppressed_steps else [],
+ 'explicit_steps': explicit_steps if explicit_steps else [],
+ '_versioned': versioned,
+ 'conditions': conditions if conditions else [],
+ 'disables': disables if disables else [],
+ 'enables': enables if enables else [],
+ 'blockers': blockers if blockers else []}
+ return d
+
+ def set_proc_from_post(self, id_: int, d: dict[str, Any]) -> None:
+ """Set Process of id_ in library based on POST dict d."""
+ proc = self.lib_get('Process', id_)
+ if proc:
+ for category in ['title', 'description', 'effort']:
+ history = proc['_versioned'][category]
+ if len(history) > 0:
+ last_i = sorted([int(k) for k in history.keys()])[-1]
+ if d[category] != history[str(last_i)]:
+ history[str(last_i + 1)] = d[category]
+ else:
+ history['0'] = d[category]
+ else:
+ proc = self.proc_as_dict(id_,
+ d['title'], d['description'], d['effort'])
+ ignore = {'title', 'description', 'effort', 'new_top_step', 'step_of',
+ 'kept_steps'}
+ proc['calendarize'] = False
+ for k, v in d.items():
+ if k in ignore\
+ or k.startswith('step_') or k.startswith('new_step_to'):
+ continue
+ if k in {'calendarize'} and v in _VALID_TRUES:
+ v = True
+ elif k in {'suppressed_steps', 'explicit_steps', 'conditions',
+ 'disables', 'enables', 'blockers'}:
+ if not isinstance(v, list):
+ v = [v]
+ proc[k] = v
+ self.lib_set('Process', [proc])
+
+
+class TestCaseWithServer(TestCaseWithDB):
+ """Module tests against our HTTP server/handler (and database)."""
+
+ def setUp(self) -> None:
+ super().setUp()
+ self.httpd = TaskServer(self.db_file, ('localhost', 0), TaskHandler)
+ self.server_thread = Thread(target=self.httpd.serve_forever)
+ self.server_thread.daemon = True
+ self.server_thread.start()
+ self.conn = HTTPConnection(str(self.httpd.server_address[0]),
+ self.httpd.server_address[1])
+ self.httpd.render_mode = 'json'
+
+ def tearDown(self) -> None:
+ self.httpd.shutdown()
+ self.httpd.server_close()
+ self.server_thread.join()
+ super().tearDown()
+
+ def post_exp_cond(self,
+ exps: list[Expected],
+ payload: dict[str, object],
+ id_: int = 1,
+ post_to_id: bool = True,
+ redir_to_id: bool = True
+ ) -> None:
+ """POST /condition(s), appropriately update Expecteds."""
+ # pylint: disable=too-many-arguments
+ target = f'/condition?id={id_}' if post_to_id else '/condition'
+ redir = f'/condition?id={id_}' if redir_to_id else '/conditions'
+ if 'title' not in payload:
+ payload['title'] = 'foo'
+ if 'description' not in payload:
+ payload['description'] = 'foo'
+ self.check_post(payload, target, redir=redir)
+ for exp in exps:
+ exp.set_cond_from_post(id_, payload)
+
+ def post_exp_day(self,
+ exps: list[Expected],
+ payload: dict[str, Any],
+ day_id: int = 1
+ ) -> None:
+ """POST /day, appropriately update Expecteds."""
+ if 'make_type' not in payload:
+ payload['make_type'] = 'empty'
+ if 'day_comment' not in payload:
+ payload['day_comment'] = ''
+ date = dt_date_from_day_id(day_id).isoformat()
+ target = f'/day?date={date}'
+ redir_to = f'{target}&make_type={payload["make_type"]}'
+ self.check_post(payload, target, 302, redir_to)
+ for exp in exps:
+ exp.set_day_from_post(day_id, payload)
+
+ def post_exp_process(self,
+ exps: list[Expected],
+ payload: dict[str, Any],
+ id_: int,
+ ) -> dict[str, object]:
+ """POST /process, appropriately update Expecteds."""
+ if 'title' not in payload:
+ payload['title'] = 'foo'
+ if 'description' not in payload:
+ payload['description'] = 'foo'
+ if 'effort' not in payload:
+ payload['effort'] = 1.1
+ self.check_post(payload, f'/process?id={id_}',
+ redir=f'/process?id={id_}')
+ for exp in exps:
+ exp.set_proc_from_post(id_, payload)
+ return payload
+
+ def post_exp_todo(self,
+ exps: list[Expected],
+ payload: dict[str, Any],
+ id_: int,
+ ) -> None:
+ """POST /todo, appropriately updated Expecteds."""
+ self.check_post(payload, f'/todo?id={id_}')
+ for exp in exps:
+ exp.set_todo_from_post(id_, payload)
+
+ def check_filter(self, exp: Expected, category: str, key: str,
+ val: str, list_ids: list[int]) -> None:
+ """Check GET /{category}?{key}={val} sorts to list_ids."""
+ # pylint: disable=too-many-arguments
+ exp.set(key, val)
+ exp.force(category, list_ids)
+ self.check_json_get(f'/{category}?{key}={val}', exp)
+
+ def check_redirect(self, target: str) -> None:
+ """Check that self.conn answers with a 302 redirect to target."""
+ response = self.conn.getresponse()
+ self.assertEqual(response.status, 302)
+ self.assertEqual(response.getheader('Location'), target)
+
+ def check_get(self, target: str, expected_code: int) -> None:
+ """Check that a GET to target yields expected_code."""
+ self.conn.request('GET', target)
+ self.assertEqual(self.conn.getresponse().status, expected_code)
+
+ def check_minimal_inputs(self,
+ url: str,
+ minimal_inputs: dict[str, Any]
+ ) -> None:
+ """Check that url 400's unless all of minimal_inputs provided."""
+ for to_hide in minimal_inputs.keys():
+ to_post = {k: v for k, v in minimal_inputs.items() if k != to_hide}
+ self.check_post(to_post, url, 400)
+
+ def check_post(self, data: Mapping[str, object], target: str,
+ expected_code: int = 302, redir: str = '') -> None:
+ """Check that POST of data to target yields expected_code."""
+ encoded_form_data = urlencode(data, doseq=True).encode('utf-8')
+ headers = {'Content-Type': 'application/x-www-form-urlencoded',
+ 'Content-Length': str(len(encoded_form_data))}
+ self.conn.request('POST', target,
+ body=encoded_form_data, headers=headers)
+ if 302 == expected_code:
+ redir = target if redir == '' else redir
+ self.check_redirect(redir)
+ else:
+ self.assertEqual(self.conn.getresponse().status, expected_code)
+
+ def check_get_defaults(self,
+ path: str,
+ default_id: str = '1',
+ id_name: str = 'id'
+ ) -> None:
+ """Some standard model paths to test."""
+ nonexist_status = 200 if self.checked_class.can_create_by_id else 404
+ self.check_get(path, nonexist_status)
+ self.check_get(f'{path}?{id_name}=', 400)
+ self.check_get(f'{path}?{id_name}=foo', 400)
+ self.check_get(f'/{path}?{id_name}=0', 400)
+ self.check_get(f'{path}?{id_name}={default_id}', nonexist_status)
+
+ def check_json_get(self, path: str, expected: Expected) -> None:
+ """Compare JSON on GET path with expected.
+
+ To simplify comparison of VersionedAttribute histories, transforms
+ timestamp keys of VersionedAttribute history keys into (strings of)
+ integers counting chronologically forward from 0.
+ """
+
+ def rewrite_history_keys_in(item: Any) -> Any:
+ if isinstance(item, dict):
+ if '_versioned' in item.keys():
+ for category in item['_versioned']:
+ vals = item['_versioned'][category].values()
+ history = {}
+ for i, val in enumerate(vals):
+ history[str(i)] = val
+ item['_versioned'][category] = history
+ for category in list(item.keys()):
+ rewrite_history_keys_in(item[category])
+ elif isinstance(item, list):
+ item[:] = [rewrite_history_keys_in(i) for i in item]
+ return item
+
+ def walk_diffs(path: str, cmp1: object, cmp2: object) -> None:
+ # pylint: disable=too-many-branches
+ def warn(intro: str, val: object) -> None:
+ if isinstance(val, (str, int, float)):
+ print(intro, val)
+ else:
+ print(intro)
+ pprint(val)
+ if cmp1 != cmp2:
+ if isinstance(cmp1, dict) and isinstance(cmp2, dict):
+ for k, v in cmp1.items():
+ if k not in cmp2:
+ warn(f'DIFF {path}: retrieved lacks {k}', v)
+ elif v != cmp2[k]:
+ walk_diffs(f'{path}:{k}', v, cmp2[k])
+ for k in [k for k in cmp2.keys() if k not in cmp1]:
+ warn(f'DIFF {path}: expected lacks retrieved\'s {k}',
+ cmp2[k])
+ elif isinstance(cmp1, list) and isinstance(cmp2, list):
+ for i, v1 in enumerate(cmp1):
+ if i >= len(cmp2):
+ warn(f'DIFF {path}[{i}] retrieved misses:', v1)
+ elif v1 != cmp2[i]:
+ walk_diffs(f'{path}[{i}]', v1, cmp2[i])
+ if len(cmp2) > len(cmp1):
+ for i, v2 in enumerate(cmp2[len(cmp1):]):
+ warn(f'DIFF {path}[{len(cmp1)+i}] misses:', v2)
+ else:
+ warn(f'DIFF {path} – for expected:', cmp1)
+ warn('… and for retrieved:', cmp2)
+
+ self.conn.request('GET', path)
+ response = self.conn.getresponse()
+ self.assertEqual(response.status, 200)
+ retrieved = json_loads(response.read().decode())
+ rewrite_history_keys_in(retrieved)
+ # to convert ._lib int keys to str
+ cmp = json_loads(json_dumps(expected.as_dict))
+ try:
+ self.assertEqual(cmp, retrieved)
+ except AssertionError as e:
+ print('EXPECTED:')
+ pprint(cmp)
+ print('RETRIEVED:')
+ pprint(retrieved)
+ walk_diffs('', cmp, retrieved)
+ raise e
--- /dev/null
+#!/usr/bin/sh
+set -e
+
+PATH_APP_SHARE=~/.local/share/taskplom
+PATH_VENV="${PATH_APP_SHARE}/venv"
+
+python3 -m venv "${PATH_VENV}"
+. "${PATH_VENV}/bin/activate"
+
+if [ "$1" = "install_deps" ]; then
+ echo "Checking dependencies."
+ pip3 install -r "${PATH_APP_SHARE}/requirements.txt"
+ exit 0
+fi
+
+export PYTHONPATH="${PATH_APP_SHARE}:${PYTHONPATH}"
+cd "${PATH_APP_SHARE}"
+./run.py $@
+++ /dev/null
-<!DOCTYPE html>
-<html>
-<meta charset="UTF-8">
-<style>
-body {
- font-family: monospace;
- text-align: left;
- padding: 0;
- background-color: white;
-}
-input[type="text"] {
- width: 100em;
-}
-input.timestamp {
- width: 11em;
-}
-input.date {
- width: 6em;
-}
-input.btn-harmless {
- color: green;
-}
-input.btn-dangerous {
- color: red;
-}
-div.btn-to-right {
- float: right;
- text-align: right;
-}
-td, th, tr, table {
- margin-top: 1em;
- padding: 0;
- border-collapse: collapse;
-}
-th, td {
- padding-right: 1em;
-}
-a {
- color: black;
-}
-table.edit_table > tbody > tr > td,
-table.edit_table > tbody > tr > th {
- border-bottom: 0.1em solid #bbbbbb;
- padding-top: 0.5em;
- padding-bottom: 0.5em;
-}
-td.number, input[type="number"] {
- text-align: right;
-}
-input[name="effort"] {
- width: 3.5em;
-}
-textarea {
- width: 100%;
-}
-table.alternating > tbody > tr:nth-child(odd) {
- background-color: #dfdfdf;
-}
-div.edit_buttons {
- margin-top: 1em;
-}
-{% block css %}
-{% endblock %}
-</style>
-<body>
-<a href="day">today</a>
-<a href="calendar">calendar</a>
-<a href="conditions">conditions</a>
-<a href="processes">processes</a>
-<a href="todos">todos</a>
-<hr>
-{% block content %}
-{% endblock %}
-</body>
-</html>
+++ /dev/null
-{% macro edit_buttons() %}
-<div class="edit_buttons">
-<input class="btn-harmless" type="submit" name="update" value="update" />
-<div class="btn-to-right">
-<input class="btn-dangerous" type="submit" name="delete" value="delete" />
-</div>
-</div>
-{% endmacro %}
-
-
-
-{% macro datalist_of_titles(title, candidates, historical=false, with_comments=false) %}
-<datalist id="{{title}}">
-{% for candidate in candidates %}
-<option value="{{candidate.id_}}">
-{% if historical is true %}
-{{candidate.title_then|e}}
-{% else %}
-{{candidate.title.newest|e}}
-{% endif %}
-{% if with_comments and candidate.comment %}
-/ {{candidate.comment}}
-{% endif %}
-</option>
-{% endfor %}
-</datalist>
-{% endmacro %}
-
-
-
-{% macro simple_checkbox_table(title, items, type_name, list_name, add_string="add", historical=false) %}
-{% if items|length > 0 %}
-<table>
-{% for item in items %}
-<tr>
-<td>
-<input type="checkbox" name="{{title}}" value="{{item.id_}}" checked />
-</td>
-<td>
-<a href="{{type_name}}?id={{item.id_}}">{% if historical is true %}{{item.title_then}}{% else %}{{item.title.newest|e}}{% endif %}</a>
-</td>
-</tr>
-{% endfor %}
-</table>
-{% endif %}
-{{add_string}}: <input name="{{title}}" type="text" list="{{list_name}}" autocomplete="off" />
-{% endmacro %}
-
-
-
-{% macro history_page(item_name, item, attribute_name, attribute, as_pre=false) %}
-<h3>{{item_name}} {{attribute_name}} history</h3>
-<form action="{{item_name}}_{{attribute_name}}s?id={{item.id_}}" method="POST">
-<table>
-
-<tr>
-<th>{{item_name}}</th>
-<td><a href="{{item_name}}?id={{item.id_}}">{{item.title.newest|e}}</a></td>
-</tr>
-
-
-{% for date in attribute.history.keys() | sort(reverse=True) %}
-<tr>
-<td><input name="at:{{date}}" class="timestamp" value="{{date|truncate(19, True, '', 0)}}"></td>
-<td>{% if as_pre %}<pre>{% endif %}{{attribute.history[date]}}{% if as_pre %}</pre>{% endif %}</td>
-</tr>
-{% endfor %}
-
-</table>
-<input class="btn-harmless" type="submit" name="update" value="update" />
-</form>
-{% endmacro %}
+++ /dev/null
-{% extends '_base.html' %}
-
-
-
-{% block css %}
-tr.week_row td {
- height: 0.3em;
- background-color: black;
- padding: 0;
- margin: 0;
- border-top: 0.2em solid white;
-}
-tr.month_row td {
- border-top: 0.2em solid white;
- color: white;
- background-color: #555555;
-}
-table {
- width: 100%;
-}
-tr.day_row td {
- background-color: #cccccc;
- border-top: 0.2em solid white;
-}
-td.day_name {
- padding-right: 0.5em;
-}
-td.today {
- font-weight: bold;
-}
-span.total_effort {
- white-space: pre;
-}
-{% endblock %}
-
-
-
-{% block content %}
-<h3>calendar</h3>
-
-<p><a href="/calendar_txt">basic view</a></p>
-
-<form action="calendar" method="GET">
-from <input name="start" class="date" value="{{start}}" />
-to <input name="end" class="date" value="{{end}}" />
-<input type="submit" value="OK" />
-</form>
-<table>
-{% for day in days %}
-
-{% if day.first_of_month %}
-<tr class="month_row">
-<td colspan=2>{{ day.month_name }}</td>
-</tr>
-{% endif %}
-
-{% if day.weekday == "Monday" %}
-<tr class="week_row">
-<td colspan=2></td>
-</tr>
-{% endif %}
-
-<tr class="day_row">
-<td class="day_name {% if day.date == today %}today{% endif %}">
-<a href="day?date={{day.date}}">{{day.weekday|truncate(2,True,'',0)}} {% if day.date == today %} {% endif %}{{day.date}}</a>
-[<span class="total_effort">{{ '{:5.1f}'.format(day.total_effort) }}</span>]
-{{day.comment|e}}</td>
-</tr>
-
-{% for todo in day.calendarized_todos %}
-<tr>
-<td>[{% if todo.is_done %}X{% else %} {% endif %}] <a href="todo?id={{todo.id_}}">{{todo.title_then|e}}</a>{% if todo.comment %} · {{todo.comment|e}}{% endif %}</td>
-</tr>
-{% endfor %}
-
-{% endfor %}
-</table>
-{% endblock %}
+++ /dev/null
-{% extends '_base.html' %}
-
-{% block content %}
-<h3>calendar</h3>
-
-<p><a href="/calendar">normal view</a></p>
-
-<form action="calendar_txt" method="GET">
-from <input name="start" class="date" value="{{start}}" />
-to <input name="end" class="date" value="{{end}}" />
-<input type="submit" value="OK" />
-</form>
-<table>
-
-<pre>{% for day in days %}{% if day.weekday == "Monday" %}
----{% endif %}{% if day.comment or day.calendarized_todos %}
-{{day.weekday|truncate(2,True,'',0)}} {{day.date}} {{day.comment|e}}{% endif %}{% if day.calendarized_todos%}{% for todo in day.calendarized_todos %}
-* {{todo.title_then|e}}{% if todo.comment %} / {{todo.comment|e}}{% endif %}{% endfor %}{% endif %}{% endfor %}
-</pre>
-{% endblock %}
+++ /dev/null
-{% extends '_base.html' %}
-{% import '_macros.html' as macros %}
-
-
-
-{% block content %}
-<h3>
-{% if is_new %}
-add NEW condition
-{% else %}
-edit condition of ID {{condition.id_}}
-{% endif %}
-</h3>
-<form action="condition?id={{condition.id_ or ''}}" method="POST">
-
-<table class="edit_table">
-<tr>
-<th>title</th>
-<td><input name="title" type="text" value="{{condition.title.newest|e}}" />{% if condition.id_ %} [<a href="condition_titles?id={{condition.id_}}">history</a>]{% endif %}</td>
-<tr/>
-<tr>
-<th>is active</th>
-<td><input name="is_active" type="checkbox" {% if condition.is_active %}checked{% endif %} /></td>
-<tr/>
-<tr>
-<th>description</th>
-<td><textarea name="description">{{condition.description.newest|e}}</textarea>{% if condition.id_ %} [<a href="condition_descriptions?id={{condition.id_}}">history</a>]{% endif %}</td>
-<tr/>
-<tr>
-<th>enables</th>
-<td>
-{% for process in enabled_processes %}
-<a href="process?id={{process.id_}}">{{process.title.newest|e}}</a><br />
-{% endfor %}
-</td>
-</tr>
-<tr>
-<th>disables</th>
-<td>
-{% for process in disabled_processes %}
-<a href="process?id={{process.id_}}">{{process.title.newest|e}}</a><br />
-{% endfor %}
-</td>
-</tr>
-<tr>
-<th>enabled by</th>
-<td>
-{% for process in enabling_processes %}
-<a href="process?id={{process.id_}}">{{process.title.newest|e}}</a><br />
-{% endfor %}
-</td>
-</tr>
-<tr>
-<th>disabled by</th>
-<td>
-{% for process in disabling_processes %}
-<a href="process?id={{process.id_}}">{{process.title.newest|e}}</a><br />
-{% endfor %}
-</td>
-</tr>
-</table>
-
-{{ macros.edit_buttons() }}
-{% endblock %}
-
+++ /dev/null
-{% extends '_base.html' %}
-{% import '_macros.html' as macros %}
-
-
-
-{% block content %}
-{{ macros.history_page("condition", condition, "description", condition.description, true) }}
-{% endblock %}
+++ /dev/null
-{% extends '_base.html' %}
-{% import '_macros.html' as macros %}
-
-
-
-{% block content %}
-{{ macros.history_page("condition", condition, "title", condition.title) }}
-{% endblock %}
+++ /dev/null
-{% extends '_base.html' %}
-
-{% block content %}
-<h3>conditions</h3>
-
-<form action="conditions" method="GET">
-<input type="submit" value="filter" />
-<input name="pattern" type="text" value="{{pattern}}" />
-</form>
-
-<table class="alternating">
-<tr>
-<th><a href="?sort_by={% if sort_by == "is_active" %}-{% endif %}is_active">active</a></th>
-<th><a href="?sort_by={% if sort_by == "title" %}-{% endif %}title">title</a></th>
-</tr>
-{% for condition in conditions %}
-<tr>
-<td>[{% if condition.is_active %}X{% else %} {% endif %}]</td>
-<td><a href="condition?id={{condition.id_}}">{{condition.title.newest}}</a></td>
-</tr>
-{% endfor %}
-</table>
-
-<p>
-<a href="condition">add</a>
-</p>
-
-{% endblock %}
+++ /dev/null
-{% extends '_base.html' %}
-{% import '_macros.html' as macros %}
-
-
-
-{% block css %}
-th {
- border: 1px solid black;
-}
-td.cond_line {
- padding: 0;
- border-top: 1px solid white;
-}
-td.cond_0 {
- background-color: #bfbfbf;
-}
-td.cond_1 {
- background-color: #dfdfdf;
-}
-td.cond_2 {
- background-color: fffff;
-}
-td.cond_shrink {
- max-width: 0px;
- white-space: nowrap;
- overflow: hidden;
- text-overflow: clip;
-}
-td.todo_line {
- border-bottom: 1px solid #bfbfbf;
- height: 1.7em;
-}
-tr.inactive > td.todo_line {
- background-color: #bfbfbf;
- border-bottom: 1px solid white;
-}
-tr.hidden_undone > td, tr.hidden_undone a {
- color: #9f9f9f;
-}
-td.left_border {
- border-left: 1px solid black;
-}
-td.right_border {
- border-right: 1px solid black;
-}
-input.ablers {
- width: 50em;
-}
-{% endblock %}
-
-
-
-{% macro show_node_undone(node, indent) %}
-{% if not node.todo.is_done %}
-<tr {% if node.seen or not node.todo.is_doable %}class="inactive"{% endif %}>
-{% if not node.seen %}
-<input type="hidden" name="todo_id" value="{{node.todo.id_}}" />
-{% endif %}
-
-{% for condition in conditions_present %}
-{% if condition in node.todo.conditions and not condition.is_active %}
-<td class="cond_line cond_{{loop.index0 % 3}}">
-+>
-{% elif condition in node.todo.blockers and condition.is_active %}
-<td class="cond_line cond_{{loop.index0 % 3}}">
-->
-{% else %}
-<td class="cond_line cond_{{loop.index0 % 3}} cond_shrink">
-|
-{% endif %}
-</td>
-{% endfor %}
-
-{% if node.seen %}
-<td class="todo_line left_border"></td>
-<td class="todo_line">{% if node.todo.effort %}{{ node.todo.effort }}{% endif %}</td>
-{% else %}
-<td class="todo_line left_border"><input name="done" type="checkbox" value="{{node.todo.id_}}" {% if not node.todo.is_doable %}disabled{% endif %}/></td>
-<td class="todo_line"><input name="effort" type="number" step=0.1 placeholder={{node.todo.effort_then}} value={{node.todo.effort}} /></td>
-{% endif %}
-<td class="todo_line right_border">
-{% for i in range(indent) %} {% endfor %} +
-{% if node.seen %}({% endif %}<a href="todo?id={{node.todo.id_}}">{{node.todo.title_then|e}}</a>{% if node.seen %}){% endif %}
-</td>
-
-{% for condition in conditions_present|reverse %}
-{% if condition in node.todo.enables %}
-<td class="cond_line cond_{{(conditions_present|length - loop.index) % 3}}">
-+>
-{% elif condition in node.todo.disables %}
-<td class="cond_line cond_{{(conditions_present|length - loop.index) % 3}}">
-->
-{% else %}
-<td class="cond_line cond_{{(conditions_present|length - loop.index) % 3}} cond_shrink">
- |
-{% endif %}
-</td>
-{% endfor %}
-
-<td colspan=2>
-{% if node.seen %}
-{{node.todo.comment|e}}
-{% else %}
-<input name="comment" type="text" value="{{node.todo.comment|e}}" />
-{% endif %}
-</td>
-
-</tr>
-{% endif %}
-
-{% if not node.seen %}
-{% for child in node.children %}
-{{ show_node_undone(child, indent+1) }}
-{% endfor %}
-{% endif %}
-
-{% endmacro %}
-
-
-
-{% macro show_node_done(node, indent, path) %}
-{% if node.todo.has_doneness_in_path %}
-<tr{% if not node.todo.is_done %} class="hidden_undone"{% endif %}>
-<td class="number">{{ '{:4.1f}'.format(node.todo.performed_effort) }}</td>
-<td class="number">{{ '{:4.1f}'.format(node.todo.tree_effort) }}</td>
-<td>
-{% for i in range(indent) %} {% endfor %} +
-{% if not node.todo.is_done %}({% endif %}{% if node.seen %}[{% endif %}<a href="todo?id={{node.todo.id_}}">{{node.todo.title_then|e}}</a>{% if node.seen %}]{% endif %}{% if not node.todo.is_done %}){% endif %}{% if node.todo.comment %} · {{node.todo.comment|e}}{% endif %}</td>
-</tr>
-{% if not node.seen %}
-{% for child in node.children %}
-{{ show_node_done(child, indent+1) }}
-{% endfor %}
-{% endif %}
-{% endif %}
-{% endmacro %}
-
-
-
-{% block content %}
-<h3>{{day.date}} / {{day.weekday}} ({{day.total_effort|round(1)}})</h3>
-<p>
-<a href="day?date={{day.prev_date}}">prev</a> | <a href="day?date={{day.next_date}}">next</a>
-</p>
-<form action="day?date={{day.date}}" method="POST">
-
-<p>
-comment:
-<input name="day_comment" type="text" value="{{day.comment|e}}" />
-<input type="submit" value="OK" /></td>
-</p>
-
-<h4>to do</h4>
-
-<p>
-add: <input type="text" name="new_todo" list="processes">
-</p>
-<p>
-make new todos
-<select name="make_type">
-<option value="full">with</option>
-<option value="empty"{% if make_type == "empty" %}selected {% endif %}>without</option>
-</select>
-descendants (i.e. adopt where possible, otherwise create anew)
-</p>
-
-<table>
-
-<tr>
-<th colspan={{ conditions_present|length + 3 + conditions_present|length }}>conditions</th>
-<th>add enabler</th>
-<th>add disabler</th>
-</tr>
-
-{% for condition in conditions_present %}
-{% set outer_loop = loop %}
-<tr>
-
-{% for _ in conditions_present %}
-{% if outer_loop.index > loop.index %}
-<td class="cond_line cond_{{loop.index0 % 3}} cond_shrink">|
-{% elif outer_loop.index < loop.index %}
-<td class="cond_line cond_{{outer_loop.index0 % 3}}">
-{% else %}
-<td class="cond_line cond_{{outer_loop.index0 % 3}} cond_shrink">/
-{% endif %}
-</td>
-{% endfor %}
-
-<td class="cond_line cond_{{loop.index0 % 3}}"><input type="checkbox" disabled{% if condition.is_active %} checked{% endif %}></td>
-<td colspan=2 class="cond_line cond_{{loop.index0 % 3}}"><a href="condition?id={{condition.id_}}">{{condition.title.at(day.date)|e}}</a></td>
-
-{% for _ in conditions_present %}
-{% if outer_loop.index0 + loop.index < conditions_present|length %}
-<td class="cond_line cond_{{outer_loop.index0 % 3}}">
-{% elif outer_loop.index0 + loop.index > conditions_present|length %}
-<td class="cond_line cond_{{(conditions_present|length - loop.index) % 3}} cond_shrink"> |
-{% else %}
-<td class="cond_line cond_{{outer_loop.index0 % 3}} cond_shrink"> \
-{% endif %}
-{% endfor %}
-
-{% set list_name = "todos_for_%s"|format(condition.id_) %}
-<td><input class="ablers" type="text" name="new_todo" list="{{list_name}}" autocomplete="off" /></td>
-{{ macros.datalist_of_titles(list_name, enablers_for[condition.id_]) }}
-</td>
-{% set list_name = "todos_against_%s"|format(condition.id_) %}
-<td><input class="ablers" type="text" name="new_todo" list="{{list_name}}" autocomplete="off" /></td>
-{{ macros.datalist_of_titles(list_name, disablers_for[condition.id_]) }}
-</td>
-</tr>
-{% endfor %}
-
-<tr>
-{% for condition in conditions_present %}
-<td class="cond_line cond_{{loop.index0 % 3}} cond_shrink">|</td>
-{% endfor %}
-<th colspan=3>doables</th>
-{% for condition in conditions_present %}
-<td class="cond_line cond_{{(conditions_present|length - loop.index) % 3}} cond_shrink"> |</td>
-{% endfor %}
-<th colspan=2>comments</th>
-</tr>
-{% for node in top_nodes %}
-{{ show_node_undone(node, 0) }}
-{% endfor %}
-
-</table>
-
-<h4>done</h4>
-
-<table class="alternating">
-<tr>
-<th colspan=2>effort</th><th rowspan=2>action · comment</th>
-</tr>
-<tr>
-<th>self</th><th>tree</th>
-</tr>
-{% for node in top_nodes %}
-{{ show_node_done(node, 0, []) }}
-{% endfor %}
-</table>
-
-</form>
-
-{{ macros.datalist_of_titles("processes", processes) }}
-{% endblock %}
+++ /dev/null
-{% extends '_base.html' %}
-
-
-
-{% block content %}
-<p>{{msg}}</p>
-{% endblock %}
-
+++ /dev/null
-{% extends '_base.html' %}
-{% import '_macros.html' as macros %}
-
-
-
-{% block css %}
-details > summary::after {
- content: '[+]';
-}
-details summary {
- list-style: none;
-}
-details[open] > summary::after {
- content: '[-]';
-}
-{% endblock %}
-
-
-
-{% macro step_with_steps(step_node, indent) %}
-<tr>
-<td>
-<input type="hidden" name="steps" value="{{step_node.step.id_}}" />
-{% if step_node.is_explicit %}
-<input type="checkbox" name="kept_steps" value="{{step_node.step.id_}}" checked />
-{% endif %}
-</td>
-
-{% if step_node.is_explicit and not step_node.seen %}
-<td colspan=2>
-<details>
-<summary>
-{% else %}
-<td>
-{% endif %}
-
-{% for i in range(indent) %}+{%endfor %}
-{% if step_node.is_suppressed %}<del>{% endif %}
-{% if step_node.seen %}
-<a href="process?id={{step_node.process.id_}}">({{step_node.process.title.newest|e}})</a>
-{% else %}
-<a href="process?id={{step_node.process.id_}}">{{step_node.process.title.newest|e}}</a>
-{% endif %}
-{% if step_node.is_suppressed %}</del>{% endif %}
-
-
-{% if step_node.is_explicit and not step_node.seen %}
-</summary>
-<div>add sub-step: <input name="new_step_to_{{step_id}}" list="process_candidates" autocomplete="off" size="100" /></div>
-</details>
-{% endif %}
-
-</td>
-{% if (not step_node.is_explicit) and (not step_node.seen) %}
-<td>
-<input type="checkbox" name="suppressed_steps" value="{{step_id}}" {% if step_node.is_suppressed %}checked{% endif %}> suppress
-</td>
-{% endif %}
-</tr>
-{% if step_node.is_explicit or not step_node.seen %}
-{% for substep in step_node.steps %}
-{{ step_with_steps(substep, indent+1) }}
-{% endfor %}
-{% endif %}
-{% endmacro %}
-
-
-
-{% block content %}
-<h3>
-{% if is_new %}
-add NEW process
-{% else %}
-edit process of ID {{process.id_}}
-{% endif %}
-</h3>
-<form action="process?id={{process.id_ or ''}}" method="POST">
-
-<table class="edit_table">
-<tr>
-<th>title</th>
-<td><input name="title" type="text" value="{{process.title.newest|e}}" />{% if process.id_ %} [<a href="process_titles?id={{process.id_}}">history</a>]{% endif %}</td>
-</tr>
-<tr>
-<th>effort</th>
-<td><input type="number" name="effort" step=0.1 value={{process.effort.newest}} />{% if process.id_ %} [<a href="process_efforts?id={{process.id_}}">history</a>]{% endif %}</td>
-</tr>
-<tr>
-<th>description</th>
-<td><textarea name="description">{{process.description.newest|e}}</textarea><br />{% if process.id_ %} [<a href="process_descriptions?id={{process.id_}}">history</a>]{% endif %}</td>
-</tr>
-<tr>
-<th>calendarize</th>
-<td><input type="checkbox" name="calendarize" {% if process.calendarize %}checked {% endif %}</td>
-</tr>
-<tr>
-<th>conditions</th>
-<td>{{ macros.simple_checkbox_table("conditions", process.conditions, "condition", "condition_candidates") }}</td>
-</tr>
-<tr>
-<th>blockers</th>
-<td>{{ macros.simple_checkbox_table("blockers", process.blockers, "condition", "condition_candidates") }}</td>
-</tr>
-<tr>
-<th>enables</th>
-<td>{{ macros.simple_checkbox_table("enables", process.enables, "condition", "condition_candidates") }}</td>
-</tr>
-<tr>
-<th>disables</th>
-<td>{{ macros.simple_checkbox_table("disables", process.disables, "condition", "condition_candidates") }}</td>
-</tr>
-<tr>
-<th>steps</th>
-<td>
-{% if steps %}
-<table>
-{% for step_node in steps %}
-{{ step_with_steps(step_node, 0) }}
-{% endfor %}
-</table>
-{% endif %}
-add: <input type="text" name="new_top_step" list="process_candidates" autocomplete="off" value="{{preset_top_step or ''}}" />
-</td>
-</tr>
-<tr>
-<th>step of</th>
-<td>{{ macros.simple_checkbox_table("step_of", owners, "process", "process_candidates") }}</td>
-</tr>
-<tr>
-<th>todos</th>
-<td>
-<a href="todos?process_id={{process.id_}}">{{n_todos}}</a><br />
-</td>
-</tr>
-</table>
-
-{{ macros.edit_buttons() }}
-</form>
-{{ macros.datalist_of_titles("condition_candidates", condition_candidates) }}
-{{ macros.datalist_of_titles("process_candidates", process_candidates) }}
-{% endblock %}
+++ /dev/null
-{% extends '_base.html' %}
-{% import '_macros.html' as macros %}
-
-
-
-{% block content %}
-{{ macros.history_page("process", process, "description", process.description, as_pre=true) }}
-{% endblock %}
-
+++ /dev/null
-{% extends '_base.html' %}
-{% import '_macros.html' as macros %}
-
-
-
-{% block content %}
-{{ macros.history_page("process", process, "effort", process.effort) }}
-{% endblock %}
+++ /dev/null
-{% extends '_base.html' %}
-{% import '_macros.html' as macros %}
-
-
-
-{% block content %}
-{{ macros.history_page("process", process, "title", process.title) }}
-{% endblock %}
+++ /dev/null
-{% extends '_base.html' %}
-
-{% block content %}
-<h3>processes</h3>
-
-<form action="processes" method="GET">
-<input type="submit" value="filter" />
-<input name="pattern" type="text" value="{{pattern}}" />
-</form>
-
-<table class="alternating">
-<tr>
-<th><a href="?sort_by={% if sort_by == "steps" %}-{% endif %}steps">steps</a></th>
-<th><a href="?sort_by={% if sort_by == "owners" %}-{% endif %}owners">owners</a></th>
-<th><a href="?sort_by={% if sort_by == "effort" %}-{% endif %}effort">effort</a></th>
-<th><a href="?sort_by={% if sort_by == "title" %}-{% endif %}title">title</a></th>
-</tr>
-{% for process in processes %}
-<tr>
-<td class="number">{{ process.explicit_steps|count }}</td>
-<td class="number">{{ process.n_owners }}</td>
-<td class="number">{{ process.effort.newest }}</td>
-<td><a href="process?id={{process.id_}}">{{process.title.newest}}</a></td>
-</tr>
-{% endfor %}
-</table>
-
-<p>
-<a href="process">add</a>
-</p>
-{% endblock %}
+++ /dev/null
-{% extends '_base.html' %}
-{% import '_macros.html' as macros %}
-
-
-
-{% block css %}
-select{ font-size: 0.5em; margin: 0; padding: 0; }
-{% endblock %}
-
-
-
-{% macro draw_tree_row(item, parent_todo, indent=0) %}
-<tr>
-<td>
-{% if item.todo %}
-{% if not item.process %}+{% else %} {% endif %}<input type="checkbox" name="adopt" value="{{item.todo.id_}}" checked {% if indent > 0 %}disabled{% endif %}/>
-{% endif %}
-</td>
-<td>
-{% for i in range(indent-1) %} {%endfor %}{% if indent > 0 %}·{% endif %}
-{% if item.todo %}
-<a href="todo?id={{item.todo.id_}}">{{item.todo.title_then|e}}</a>
-{% else %}
-{{item.process.title.newest|e}}
-{% if parent_todo %}
-· fill: <select name="step_filler_to_{{parent_todo.id_}}">
-<option value="ignore">--</option>
-<option value="make_{{item.process.id_}}">make empty</option>
-{% for adoptable in adoption_candidates_for[item.process.id_] %}
-<option value="{{adoptable.id_}}">adopt #{{adoptable.id_}}{% if adoptable.comment %} / {{adoptable.comment}}{% endif %}</option>
-{% endfor %}
-</select>
-{% endif %}
-
-{% endif %}
-</td>
-</tr>
-{% for child in item.children %}
-{{ draw_tree_row(child, item.todo, indent+1) }}
-{% endfor %}
-{% endmacro %}
-
-
-
-{% block content %}
-<h3>Todo: {{todo.title_then|e}}</h3>
-<form action="todo?id={{todo.id_}}" method="POST">
-
-<table class="edit_table">
-<tr>
-<th>day</th>
-<td><a href="day?date={{todo.date}}">{{todo.date}}</a></td>
-</tr>
-<tr>
-<th>process</th>
-<td><a href="process?id={{todo.process.id_}}">{{todo.process.title.newest|e}}</a></td>
-</tr>
-<tr>
-<th>done</th>
-<td><input type="checkbox" name="is_done" {% if todo.is_done %}checked {% endif %} {% if not todo.is_doable %}disabled {% endif %}/>
-{% if not todo.is_doable and todo.is_done %}<input type="hidden" name="is_done" value="1" />{% endif %}
-</td>
-</tr>
-<tr>
-<th>effort</th>
-<td><input type="number" name="effort" step=0.1 placeholder={{todo.effort_then}} value={{todo.effort}} /></td>
-</tr>
-<tr>
-<th>comment</th>
-<td><input name="comment" type="text" value="{{todo.comment|e}}"/></td>
-</tr>
-<tr>
-<th>calendarize</th>
-<td><input type="checkbox" name="calendarize" {% if todo.calendarize %}checked {% endif %}</td>
-</tr>
-<tr>
-<th>conditions</th>
-<td>{{ macros.simple_checkbox_table("conditions", todo.conditions, "condition", "condition_candidates") }}</td>
-</tr>
-<tr>
-<th>blockers</th>
-<td>{{ macros.simple_checkbox_table("blockers", todo.blockers, "condition", "condition_candidates") }}</td>
-</tr>
-<tr>
-<th>enables</th>
-<td>{{ macros.simple_checkbox_table("enables", todo.enables, "condition", "condition_candidates") }}</td>
-</tr>
-<tr>
-<th>disables</th>
-<td>{{ macros.simple_checkbox_table("disables", todo.disables, "condition", "condition_candidates") }}</td>
-</tr>
-<tr>
-<th>parents</th>
-<td>
-{% for parent in todo.parents %}
-<a href="todo?id={{parent.id_}}">{{parent.title_then|e}}</a><br />
-{% endfor %}
-</td>
-</tr>
-<tr>
-<th>descendants</th>
-<td>
-{% if steps_todo_to_process|length > 0 %}
-<table>
-{% for step in steps_todo_to_process %}
-{{ draw_tree_row(step, todo) }}
-{% endfor %}
-</table>
-{% endif %}
-adopt: <input type="text" name="adopt" list="todo_candidates" autocomplete="off" /><br />
-make empty: <input type="text" name="make_empty" list="process_candidates" autocomplete="off" /><br />
-make full: <input type="text" name="make_full" list="process_candidates" autocomplete="off" />
-</td>
-</tr>
-</table>
-
-{{ macros.edit_buttons() }}
-</form>
-{{ macros.datalist_of_titles("condition_candidates", condition_candidates) }}
-{{ macros.datalist_of_titles("process_candidates", process_candidates) }}
-{{ macros.datalist_of_titles("todo_candidates", todo_candidates, historical=true, with_comments=true) }}
-{% endblock %}
+++ /dev/null
-{% extends '_base.html' %}
-{% import '_macros.html' as macros %}
-
-
-
-{% block content %}
-<h3>todos</h3>
-
-<form action="todos" method="GET">
-from <input name="start" class="date" value="{{start}}" />
-to <input name="end" class="date" value="{{end}}" /><br />
-process <input name="process_id" type="text" value="{{process_id or ''}}" list="processes" /><br />
-in comment <input name="comment_pattern" type="text" value="{{comment_pattern}}" /><br />
-<input type="submit" value="filter" />
-</form>
-
-<table class="alternating">
-<tr>
-<th><a href="?sort_by={% if sort_by == "doneness" %}-{% endif %}doneness">done</a></th>
-<th><a href="?sort_by={% if sort_by == "date" %}-{% endif %}date">date</a></th>
-<th><a href="?sort_by={% if sort_by == "title" %}-{% endif %}title">title</a></th>
-<th><a href="?sort_by={% if sort_by == "comment" %}-{% endif %}comment">comment</a></th>
-</tr>
-{% for todo in todos %}
-<tr>
-<td>[{% if todo.is_done %}x{% else %} {% endif %}]</td>
-<td><a href="day?date={{todo.date}}">{{todo.date}}</a></td>
-<td><a href="todo?id={{todo.id_}}">{{todo.title_then}}</a></td>
-<td>{{todo.comment}}</td>
-</tr>
-{% endfor %}
-</table>
-{{ macros.datalist_of_titles("processes", all_processes) }}
-{% endblock %}
-
+++ /dev/null
-"""Test Conditions module."""
-from typing import Any
-from tests.utils import (TestCaseSansDB, TestCaseWithDB, TestCaseWithServer,
- Expected)
-from plomtask.conditions import Condition
-
-
-class TestsSansDB(TestCaseSansDB):
- """Tests requiring no DB setup."""
- checked_class = Condition
-
-
-class TestsWithDB(TestCaseWithDB):
- """Tests requiring DB, but not server setup."""
- checked_class = Condition
- default_init_kwargs = {'is_active': 0}
-
-
-class ExpectedGetConditions(Expected):
- """Builder of expectations for GET /conditions."""
- _default_dict = {'sort_by': 'title', 'pattern': ''}
-
- def recalc(self) -> None:
- """Update internal dictionary by subclass-specific rules."""
- super().recalc()
- self._fields['conditions'] = self.as_ids(self.lib_all('Condition'))
-
-
-class ExpectedGetCondition(Expected):
- """Builder of expectations for GET /condition."""
- _default_dict = {'is_new': False}
- _on_empty_make_temp = ('Condition', 'cond_as_dict')
-
- def __init__(self, id_: int | None, *args: Any, **kwargs: Any) -> None:
- self._fields = {'condition': id_}
- super().__init__(*args, **kwargs)
-
- def recalc(self) -> None:
- """Update internal dictionary by subclass-specific rules."""
- super().recalc()
- for p_field, c_field in [('conditions', 'enabled_processes'),
- ('disables', 'disabling_processes'),
- ('blockers', 'disabled_processes'),
- ('enables', 'enabling_processes')]:
- self._fields[c_field] = self.as_ids([
- p for p in self.lib_all('Process')
- if self._fields['condition'] in p[p_field]])
-
-
-class TestsWithServer(TestCaseWithServer):
- """Module tests against our HTTP server/handler (and database)."""
- checked_class = Condition
-
- def test_fail_POST_condition(self) -> None:
- """Test malformed/illegal POST /condition requests."""
- # check incomplete POST payloads
- valid_payload = {'title': '', 'description': ''}
- self.check_minimal_inputs('/condition', valid_payload)
- # check valid POST payload on bad paths
- self.check_post(valid_payload, '/condition?id=foo', 400)
- # check cannot delete depended-upon Condition
- self.post_exp_cond([], {})
- for key in ('conditions', 'blockers', 'enables', 'disables'):
- self.post_exp_process([], {key: [1]}, 1)
- self.check_post({'delete': ''}, '/condition?id=1', 500)
- self.post_exp_process([], {}, 1)
- self.post_exp_day([], {'new_todo': '1'})
- for key in ('conditions', 'blockers', 'enables', 'disables'):
- self.post_exp_todo([], {key: [1]}, 1)
- self.check_post({'delete': ''}, '/condition?id=1', 500)
-
- def test_POST_condition(self) -> None:
- """Test (valid) POST /condition and its effect on GET /condition[s]."""
- url_single, url_all = '/condition?id=1', '/conditions'
- exp_single, exp_all = ExpectedGetCondition(1), ExpectedGetConditions()
- all_exps = [exp_single, exp_all]
- # test valid POST's effect on single /condition and full /conditions
- self.post_exp_cond(all_exps, {}, post_to_id=False)
- self.check_json_get(url_single, exp_single)
- self.check_json_get(url_all, exp_all)
- # test (no) effect of invalid POST to existing Condition on /condition
- self.check_post({}, url_single, 400)
- self.check_json_get(url_single, exp_single)
- # test effect of POST changing title, description, and activeness
- self.post_exp_cond(all_exps, {'title': 'bar', 'description': 'oof',
- 'is_active': 1})
- self.check_json_get(url_single, exp_single)
- # test POST sans 'is_active' setting it negative
- self.post_exp_cond(all_exps, {})
- self.check_json_get(url_single, exp_single)
- # test deletion POST's effect, both to return id=1 into empty single,
- # full /conditions into empty list
- self.check_json_get(url_single, exp_single)
- self.post_exp_cond(all_exps, {'delete': ''}, redir_to_id=False)
- exp_single.set('is_new', True)
- self.check_json_get(url_single, exp_single)
- self.check_json_get(url_all, exp_all)
-
- def test_GET_condition(self) -> None:
- """More GET /condition testing, especially for Process relations."""
- # check expected default status codes
- self.check_get_defaults('/condition')
- # check 'is_new' set if id= absent or pointing to not-yet-existing ID
- exp = ExpectedGetCondition(None)
- exp.set('is_new', True)
- self.check_json_get('/condition', exp)
- exp = ExpectedGetCondition(1)
- exp.set('is_new', True)
- self.check_json_get('/condition?id=1', exp)
- # make Condition and two Processes that among them establish all
- # possible ConditionsRelations to it, check /condition displays all
- exp = ExpectedGetCondition(1)
- self.post_exp_cond([exp], {}, post_to_id=False)
- for i, p in enumerate([('conditions', 'disables'),
- ('enables', 'blockers')]):
- self.post_exp_process([exp], {k: [1] for k in p}, i+1)
- self.check_json_get('/condition?id=1', exp)
-
- def test_GET_conditions(self) -> None:
- """Test GET /conditions."""
- # test empty result on empty DB, default-settings on empty params
- exp = ExpectedGetConditions()
- self.check_json_get('/conditions', exp)
- # test 'sort_by' default to 'title' (even if set to something else, as
- # long as without handler) and 'pattern' get preserved
- exp.set('pattern', 'bar')
- self.check_json_get('/conditions?sort_by=foo&pattern=bar&foo=x', exp)
- exp.set('pattern', '')
- # test non-empty result, automatic (positive) sorting by title
- post_cond1 = {'is_active': 0, 'title': 'foo', 'description': 'oof'}
- post_cond2 = {'is_active': 0, 'title': 'bar', 'description': 'rab'}
- post_cond3 = {'is_active': 1, 'title': 'baz', 'description': 'zab'}
- for i, post in enumerate([post_cond1, post_cond2, post_cond3]):
- self.post_exp_cond([exp], post, i+1, post_to_id=False)
- self.check_filter(exp, 'conditions', 'sort_by', 'title', [2, 3, 1])
- # test other sortings
- self.check_filter(exp, 'conditions', 'sort_by', '-title', [1, 3, 2])
- self.check_filter(exp, 'conditions', 'sort_by', 'is_active', [1, 2, 3])
- self.check_filter(exp, 'conditions', 'sort_by', '-is_active',
- [3, 2, 1])
- exp.set('sort_by', 'title')
- # test pattern matching on title
- exp.lib_del('Condition', 1)
- self.check_filter(exp, 'conditions', 'pattern', 'ba', [2, 3])
- # test pattern matching on description
- exp.lib_wipe('Condition')
- exp.set_cond_from_post(1, post_cond1)
- self.check_filter(exp, 'conditions', 'pattern', 'of', [1])
+++ /dev/null
-"""Test Days module."""
-from datetime import date as dt_date, datetime, timedelta
-from typing import Any
-from tests.utils import (TestCaseSansDB, TestCaseWithDB, TestCaseWithServer,
- Expected, date_and_day_id, dt_date_from_day_id)
-from plomtask.dating import date_in_n_days as tested_date_in_n_days
-from plomtask.days import Day
-
-# Simply the ISO format for dates as used in plomtask.dating, but for testing
-# purposes we state our expectations here independently and explicitly
-TESTING_DATE_FORMAT = '%Y-%m-%d'
-
-
-def _testing_date_in_n_days(n: int) -> str:
- """Return in ISO format / TEST_DATE_FORMAT date from today + n days.
-
- As with TESTING_DATE_FORMAT, we assume this equal the original's code
- at plomtask.dating.date_in_n_days, but want to state our expectations
- explicitly to rule out importing issues from the original.
- """
- date = dt_date.today() + timedelta(days=n)
- return date.strftime(TESTING_DATE_FORMAT)
-
-
-def _days_n_for_date(date: str) -> int:
- return (dt_date.fromisoformat(date) - dt_date(2000, 1, 1)).days
-
-
-class TestsSansDB(TestCaseSansDB):
- """Days module tests not requiring DB setup."""
- checked_class = Day
-
- def test_date_in_n_days(self) -> None:
- """Test dating.date_in_n_days"""
- for n in [-100, -2, -1, 0, 1, 2, 1000]:
- date = datetime.now() + timedelta(days=n)
- self.assertEqual(tested_date_in_n_days(n),
- date.strftime(TESTING_DATE_FORMAT))
-
- def test_Day_date_weekday_neighbor_dates(self) -> None:
- """Test Day's date parsing and neighbourhood resolution."""
- self.assertEqual(dt_date(2000, 1, 2).isoformat(), Day(1).date)
- self.assertEqual(dt_date(2001, 1, 2).isoformat(), Day(367).date)
- self.assertEqual('Sunday', Day(1).weekday)
- self.assertEqual('March', Day(75).month_name)
- self.assertEqual('2000-12-31', Day(366).prev_date)
- self.assertEqual('2001-03-01', Day(424).next_date)
-
-
-class TestsWithDB(TestCaseWithDB):
- """Tests requiring DB, but not server setup."""
- checked_class = Day
-
- def test_Day_with_filled_gaps(self) -> None:
- """Test .with_filled_gaps."""
- day_ids = [n + 1 for n in range(9)]
- dt_dates = [dt_date_from_day_id(id_) for id_ in day_ids]
-
- def expect_within_full_range_as_commented(
- range_indexes: tuple[int, int],
- indexes_to_provide: list[int]
- ) -> None:
- start_i, end_i = range_indexes
- days_expected = [Day(n) for n in day_ids]
- to_remove = []
- for idx in indexes_to_provide:
- days_expected[idx] = Day(day_ids[idx], '#')
- days_expected[idx].save(self.db_conn)
- to_remove += [days_expected[idx]]
- days_expected = days_expected[start_i:end_i+1]
- days_result = Day.with_filled_gaps(
- self.db_conn, dt_dates[start_i], dt_dates[end_i])
- self.assertEqual(days_result, days_expected)
- for day in to_remove:
- day.remove(self.db_conn)
-
- # check provided Days recognizable in (full-range) interval
- expect_within_full_range_as_commented((0, 8), [0, 4, 8])
- # check limited range, but limiting Days provided
- expect_within_full_range_as_commented((2, 6), [2, 5, 6])
- # check Days within range but beyond provided Days also filled in
- expect_within_full_range_as_commented((1, 7), [2, 5])
- # check provided Days beyond range ignored
- expect_within_full_range_as_commented((3, 5), [1, 2, 4, 6, 7])
- # check inversion of start_date and end_date returns empty list
- expect_within_full_range_as_commented((5, 3), [2, 4, 6])
- # check empty provision still creates filler elements in interval
- expect_within_full_range_as_commented((3, 5), [])
- # check single-element selection creating only filler beyond provided
- expect_within_full_range_as_commented((1, 1), [2, 4, 6])
- # check (un-saved) filler Days don't show up in cache or DB
- day = Day(day_ids[3])
- day.save(self.db_conn)
- Day.with_filled_gaps(self.db_conn, dt_dates[0], dt_dates[-1])
- self.check_identity_with_cache_and_db([day])
-
-
-class ExpectedGetCalendar(Expected):
- """Builder of expectations for GET /calendar."""
-
- def __init__(self, start: int, end: int, *args: Any, **kwargs: Any
- ) -> None:
- today_dt = dt_date.today()
- today_iso = today_dt.isoformat()
- self._fields = {
- 'start': (today_dt + timedelta(days=start)).isoformat(),
- 'end': (today_dt + timedelta(days=end)).isoformat(),
- 'today': today_iso}
- self._fields['days'] = [
- _days_n_for_date(today_iso) + i for i in range(start, end+1)]
- super().__init__(*args, **kwargs)
- for day_id in self._fields['days']:
- self.lib_set('Day', [self.day_as_dict(day_id)])
-
-
-class ExpectedGetDay(Expected):
- """Builder of expectations for GET /day."""
- _default_dict = {'make_type': 'full'}
- _on_empty_make_temp = ('Day', 'day_as_dict')
-
- def __init__(self, day_id: int, *args: Any, **kwargs: Any) -> None:
- self._fields = {'day': day_id}
- super().__init__(*args, **kwargs)
-
- def recalc(self) -> None:
- super().recalc()
- todos = [t for t in self.lib_all('Todo')
- if t['day_id'] == self._fields['day']]
- self.lib_get('Day', self._fields['day'])['todos'] = self.as_ids(todos)
- self._fields['top_nodes'] = [
- {'children': [], 'seen': 0, 'todo': todo['id']}
- for todo in todos]
- for todo in todos:
- proc = self.lib_get('Process', todo['process_id'])
- for title in ['conditions', 'enables', 'blockers', 'disables']:
- todo[title] = proc[title]
- conds_present = set()
- for todo in todos:
- for title in ['conditions', 'enables', 'blockers', 'disables']:
- for cond_id in todo[title]:
- conds_present.add(cond_id)
- self._fields['conditions_present'] = list(conds_present)
- for prefix in ['en', 'dis']:
- blers = {}
- for cond_id in conds_present:
- blers[cond_id] = self.as_ids(
- [t for t in todos if cond_id in t[f'{prefix}ables']])
- self._fields[f'{prefix}ablers_for'] = blers
- self._fields['processes'] = self.as_ids(self.lib_all('Process'))
-
-
-class TestsWithServer(TestCaseWithServer):
- """Tests against our HTTP server/handler (and database)."""
- checked_class = Day
-
- def test_basic_GET_day(self) -> None:
- """Test basic (no Processes/Conditions/Todos) GET /day basics."""
- # check illegal date parameters
- self.check_get_defaults('/day', '2024-01-01', 'date')
- self.check_get('/day?date=2024-02-30', 400)
- # check undefined day
- today_iso = dt_date.today().isoformat()
- exp = ExpectedGetDay(_days_n_for_date(today_iso))
- self.check_json_get('/day', exp)
- # check defined day with make_type parameter
- date, day_id = date_and_day_id(1)
- exp = ExpectedGetDay(day_id)
- exp.set('make_type', 'bar')
- self.check_json_get(f'/day?date={date}&make_type=bar', exp)
- # check parsing of 'yesterday', 'today', 'tomorrow'
- for name, dist in [('yesterday', -1), ('today', 0), ('tomorrow', +1)]:
- exp = ExpectedGetDay(_days_n_for_date(today_iso) + dist)
- self.check_json_get(f'/day?date={name}', exp)
-
- def test_fail_POST_day(self) -> None:
- """Test malformed/illegal POST /day requests."""
- # check payloads lacking minimum expecteds
- url = '/day?date=2024-01-01'
- minimal_post = {'make_type': '', 'day_comment': ''}
- self.check_minimal_inputs(url, minimal_post)
- # to next check illegal new_todo values, we need an actual Process
- self.post_exp_process([], {}, 1)
- # check illegal new_todo values
- self.check_post(minimal_post | {'new_todo': ['foo']}, url, 400)
- self.check_post(minimal_post | {'new_todo': [1, 2]}, url, 404)
- # to next check illegal old_todo inputs, we need to first post Todo
- self.check_post(minimal_post | {'new_todo': [1]}, url, 302,
- '/day?date=2024-01-01&make_type=')
- # check illegal old_todo inputs (equal list lengths though)
- post = minimal_post | {'comment': ['foo'], 'effort': [3.3],
- 'done': [], 'todo_id': [1]}
- self.check_post(post, url, 302, '/day?date=2024-01-01&make_type=')
- post['todo_id'] = [2] # reference to non-existant Process
- self.check_post(post, url, 404)
- post['todo_id'] = ['a']
- self.check_post(post, url, 400)
- post['todo_id'] = [1]
- post['done'] = ['foo']
- self.check_post(post, url, 400)
- post['done'] = [2] # reference to non-posted todo_id
- self.check_post(post, url, 400)
- post['done'] = []
- post['effort'] = ['foo']
- self.check_post(post, url, 400)
- post['effort'] = [None]
- self.check_post(post, url, 400)
- post['effort'] = [3.3]
- # check illegal old_todo inputs: unequal list lengths
- post['comment'] = []
- self.check_post(post, url, 400)
- post['comment'] = ['foo', 'foo']
- self.check_post(post, url, 400)
- post['comment'] = ['foo']
- post['effort'] = []
- self.check_post(post, url, 400)
- post['effort'] = [3.3, 3.3]
- self.check_post(post, url, 400)
- post['effort'] = [3.3]
- post['todo_id'] = [1, 1]
- self.check_post(post, url, 400)
- post['todo_id'] = [1]
- # # check valid POST payload on bad paths
- self.check_post(post, '/day', 400)
- self.check_post(post, '/day?date=', 400)
- self.check_post(post, '/day?date=foo', 400)
-
- def test_basic_POST_day(self) -> None:
- """Test basic (no Processes/Conditions/Todos) POST /day.
-
- Check POST requests properly parse 'today', 'tomorrow', 'yesterday',
- and actual date strings; store 'day_comment'; preserve 'make_type'
- setting in redirect even if nonsensical; and allow '' as 'new_todo'.
- """
- for name, dist, test_str in [('2024-01-01', None, 'a'),
- ('today', 0, 'b'),
- ('yesterday', -1, 'c'),
- ('tomorrow', +1, 'd')]:
- date = name if dist is None else _testing_date_in_n_days(dist)
- post = {'day_comment': test_str, 'make_type': f'x:{test_str}',
- 'new_todo': ['', '']}
- post_url = f'/day?date={name}'
- redir_url = f'{post_url}&make_type={post["make_type"]}'
- self.check_post(post, post_url, 302, redir_url)
- day_id = _days_n_for_date(date)
- exp = ExpectedGetDay(day_id)
- exp.set_day_from_post(day_id, post)
- self.check_json_get(post_url, exp)
-
- def test_GET_day_with_processes_and_todos(self) -> None:
- """Test GET /day displaying Processes and Todos (no trees)."""
- date, day_id = date_and_day_id(1)
- exp = ExpectedGetDay(day_id)
- # check Processes get displayed in ['processes'] and ['_library'],
- # even without any Todos referencing them
- proc_posts = [{'title': 'foo', 'description': 'oof', 'effort': 1.1},
- {'title': 'bar', 'description': 'rab', 'effort': 0.9}]
- for i, proc_post in enumerate(proc_posts):
- self.post_exp_process([exp], proc_post, i+1)
- self.check_json_get(f'/day?date={date}', exp)
- # post Todos of either Process and check their display
- self.post_exp_day([exp], {'new_todo': [1, 2]})
- self.check_json_get(f'/day?date={date}', exp)
- # test malformed Todo manipulation posts
- post_day = {'day_comment': '', 'make_type': '', 'comment': [''],
- 'new_todo': [], 'done': [1], 'effort': [2.3]}
- self.check_post(post_day, f'/day?date={date}', 400) # no todo_id
- post_day['todo_id'] = [2] # not identifying Todo refered by done
- self.check_post(post_day, f'/day?date={date}', 400)
- post_day['todo_id'] = [1, 2] # imply range beyond that of effort etc.
- self.check_post(post_day, f'/day?date={date}', 400)
- post_day['comment'] = ['FOO', '']
- self.check_post(post_day, f'/day?date={date}', 400)
- post_day['effort'] = [2.3, '']
- post_day['comment'] = ['']
- self.check_post(post_day, f'/day?date={date}', 400)
- # add a comment to one Todo and set the other's doneness and effort
- post_day['comment'] = ['FOO', '']
- self.post_exp_day([exp], post_day)
- self.check_json_get(f'/day?date={date}', exp)
- # invert effort and comment between both Todos
- # (cannot invert doneness, /day only collects positive setting)
- post_day['comment'] = ['', 'FOO']
- post_day['effort'] = ['', 2.3]
- self.post_exp_day([exp], post_day)
- self.check_json_get(f'/day?date={date}', exp)
-
- def test_POST_day_todo_make_types(self) -> None:
- """Test behavior of POST /todo on 'make_type'='full' and 'empty'."""
- date, day_id = date_and_day_id(1)
- exp = ExpectedGetDay(day_id)
- # create two Processes, with second one step of first one
- self.post_exp_process([exp], {}, 2)
- self.post_exp_process([exp], {'new_top_step': 2}, 1)
- exp.lib_set('ProcessStep', [
- exp.procstep_as_dict(1, owner_id=1, step_process_id=2)])
- self.check_json_get(f'/day?date={date}', exp)
- # post Todo of adopting Process, with make_type=full
- self.post_exp_day([exp], {'make_type': 'full', 'new_todo': [1]})
- exp.lib_get('Todo', 1)['children'] = [2]
- exp.lib_set('Todo', [exp.todo_as_dict(2, 2)])
- top_nodes = [{'todo': 1,
- 'seen': 0,
- 'children': [{'todo': 2,
- 'seen': 0,
- 'children': []}]}]
- exp.force('top_nodes', top_nodes)
- self.check_json_get(f'/day?date={date}', exp)
- # post another Todo of adopting Process, expect to adopt existing
- self.post_exp_day([exp], {'make_type': 'full', 'new_todo': [1]})
- exp.lib_set('Todo', [exp.todo_as_dict(3, 1, children=[2])])
- top_nodes += [{'todo': 3,
- 'seen': 0,
- 'children': [{'todo': 2,
- 'seen': 1,
- 'children': []}]}]
- exp.force('top_nodes', top_nodes)
- self.check_json_get(f'/day?date={date}', exp)
- # post another Todo of adopting Process, no adopt with make_type=empty
- self.post_exp_day([exp], {'make_type': 'empty', 'new_todo': [1]})
- exp.lib_set('Todo', [exp.todo_as_dict(4, 1)])
- top_nodes += [{'todo': 4,
- 'seen': 0,
- 'children': []}]
- exp.force('top_nodes', top_nodes)
- self.check_json_get(f'/day?date={date}', exp)
-
- def test_POST_day_new_todo_order_commutative(self) -> None:
- """Check that order of 'new_todo' values in POST /day don't matter."""
- date, day_id = date_and_day_id(1)
- exp = ExpectedGetDay(day_id)
- self.post_exp_process([exp], {}, 2)
- self.post_exp_process([exp], {'new_top_step': 2}, 1)
- exp.lib_set('ProcessStep', [
- exp.procstep_as_dict(1, owner_id=1, step_process_id=2)])
- # make-full-day-post batch of Todos of both Processes in one order …,
- self.post_exp_day([exp], {'make_type': 'full', 'new_todo': [1, 2]})
- top_nodes: list[dict[str, Any]] = [{'todo': 1,
- 'seen': 0,
- 'children': [{'todo': 2,
- 'seen': 0,
- 'children': []}]}]
- exp.force('top_nodes', top_nodes)
- exp.lib_get('Todo', 1)['children'] = [2]
- self.check_json_get(f'/day?date={date}', exp)
- # … and then in the other, expecting same node tree / relations
- exp.lib_del('Day', day_id)
- date, day_id = date_and_day_id(2)
- exp.set('day', day_id)
- day_post = {'make_type': 'full', 'new_todo': [2, 1]}
- self.post_exp_day([exp], day_post, day_id)
- exp.lib_del('Todo', 1)
- exp.lib_del('Todo', 2)
- top_nodes[0]['todo'] = 3 # was: 1
- top_nodes[0]['children'][0]['todo'] = 4 # was: 2
- exp.lib_get('Todo', 3)['children'] = [4]
- self.check_json_get(f'/day?date={date}', exp)
-
- def test_POST_day_todo_deletion_by_negative_effort(self) -> None:
- """Test POST /day removal of Todos by setting negative effort."""
- date, day_id = date_and_day_id(1)
- exp = ExpectedGetDay(day_id)
- self.post_exp_process([exp], {}, 1)
- self.post_exp_day([exp], {'new_todo': [1]})
- # check cannot remove Todo if commented
- self.post_exp_day([exp],
- {'todo_id': [1], 'comment': ['foo'], 'effort': [-1]})
- self.check_json_get(f'/day?date={date}', exp)
- # check *can* remove Todo while getting done
- self.post_exp_day([exp],
- {'todo_id': [1], 'comment': [''], 'effort': [-1],
- 'done': [1]})
- exp.lib_del('Todo', 1)
- self.check_json_get(f'/day?date={date}', exp)
-
- def test_GET_day_with_conditions(self) -> None:
- """Test GET /day displaying Conditions and their relations."""
- date, day_id = date_and_day_id(1)
- exp = ExpectedGetDay(day_id)
- # check non-referenced Conditions not shown
- cond_posts = [{'is_active': 0, 'title': 'A', 'description': 'a'},
- {'is_active': 1, 'title': 'B', 'description': 'b'}]
- for i, cond_post in enumerate(cond_posts):
- self.check_post(cond_post, f'/condition?id={i+1}')
- self.check_json_get(f'/day?date={date}', exp)
- # add Processes with Conditions, check Conditions now shown
- for i, (c1, c2) in enumerate([(1, 2), (2, 1)]):
- post = {'conditions': [c1], 'disables': [c1],
- 'blockers': [c2], 'enables': [c2]}
- self.post_exp_process([exp], post, i+1)
- for i, cond_post in enumerate(cond_posts):
- exp.set_cond_from_post(i+1, cond_post)
- self.check_json_get(f'/day?date={date}', exp)
- # add Todos in relation to Conditions, check consequence relations
- self.post_exp_day([exp], {'new_todo': [1, 2]})
- self.check_json_get(f'/day?date={date}', exp)
-
- def test_GET_calendar(self) -> None:
- """Test GET /calendar responses based on various inputs, DB states."""
- # check illegal date range delimiters
- self.check_get('/calendar?start=foo', 400)
- self.check_get('/calendar?end=foo', 400)
- # check default range for expected selection/order without saved days
- exp = ExpectedGetCalendar(-1, 366)
- self.check_json_get('/calendar', exp)
- self.check_json_get('/calendar?start=&end=', exp)
- # check with named days as delimiters
- exp = ExpectedGetCalendar(-1, +1)
- self.check_json_get('/calendar?start=yesterday&end=tomorrow', exp)
- # check zero-element range
- exp = ExpectedGetCalendar(+1, 0)
- self.check_json_get('/calendar?start=tomorrow&end=today', exp)
- # check saved day shows up in results, proven by its comment
- start_date = _testing_date_in_n_days(-5)
- date = _testing_date_in_n_days(-2)
- end_date = _testing_date_in_n_days(+5)
- exp = ExpectedGetCalendar(-5, +5)
- self.post_exp_day([exp],
- {'day_comment': 'foo'}, _days_n_for_date(date))
- url = f'/calendar?start={start_date}&end={end_date}'
- self.check_json_get(url, exp)
+++ /dev/null
-"""Miscellaneous tests."""
-from typing import Callable
-from unittest import TestCase
-from tests.utils import TestCaseWithServer
-from plomtask.http import InputsParser
-from plomtask.exceptions import BadFormatException
-
-
-class TestsSansServer(TestCase):
- """Tests that do not require DB setup or a server."""
-
- def _test_parser(self,
- method: Callable,
- serialized: str,
- expected: object,
- method_args: list[object],
- fails: bool = False
- ) -> None:
- # pylint: disable=too-many-arguments
- parser = InputsParser(serialized)
- if fails:
- with self.assertRaises(BadFormatException):
- method(parser, *method_args)
- else:
- self.assertEqual(expected, method(parser, *method_args))
-
- def test_InputsParser_get_str_or_fail(self) -> None:
- """Test InputsParser.get_str."""
- m = InputsParser.get_str_or_fail
- self._test_parser(m, '', 0, ['foo'], fails=True)
- self._test_parser(m, '', 'bar', ['foo', 'bar'])
- self._test_parser(m, 'foo=', '', ['foo'])
- self._test_parser(m, 'foo=', '', ['foo', 'bar'])
- self._test_parser(m, 'foo=baz', 'baz', ['foo', 'bar'])
- self._test_parser(m, 'foo=baz&foo=quux', 'baz', ['foo', 'bar'])
- self._test_parser(m, 'foo=baz,quux', 'baz,quux', ['foo', 'bar'])
-
- def test_InputsParser_get_str(self) -> None:
- """Test InputsParser.get_str."""
- m = InputsParser.get_str
- self._test_parser(m, '', None, ['foo'])
- self._test_parser(m, '', 'bar', ['foo', 'bar'])
- self._test_parser(m, 'foo=', '', ['foo'])
- self._test_parser(m, 'foo=', '', ['foo', 'bar'])
- self._test_parser(m, 'foo=baz', 'baz', ['foo', 'bar'])
- self._test_parser(m, 'foo=baz&foo=quux', 'baz', ['foo', 'bar'])
- self._test_parser(m, 'foo=baz,quux', 'baz,quux', ['foo', 'bar'])
-
- def test_InputsParser_get_all_of_key_prefixed(self) -> None:
- """Test InputsParser.get_all_of_key_prefixed."""
- m = InputsParser.get_all_of_key_prefixed
- self._test_parser(m, '', {}, [''])
- self._test_parser(m, '', {}, ['foo'])
- self._test_parser(m, 'foo=bar', {'foo': ['bar']}, [''])
- self._test_parser(m, 'x=y&x=z', {'': ['y', 'z']}, ['x'])
- self._test_parser(m, 'xx=y&xx=Z', {'x': ['y', 'Z']}, ['x'])
- self._test_parser(m, 'xx=y', {}, ['xxx'])
- self._test_parser(m, 'xxx=x&xxy=y&xyy=z', {'x': ['x'], 'y': ['y']},
- ['xx'])
-
- def test_InputsParser_get_int_or_none(self) -> None:
- """Test InputsParser.get_int_or_none."""
- m = InputsParser.get_int_or_none
- self._test_parser(m, '', None, ['foo'])
- self._test_parser(m, 'foo=', None, ['foo'])
- self._test_parser(m, 'foo=0', 0, ['foo'])
- self._test_parser(m, 'foo=None', 0, ['foo'], fails=True)
- self._test_parser(m, 'foo=0.1', 0, ['foo'], fails=True)
- self._test_parser(m, 'foo=23', 23, ['foo'])
-
- def test_InputsParser_get_float_or_fail(self) -> None:
- """Test InputsParser.get_float_or_fail."""
- m = InputsParser.get_float_or_fail
- self._test_parser(m, '', 0, ['foo'], fails=True)
- self._test_parser(m, 'foo=', 0, ['foo'], fails=True)
- self._test_parser(m, 'foo=bar', 0, ['foo'], fails=True)
- self._test_parser(m, 'foo=0', 0, ['foo'])
- self._test_parser(m, 'foo=0.1', 0.1, ['foo'])
- self._test_parser(m, 'foo=1.23&foo=456', 1.23, ['foo'])
-
- def test_InputsParser_get_bool(self) -> None:
- """Test InputsParser.get_bool."""
- m = InputsParser.get_bool
- self._test_parser(m, '', 0, ['foo'])
- self._test_parser(m, 'val=foo', 0, ['foo'])
- self._test_parser(m, 'val=True', 0, ['foo'])
- self._test_parser(m, 'foo=', 0, ['foo'])
- self._test_parser(m, 'foo=None', 0, ['foo'])
- self._test_parser(m, 'foo=0', 0, ['foo'])
- self._test_parser(m, 'foo=bar', 0, ['foo'])
- self._test_parser(m, 'foo=bar&foo=baz', 0, ['foo'])
- self._test_parser(m, 'foo=False', 0, ['foo'])
- self._test_parser(m, 'foo=true', 1, ['foo'])
- self._test_parser(m, 'foo=True', 1, ['foo'])
- self._test_parser(m, 'foo=1', 1, ['foo'])
- self._test_parser(m, 'foo=on', 1, ['foo'])
-
- def test_InputsParser_get_all_str(self) -> None:
- """Test InputsParser.get_all_str."""
- m = InputsParser.get_all_str
- self._test_parser(m, '', [], ['foo'])
- self._test_parser(m, 'foo=', [''], ['foo'])
- self._test_parser(m, 'foo=bar', ['bar'], ['foo'])
- self._test_parser(m, 'foo=bar&foo=baz', ['bar', 'baz'], ['foo'])
-
- def test_InputsParser_get_all_int(self) -> None:
- """Test InputsParser.get_all_int."""
- m = InputsParser.get_all_int
- self._test_parser(m, '', [], ['foo'])
- self._test_parser(m, 'foo=', [], ['foo'])
- self._test_parser(m, 'foo=', 0, ['foo', True], fails=True)
- self._test_parser(m, 'foo=0', [0], ['foo'])
- self._test_parser(m, 'foo=0&foo=17', [0, 17], ['foo'])
- self._test_parser(m, 'foo=0.1&foo=17', 0, ['foo'], fails=True)
- self._test_parser(m, 'foo=None&foo=17', 0, ['foo'], fails=True)
-
-
-class TestsWithServer(TestCaseWithServer):
- """Tests against our HTTP server/handler (and database)."""
-
- def test_do_GET(self) -> None:
- """Test GET / redirect, and unknown targets failing."""
- self.conn.request('GET', '/')
- self.check_redirect('/day')
- self.check_get('/foo', 404)
-
- def test_do_POST(self) -> None:
- """Test POST to / and other unknown targets failing."""
- self.check_post({}, '/', 404)
- self.check_post({}, '/foo', 404)
+++ /dev/null
-"""Test Processes module."""
-from typing import Any
-from tests.utils import (TestCaseSansDB, TestCaseWithDB, TestCaseWithServer,
- Expected)
-from plomtask.processes import Process, ProcessStep
-from plomtask.exceptions import NotFoundException
-
-
-class TestsSansDB(TestCaseSansDB):
- """Module tests not requiring DB setup."""
- checked_class = Process
-
-
-class TestsSansDBProcessStep(TestCaseSansDB):
- """Module tests not requiring DB setup."""
- checked_class = ProcessStep
- default_init_kwargs = {'owner_id': 2, 'step_process_id': 3,
- 'parent_step_id': 4}
-
-
-class TestsWithDB(TestCaseWithDB):
- """Module tests requiring DB setup."""
- checked_class = Process
-
- def test_remove(self) -> None:
- """Test removal of Processes and ProcessSteps."""
- super().test_remove()
- p1, p2, p3 = Process(None), Process(None), Process(None)
- for p in [p1, p2, p3]:
- p.save(self.db_conn)
- assert isinstance(p1.id_, int)
- assert isinstance(p2.id_, int)
- assert isinstance(p3.id_, int)
- step = ProcessStep(None, p2.id_, p1.id_, None)
- p2.set_steps(self.db_conn, [step])
- step_id = step.id_
- p2.set_steps(self.db_conn, [])
- with self.assertRaises(NotFoundException):
- # check unset ProcessSteps actually cannot be found anymore
- assert step_id is not None
- ProcessStep.by_id(self.db_conn, step_id)
- p1.remove(self.db_conn)
- step = ProcessStep(None, p2.id_, p3.id_, None)
- p2.set_steps(self.db_conn, [step])
- step_id = step.id_
- # check _can_ remove Process pointed to by ProcessStep.owner_id, and …
- p2.remove(self.db_conn)
- with self.assertRaises(NotFoundException):
- # … being dis-owned eliminates ProcessStep
- assert step_id is not None
- ProcessStep.by_id(self.db_conn, step_id)
-
-
-class TestsWithDBForProcessStep(TestCaseWithDB):
- """Module tests requiring DB setup."""
- checked_class = ProcessStep
- default_init_kwargs = {'owner_id': 1, 'step_process_id': 2,
- 'parent_step_id': 3}
-
- def setUp(self) -> None:
- super().setUp()
- self.p1 = Process(1)
- self.p1.save(self.db_conn)
-
- def test_remove(self) -> None:
- """Test .remove and unsetting of owner's .explicit_steps entry."""
- p2 = Process(2)
- p2.save(self.db_conn)
- assert isinstance(self.p1.id_, int)
- assert isinstance(p2.id_, int)
- step = ProcessStep(None, self.p1.id_, p2.id_, None)
- self.p1.set_steps(self.db_conn, [step])
- step.remove(self.db_conn)
- self.assertEqual(self.p1.explicit_steps, [])
- self.check_identity_with_cache_and_db([])
-
-
-class ExpectedGetProcess(Expected):
- """Builder of expectations for GET /processes."""
- _default_dict = {'is_new': False, 'preset_top_step': None, 'n_todos': 0}
- _on_empty_make_temp = ('Process', 'proc_as_dict')
-
- def __init__(self,
- proc_id: int,
- *args: Any, **kwargs: Any) -> None:
- self._fields = {'process': proc_id, 'steps': []}
- super().__init__(*args, **kwargs)
-
- @staticmethod
- def stepnode_as_dict(step_id: int,
- proc_id: int,
- seen: bool = False,
- steps: None | list[dict[str, object]] = None,
- is_explicit: bool = True,
- is_suppressed: bool = False) -> dict[str, object]:
- # pylint: disable=too-many-arguments
- """Return JSON of ProcessStepNode to expect."""
- return {'step': step_id,
- 'process': proc_id,
- 'seen': seen,
- 'steps': steps if steps else [],
- 'is_explicit': is_explicit,
- 'is_suppressed': is_suppressed}
-
- def recalc(self) -> None:
- """Update internal dictionary by subclass-specific rules."""
- super().recalc()
- self._fields['process_candidates'] = self.as_ids(
- self.lib_all('Process'))
- self._fields['condition_candidates'] = self.as_ids(
- self.lib_all('Condition'))
- self._fields['owners'] = [
- s['owner_id'] for s in self.lib_all('ProcessStep')
- if s['step_process_id'] == self._fields['process']]
-
-
-class ExpectedGetProcesses(Expected):
- """Builder of expectations for GET /processes."""
- _default_dict = {'sort_by': 'title', 'pattern': ''}
-
- def recalc(self) -> None:
- """Update internal dictionary by subclass-specific rules."""
- super().recalc()
- self._fields['processes'] = self.as_ids(self.lib_all('Process'))
-
-
-class TestsWithServer(TestCaseWithServer):
- """Module tests against our HTTP server/handler (and database)."""
- checked_class = Process
-
- def test_fail_POST_process(self) -> None:
- """Test POST /process and its effect on the database."""
- valid_post = {'title': '', 'description': '', 'effort': 1.0}
- # check payloads lacking minimum expecteds
- self.check_minimal_inputs('/process', valid_post)
- # check payloads of bad data types
- self.check_post(valid_post | {'effort': ''}, '/process', 400)
- # check references to non-existant items
- self.check_post(valid_post | {'conditions': [1]}, '/process', 404)
- self.check_post(valid_post | {'disables': [1]}, '/process', 404)
- self.check_post(valid_post | {'blockers': [1]}, '/process', 404)
- self.check_post(valid_post | {'enables': [1]}, '/process', 404)
- self.check_post(valid_post | {'new_top_step': 2}, '/process', 404)
- # check deletion of non-existant
- self.check_post({'delete': ''}, '/process?id=1', 404)
-
- def test_basic_POST_process(self) -> None:
- """Test basic GET/POST /process operations."""
- # check on un-saved
- exp = ExpectedGetProcess(1)
- exp.force('process_candidates', [])
- exp.set('is_new', True)
- self.check_json_get('/process?id=1', exp)
- # check on minimal payload post
- exp = ExpectedGetProcess(1)
- self.post_exp_process([exp], {}, 1)
- self.check_json_get('/process?id=1', exp)
- # check boolean 'calendarize'
- self.post_exp_process([exp], {'calendarize': True}, 1)
- self.check_json_get('/process?id=1', exp)
- self.post_exp_process([exp], {}, 1)
- self.check_json_get('/process?id=1', exp)
- # check conditions posting
- for i in range(3):
- self.post_exp_cond([exp], {}, i+1)
- p = {'conditions': [1, 2], 'disables': [1],
- 'blockers': [3], 'enables': [2, 3]}
- self.post_exp_process([exp], p, 1)
- self.check_json_get('/process?id=1', exp)
- # check n_todos field
- self.post_exp_day([], {'new_todo': ['1']}, 1)
- self.post_exp_day([], {'new_todo': ['1']}, 2)
- exp.set('n_todos', 2)
- self.check_json_get('/process?id=1', exp)
- # check cannot delete if Todos to Process
- self.check_post({'delete': ''}, '/process?id=1', 500)
- # check cannot delete if some ProcessStep's .step_process_id
- self.post_exp_process([exp], {}, 2)
- self.post_exp_process([exp], {'new_top_step': 2}, 3)
- self.check_post({'delete': ''}, '/process?id=2', 500)
- # check successful deletion
- self.post_exp_process([exp], {}, 4)
- self.check_post({'delete': ''}, '/process?id=4', 302, '/processes')
- exp = ExpectedGetProcess(4)
- exp.set('is_new', True)
- for i in range(3):
- self.post_exp_cond([exp], {}, i+1)
- self.post_exp_process([exp], {}, i+1)
- exp.force('process_candidates', [1, 2, 3])
- self.check_json_get('/process?id=4', exp)
-
- def test_POST_process_steps(self) -> None:
- """Test behavior of ProcessStep posting."""
- # pylint: disable=too-many-statements
- url = '/process?id=1'
- exp = ExpectedGetProcess(1)
- self.post_exp_process([exp], {}, 1)
- # post first (top-level) step of proc2 to proc1 by 'step_of' in 2
- self.post_exp_process([exp], {'step_of': 1}, 2)
- exp.lib_set('ProcessStep',
- [exp.procstep_as_dict(1, owner_id=1, step_process_id=2)])
- exp.set('steps', [
- exp.stepnode_as_dict(
- step_id=1,
- proc_id=2)])
- self.check_json_get(url, exp)
- # post empty/absent steps list to process, expect clean slate, and old
- # step to completely disappear
- self.post_exp_process([exp], {}, 1)
- exp.lib_wipe('ProcessStep')
- exp.set('steps', [])
- self.check_json_get(url, exp)
- # post anew (as only step yet) step of proc2 to proc1 by 'new_top_step'
- self.post_exp_process([exp], {'new_top_step': 2}, 1)
- exp.lib_set('ProcessStep',
- [exp.procstep_as_dict(1, owner_id=1, step_process_id=2)])
- self.post_exp_process([exp], {'kept_steps': [1]}, 1)
- step_nodes = [exp.stepnode_as_dict(step_id=1, proc_id=2)]
- exp.set('steps', step_nodes)
- self.check_json_get(url, exp)
- # fail on zero-step recursion
- p_min = {'title': '', 'description': '', 'effort': 0}
- self.check_post(p_min | {'new_top_step': 1}, url, 400)
- self.check_post(p_min | {'step_of': 1}, url, 400)
- # post sibling steps
- self.post_exp_process([exp], {}, 3)
- self.post_exp_process([exp], {'kept_steps': [1], 'new_top_step': 3}, 1)
- exp.lib_set('ProcessStep',
- [exp.procstep_as_dict(2, owner_id=1, step_process_id=3)])
- step_nodes += [exp.stepnode_as_dict(step_id=2, proc_id=3)]
- self.check_json_get(url, exp)
- # # post implicit sub-step via post to proc2
- self.post_exp_process([exp], {}, 4)
- self.post_exp_process([exp], {'step_of': [1], 'new_top_step': 4}, 2)
- exp.lib_set('ProcessStep',
- [exp.procstep_as_dict(3, owner_id=2, step_process_id=4)])
- step_nodes[0]['steps'] = [
- exp.stepnode_as_dict(step_id=3, proc_id=4, is_explicit=False)]
- self.check_json_get(url, exp)
- # post explicit sub-step via post to proc1
- p = {'kept_steps': [1, 2], 'new_step_to_2': 4}
- self.post_exp_process([exp], p, 1)
- exp.lib_set('ProcessStep', [exp.procstep_as_dict(
- 4, owner_id=1, step_process_id=4, parent_step_id=2)])
- step_nodes[1]['steps'] = [
- exp.stepnode_as_dict(step_id=4, proc_id=4)]
- self.check_json_get(url, exp)
- # to ensure suppressed step nodes are hidden, add new step to proc4,
- # implicitly adding it as sub-step to the proc4 steps in proc1, but
- # suppress one of the proc4 occurences there, marking its
- # .is_suppressed *and* hiding the new step below it
- p = {'kept_steps': [1, 2, 4], 'suppressed_steps': [3]}
- self.post_exp_process([exp], {'step_of': [4]}, 5)
- self.post_exp_process([exp], p, 1)
- exp.lib_set('ProcessStep',
- [exp.procstep_as_dict(5, owner_id=4, step_process_id=5)])
- assert isinstance(step_nodes[0]['steps'], list)
- assert isinstance(step_nodes[1]['steps'], list)
- step_nodes[0]['steps'][0]['is_suppressed'] = True
- step_nodes[1]['steps'][0]['steps'] = [
- exp.stepnode_as_dict(step_id=5, proc_id=5, is_explicit=False)]
- self.check_json_get(url, exp)
- # ensure implicit steps' non-top explicit sub-steps are shown
- self.post_exp_process([exp], {}, 6)
- self.post_exp_process([exp], {'kept_steps': [5], 'step_of': [1, 2],
- 'new_step_to_5': 6}, 4)
- exp.lib_set('ProcessStep', [exp.procstep_as_dict(
- 6, owner_id=4, parent_step_id=5, step_process_id=6)])
- step_nodes[1]['steps'][0]['steps'][0]['steps'] = [
- exp.stepnode_as_dict(step_id=6, proc_id=6, is_explicit=False)]
- self.check_json_get(url, exp)
- # try to post sub-step to non-existing sub-step, expect it to become
- # top-level step instead
- p['new_step_to_9'] = 5
- self.post_exp_process([exp], p, 1)
- exp.lib_set('ProcessStep',
- [exp.procstep_as_dict(7, owner_id=1, step_process_id=5)])
- step_nodes += [
- exp.stepnode_as_dict(step_id=7, proc_id=5)]
- self.check_json_get(url, exp)
- del p['new_step_to_9']
- assert isinstance(p['kept_steps'], list)
- p['kept_steps'] += [7]
- # try to post sub-step to implicit sub-step, expect same result
- p['new_step_to_5'] = 5
- self.post_exp_process([exp], p, 1)
- exp.lib_set('ProcessStep',
- [exp.procstep_as_dict(8, owner_id=1, step_process_id=5)])
- step_nodes += [
- exp.stepnode_as_dict(step_id=8, proc_id=5)]
- self.check_json_get(url, exp)
- del p['new_step_to_5']
- p['kept_steps'] += [8]
- # post sub-step to explicit sub-step with implicit sub-step of same
- # step process ID, expect it to eliminate/replace implicit sub-step
- p['new_step_to_4'] = 5
- self.post_exp_process([exp], p, 1)
- step_nodes[1]['steps'][0]['steps'][0] = exp.stepnode_as_dict(
- step_id=9, proc_id=5)
- exp.lib_set('ProcessStep', [exp.procstep_as_dict(
- 9, owner_id=1, parent_step_id=4, step_process_id=5)])
- self.check_json_get(url, exp)
- del p['new_step_to_4']
- p['kept_steps'] += [9]
- # fail on single-step recursion via top step
- self.post_exp_process([exp], {}, 7)
- self.post_exp_process([exp], {'new_top_step': 1}, 7)
- exp.lib_set('ProcessStep', [exp.procstep_as_dict(
- 10, owner_id=7, step_process_id=1)])
- p['step_of'] = [7]
- self.check_post(p_min | p | {'new_top_step': 7}, url, 400)
- # fail on double-step recursion via top step
- self.post_exp_process([exp], {}, 8)
- self.post_exp_process([exp], {'new_top_step': 7}, 8)
- exp.lib_set('ProcessStep', [exp.procstep_as_dict(
- 11, owner_id=8, step_process_id=7)])
- self.check_post(p_min | p | {'new_top_step': 8}, url, 400)
- # fail on single- and double-step recursion via explicit sub-step
- self.check_post(p_min | p | {'new_step_to_8': 7}, url, 400)
- self.check_post(p_min | p | {'new_step_to_8': 8}, url, 400)
-
- def test_fail_GET_process(self) -> None:
- """Test invalid GET /process params."""
- # check for invalid IDs
- self.check_get_defaults('/process')
- # check we catch invalid base64
- self.check_get('/process?title_b64=foo', 400)
- # check failure on references to unknown processes; we create Process
- # of ID=1 here so we know the 404 comes from step_to=2 etc. (that tie
- # the Process displayed by /process to others), not from not finding
- # the main Process itself
- self.post_exp_process([], {}, 1)
- self.check_get('/process?id=1&step_to=2', 404)
- self.check_get('/process?id=1&has_step=2', 404)
-
- def test_GET_processes(self) -> None:
- """Test GET /processes."""
- # pylint: disable=too-many-statements
- # test empty result on empty DB, default-settings on empty params
- exp = ExpectedGetProcesses()
- self.check_json_get('/processes', exp)
- # test on meaningless non-empty params (incl. entirely un-used key),
- # that 'sort_by' default to 'title' (even if set to something else, as
- # long as without handler) and 'pattern' get preserved
- exp.set('pattern', 'bar')
- url = '/processes?sort_by=foo&pattern=bar&foo=x'
- self.check_json_get(url, exp)
- # test non-empty result, automatic (positive) sorting by title
- for i, t in enumerate([('foo', 'oof', 1.0, []),
- ('bar', 'rab', 1.1, [1]),
- ('baz', 'zab', 0.9, [1, 2])]):
- payload = {'title': t[0], 'description': t[1], 'effort': t[2],
- 'new_top_step': t[3]}
- self.post_exp_process([exp], payload, i+1)
- exp.lib_set('ProcessStep',
- [exp.procstep_as_dict(1, owner_id=2, step_process_id=1),
- exp.procstep_as_dict(2, owner_id=3, step_process_id=1),
- exp.procstep_as_dict(3, owner_id=3, step_process_id=2)])
- exp.set('pattern', '')
- self.check_filter(exp, 'processes', 'sort_by', 'title', [2, 3, 1])
- # test other sortings
- self.check_filter(exp, 'processes', 'sort_by', '-title', [1, 3, 2])
- self.check_filter(exp, 'processes', 'sort_by', 'effort', [3, 1, 2])
- self.check_filter(exp, 'processes', 'sort_by', '-effort', [2, 1, 3])
- self.check_filter(exp, 'processes', 'sort_by', 'steps', [1, 2, 3])
- self.check_filter(exp, 'processes', 'sort_by', '-steps', [3, 2, 1])
- self.check_filter(exp, 'processes', 'sort_by', 'owners', [3, 2, 1])
- self.check_filter(exp, 'processes', 'sort_by', '-owners', [1, 2, 3])
- # test pattern matching on title
- exp.set('sort_by', 'title')
- exp.lib_del('Process', 1)
- self.check_filter(exp, 'processes', 'pattern', 'ba', [2, 3])
- # test pattern matching on description
- exp.lib_wipe('Process')
- exp.lib_wipe('ProcessStep')
- self.post_exp_process([exp], {'description': 'oof', 'effort': 1.0}, 1)
- self.check_filter(exp, 'processes', 'pattern', 'of', [1])
+++ /dev/null
-"""Test Todos module."""
-from typing import Any
-from datetime import date as dt_date, timedelta
-from tests.utils import (TestCaseSansDB, TestCaseWithDB, TestCaseWithServer,
- Expected, date_and_day_id)
-from plomtask.todos import Todo
-from plomtask.processes import Process
-from plomtask.exceptions import BadFormatException, HandledException
-
-
-class TestsWithDB(TestCaseWithDB, TestCaseSansDB):
- """Tests requiring DB, but not server setup.
-
- NB: We subclass TestCaseSansDB too, to run any tests there that due to any
- Todo requiring a _saved_ Process wouldn't run without a DB.
- """
- checked_class = Todo
- default_init_kwargs = {'process': None, 'is_done': False, 'day_id': 1}
-
- def setUp(self) -> None:
- super().setUp()
- self.proc = Process(None)
- self.proc.save(self.db_conn)
- self.default_init_kwargs['process'] = self.proc
-
- def test_Todo_by_date(self) -> None:
- """Test findability of Todos by date."""
- date_1, day_id_1 = date_and_day_id(1)
- date_2, _ = date_and_day_id(2)
- t1 = Todo(None, self.proc, False, day_id_1)
- t1.save(self.db_conn)
- t2 = Todo(None, self.proc, False, day_id_1)
- t2.save(self.db_conn)
- self.assertEqual(Todo.by_date(self.db_conn, date_1), [t1, t2])
- self.assertEqual(Todo.by_date(self.db_conn, date_2), [])
- with self.assertRaises(BadFormatException):
- self.assertEqual(Todo.by_date(self.db_conn, 'foo'), [])
-
- def test_Todo_by_date_range_with_limits(self) -> None:
- """Test .by_date_range_with_limits."""
- # pylint: disable=too-many-locals
- f = Todo.by_date_range_with_limits
- # check illegal ranges
- legal_range = ('yesterday', 'tomorrow')
- for i in [0, 1]:
- for bad_date in ['foo', '2024-02-30', '2024-01-01 12:00:00']:
- date_range_l = list(legal_range[:])
- date_range_l[i] = bad_date
- with self.assertRaises(HandledException):
- f(self.db_conn, (date_range_l[0], date_range_l[1]))
- # check empty, translation of 'yesterday' and 'tomorrow'
- items, start, end = f(self.db_conn, legal_range)
- self.assertEqual(items, [])
- dt_today = dt_date.today()
- dt_yesterday = dt_today + timedelta(days=-1)
- dt_tomorrow = dt_today + timedelta(days=+1)
- self.assertEqual(start, dt_yesterday.isoformat())
- self.assertEqual(end, dt_tomorrow.isoformat())
- # prepare dated items for non-empty results
- kwargs = self.default_init_kwargs.copy()
- todos = []
- dates_and_day_ids = [date_and_day_id(i) for i in range(5)]
- for day_id in [t[1] for t in dates_and_day_ids[1:-1]]:
- kwargs['day_id'] = day_id
- todos += [Todo(None, **kwargs)]
- # check ranges still empty before saving
- date_range = (dates_and_day_ids[1][0], dates_and_day_ids[-2][0])
- self.assertEqual(f(self.db_conn, date_range)[0], [])
- # check all objs displayed within interval
- for todo in todos:
- todo.save(self.db_conn)
- self.assertEqual(f(self.db_conn, date_range)[0], todos)
- # check that only displayed what exists within interval
- date_range = (dates_and_day_ids[1][0], dates_and_day_ids[-3][0])
- expected = [todos[0], todos[1]]
- self.assertEqual(f(self.db_conn, date_range)[0], expected)
- date_range = (dates_and_day_ids[-2][0], dates_and_day_ids[-1][0])
- expected = [todos[2]]
- self.assertEqual(f(self.db_conn, date_range)[0], expected)
- # check that inverted interval displays nothing
- date_range = (dates_and_day_ids[-1][0], dates_and_day_ids[0][0])
- self.assertEqual(f(self.db_conn, date_range)[0], [])
- # check that "today" is interpreted, and single-element interval
- kwargs['day_id'] = (dt_today - dt_date(2000, 1, 1)).days
- todo_today = Todo(None, **kwargs)
- todo_today.save(self.db_conn)
- date_range = ('today', 'today')
- items, start, end = f(self.db_conn, date_range)
- self.assertEqual(start, dt_today.isoformat())
- self.assertEqual(start, end)
- self.assertEqual(items, [todo_today])
-
- def test_Todo_children(self) -> None:
- """Test Todo.children relations."""
- todo_1 = Todo(None, self.proc, False, 1)
- todo_2 = Todo(None, self.proc, False, 1)
- todo_2.save(self.db_conn)
- # check un-saved Todo cannot parent
- with self.assertRaises(HandledException):
- todo_1.add_child(todo_2)
- todo_1.save(self.db_conn)
- todo_3 = Todo(None, self.proc, False, 1)
- # check un-saved Todo cannot be parented
- with self.assertRaises(HandledException):
- todo_1.add_child(todo_3)
-
-
-class ExpectedGetTodo(Expected):
- """Builder of expectations for GET /todo."""
-
- def __init__(self,
- todo_id: int,
- *args: Any, **kwargs: Any) -> None:
- self._fields = {'todo': todo_id,
- 'steps_todo_to_process': []}
- super().__init__(*args, **kwargs)
-
- def recalc(self) -> None:
- """Update internal dictionary by subclass-specific rules."""
-
- def walk_steps(step: dict[str, Any]) -> None:
- if not step['todo']:
- proc_id = step['process']
- cands = self.as_ids(
- [t for t in todos if proc_id == t['process_id']
- and t['id'] in self._fields['todo_candidates']])
- self._fields['adoption_candidates_for'][str(proc_id)] = cands
- for child in step['children']:
- walk_steps(child)
-
- super().recalc()
- self.lib_wipe('Day')
- todos = self.lib_all('Todo')
- procs = self.lib_all('Process')
- conds = self.lib_all('Condition')
- self._fields['todo_candidates'] = self.as_ids(
- [t for t in todos if t['id'] != self._fields['todo']])
- self._fields['process_candidates'] = self.as_ids(procs)
- self._fields['condition_candidates'] = self.as_ids(conds)
- self._fields['adoption_candidates_for'] = {}
- for step in self._fields['steps_todo_to_process']:
- walk_steps(step)
-
- @staticmethod
- def step_as_dict(node_id: int,
- process: int | None = None,
- todo: int | None = None,
- fillable: bool = False,
- children: None | list[dict[str, object]] = None
- ) -> dict[str, object]:
- """Return JSON of TodoOrProcStepsNode to expect."""
- return {'node_id': node_id,
- 'children': children if children is not None else [],
- 'process': process,
- 'fillable': fillable,
- 'todo': todo}
-
-
-class TestsWithServer(TestCaseWithServer):
- """Tests against our HTTP server/handler (and database)."""
- checked_class = Todo
-
- def test_basic_fail_POST_todo(self) -> None:
- """Test basic malformed/illegal POST /todo requests."""
- self.post_exp_process([], {}, 1)
- # test we cannot just POST into non-existing Todo
- self.check_post({}, '/todo', 404)
- self.check_post({}, '/todo?id=FOO', 400)
- self.check_post({}, '/todo?id=0', 400)
- self.check_post({}, '/todo?id=1', 404)
- # test malformed values on existing Todo
- self.post_exp_day([], {'new_todo': [1]})
- for name in ['adopt', 'effort', 'make_full', 'make_empty',
- 'conditions', 'disables', 'blockers', 'enables']:
- self.check_post({name: 'x'}, '/todo?id=1', 400, '/todo')
- for prefix in ['make_', '']:
- for suffix in ['', 'x', '1.1']:
- self.check_post({'step_filler_to_1': [f'{prefix}{suffix}']},
- '/todo?id=1', 400, '/todo')
- for suffix in ['', 'x', '1.1']:
- self.check_post({'step_filler_to_{suffix}': ['1']},
- '/todo?id=1', 400, '/todo')
-
- def test_basic_POST_todo(self) -> None:
- """Test basic POST /todo manipulations."""
- exp = ExpectedGetTodo(1)
- self.post_exp_process([exp], {'calendarize': 0}, 1)
- self.post_exp_day([exp], {'new_todo': [1]})
- # test posting naked entity at first changes nothing
- self.check_json_get('/todo?id=1', exp)
- self.check_post({}, '/todo?id=1')
- self.check_json_get('/todo?id=1', exp)
- # test posting doneness, comment, calendarization, effort
- todo_post = {'is_done': 1, 'calendarize': 1,
- 'comment': 'foo', 'effort': 2.3}
- self.post_exp_todo([exp], todo_post, 1)
- self.check_json_get('/todo?id=1', exp)
- # test implicitly un-setting comment/calendarize/is_done by empty post
- self.post_exp_todo([exp], {}, 1)
- self.check_json_get('/todo?id=1', exp)
- # test effort post can be explicitly unset by "effort":"" post
- self.check_post({'effort': ''}, '/todo?id=1')
- exp.lib_get('Todo', 1)['effort'] = None
- self.check_json_get('/todo?id=1', exp)
- # test Condition posts
- c1_post = {'title': 'foo', 'description': 'oof', 'is_active': 0}
- c2_post = {'title': 'bar', 'description': 'rab', 'is_active': 1}
- self.post_exp_cond([exp], c1_post, 1)
- self.post_exp_cond([exp], c2_post, 2)
- self.check_json_get('/todo?id=1', exp)
- todo_post = {'conditions': [1], 'disables': [1],
- 'blockers': [2], 'enables': [2]}
- self.post_exp_todo([exp], todo_post, 1)
- self.check_json_get('/todo?id=1', exp)
-
- def test_POST_todo_deletion(self) -> None:
- """Test deletions via POST /todo."""
- exp = ExpectedGetTodo(1)
- self.post_exp_process([exp], {}, 1)
- # test failure of deletion on non-existing Todo
- self.check_post({'delete': ''}, '/todo?id=2', 404, '/')
- # test deletion of existing Todo
- self.post_exp_day([exp], {'new_todo': [1]})
- self.check_post({'delete': ''}, '/todo?id=1', 302, '/')
- self.check_get('/todo?id=1', 404)
- exp.lib_del('Todo', 1)
- # test deletion of adopted Todo
- self.post_exp_day([exp], {'new_todo': [1]})
- self.post_exp_day([exp], {'new_todo': [1]})
- self.check_post({'adopt': 2}, '/todo?id=1')
- self.check_post({'delete': ''}, '/todo?id=2', 302, '/')
- exp.lib_del('Todo', 2)
- self.check_get('/todo?id=2', 404)
- self.check_json_get('/todo?id=1', exp)
- # test deletion of adopting Todo
- self.post_exp_day([exp], {'new_todo': [1]})
- self.check_post({'adopt': 2}, '/todo?id=1')
- self.check_post({'delete': ''}, '/todo?id=1', 302, '/')
- exp.set('todo', 2)
- exp.lib_del('Todo', 1)
- self.check_json_get('/todo?id=2', exp)
- # test cannot delete Todo with comment or effort
- self.check_post({'comment': 'foo'}, '/todo?id=2')
- self.check_post({'delete': ''}, '/todo?id=2', 500, '/')
- self.check_post({'effort': 5}, '/todo?id=2')
- self.check_post({'delete': ''}, '/todo?id=2', 500, '/')
- # test deletion via effort < 0, but only if deletable
- self.check_post({'effort': -1, 'comment': 'foo'}, '/todo?id=2')
- self.check_post({}, '/todo?id=2')
- self.check_get('/todo?id=2', 404)
-
- def test_POST_todo_adoption(self) -> None:
- """Test adoption via POST /todo with "adopt"."""
- # post two Todos to Day, have first adopt second
- exp = ExpectedGetTodo(1)
- self.post_exp_process([exp], {}, 1)
- self.post_exp_day([exp], {'new_todo': [1]})
- self.post_exp_day([exp], {'new_todo': [1]})
- self.post_exp_todo([exp], {'adopt': 2}, 1)
- exp.set('steps_todo_to_process', [
- exp.step_as_dict(node_id=1, process=None, todo=2)])
- self.check_json_get('/todo?id=1', exp)
- # test Todo un-adopting by just not sending an adopt
- self.post_exp_todo([exp], {}, 1)
- exp.set('steps_todo_to_process', [])
- self.check_json_get('/todo?id=1', exp)
- # test fail on trying to adopt non-existing Todo
- self.check_post({'adopt': 3}, '/todo?id=1', 404)
- # test cannot self-adopt
- self.check_post({'adopt': 1}, '/todo?id=1', 400)
- # test cannot do 1-step circular adoption
- self.post_exp_todo([exp], {'adopt': 1}, 2)
- self.check_post({'adopt': 2}, '/todo?id=1', 400)
- # test cannot do 2-step circular adoption
- self.post_exp_day([exp], {'new_todo': [1]})
- self.post_exp_todo([exp], {'adopt': 2}, 3)
- self.check_post({'adopt': 3}, '/todo?id=1', 400)
- # test can adopt Todo into ProcessStep chain via its Process (with key
- # 'step_filler' equivalent to single-element 'adopt' if intable)
- self.post_exp_process([exp], {}, 2)
- self.post_exp_process([exp], {}, 3)
- self.post_exp_process([exp], {'new_top_step': [2, 3]}, 1)
- exp.lib_set('ProcessStep',
- [exp.procstep_as_dict(1, owner_id=1, step_process_id=2),
- exp.procstep_as_dict(2, owner_id=1, step_process_id=3)])
- slots = [
- exp.step_as_dict(node_id=1, process=2, todo=None, fillable=True),
- exp.step_as_dict(node_id=2, process=3, todo=None, fillable=True)]
- exp.set('steps_todo_to_process', slots)
- self.post_exp_day([exp], {'new_todo': [2]})
- self.post_exp_day([exp], {'new_todo': [3]})
- self.check_json_get('/todo?id=1', exp)
- self.post_exp_todo([exp], {'step_filler_to_1': 5, 'adopt': [4]}, 1)
- exp.lib_get('Todo', 1)['children'] += [5]
- slots[0]['todo'] = 4
- slots[1]['todo'] = 5
- self.check_json_get('/todo?id=1', exp)
- # test 'ignore' values for 'step_filler' are ignored, and intable
- # 'step_filler' values are interchangeable with those of 'adopt'
- todo_post = {'adopt': 5, 'step_filler_to_1': ['ignore', 4]}
- self.check_post(todo_post, '/todo?id=1')
- self.check_json_get('/todo?id=1', exp)
- # test cannot adopt into non-top-level elements of chain, instead
- # creating new top-level steps when adopting of respective Process
- self.post_exp_process([exp], {}, 4)
- self.post_exp_process([exp], {'new_top_step': 4, 'step_of': [1]}, 3)
- exp.lib_set('ProcessStep',
- [exp.procstep_as_dict(3, owner_id=3, step_process_id=4)])
- slots[1]['children'] = [exp.step_as_dict(
- node_id=3, process=4, todo=None, fillable=True)]
- self.post_exp_day([exp], {'new_todo': [4]})
- self.post_exp_todo([exp], {'adopt': [4, 5, 6]}, 1)
- slots += [exp.step_as_dict(
- node_id=4, process=None, todo=6, fillable=False)]
- self.check_json_get('/todo?id=1', exp)
-
- def test_POST_todo_make_empty(self) -> None:
- """Test creation via POST /todo "step_filler_to"/"make"."""
- # create chain of Processes
- exp = ExpectedGetTodo(1)
- self.post_exp_process([exp], {}, 1)
- for i in range(1, 4):
- self.post_exp_process([exp], {'new_top_step': i}, i+1)
- exp.lib_set('ProcessStep',
- [exp.procstep_as_dict(1, owner_id=2, step_process_id=1),
- exp.procstep_as_dict(2, owner_id=3, step_process_id=2),
- exp.procstep_as_dict(3, owner_id=4, step_process_id=3)])
- # post (childless) Todo of chain end, then make empty on next in line
- self.post_exp_day([exp], {'new_todo': [4]})
- slots = [exp.step_as_dict(
- node_id=1, process=3, todo=None, fillable=True,
- children=[exp.step_as_dict(
- node_id=2, process=2, todo=None, fillable=False,
- children=[exp.step_as_dict(
- node_id=3, process=1, todo=None, fillable=False)])])]
- exp.set('steps_todo_to_process', slots)
- self.check_json_get('/todo?id=1', exp)
- self.check_post({'step_filler_to_1': 'make_3'}, '/todo?id=1')
- exp.set_todo_from_post(2, {'process_id': 3})
- exp.set_todo_from_post(1, {'process_id': 4, 'children': [2]})
- slots[0]['todo'] = 2
- assert isinstance(slots[0]['children'], list)
- slots[0]['children'][0]['fillable'] = True
- self.check_json_get('/todo?id=1', exp)
- # make new top-level Todo without chain implied by its Process
- self.check_post({'make_empty': 2, 'adopt': [2]}, '/todo?id=1')
- exp.set_todo_from_post(3, {'process_id': 2})
- exp.set_todo_from_post(1, {'process_id': 4, 'children': [2, 3]})
- slots += [exp.step_as_dict(
- node_id=4, process=None, todo=3, fillable=False)]
- self.check_json_get('/todo?id=1', exp)
- # fail on trying to call make_empty on non-existing Process
- self.check_post({'make_full': 5}, '/todo?id=1', 404)
-
- def test_GET_todo(self) -> None:
- """Test GET /todo response codes."""
- # test malformed or illegal parameter values
- self.check_get_defaults('/todo')
- # test all existing Processes are shown as available
- exp = ExpectedGetTodo(1)
- self.post_exp_process([exp], {}, 1)
- self.post_exp_day([exp], {'new_todo': [1]})
- self.post_exp_process([exp], {}, 2)
- self.check_json_get('/todo?id=1', exp)
- # test chain of Processes shown as potential step nodes
- self.post_exp_process([exp], {}, 3)
- self.post_exp_process([exp], {}, 4)
- self.post_exp_process([exp], {'new_top_step': 2}, 1)
- self.post_exp_process([exp], {'new_top_step': 3, 'step_of': [1]}, 2)
- self.post_exp_process([exp], {'new_top_step': 4, 'step_of': [2]}, 3)
- exp.lib_set('ProcessStep', [
- exp.procstep_as_dict(1, owner_id=1, step_process_id=2),
- exp.procstep_as_dict(2, owner_id=2, step_process_id=3),
- exp.procstep_as_dict(3, owner_id=3, step_process_id=4)])
- slots = [exp.step_as_dict(
- node_id=1, process=2, todo=None, fillable=True,
- children=[exp.step_as_dict(
- node_id=2, process=3, todo=None, fillable=False,
- children=[exp.step_as_dict(
- node_id=3, process=4, todo=None, fillable=False)])])]
- exp.set('steps_todo_to_process', slots)
- self.check_json_get('/todo?id=1', exp)
- # test display of parallel chains
- proc_steps_post = {'new_top_step': 4, 'kept_steps': [1, 3]}
- self.post_exp_process([], proc_steps_post, 1)
- exp.lib_set('ProcessStep', [
- exp.procstep_as_dict(4, owner_id=1, step_process_id=4)])
- slots += [exp.step_as_dict(
- node_id=4, process=4, todo=None, fillable=True)]
- self.check_json_get('/todo?id=1', exp)
-
- def test_POST_todo_doneness_relations(self) -> None:
- """Test Todo.is_done Condition, adoption relations for /todo POSTs."""
- self.post_exp_process([], {}, 1)
- # test Todo with adoptee can only be set done if adoptee is done too
- self.post_exp_day([], {'new_todo': [1]})
- self.post_exp_day([], {'new_todo': [1]})
- self.check_post({'adopt': 2, 'is_done': 1}, '/todo?id=1', 400)
- self.check_post({'is_done': 1}, '/todo?id=2')
- self.check_post({'adopt': 2, 'is_done': 1}, '/todo?id=1', 302)
- # test Todo cannot be set undone with adopted Todo not done yet
- self.check_post({'is_done': 0}, '/todo?id=2')
- self.check_post({'adopt': 2, 'is_done': 0}, '/todo?id=1', 400)
- # test unadoption relieves block
- self.check_post({'is_done': 0}, '/todo?id=1', 302)
- # test Condition being set or unset can block doneness setting
- c1_post = {'title': '', 'description': '', 'is_active': 0}
- c2_post = {'title': '', 'description': '', 'is_active': 1}
- self.check_post(c1_post, '/condition', redir='/condition?id=1')
- self.check_post(c2_post, '/condition', redir='/condition?id=2')
- self.check_post({'conditions': [1], 'is_done': 1}, '/todo?id=1', 400)
- self.check_post({'is_done': 1}, '/todo?id=1', 302)
- self.check_post({'is_done': 0}, '/todo?id=1', 302)
- self.check_post({'blockers': [2], 'is_done': 1}, '/todo?id=1', 400)
- self.check_post({'is_done': 1}, '/todo?id=1', 302)
- # test setting Todo doneness can set/un-set Conditions, but only on
- # doneness change, not by mere passive state
- self.check_post({'is_done': 0}, '/todo?id=2', 302)
- self.check_post({'enables': [1], 'is_done': 1}, '/todo?id=1')
- self.check_post({'conditions': [1], 'is_done': 1}, '/todo?id=2', 400)
- self.check_post({'enables': [1], 'is_done': 0}, '/todo?id=1')
- self.check_post({'enables': [1], 'is_done': 1}, '/todo?id=1')
- self.check_post({'conditions': [1], 'is_done': 1}, '/todo?id=2')
- self.check_post({'blockers': [1], 'is_done': 0}, '/todo?id=2', 400)
- self.check_post({'disables': [1], 'is_done': 1}, '/todo?id=1')
- self.check_post({'blockers': [1], 'is_done': 0}, '/todo?id=2', 400)
- self.check_post({'disables': [1]}, '/todo?id=1')
- self.check_post({'disables': [1], 'is_done': 1}, '/todo?id=1')
- self.check_post({'blockers': [1]}, '/todo?id=2')
+++ /dev/null
-"""Shared test utilities."""
-# pylint: disable=too-many-lines
-from __future__ import annotations
-from datetime import datetime, date as dt_date, timedelta
-from unittest import TestCase
-from typing import Mapping, Any, Callable
-from threading import Thread
-from pathlib import Path
-from http.client import HTTPConnection
-from time import sleep
-from json import loads as json_loads, dumps as json_dumps
-from urllib.parse import urlencode
-from uuid import uuid4
-from os import remove as remove_file
-from pprint import pprint
-from plomtask.db import DatabaseFile, DatabaseConnection
-from plomtask.http import TaskHandler, TaskServer
-from plomtask.processes import Process, ProcessStep
-from plomtask.conditions import Condition
-from plomtask.days import Day
-from plomtask.todos import Todo
-from plomtask.versioned_attributes import VersionedAttribute, TIMESTAMP_FMT
-from plomtask.exceptions import NotFoundException, HandledException
-
-
-_VERSIONED_VALS: dict[str,
- list[str] | list[float]] = {'str': ['A', 'B'],
- 'float': [0.3, 1.1]}
-_VALID_TRUES = {True, 'True', 'true', '1', 'on'}
-
-
-def dt_date_from_day_id(day_id: int) -> dt_date:
- """Return datetime.date of adding day_id days to 2000-01-01."""
- return dt_date(2000, 1, 1) + timedelta(days=day_id)
-
-
-def date_and_day_id(day_id: int) -> tuple[str, int]:
- """Interpet day_id as n of days since millennium, return (date, day_id)."""
- return dt_date_from_day_id(day_id).isoformat(), day_id
-
-
-class TestCaseAugmented(TestCase):
- """Tester core providing helpful basic internal decorators and methods."""
- checked_class: Any
- default_init_kwargs: dict[str, Any] = {}
-
- @staticmethod
- def _run_on_versioned_attributes(f: Callable[..., None]
- ) -> Callable[..., None]:
- def wrapper(self: TestCase) -> None:
- assert isinstance(self, TestCaseAugmented)
- for attr_name in self.checked_class.to_save_versioned():
- default = self.checked_class.versioned_defaults[attr_name]
- owner = self.checked_class(None, **self.default_init_kwargs)
- attr = getattr(owner, attr_name)
- to_set = _VERSIONED_VALS[attr.value_type_name]
- f(self, owner, attr_name, attr, default, to_set)
- return wrapper
-
- @classmethod
- def _run_if_sans_db(cls, f: Callable[..., None]) -> Callable[..., None]:
- def wrapper(self: TestCaseSansDB) -> None:
- if issubclass(cls, TestCaseSansDB):
- f(self)
- return wrapper
-
- @classmethod
- def _run_if_with_db_but_not_server(cls,
- f: Callable[..., None]
- ) -> Callable[..., None]:
- def wrapper(self: TestCaseWithDB) -> None:
- if issubclass(cls, TestCaseWithDB) and\
- not issubclass(cls, TestCaseWithServer):
- f(self)
- return wrapper
-
- @classmethod
- def _make_from_defaults(cls, id_: int | None) -> Any:
- return cls.checked_class(id_, **cls.default_init_kwargs)
-
-
-class TestCaseSansDB(TestCaseAugmented):
- """Tests requiring no DB setup."""
- _legal_ids: list[int] = [1, 5]
- _illegal_ids: list[int] = [0]
-
- @TestCaseAugmented._run_if_sans_db
- def test_id_validation(self) -> None:
- """Test .id_ validation/setting."""
- for id_ in self._illegal_ids:
- with self.assertRaises(HandledException):
- self._make_from_defaults(id_)
- for id_ in self._legal_ids:
- obj = self._make_from_defaults(id_)
- self.assertEqual(obj.id_, id_)
-
- @TestCaseAugmented._run_if_sans_db
- @TestCaseAugmented._run_on_versioned_attributes
- def test_versioned_set(self,
- _: Any,
- __: str,
- attr: VersionedAttribute,
- default: str | float,
- to_set: list[str] | list[float]
- ) -> None:
- """Test VersionedAttribute.set() behaves as expected."""
- attr.set(default)
- self.assertEqual(list(attr.history.values()), [default])
- # check same value does not get set twice in a row,
- # and that not even its timestamp get updated
- timestamp = list(attr.history.keys())[0]
- attr.set(default)
- self.assertEqual(list(attr.history.values()), [default])
- self.assertEqual(list(attr.history.keys())[0], timestamp)
- # check that different value _will_ be set/added
- attr.set(to_set[0])
- timesorted_vals = [attr.history[t] for
- t in sorted(attr.history.keys())]
- expected = [default, to_set[0]]
- self.assertEqual(timesorted_vals, expected)
- # check that a previously used value can be set if not most recent
- attr.set(default)
- timesorted_vals = [attr.history[t] for
- t in sorted(attr.history.keys())]
- expected = [default, to_set[0], default]
- self.assertEqual(timesorted_vals, expected)
- # again check for same value not being set twice in a row, even for
- # later items
- attr.set(to_set[1])
- timesorted_vals = [attr.history[t] for
- t in sorted(attr.history.keys())]
- expected = [default, to_set[0], default, to_set[1]]
- self.assertEqual(timesorted_vals, expected)
- attr.set(to_set[1])
- self.assertEqual(timesorted_vals, expected)
-
- @TestCaseAugmented._run_if_sans_db
- @TestCaseAugmented._run_on_versioned_attributes
- def test_versioned_newest(self,
- _: Any,
- __: str,
- attr: VersionedAttribute,
- default: str | float,
- to_set: list[str] | list[float]
- ) -> None:
- """Test VersionedAttribute.newest."""
- # check .newest on empty history returns .default
- self.assertEqual(attr.newest, default)
- # check newest element always returned
- for v in [to_set[0], to_set[1]]:
- attr.set(v)
- self.assertEqual(attr.newest, v)
- # check newest element returned even if also early value
- attr.set(default)
- self.assertEqual(attr.newest, default)
-
- @TestCaseAugmented._run_if_sans_db
- @TestCaseAugmented._run_on_versioned_attributes
- def test_versioned_at(self,
- _: Any,
- __: str,
- attr: VersionedAttribute,
- default: str | float,
- to_set: list[str] | list[float]
- ) -> None:
- """Test .at() returns values nearest to queried time, or default."""
- # check .at() return default on empty history
- timestamp_a = datetime.now().strftime(TIMESTAMP_FMT)
- self.assertEqual(attr.at(timestamp_a), default)
- # check value exactly at timestamp returned
- attr.set(to_set[0])
- timestamp_b = list(attr.history.keys())[0]
- self.assertEqual(attr.at(timestamp_b), to_set[0])
- # check earliest value returned if exists, rather than default
- self.assertEqual(attr.at(timestamp_a), to_set[0])
- # check reverts to previous value for timestamps not indexed
- sleep(0.00001)
- timestamp_between = datetime.now().strftime(TIMESTAMP_FMT)
- sleep(0.00001)
- attr.set(to_set[1])
- timestamp_c = sorted(attr.history.keys())[-1]
- self.assertEqual(attr.at(timestamp_c), to_set[1])
- self.assertEqual(attr.at(timestamp_between), to_set[0])
- sleep(0.00001)
- timestamp_after_c = datetime.now().strftime(TIMESTAMP_FMT)
- self.assertEqual(attr.at(timestamp_after_c), to_set[1])
-
-
-class TestCaseWithDB(TestCaseAugmented):
- """Module tests not requiring DB setup."""
- _default_ids: tuple[int, int, int] = (1, 2, 3)
-
- def setUp(self) -> None:
- Condition.empty_cache()
- Day.empty_cache()
- Process.empty_cache()
- ProcessStep.empty_cache()
- Todo.empty_cache()
- db_path = Path(f'test_db:{uuid4()}')
- DatabaseFile.create(db_path)
- self.db_file = DatabaseFile(db_path)
- self.db_conn = DatabaseConnection(self.db_file)
-
- def tearDown(self) -> None:
- self.db_conn.close()
- remove_file(self.db_file.path)
-
- def _load_from_db(self, id_: int) -> list[object]:
- db_found: list[object] = []
- for row in self.db_conn.row_where(self.checked_class.table_name,
- 'id', id_):
- db_found += [self.checked_class.from_table_row(self.db_conn,
- row)]
- return db_found
-
- def _change_obj(self, obj: object) -> str:
- attr_name: str = self.checked_class.to_save_simples[-1]
- attr = getattr(obj, attr_name)
- new_attr: str | int | float | bool
- if isinstance(attr, (int, float)):
- new_attr = attr + 1
- elif isinstance(attr, str):
- new_attr = attr + '_'
- elif isinstance(attr, bool):
- new_attr = not attr
- setattr(obj, attr_name, new_attr)
- return attr_name
-
- def check_identity_with_cache_and_db(self, content: list[Any]) -> None:
- """Test both cache and DB equal content."""
- expected_cache = {}
- for item in content:
- expected_cache[item.id_] = item
- self.assertEqual(self.checked_class.get_cache(), expected_cache)
- hashes_content = [hash(x) for x in content]
- db_found: list[Any] = []
- for item in content:
- db_found += self._load_from_db(item.id_)
- hashes_db_found = [hash(x) for x in db_found]
- self.assertEqual(sorted(hashes_content), sorted(hashes_db_found))
-
- @TestCaseAugmented._run_if_with_db_but_not_server
- @TestCaseAugmented._run_on_versioned_attributes
- def test_saving_versioned_attributes(self,
- owner: Any,
- attr_name: str,
- attr: VersionedAttribute,
- _: str | float,
- to_set: list[str] | list[float]
- ) -> None:
- """Test storage and initialization of versioned attributes."""
-
- def retrieve_attr_vals(attr: VersionedAttribute) -> list[object]:
- attr_vals_saved: list[object] = []
- for row in self.db_conn.row_where(attr.table_name, 'parent',
- owner.id_):
- attr_vals_saved += [row[2]]
- return attr_vals_saved
-
- attr.set(to_set[0])
- # check that without attr.save() no rows in DB
- rows = self.db_conn.row_where(attr.table_name, 'parent', owner.id_)
- self.assertEqual([], rows)
- # fail saving attributes on non-saved owner
- with self.assertRaises(NotFoundException):
- attr.save(self.db_conn)
- # check owner.save() created entries as expected in attr table
- owner.save(self.db_conn)
- attr_vals_saved = retrieve_attr_vals(attr)
- self.assertEqual([to_set[0]], attr_vals_saved)
- # check changing attr val without save affects owner in memory …
- attr.set(to_set[1])
- cmp_attr = getattr(owner, attr_name)
- self.assertEqual(to_set, list(cmp_attr.history.values()))
- self.assertEqual(cmp_attr.history, attr.history)
- # … but does not yet affect DB
- attr_vals_saved = retrieve_attr_vals(attr)
- self.assertEqual([to_set[0]], attr_vals_saved)
- # check individual attr.save also stores new val to DB
- attr.save(self.db_conn)
- attr_vals_saved = retrieve_attr_vals(attr)
- self.assertEqual(to_set, attr_vals_saved)
-
- @TestCaseAugmented._run_if_with_db_but_not_server
- def test_saving_and_caching(self) -> None:
- """Test effects of .cache() and .save()."""
- id1 = self._default_ids[0]
- # check failure to cache without ID (if None-ID input possible)
- obj0 = self._make_from_defaults(None)
- with self.assertRaises(HandledException):
- obj0.cache()
- # check mere object init itself doesn't even store in cache
- obj1 = self._make_from_defaults(id1)
- self.assertEqual(self.checked_class.get_cache(), {})
- # check .cache() fills cache, but not DB
- obj1.cache()
- self.assertEqual(self.checked_class.get_cache(), {id1: obj1})
- found_in_db = self._load_from_db(id1)
- self.assertEqual(found_in_db, [])
- # check .save() sets ID, updates cache, and fills DB
- # (expect ID to be set to id1, despite obj1 already having that as ID:
- # it's generated by cursor.lastrowid on the DB table, and with obj1
- # not written there, obj2 should get it first!)
- obj2 = self._make_from_defaults(None)
- obj2.save(self.db_conn)
- self.assertEqual(self.checked_class.get_cache(), {id1: obj2})
- # NB: we'll only compare hashes because obj2 itself disappears on
- # .from_table_row-triggered database reload
- obj2_hash = hash(obj2)
- found_in_db += self._load_from_db(id1)
- self.assertEqual([hash(o) for o in found_in_db], [obj2_hash])
- # check we cannot overwrite obj2 with obj1 despite its same ID,
- # since it has disappeared now
- with self.assertRaises(HandledException):
- obj1.save(self.db_conn)
-
- @TestCaseAugmented._run_if_with_db_but_not_server
- def test_by_id(self) -> None:
- """Test .by_id()."""
- id1, id2, _ = self._default_ids
- # check failure if not yet saved
- obj1 = self._make_from_defaults(id1)
- with self.assertRaises(NotFoundException):
- self.checked_class.by_id(self.db_conn, id1)
- # check identity of cached and retrieved
- obj1.cache()
- self.assertEqual(obj1, self.checked_class.by_id(self.db_conn, id1))
- # check identity of saved and retrieved
- obj2 = self._make_from_defaults(id2)
- obj2.save(self.db_conn)
- self.assertEqual(obj2, self.checked_class.by_id(self.db_conn, id2))
-
- @TestCaseAugmented._run_if_with_db_but_not_server
- def test_by_id_or_create(self) -> None:
- """Test .by_id_or_create."""
- # check .by_id_or_create fails if wrong class
- if not self.checked_class.can_create_by_id:
- with self.assertRaises(HandledException):
- self.checked_class.by_id_or_create(self.db_conn, None)
- return
- # check ID input of None creates, on saving, ID=1,2,…
- for n in range(2):
- item = self.checked_class.by_id_or_create(self.db_conn, None)
- self.assertEqual(item.id_, None)
- item.save(self.db_conn)
- self.assertEqual(item.id_, n+1)
- # check .by_id_or_create acts like normal instantiation (sans saving)
- id_ = self._default_ids[2]
- item = self.checked_class.by_id_or_create(self.db_conn, id_)
- self.assertEqual(item.id_, id_)
- with self.assertRaises(NotFoundException):
- self.checked_class.by_id(self.db_conn, item.id_)
- self.assertEqual(self.checked_class(item.id_), item)
-
- @TestCaseAugmented._run_if_with_db_but_not_server
- def test_from_table_row(self) -> None:
- """Test .from_table_row() properly reads in class directly from DB."""
- obj = self._make_from_defaults(self._default_ids[0])
- obj.save(self.db_conn)
- for row in self.db_conn.row_where(self.checked_class.table_name,
- 'id', obj.id_):
- # check .from_table_row reproduces state saved, no matter if obj
- # later changed (with caching even)
- # NB: we'll only compare hashes because obj itself disappears on
- # .from_table_row-triggered database reload
- hash_original = hash(obj)
- attr_name = self._change_obj(obj)
- obj.cache()
- to_cmp = getattr(obj, attr_name)
- retrieved = self.checked_class.from_table_row(self.db_conn, row)
- self.assertNotEqual(to_cmp, getattr(retrieved, attr_name))
- self.assertEqual(hash_original, hash(retrieved))
- # check cache contains what .from_table_row just produced
- self.assertEqual({retrieved.id_: retrieved},
- self.checked_class.get_cache())
-
- @TestCaseAugmented._run_if_with_db_but_not_server
- @TestCaseAugmented._run_on_versioned_attributes
- def test_versioned_history_from_row(self,
- owner: Any,
- _: str,
- attr: VersionedAttribute,
- default: str | float,
- to_set: list[str] | list[float]
- ) -> None:
- """"Test VersionedAttribute.history_from_row() knows its DB rows."""
- attr.set(to_set[0])
- attr.set(to_set[1])
- owner.save(self.db_conn)
- # make empty VersionedAttribute, fill from rows, compare to owner's
- for row in self.db_conn.row_where(owner.table_name, 'id', owner.id_):
- loaded_attr = VersionedAttribute(owner, attr.table_name, default)
- for row in self.db_conn.row_where(attr.table_name, 'parent',
- owner.id_):
- loaded_attr.history_from_row(row)
- self.assertEqual(len(attr.history.keys()),
- len(loaded_attr.history.keys()))
- for timestamp, value in attr.history.items():
- self.assertEqual(value, loaded_attr.history[timestamp])
-
- @TestCaseAugmented._run_if_with_db_but_not_server
- def test_all(self) -> None:
- """Test .all() and its relation to cache and savings."""
- id1, id2, id3 = self._default_ids
- item1 = self._make_from_defaults(id1)
- item2 = self._make_from_defaults(id2)
- item3 = self._make_from_defaults(id3)
- # check .all() returns empty list on un-cached items
- self.assertEqual(self.checked_class.all(self.db_conn), [])
- # check that all() shows only cached/saved items
- item1.cache()
- item3.save(self.db_conn)
- self.assertEqual(sorted(self.checked_class.all(self.db_conn)),
- sorted([item1, item3]))
- item2.save(self.db_conn)
- self.assertEqual(sorted(self.checked_class.all(self.db_conn)),
- sorted([item1, item2, item3]))
-
- @TestCaseAugmented._run_if_with_db_but_not_server
- def test_singularity(self) -> None:
- """Test pointers made for single object keep pointing to it."""
- id1 = self._default_ids[0]
- obj = self._make_from_defaults(id1)
- obj.save(self.db_conn)
- # change object, expect retrieved through .by_id to carry change
- attr_name = self._change_obj(obj)
- new_attr = getattr(obj, attr_name)
- retrieved = self.checked_class.by_id(self.db_conn, id1)
- self.assertEqual(new_attr, getattr(retrieved, attr_name))
-
- @TestCaseAugmented._run_if_with_db_but_not_server
- @TestCaseAugmented._run_on_versioned_attributes
- def test_versioned_singularity(self,
- owner: Any,
- attr_name: str,
- attr: VersionedAttribute,
- _: str | float,
- to_set: list[str] | list[float]
- ) -> None:
- """Test singularity of VersionedAttributes on saving."""
- owner.save(self.db_conn)
- # change obj, expect retrieved through .by_id to carry change
- attr.set(to_set[0])
- retrieved = self.checked_class.by_id(self.db_conn, owner.id_)
- attr_retrieved = getattr(retrieved, attr_name)
- self.assertEqual(attr.history, attr_retrieved.history)
-
- @TestCaseAugmented._run_if_with_db_but_not_server
- def test_remove(self) -> None:
- """Test .remove() effects on DB and cache."""
- obj = self._make_from_defaults(self._default_ids[0])
- # check removal only works after saving
- with self.assertRaises(HandledException):
- obj.remove(self.db_conn)
- obj.save(self.db_conn)
- obj.remove(self.db_conn)
- # check access to obj fails after removal
- with self.assertRaises(HandledException):
- print(obj.id_)
- # check DB and cache now empty
- self.check_identity_with_cache_and_db([])
-
-
-class Expected:
- """Builder of (JSON-like) dict to compare against responses of test server.
-
- Collects all items and relations we expect expressed in the server's JSON
- responses and puts them into the proper json.dumps-friendly dict structure,
- accessibla via .as_dict, to compare them in TestsWithServer.check_json_get.
-
- On its own provides for .as_dict output only {"_library": …}, initialized
- from .__init__ and to be directly manipulated via the .lib* methods.
- Further structures of the expected response may be added and kept
- up-to-date by subclassing .__init__, .recalc, and .d.
-
- NB: Lots of expectations towards server behavior will be made explicit here
- (or in the subclasses) rather than in the actual TestCase methods' code.
- """
- _default_dict: dict[str, Any]
- _forced: dict[str, Any]
- _fields: dict[str, Any]
- _on_empty_make_temp: tuple[str, str]
-
- def __init__(self) -> None:
- for name in ['_default_dict', '_fields', '_forced']:
- if not hasattr(self, name):
- setattr(self, name, {})
- self._lib: dict[str, dict[int, dict[str, Any]]] = {}
- for k, v in self._default_dict.items():
- if k not in self._fields:
- self._fields[k] = v
-
- def recalc(self) -> None:
- """Update internal dictionary by subclass-specific rules."""
- todos = self.lib_all('Todo')
- for todo in todos:
- todo['parents'] = []
- for todo in todos:
- for child_id in todo['children']:
- self.lib_get('Todo', child_id)['parents'] += [todo['id']]
- todo['children'].sort()
- procsteps = self.lib_all('ProcessStep')
- procs = self.lib_all('Process')
- for proc in procs:
- proc['explicit_steps'] = [s['id'] for s in procsteps
- if s['owner_id'] == proc['id']]
-
- @property
- def as_dict(self) -> dict[str, Any]:
- """Return dict to compare against test server JSON responses."""
- make_temp = False
- if hasattr(self, '_on_empty_make_temp'):
- category, dicter = getattr(self, '_on_empty_make_temp')
- id_ = self._fields[category.lower()]
- make_temp = not bool(self.lib_get(category, id_))
- if make_temp:
- self.lib_set(category, [getattr(self, dicter)(id_)])
- self.recalc()
- d = {'_library': self._lib}
- for k, v in self._fields.items():
- # we expect everything sortable to be sorted
- if isinstance(v, list) and k not in self._forced:
- # NB: if we don't test for v being list, sorted() on an empty
- # dict may return an empty list
- try:
- v = sorted(v)
- except TypeError:
- pass
- d[k] = v
- for k, v in self._forced.items():
- d[k] = v
- if make_temp:
- json = json_dumps(d)
- id_ = id_ if id_ is not None else -1
- self.lib_del(category, id_)
- d = json_loads(json)
- return d
-
- def lib_get(self, category: str, id_: int) -> dict[str, Any]:
- """From library, return item of category and id_, or empty dict."""
- if category in self._lib and id_ in self._lib[category]:
- return self._lib[category][id_]
- return {}
-
- def lib_all(self, category: str) -> list[dict[str, Any]]:
- """From library, return items of category, or [] if none."""
- if category in self._lib:
- return list(self._lib[category].values())
- return []
-
- def lib_set(self, category: str, items: list[dict[str, object]]) -> None:
- """Update library for category with items."""
- if category not in self._lib:
- self._lib[category] = {}
- for item in items:
- id_ = item['id'] if item['id'] is not None else -1
- assert isinstance(id_, int)
- self._lib[category][id_] = item
-
- def lib_del(self, category: str, id_: int) -> None:
- """Remove category element of id_ from library."""
- del self._lib[category][id_]
- if 0 == len(self._lib[category]):
- del self._lib[category]
-
- def lib_wipe(self, category: str) -> None:
- """Remove category from library."""
- if category in self._lib:
- del self._lib[category]
-
- def set(self, field_name: str, value: object) -> None:
- """Set top-level .as_dict field."""
- self._fields[field_name] = value
-
- def force(self, field_name: str, value: object) -> None:
- """Set ._forced field to ensure value in .as_dict."""
- self._forced[field_name] = value
-
- @staticmethod
- def as_ids(items: list[dict[str, Any]]) -> list[int]:
- """Return list of only 'id' fields of items."""
- return [item['id'] for item in items]
-
- @staticmethod
- def day_as_dict(id_: int, comment: str = '') -> dict[str, object]:
- """Return JSON of Day to expect."""
- return {'id': id_, 'comment': comment, 'todos': []}
-
- def set_day_from_post(self, id_: int, d: dict[str, Any]) -> None:
- """Set Day of id_ in library based on POST dict d."""
- day = self.day_as_dict(id_)
- for k, v in d.items():
- if 'day_comment' == k:
- day['comment'] = v
- elif 'new_todo' == k:
- next_id = 1
- for todo in self.lib_all('Todo'):
- if next_id <= todo['id']:
- next_id = todo['id'] + 1
- for proc_id in sorted([id_ for id_ in v if id_]):
- todo = self.todo_as_dict(next_id, proc_id, id_)
- self.lib_set('Todo', [todo])
- next_id += 1
- elif 'done' == k:
- for todo_id in v:
- self.lib_get('Todo', todo_id)['is_done'] = True
- elif 'todo_id' == k:
- for i, todo_id in enumerate(v):
- t = self.lib_get('Todo', todo_id)
- if 'comment' in d:
- t['comment'] = d['comment'][i]
- if 'effort' in d:
- effort = d['effort'][i] if d['effort'][i] else None
- t['effort'] = effort
- self.lib_set('Day', [day])
-
- @staticmethod
- def cond_as_dict(id_: int = 1,
- is_active: bool = False,
- title: None | str = None,
- description: None | str = None,
- ) -> dict[str, object]:
- """Return JSON of Condition to expect."""
- versioned: dict[str, dict[str, object]]
- versioned = {'title': {}, 'description': {}}
- if title is not None:
- versioned['title']['0'] = title
- if description is not None:
- versioned['description']['0'] = description
- return {'id': id_, 'is_active': is_active, '_versioned': versioned}
-
- def set_cond_from_post(self, id_: int, d: dict[str, Any]) -> None:
- """Set Condition of id_ in library based on POST dict d."""
- if 'delete' in d:
- self.lib_del('Condition', id_)
- return
- cond = self.lib_get('Condition', id_)
- if cond:
- cond['is_active'] = 'is_active' in d and\
- d['is_active'] in _VALID_TRUES
- for category in ['title', 'description']:
- history = cond['_versioned'][category]
- if len(history) > 0:
- last_i = sorted([int(k) for k in history.keys()])[-1]
- if d[category] != history[str(last_i)]:
- history[str(last_i + 1)] = d[category]
- else:
- history['0'] = d[category]
- else:
- cond = self.cond_as_dict(id_, **d)
- self.lib_set('Condition', [cond])
-
- @staticmethod
- def todo_as_dict(id_: int = 1,
- process_id: int = 1,
- day_id: int = 1,
- conditions: None | list[int] = None,
- disables: None | list[int] = None,
- blockers: None | list[int] = None,
- enables: None | list[int] = None,
- calendarize: bool = False,
- comment: str = '',
- is_done: bool = False,
- effort: float | None = None,
- children: list[int] | None = None,
- parents: list[int] | None = None,
- ) -> dict[str, object]:
- """Return JSON of Todo to expect."""
- # pylint: disable=too-many-arguments
- d = {'id': id_,
- 'day_id': day_id,
- 'process_id': process_id,
- 'is_done': is_done,
- 'calendarize': calendarize,
- 'comment': comment,
- 'children': children if children else [],
- 'parents': parents if parents else [],
- 'effort': effort,
- 'conditions': conditions if conditions else [],
- 'disables': disables if disables else [],
- 'blockers': blockers if blockers else [],
- 'enables': enables if enables else []}
- return d
-
- def set_todo_from_post(self, id_: int, d: dict[str, Any]) -> None:
- """Set Todo of id_ in library based on POST dict d."""
- corrected_kwargs: dict[str, Any] = {
- 'children': [], 'is_done': 0, 'calendarize': 0, 'comment': ''}
- for k, v in d.items():
- if k.startswith('step_filler_to_'):
- continue
- if 'adopt' == k:
- new_children = v if isinstance(v, list) else [v]
- corrected_kwargs['children'] += new_children
- continue
- if k in {'is_done', 'calendarize'} and v in _VALID_TRUES:
- v = True
- corrected_kwargs[k] = v
- todo = self.lib_get('Todo', id_)
- if todo:
- for k, v in corrected_kwargs.items():
- todo[k] = v
- else:
- todo = self.todo_as_dict(id_, **corrected_kwargs)
- self.lib_set('Todo', [todo])
-
- @staticmethod
- def procstep_as_dict(id_: int,
- owner_id: int,
- step_process_id: int,
- parent_step_id: int | None = None
- ) -> dict[str, object]:
- """Return JSON of ProcessStep to expect."""
- return {'id': id_,
- 'owner_id': owner_id,
- 'step_process_id': step_process_id,
- 'parent_step_id': parent_step_id}
-
- @staticmethod
- def proc_as_dict(id_: int = 1,
- title: None | str = None,
- description: None | str = None,
- effort: None | float = None,
- conditions: None | list[int] = None,
- disables: None | list[int] = None,
- blockers: None | list[int] = None,
- enables: None | list[int] = None,
- explicit_steps: None | list[int] = None,
- suppressed_steps: None | list[int] = None
- ) -> dict[str, object]:
- """Return JSON of Process to expect."""
- # pylint: disable=too-many-arguments
- versioned: dict[str, dict[str, object]]
- versioned = {'title': {}, 'description': {}, 'effort': {}}
- if title is not None:
- versioned['title']['0'] = title
- if description is not None:
- versioned['description']['0'] = description
- if effort is not None:
- versioned['effort']['0'] = effort
- d = {'id': id_,
- 'calendarize': False,
- 'suppressed_steps': suppressed_steps if suppressed_steps else [],
- 'explicit_steps': explicit_steps if explicit_steps else [],
- '_versioned': versioned,
- 'conditions': conditions if conditions else [],
- 'disables': disables if disables else [],
- 'enables': enables if enables else [],
- 'blockers': blockers if blockers else []}
- return d
-
- def set_proc_from_post(self, id_: int, d: dict[str, Any]) -> None:
- """Set Process of id_ in library based on POST dict d."""
- proc = self.lib_get('Process', id_)
- if proc:
- for category in ['title', 'description', 'effort']:
- history = proc['_versioned'][category]
- if len(history) > 0:
- last_i = sorted([int(k) for k in history.keys()])[-1]
- if d[category] != history[str(last_i)]:
- history[str(last_i + 1)] = d[category]
- else:
- history['0'] = d[category]
- else:
- proc = self.proc_as_dict(id_,
- d['title'], d['description'], d['effort'])
- ignore = {'title', 'description', 'effort', 'new_top_step', 'step_of',
- 'kept_steps'}
- proc['calendarize'] = False
- for k, v in d.items():
- if k in ignore\
- or k.startswith('step_') or k.startswith('new_step_to'):
- continue
- if k in {'calendarize'} and v in _VALID_TRUES:
- v = True
- elif k in {'suppressed_steps', 'explicit_steps', 'conditions',
- 'disables', 'enables', 'blockers'}:
- if not isinstance(v, list):
- v = [v]
- proc[k] = v
- self.lib_set('Process', [proc])
-
-
-class TestCaseWithServer(TestCaseWithDB):
- """Module tests against our HTTP server/handler (and database)."""
-
- def setUp(self) -> None:
- super().setUp()
- self.httpd = TaskServer(self.db_file, ('localhost', 0), TaskHandler)
- self.server_thread = Thread(target=self.httpd.serve_forever)
- self.server_thread.daemon = True
- self.server_thread.start()
- self.conn = HTTPConnection(str(self.httpd.server_address[0]),
- self.httpd.server_address[1])
- self.httpd.render_mode = 'json'
-
- def tearDown(self) -> None:
- self.httpd.shutdown()
- self.httpd.server_close()
- self.server_thread.join()
- super().tearDown()
-
- def post_exp_cond(self,
- exps: list[Expected],
- payload: dict[str, object],
- id_: int = 1,
- post_to_id: bool = True,
- redir_to_id: bool = True
- ) -> None:
- """POST /condition(s), appropriately update Expecteds."""
- # pylint: disable=too-many-arguments
- target = f'/condition?id={id_}' if post_to_id else '/condition'
- redir = f'/condition?id={id_}' if redir_to_id else '/conditions'
- if 'title' not in payload:
- payload['title'] = 'foo'
- if 'description' not in payload:
- payload['description'] = 'foo'
- self.check_post(payload, target, redir=redir)
- for exp in exps:
- exp.set_cond_from_post(id_, payload)
-
- def post_exp_day(self,
- exps: list[Expected],
- payload: dict[str, Any],
- day_id: int = 1
- ) -> None:
- """POST /day, appropriately update Expecteds."""
- if 'make_type' not in payload:
- payload['make_type'] = 'empty'
- if 'day_comment' not in payload:
- payload['day_comment'] = ''
- date = dt_date_from_day_id(day_id).isoformat()
- target = f'/day?date={date}'
- redir_to = f'{target}&make_type={payload["make_type"]}'
- self.check_post(payload, target, 302, redir_to)
- for exp in exps:
- exp.set_day_from_post(day_id, payload)
-
- def post_exp_process(self,
- exps: list[Expected],
- payload: dict[str, Any],
- id_: int,
- ) -> dict[str, object]:
- """POST /process, appropriately update Expecteds."""
- if 'title' not in payload:
- payload['title'] = 'foo'
- if 'description' not in payload:
- payload['description'] = 'foo'
- if 'effort' not in payload:
- payload['effort'] = 1.1
- self.check_post(payload, f'/process?id={id_}',
- redir=f'/process?id={id_}')
- for exp in exps:
- exp.set_proc_from_post(id_, payload)
- return payload
-
- def post_exp_todo(self,
- exps: list[Expected],
- payload: dict[str, Any],
- id_: int,
- ) -> None:
- """POST /todo, appropriately updated Expecteds."""
- self.check_post(payload, f'/todo?id={id_}')
- for exp in exps:
- exp.set_todo_from_post(id_, payload)
-
- def check_filter(self, exp: Expected, category: str, key: str,
- val: str, list_ids: list[int]) -> None:
- """Check GET /{category}?{key}={val} sorts to list_ids."""
- # pylint: disable=too-many-arguments
- exp.set(key, val)
- exp.force(category, list_ids)
- self.check_json_get(f'/{category}?{key}={val}', exp)
-
- def check_redirect(self, target: str) -> None:
- """Check that self.conn answers with a 302 redirect to target."""
- response = self.conn.getresponse()
- self.assertEqual(response.status, 302)
- self.assertEqual(response.getheader('Location'), target)
-
- def check_get(self, target: str, expected_code: int) -> None:
- """Check that a GET to target yields expected_code."""
- self.conn.request('GET', target)
- self.assertEqual(self.conn.getresponse().status, expected_code)
-
- def check_minimal_inputs(self,
- url: str,
- minimal_inputs: dict[str, Any]
- ) -> None:
- """Check that url 400's unless all of minimal_inputs provided."""
- for to_hide in minimal_inputs.keys():
- to_post = {k: v for k, v in minimal_inputs.items() if k != to_hide}
- self.check_post(to_post, url, 400)
-
- def check_post(self, data: Mapping[str, object], target: str,
- expected_code: int = 302, redir: str = '') -> None:
- """Check that POST of data to target yields expected_code."""
- encoded_form_data = urlencode(data, doseq=True).encode('utf-8')
- headers = {'Content-Type': 'application/x-www-form-urlencoded',
- 'Content-Length': str(len(encoded_form_data))}
- self.conn.request('POST', target,
- body=encoded_form_data, headers=headers)
- if 302 == expected_code:
- redir = target if redir == '' else redir
- self.check_redirect(redir)
- else:
- self.assertEqual(self.conn.getresponse().status, expected_code)
-
- def check_get_defaults(self,
- path: str,
- default_id: str = '1',
- id_name: str = 'id'
- ) -> None:
- """Some standard model paths to test."""
- nonexist_status = 200 if self.checked_class.can_create_by_id else 404
- self.check_get(path, nonexist_status)
- self.check_get(f'{path}?{id_name}=', 400)
- self.check_get(f'{path}?{id_name}=foo', 400)
- self.check_get(f'/{path}?{id_name}=0', 400)
- self.check_get(f'{path}?{id_name}={default_id}', nonexist_status)
-
- def check_json_get(self, path: str, expected: Expected) -> None:
- """Compare JSON on GET path with expected.
-
- To simplify comparison of VersionedAttribute histories, transforms
- timestamp keys of VersionedAttribute history keys into (strings of)
- integers counting chronologically forward from 0.
- """
-
- def rewrite_history_keys_in(item: Any) -> Any:
- if isinstance(item, dict):
- if '_versioned' in item.keys():
- for category in item['_versioned']:
- vals = item['_versioned'][category].values()
- history = {}
- for i, val in enumerate(vals):
- history[str(i)] = val
- item['_versioned'][category] = history
- for category in list(item.keys()):
- rewrite_history_keys_in(item[category])
- elif isinstance(item, list):
- item[:] = [rewrite_history_keys_in(i) for i in item]
- return item
-
- def walk_diffs(path: str, cmp1: object, cmp2: object) -> None:
- # pylint: disable=too-many-branches
- def warn(intro: str, val: object) -> None:
- if isinstance(val, (str, int, float)):
- print(intro, val)
- else:
- print(intro)
- pprint(val)
- if cmp1 != cmp2:
- if isinstance(cmp1, dict) and isinstance(cmp2, dict):
- for k, v in cmp1.items():
- if k not in cmp2:
- warn(f'DIFF {path}: retrieved lacks {k}', v)
- elif v != cmp2[k]:
- walk_diffs(f'{path}:{k}', v, cmp2[k])
- for k in [k for k in cmp2.keys() if k not in cmp1]:
- warn(f'DIFF {path}: expected lacks retrieved\'s {k}',
- cmp2[k])
- elif isinstance(cmp1, list) and isinstance(cmp2, list):
- for i, v1 in enumerate(cmp1):
- if i >= len(cmp2):
- warn(f'DIFF {path}[{i}] retrieved misses:', v1)
- elif v1 != cmp2[i]:
- walk_diffs(f'{path}[{i}]', v1, cmp2[i])
- if len(cmp2) > len(cmp1):
- for i, v2 in enumerate(cmp2[len(cmp1):]):
- warn(f'DIFF {path}[{len(cmp1)+i}] misses:', v2)
- else:
- warn(f'DIFF {path} – for expected:', cmp1)
- warn('… and for retrieved:', cmp2)
-
- self.conn.request('GET', path)
- response = self.conn.getresponse()
- self.assertEqual(response.status, 200)
- retrieved = json_loads(response.read().decode())
- rewrite_history_keys_in(retrieved)
- # to convert ._lib int keys to str
- cmp = json_loads(json_dumps(expected.as_dict))
- try:
- self.assertEqual(cmp, retrieved)
- except AssertionError as e:
- print('EXPECTED:')
- pprint(cmp)
- print('RETRIEVED:')
- pprint(retrieved)
- walk_diffs('', cmp, retrieved)
- raise e