home · contact · privacy
Add TaskHandler code to actually make previous commit work. master
authorChristian Heller <c.heller@plomlompom.de>
Sat, 10 Aug 2024 03:32:55 +0000 (05:32 +0200)
committerChristian Heller <c.heller@plomlompom.de>
Sat, 10 Aug 2024 03:32:55 +0000 (05:32 +0200)
18 files changed:
plomtask/conditions.py
plomtask/days.py
plomtask/db.py
plomtask/http.py
plomtask/misc.py [new file with mode: 0644]
plomtask/processes.py
plomtask/todos.py
plomtask/versioned_attributes.py
templates/calendar_txt.html [new file with mode: 0644]
templates/process.html
templates/todo.html
tests/conditions.py
tests/days.py
tests/misc.py
tests/processes.py
tests/todos.py
tests/utils.py
tests/versioned_attributes.py [deleted file]

index 15dcb9df623c60378485632ce3bebc4c30f03d47..8d4160424423cf6ddfccb06b5b04d77e2bea4b15 100644 (file)
@@ -8,8 +8,8 @@ from plomtask.exceptions import HandledException
 class Condition(BaseModel[int]):
     """Non-Process dependency for ProcessSteps and Todos."""
     table_name = 'conditions'
 class Condition(BaseModel[int]):
     """Non-Process dependency for ProcessSteps and Todos."""
     table_name = 'conditions'
-    to_save = ['is_active']
-    to_save_versioned = ['title', 'description']
+    to_save_simples = ['is_active']
+    versioned_defaults = {'title': 'UNNAMED', 'description': ''}
     to_search = ['title.newest', 'description.newest']
     can_create_by_id = True
     sorters = {'is_active': lambda c: c.is_active,
     to_search = ['title.newest', 'description.newest']
     can_create_by_id = True
     sorters = {'is_active': lambda c: c.is_active,
@@ -18,9 +18,10 @@ class Condition(BaseModel[int]):
     def __init__(self, id_: int | None, is_active: bool = False) -> None:
         super().__init__(id_)
         self.is_active = is_active
     def __init__(self, id_: int | None, is_active: bool = False) -> None:
         super().__init__(id_)
         self.is_active = is_active
-        self.title = VersionedAttribute(self, 'condition_titles', 'UNNAMED')
-        self.description = VersionedAttribute(self, 'condition_descriptions',
-                                              '')
+        for name in ['title', 'description']:
+            attr = VersionedAttribute(self, f'condition_{name}s',
+                                      self.versioned_defaults[name])
+            setattr(self, name, attr)
 
     def remove(self, db_conn: DatabaseConnection) -> None:
         """Remove from DB, with VersionedAttributes.
 
     def remove(self, db_conn: DatabaseConnection) -> None:
         """Remove from DB, with VersionedAttributes.
@@ -41,6 +42,7 @@ class Condition(BaseModel[int]):
 
 class ConditionsRelations:
     """Methods for handling relations to Conditions, for Todo and Process."""
 
 class ConditionsRelations:
     """Methods for handling relations to Conditions, for Todo and Process."""
+    # pylint: disable=too-few-public-methods
 
     def __init__(self) -> None:
         self.conditions: list[Condition] = []
 
     def __init__(self) -> None:
         self.conditions: list[Condition] = []
@@ -48,26 +50,21 @@ class ConditionsRelations:
         self.enables: list[Condition] = []
         self.disables: list[Condition] = []
 
         self.enables: list[Condition] = []
         self.disables: list[Condition] = []
 
-    def set_conditions(self, db_conn: DatabaseConnection, ids: list[int],
-                       target: str = 'conditions') -> None:
-        """Set self.[target] to Conditions identified by ids."""
-        target_list = getattr(self, target)
-        while len(target_list) > 0:
-            target_list.pop()
-        for id_ in ids:
-            target_list += [Condition.by_id(db_conn, id_)]
-
-    def set_blockers(self, db_conn: DatabaseConnection,
-                     ids: list[int]) -> None:
-        """Set self.enables to Conditions identified by ids."""
-        self.set_conditions(db_conn, ids, 'blockers')
-
-    def set_enables(self, db_conn: DatabaseConnection,
-                    ids: list[int]) -> None:
-        """Set self.enables to Conditions identified by ids."""
-        self.set_conditions(db_conn, ids, 'enables')
-
-    def set_disables(self, db_conn: DatabaseConnection,
-                     ids: list[int]) -> None:
-        """Set self.disables to Conditions identified by ids."""
-        self.set_conditions(db_conn, ids, 'disables')
+    def set_condition_relations(self,
+                                db_conn: DatabaseConnection,
+                                ids_conditions: list[int],
+                                ids_blockers: list[int],
+                                ids_enables: list[int],
+                                ids_disables: list[int]
+                                ) -> None:
+        """Set owned Condition lists to those identified by respective IDs."""
+        # pylint: disable=too-many-arguments
+        for ids, target in [(ids_conditions, 'conditions'),
+                            (ids_blockers, 'blockers'),
+                            (ids_enables, 'enables'),
+                            (ids_disables, 'disables')]:
+            target_list = getattr(self, target)
+            while len(target_list) > 0:
+                target_list.pop()
+            for id_ in ids:
+                target_list += [Condition.by_id(db_conn, id_)]
index 23201301bbe792042a361d3f970415c622d80627..3d9d04182e740d843ffe4c7ec98c68abd6c3a93d 100644 (file)
@@ -11,7 +11,7 @@ from plomtask.dating import (DATE_FORMAT, valid_date)
 class Day(BaseModel[str]):
     """Individual days defined by their dates."""
     table_name = 'days'
 class Day(BaseModel[str]):
     """Individual days defined by their dates."""
     table_name = 'days'
-    to_save = ['comment']
+    to_save_simples = ['comment']
     add_to_dict = ['todos']
     can_create_by_id = True
 
     add_to_dict = ['todos']
     can_create_by_id = True
 
@@ -50,20 +50,15 @@ class Day(BaseModel[str]):
             day.todos = Todo.by_date(db_conn, day.id_)
         return day
 
             day.todos = Todo.by_date(db_conn, day.id_)
         return day
 
-    @classmethod
-    def by_date_range_filled(cls, db_conn: DatabaseConnection,
-                             start: str, end: str) -> list[Day]:
-        """Return days existing and non-existing between dates start/end."""
-        ret = cls.by_date_range_with_limits(db_conn, (start, end), 'id')
-        days, start_date, end_date = ret
-        return cls.with_filled_gaps(days, start_date, end_date)
-
     @classmethod
     def with_filled_gaps(cls, days: list[Day], start_date: str, end_date: str
                          ) -> list[Day]:
     @classmethod
     def with_filled_gaps(cls, days: list[Day], start_date: str, end_date: str
                          ) -> list[Day]:
-        """In days, fill with (un-saved) Days gaps between start/end_date."""
+        """In days, fill with (un-stored) Days gaps between start/end_date."""
+        days = days[:]
+        start_date, end_date = valid_date(start_date), valid_date(end_date)
         if start_date > end_date:
         if start_date > end_date:
-            return days
+            return []
+        days = [d for d in days if d.date >= start_date and d.date <= end_date]
         days.sort()
         if start_date not in [d.date for d in days]:
             days[:] = [Day(start_date)] + days
         days.sort()
         if start_date not in [d.date for d in days]:
             days[:] = [Day(start_date)] + days
index 13cdaef5b9c7d3e992f8c92730a9979b9eee2d73..ee5f3b99f04e66460ef4e0a90c5d10d600210881 100644 (file)
@@ -232,9 +232,9 @@ BaseModelInstance = TypeVar('BaseModelInstance', bound='BaseModel[Any]')
 class BaseModel(Generic[BaseModelId]):
     """Template for most of the models we use/derive from the DB."""
     table_name = ''
 class BaseModel(Generic[BaseModelId]):
     """Template for most of the models we use/derive from the DB."""
     table_name = ''
-    to_save: list[str] = []
-    to_save_versioned: list[str] = []
+    to_save_simples: list[str] = []
     to_save_relations: list[tuple[str, str, str, int]] = []
     to_save_relations: list[tuple[str, str, str, int]] = []
+    versioned_defaults: dict[str, str | float] = {}
     add_to_dict: list[str] = []
     id_: None | BaseModelId
     cache_: dict[BaseModelId, Self]
     add_to_dict: list[str] = []
     id_: None | BaseModelId
     cache_: dict[BaseModelId, Self]
@@ -253,11 +253,12 @@ class BaseModel(Generic[BaseModelId]):
         self.id_ = id_
 
     def __hash__(self) -> int:
         self.id_ = id_
 
     def __hash__(self) -> int:
-        hashable = [self.id_] + [getattr(self, name) for name in self.to_save]
+        hashable = [self.id_] + [getattr(self, name)
+                                 for name in self.to_save_simples]
         for definition in self.to_save_relations:
             attr = getattr(self, definition[2])
             hashable += [tuple(rel.id_ for rel in attr)]
         for definition in self.to_save_relations:
             attr = getattr(self, definition[2])
             hashable += [tuple(rel.id_ for rel in attr)]
-        for name in self.to_save_versioned:
+        for name in self.to_save_versioned():
             hashable += [hash(getattr(self, name))]
         return hash(tuple(hashable))
 
             hashable += [hash(getattr(self, name))]
         return hash(tuple(hashable))
 
@@ -274,62 +275,35 @@ class BaseModel(Generic[BaseModelId]):
         assert isinstance(other.id_, int)
         return self.id_ < other.id_
 
         assert isinstance(other.id_, int)
         return self.id_ < other.id_
 
+    @classmethod
+    def to_save_versioned(cls) -> list[str]:
+        """Return keys of cls.versioned_defaults assuming we wanna save 'em."""
+        return list(cls.versioned_defaults.keys())
+
     @property
     @property
-    def as_dict(self) -> dict[str, object]:
-        """Return self as (json.dumps-compatible) dict."""
-        library: dict[str, dict[str | int, object]] = {}
-        d: dict[str, object] = {'id': self.id_, '_library': library}
-        for to_save in self.to_save:
-            attr = getattr(self, to_save)
-            if hasattr(attr, 'as_dict_into_reference'):
-                d[to_save] = attr.as_dict_into_reference(library)
-            else:
-                d[to_save] = attr
-        if len(self.to_save_versioned) > 0:
+    def as_dict_and_refs(self) -> tuple[dict[str, object],
+                                        list[BaseModel[int] | BaseModel[str]]]:
+        """Return self as json.dumps-ready dict, list of referenced objects."""
+        d: dict[str, object] = {'id': self.id_}
+        refs: list[BaseModel[int] | BaseModel[str]] = []
+        for to_save in self.to_save_simples:
+            d[to_save] = getattr(self, to_save)
+        if len(self.to_save_versioned()) > 0:
             d['_versioned'] = {}
             d['_versioned'] = {}
-        for k in self.to_save_versioned:
+        for k in self.to_save_versioned():
             attr = getattr(self, k)
             assert isinstance(d['_versioned'], dict)
             d['_versioned'][k] = attr.history
             attr = getattr(self, k)
             assert isinstance(d['_versioned'], dict)
             d['_versioned'][k] = attr.history
-        for r in self.to_save_relations:
-            attr_name = r[2]
-            l: list[int | str] = []
-            for rel in getattr(self, attr_name):
-                l += [rel.as_dict_into_reference(library)]
-            d[attr_name] = l
-        for k in self.add_to_dict:
-            d[k] = [x.as_dict_into_reference(library)
-                    for x in getattr(self, k)]
-        return d
-
-    def as_dict_into_reference(self,
-                               library: dict[str, dict[str | int, object]]
-                               ) -> int | str:
-        """Return self.id_ while writing .as_dict into library."""
-        def into_library(library: dict[str, dict[str | int, object]],
-                         cls_name: str,
-                         id_: str | int,
-                         d: dict[str, object]
-                         ) -> None:
-            if cls_name not in library:
-                library[cls_name] = {}
-            if id_ in library[cls_name]:
-                if library[cls_name][id_] != d:
-                    msg = 'Unexpected inequality of entries for ' +\
-                            f'_library at: {cls_name}/{id_}'
-                    raise HandledException(msg)
-            else:
-                library[cls_name][id_] = d
-        as_dict = self.as_dict
-        assert isinstance(as_dict['_library'], dict)
-        for cls_name, dict_of_objs in as_dict['_library'].items():
-            for id_, obj in dict_of_objs.items():
-                into_library(library, cls_name, id_, obj)
-        del as_dict['_library']
-        assert self.id_ is not None
-        into_library(library, self.__class__.__name__, self.id_, as_dict)
-        assert isinstance(as_dict['id'], (int, str))
-        return as_dict['id']
+        rels_to_collect = [rel[2] for rel in self.to_save_relations]
+        rels_to_collect += self.add_to_dict
+        for attr_name in rels_to_collect:
+            rel_list = []
+            for item in getattr(self, attr_name):
+                rel_list += [item.id_]
+                if item not in refs:
+                    refs += [item]
+            d[attr_name] = rel_list
+        return d, refs
 
     @classmethod
     def name_lowercase(cls) -> str:
 
     @classmethod
     def name_lowercase(cls) -> str:
@@ -339,13 +313,18 @@ class BaseModel(Generic[BaseModelId]):
     @classmethod
     def sort_by(cls, seq: list[Any], sort_key: str, default: str = 'title'
                 ) -> str:
     @classmethod
     def sort_by(cls, seq: list[Any], sort_key: str, default: str = 'title'
                 ) -> str:
-        """Sort cls list by cls.sorters[sort_key] (reverse if '-'-prefixed)."""
+        """Sort cls list by cls.sorters[sort_key] (reverse if '-'-prefixed).
+
+        Before cls.sorters[sort_key] is applied, seq is sorted by .id_, to
+        ensure predictability where parts of seq are of same sort value.
+        """
         reverse = False
         if len(sort_key) > 1 and '-' == sort_key[0]:
             sort_key = sort_key[1:]
             reverse = True
         if sort_key not in cls.sorters:
             sort_key = default
         reverse = False
         if len(sort_key) > 1 and '-' == sort_key[0]:
             sort_key = sort_key[1:]
             reverse = True
         if sort_key not in cls.sorters:
             sort_key = default
+        seq.sort(key=lambda x: x.id_, reverse=reverse)
         sorter: Callable[..., Any] = cls.sorters[sort_key]
         seq.sort(key=sorter, reverse=reverse)
         if reverse:
         sorter: Callable[..., Any] = cls.sorters[sort_key]
         seq.sort(key=sorter, reverse=reverse)
         if reverse:
@@ -361,7 +340,8 @@ class BaseModel(Generic[BaseModelId]):
     def __getattribute__(self, name: str) -> Any:
         """Ensure fail if ._disappear() was called, except to check ._exists"""
         if name != '_exists' and not super().__getattribute__('_exists'):
     def __getattribute__(self, name: str) -> Any:
         """Ensure fail if ._disappear() was called, except to check ._exists"""
         if name != '_exists' and not super().__getattribute__('_exists'):
-            raise HandledException('Object does not exist.')
+            msg = f'Object for attribute does not exist: {name}'
+            raise HandledException(msg)
         return super().__getattribute__(name)
 
     def _disappear(self) -> None:
         return super().__getattribute__(name)
 
     def _disappear(self) -> None:
@@ -386,10 +366,11 @@ class BaseModel(Generic[BaseModelId]):
         cls.cache_ = {}
 
     @classmethod
         cls.cache_ = {}
 
     @classmethod
-    def get_cache(cls: type[BaseModelInstance]) -> dict[Any, BaseModel[Any]]:
+    def get_cache(cls: type[BaseModelInstance]
+                  ) -> dict[Any, BaseModelInstance]:
         """Get cache dictionary, create it if not yet existing."""
         if not hasattr(cls, 'cache_'):
         """Get cache dictionary, create it if not yet existing."""
         if not hasattr(cls, 'cache_'):
-            d: dict[Any, BaseModel[Any]] = {}
+            d: dict[Any, BaseModelInstance] = {}
             cls.cache_ = d
         return cls.cache_
 
             cls.cache_ = d
         return cls.cache_
 
@@ -438,7 +419,7 @@ class BaseModel(Generic[BaseModelId]):
         """Make from DB row (sans relations), update DB cache with it."""
         obj = cls(*row)
         assert obj.id_ is not None
         """Make from DB row (sans relations), update DB cache with it."""
         obj = cls(*row)
         assert obj.id_ is not None
-        for attr_name in cls.to_save_versioned:
+        for attr_name in cls.to_save_versioned():
             attr = getattr(obj, attr_name)
             table_name = attr.table_name
             for row_ in db_conn.row_where(table_name, 'parent', obj.id_):
             attr = getattr(obj, attr_name)
             table_name = attr.table_name
             for row_ in db_conn.row_where(table_name, 'parent', obj.id_):
@@ -498,7 +479,7 @@ class BaseModel(Generic[BaseModelId]):
                 item = cls.by_id(db_conn, id_)
                 assert item.id_ is not None
                 items[item.id_] = item
                 item = cls.by_id(db_conn, id_)
                 assert item.id_ is not None
                 items[item.id_] = item
-        return list(items.values())
+        return sorted(list(items.values()))
 
     @classmethod
     def by_date_range_with_limits(cls: type[BaseModelInstance],
 
     @classmethod
     def by_date_range_with_limits(cls: type[BaseModelInstance],
@@ -507,7 +488,7 @@ class BaseModel(Generic[BaseModelId]):
                                   date_col: str = 'day'
                                   ) -> tuple[list[BaseModelInstance], str,
                                              str]:
                                   date_col: str = 'day'
                                   ) -> tuple[list[BaseModelInstance], str,
                                              str]:
-        """Return list of items in database within (open) date_range interval.
+        """Return list of items in DB within (closed) date_range interval.
 
         If no range values provided, defaults them to 'yesterday' and
         'tomorrow'. Knows to properly interpret these and 'today' as value.
 
         If no range values provided, defaults them to 'yesterday' and
         'tomorrow'. Knows to properly interpret these and 'today' as value.
@@ -549,7 +530,7 @@ class BaseModel(Generic[BaseModelId]):
         """Write self to DB and cache and ensure .id_.
 
         Write both to DB, and to cache. To DB, write .id_ and attributes
         """Write self to DB and cache and ensure .id_.
 
         Write both to DB, and to cache. To DB, write .id_ and attributes
-        listed in cls.to_save[_versioned|_relations].
+        listed in cls.to_save_[simples|versioned|_relations].
 
         Ensure self.id_ by setting it to what the DB command returns as the
         last saved row's ID (cursor.lastrowid), EXCEPT if self.id_ already
 
         Ensure self.id_ by setting it to what the DB command returns as the
         last saved row's ID (cursor.lastrowid), EXCEPT if self.id_ already
@@ -557,14 +538,14 @@ class BaseModel(Generic[BaseModelId]):
         only the case with the Day class, where it's to be a date string.
         """
         values = tuple([self.id_] + [getattr(self, key)
         only the case with the Day class, where it's to be a date string.
         """
         values = tuple([self.id_] + [getattr(self, key)
-                                     for key in self.to_save])
+                                     for key in self.to_save_simples])
         table_name = self.table_name
         cursor = db_conn.exec_on_vals(f'REPLACE INTO {table_name} VALUES',
                                       values)
         if not isinstance(self.id_, str):
             self.id_ = cursor.lastrowid  # type: ignore[assignment]
         self.cache()
         table_name = self.table_name
         cursor = db_conn.exec_on_vals(f'REPLACE INTO {table_name} VALUES',
                                       values)
         if not isinstance(self.id_, str):
             self.id_ = cursor.lastrowid  # type: ignore[assignment]
         self.cache()
-        for attr_name in self.to_save_versioned:
+        for attr_name in self.to_save_versioned():
             getattr(self, attr_name).save(db_conn)
         for table, column, attr_name, key_index in self.to_save_relations:
             assert isinstance(self.id_, (int, str))
             getattr(self, attr_name).save(db_conn)
         for table, column, attr_name, key_index in self.to_save_relations:
             assert isinstance(self.id_, (int, str))
@@ -576,7 +557,7 @@ class BaseModel(Generic[BaseModelId]):
         """Remove from DB and cache, including dependencies."""
         if self.id_ is None or self._get_cached(self.id_) is None:
             raise HandledException('cannot remove unsaved item')
         """Remove from DB and cache, including dependencies."""
         if self.id_ is None or self._get_cached(self.id_) is None:
             raise HandledException('cannot remove unsaved item')
-        for attr_name in self.to_save_versioned:
+        for attr_name in self.to_save_versioned():
             getattr(self, attr_name).remove(db_conn)
         for table, column, attr_name, _ in self.to_save_relations:
             db_conn.delete_where(table, column, self.id_)
             getattr(self, attr_name).remove(db_conn)
         for table, column, attr_name, _ in self.to_save_relations:
             db_conn.delete_where(table, column, self.id_)
index b7040f76fa9c3c1d58b0ceedc58fabf02752f616..e307f1429f93227f7b960d2cad3d55b95e3d737c 100644 (file)
@@ -1,6 +1,5 @@
 """Web server stuff."""
 from __future__ import annotations
 """Web server stuff."""
 from __future__ import annotations
-from dataclasses import dataclass
 from typing import Any, Callable
 from base64 import b64encode, b64decode
 from binascii import Error as binascii_Exception
 from typing import Any, Callable
 from base64 import b64encode, b64decode
 from binascii import Error as binascii_Exception
@@ -14,10 +13,11 @@ from plomtask.dating import date_in_n_days
 from plomtask.days import Day
 from plomtask.exceptions import (HandledException, BadFormatException,
                                  NotFoundException)
 from plomtask.days import Day
 from plomtask.exceptions import (HandledException, BadFormatException,
                                  NotFoundException)
-from plomtask.db import DatabaseConnection, DatabaseFile
+from plomtask.db import DatabaseConnection, DatabaseFile, BaseModel
 from plomtask.processes import Process, ProcessStep, ProcessStepsNode
 from plomtask.conditions import Condition
 from plomtask.processes import Process, ProcessStep, ProcessStepsNode
 from plomtask.conditions import Condition
-from plomtask.todos import Todo
+from plomtask.todos import Todo, TodoOrProcStepNode
+from plomtask.misc import DictableNode
 
 TEMPLATES_DIR = 'templates'
 
 
 TEMPLATES_DIR = 'templates'
 
@@ -29,113 +29,79 @@ class TaskServer(HTTPServer):
                  *args: Any, **kwargs: Any) -> None:
         super().__init__(*args, **kwargs)
         self.db = db_file
                  *args: Any, **kwargs: Any) -> None:
         super().__init__(*args, **kwargs)
         self.db = db_file
-        self.headers: list[tuple[str, str]] = []
-        self._render_mode = 'html'
-        self._jinja = JinjaEnv(loader=JinjaFSLoader(TEMPLATES_DIR))
-
-    def set_json_mode(self) -> None:
-        """Make server send JSON instead of HTML responses."""
-        self._render_mode = 'json'
-        self.headers += [('Content-Type', 'application/json')]
-
-    @staticmethod
-    def ctx_to_json(ctx: dict[str, object]) -> str:
-        """Render ctx into JSON string."""
-        def walk_ctx(node: object) -> Any:
-            if hasattr(node, 'as_dict_into_reference'):
-                if hasattr(node, 'id_') and node.id_ is not None:
-                    return node.as_dict_into_reference(library)
-            if hasattr(node, 'as_dict'):
-                return node.as_dict
-            if isinstance(node, (list, tuple)):
-                return [walk_ctx(x) for x in node]
-            if isinstance(node, dict):
-                d = {}
-                for k, v in node.items():
-                    d[k] = walk_ctx(v)
-                return d
-            if isinstance(node, HandledException):
-                return str(node)
-            return node
-        library: dict[str, dict[str | int, object]] = {}
-        for k, v in ctx.items():
-            ctx[k] = walk_ctx(v)
-        ctx['_library'] = library
-        return json_dumps(ctx)
-
-    def render(self, ctx: dict[str, object], tmpl_name: str = '') -> str:
-        """Render ctx according to self._render_mode.."""
-        tmpl_name = f'{tmpl_name}.{self._render_mode}'
-        if 'html' == self._render_mode:
-            template = self._jinja.get_template(tmpl_name)
-            return template.render(ctx)
-        return self.__class__.ctx_to_json(ctx)
+        self.render_mode = 'html'
+        self.jinja = JinjaEnv(loader=JinjaFSLoader(TEMPLATES_DIR))
 
 
 class InputsParser:
     """Wrapper for validating and retrieving dict-like HTTP inputs."""
 
 
 
 class InputsParser:
     """Wrapper for validating and retrieving dict-like HTTP inputs."""
 
-    def __init__(self, dict_: dict[str, list[str]],
-                 strictness: bool = True) -> None:
+    def __init__(self, dict_: dict[str, list[str]]) -> None:
         self.inputs = dict_
         self.inputs = dict_
-        self.strict = strictness
 
 
-    def get_str(self, key: str, default: str = '',
-                ignore_strict: bool = False) -> str:
-        """Retrieve single/first string value of key, or default."""
-        if key not in self.inputs.keys() or 0 == len(self.inputs[key]):
-            if self.strict and not ignore_strict:
-                raise BadFormatException(f'no value found for key {key}')
-            return default
-        return self.inputs[key][0]
-
-    def get_first_strings_starting(self, prefix: str) -> dict[str, str]:
-        """Retrieve dict of (first) strings at key starting with prefix."""
-        ret = {}
-        for key in [k for k in self.inputs.keys() if k.startswith(prefix)]:
-            ret[key] = self.inputs[key][0]
-        return ret
+    def get_all_str(self, key: str) -> list[str]:
+        """Retrieve list of string values at key (empty if no key)."""
+        if key not in self.inputs.keys():
+            return []
+        return self.inputs[key]
 
 
-    def get_int(self, key: str) -> int:
-        """Retrieve single/first value of key as int, error if empty."""
-        val = self.get_int_or_none(key)
-        if val is None:
-            raise BadFormatException(f'unexpected empty value for: {key}')
-        return val
+    def get_all_int(self, key: str) -> list[int]:
+        """Retrieve list of int values at key."""
+        all_str = self.get_all_str(key)
+        try:
+            return [int(s) for s in all_str if len(s) > 0]
+        except ValueError as e:
+            msg = f'cannot int a form field value for key {key} in: {all_str}'
+            raise BadFormatException(msg) from e
+
+    def get_str(self, key: str, default: str | None = None) -> str | None:
+        """Retrieve single/first string value of key, or default."""
+        vals = self.get_all_str(key)
+        if vals:
+            return vals[0]
+        return default
+
+    def get_str_or_fail(self, key: str, default: str | None = None) -> str:
+        """Retrieve first string value of key, if none: fail or default."""
+        vals = self.get_all_str(key)
+        if not vals:
+            if default is not None:
+                return default
+            raise BadFormatException(f'no value found for key: {key}')
+        return vals[0]
 
     def get_int_or_none(self, key: str) -> int | None:
         """Retrieve single/first value of key as int, return None if empty."""
 
     def get_int_or_none(self, key: str) -> int | None:
         """Retrieve single/first value of key as int, return None if empty."""
-        val = self.get_str(key, ignore_strict=True)
+        val = self.get_str_or_fail(key, '')
         if val == '':
             return None
         try:
             return int(val)
         if val == '':
             return None
         try:
             return int(val)
-        except ValueError as e:
+        except (ValueError, TypeError) as e:
             msg = f'cannot int form field value for key {key}: {val}'
             raise BadFormatException(msg) from e
 
             msg = f'cannot int form field value for key {key}: {val}'
             raise BadFormatException(msg) from e
 
-    def get_float(self, key: str) -> float:
-        """Retrieve float value of key from self.postvars."""
+    def get_bool_or_none(self, key: str) -> bool | None:
+        """Return value to key if truish; if no value to key, None."""
         val = self.get_str(key)
         val = self.get_str(key)
-        try:
-            return float(val)
-        except ValueError as e:
-            msg = f'cannot float form field value for key {key}: {val}'
-            raise BadFormatException(msg) from e
+        if val is None:
+            return None
+        return val in {'True', 'true', '1', 'on'}
 
 
-    def get_all_str(self, key: str) -> list[str]:
-        """Retrieve list of string values at key."""
-        if key not in self.inputs.keys():
-            return []
-        return self.inputs[key]
+    def get_all_of_key_prefixed(self, key_prefix: str) -> dict[str, list[str]]:
+        """Retrieve dict of strings at keys starting with key_prefix."""
+        ret = {}
+        for key in [k for k in self.inputs.keys() if k.startswith(key_prefix)]:
+            ret[key[len(key_prefix):]] = self.inputs[key]
+        return ret
 
 
-    def get_all_int(self, key: str) -> list[int]:
-        """Retrieve list of int values at key."""
-        all_str = self.get_all_str(key)
+    def get_float_or_fail(self, key: str) -> float:
+        """Retrieve float value of key from self.postvars, fail if none."""
+        val = self.get_str_or_fail(key)
         try:
         try:
-            return [int(s) for s in all_str if len(s) > 0]
+            return float(val)
         except ValueError as e:
         except ValueError as e:
-            msg = f'cannot int a form field value for key {key} in: {all_str}'
+            msg = f'cannot float form field value for key {key}: {val}'
             raise BadFormatException(msg) from e
 
     def get_all_floats_or_nones(self, key: str) -> list[float | None]:
             raise BadFormatException(msg) from e
 
     def get_all_floats_or_nones(self, key: str) -> list[float | None]:
@@ -157,24 +123,81 @@ class TaskHandler(BaseHTTPRequestHandler):
     """Handles single HTTP request."""
     # pylint: disable=too-many-public-methods
     server: TaskServer
     """Handles single HTTP request."""
     # pylint: disable=too-many-public-methods
     server: TaskServer
-    conn: DatabaseConnection
+    _conn: DatabaseConnection
     _site: str
     _site: str
-    _form_data: InputsParser
+    _form: InputsParser
     _params: InputsParser
 
     _params: InputsParser
 
-    def _send_page(self,
-                   ctx: dict[str, Any],
-                   tmpl_name: str,
-                   code: int = 200
-                   ) -> None:
-        """Send ctx as proper HTTP response."""
-        body = self.server.render(ctx, tmpl_name)
+    def _send_page(
+            self, ctx: dict[str, Any], tmpl_name: str, code: int = 200
+            ) -> None:
+        """HTTP-send ctx as HTML or JSON, as defined by .server.render_mode.
+
+        The differentiation by .server.render_mode serves to allow easily
+        comparable JSON responses for automatic testing.
+        """
+        body: str
+        headers: list[tuple[str, str]] = []
+        if 'html' == self.server.render_mode:
+            tmpl = self.server.jinja.get_template(f'{tmpl_name}.html')
+            body = tmpl.render(ctx)
+        else:
+            body = self._ctx_to_json(ctx)
+            headers += [('Content-Type', 'application/json')]
         self.send_response(code)
         self.send_response(code)
-        for header_tuple in self.server.headers:
+        for header_tuple in headers:
             self.send_header(*header_tuple)
         self.end_headers()
         self.wfile.write(bytes(body, 'utf-8'))
 
             self.send_header(*header_tuple)
         self.end_headers()
         self.wfile.write(bytes(body, 'utf-8'))
 
+    def _ctx_to_json(self, ctx: dict[str, object]) -> str:
+        """Render ctx into JSON string.
+
+        Flattens any objects that json.dumps might not want to serialize, and
+        turns occurrences of BaseModel objects into listings of their .id_, to
+        be resolved to a full dict inside a top-level '_library' dictionary,
+        to avoid endless and circular nesting.
+        """
+
+        def flatten(node: object) -> object:
+
+            def update_library_with(
+                    item: BaseModel[int] | BaseModel[str]) -> None:
+                cls_name = item.__class__.__name__
+                if cls_name not in library:
+                    library[cls_name] = {}
+                if item.id_ not in library[cls_name]:
+                    d, refs = item.as_dict_and_refs
+                    id_key = '?' if item.id_ is None else item.id_
+                    library[cls_name][id_key] = d
+                    for ref in refs:
+                        update_library_with(ref)
+
+            if isinstance(node, BaseModel):
+                update_library_with(node)
+                return node.id_
+            if isinstance(node, DictableNode):
+                d, refs = node.as_dict_and_refs
+                for ref in refs:
+                    update_library_with(ref)
+                return d
+            if isinstance(node, (list, tuple)):
+                return [flatten(item) for item in node]
+            if isinstance(node, dict):
+                d = {}
+                for k, v in node.items():
+                    d[k] = flatten(v)
+                return d
+            if isinstance(node, HandledException):
+                return str(node)
+            return node
+
+        library: dict[str, dict[str | int, object]] = {}
+        for k, v in ctx.items():
+            ctx[k] = flatten(v)
+        ctx['_library'] = library
+        return json_dumps(ctx)
+
     @staticmethod
     def _request_wrapper(http_method: str, not_found_msg: str
                          ) -> Callable[..., Callable[[TaskHandler], None]]:
     @staticmethod
     def _request_wrapper(http_method: str, not_found_msg: str
                          ) -> Callable[..., Callable[[TaskHandler], None]]:
@@ -217,11 +240,13 @@ class TaskHandler(BaseHTTPRequestHandler):
                 # (because pylint here fails to detect the use of wrapper as a
                 # method to self with respective access privileges)
                 try:
                 # (because pylint here fails to detect the use of wrapper as a
                 # method to self with respective access privileges)
                 try:
-                    self.conn = DatabaseConnection(self.server.db)
+                    self._conn = DatabaseConnection(self.server.db)
                     parsed_url = urlparse(self.path)
                     self._site = path_split(parsed_url.path)[1]
                     parsed_url = urlparse(self.path)
                     self._site = path_split(parsed_url.path)[1]
-                    params = parse_qs(parsed_url.query, strict_parsing=True)
-                    self._params = InputsParser(params, False)
+                    params = parse_qs(parsed_url.query,
+                                      keep_blank_values=True,
+                                      strict_parsing=True)
+                    self._params = InputsParser(params)
                     handler_name = f'do_{http_method}_{self._site}'
                     if hasattr(self, handler_name):
                         handler = getattr(self, handler_name)
                     handler_name = f'do_{http_method}_{self._site}'
                     if hasattr(self, handler_name):
                         handler = getattr(self, handler_name)
@@ -241,7 +266,7 @@ class TaskHandler(BaseHTTPRequestHandler):
                     ctx = {'msg': error}
                     self._send_page(ctx, 'msg', error.http_code)
                 finally:
                     ctx = {'msg': error}
                     self._send_page(ctx, 'msg', error.http_code)
                 finally:
-                    self.conn.close()
+                    self._conn.close()
             return wrapper
         return decorator
 
             return wrapper
         return decorator
 
@@ -261,10 +286,10 @@ class TaskHandler(BaseHTTPRequestHandler):
         """Handle POST with handler, prepare redirection to result."""
         length = int(self.headers['content-length'])
         postvars = parse_qs(self.rfile.read(length).decode(),
         """Handle POST with handler, prepare redirection to result."""
         length = int(self.headers['content-length'])
         postvars = parse_qs(self.rfile.read(length).decode(),
-                            keep_blank_values=True, strict_parsing=True)
-        self._form_data = InputsParser(postvars)
+                            keep_blank_values=True)
+        self._form = InputsParser(postvars)
         redir_target = handler()
         redir_target = handler()
-        self.conn.commit()
+        self._conn.commit()
         return redir_target
 
     # GET handlers
         return redir_target
 
     # GET handlers
@@ -281,9 +306,9 @@ class TaskHandler(BaseHTTPRequestHandler):
                 # method to self with respective access privileges)
                 id_ = self._params.get_int_or_none('id')
                 if target_class.can_create_by_id:
                 # method to self with respective access privileges)
                 id_ = self._params.get_int_or_none('id')
                 if target_class.can_create_by_id:
-                    item = target_class.by_id_or_create(self.conn, id_)
+                    item = target_class.by_id_or_create(self._conn, id_)
                 else:
                 else:
-                    item = target_class.by_id(self.conn, id_)
+                    item = target_class.by_id(self._conn, id_)
                 return f(self, item)
             return wrapper
         return decorator
                 return f(self, item)
             return wrapper
         return decorator
@@ -299,12 +324,12 @@ class TaskHandler(BaseHTTPRequestHandler):
         same, the only difference being the HTML template they are rendered to,
         which .do_GET selects from their method name.
         """
         same, the only difference being the HTML template they are rendered to,
         which .do_GET selects from their method name.
         """
-        start = self._params.get_str('start')
-        end = self._params.get_str('end')
-        if not end:
-            end = date_in_n_days(366)
-        ret = Day.by_date_range_with_limits(self.conn, (start, end), 'id')
-        days, start, end = ret
+        start = self._params.get_str_or_fail('start', '')
+        end = self._params.get_str_or_fail('end', '')
+        end = end if end != '' else date_in_n_days(366)
+        #
+        days, start, end = Day.by_date_range_with_limits(self._conn,
+                                                         (start, end), 'id')
         days = Day.with_filled_gaps(days, start, end)
         today = date_in_n_days(0)
         return {'start': start, 'end': end, 'days': days, 'today': today}
         days = Day.with_filled_gaps(days, start, end)
         today = date_in_n_days(0)
         return {'start': start, 'end': end, 'days': days, 'today': today}
@@ -319,9 +344,10 @@ class TaskHandler(BaseHTTPRequestHandler):
 
     def do_GET_day(self) -> dict[str, object]:
         """Show single Day of ?date=."""
 
     def do_GET_day(self) -> dict[str, object]:
         """Show single Day of ?date=."""
-        date = self._params.get_str('date', date_in_n_days(0))
-        day = Day.by_id_or_create(self.conn, date)
-        make_type = self._params.get_str('make_type')
+        date = self._params.get_str_or_fail('date', date_in_n_days(0))
+        make_type = self._params.get_str_or_fail('make_type', 'full')
+        #
+        day = Day.by_id_or_create(self._conn, date)
         conditions_present = []
         enablers_for = {}
         disablers_for = {}
         conditions_present = []
         enablers_for = {}
         disablers_for = {}
@@ -330,10 +356,10 @@ class TaskHandler(BaseHTTPRequestHandler):
                 if condition not in conditions_present:
                     conditions_present += [condition]
                     enablers_for[condition.id_] = [p for p in
                 if condition not in conditions_present:
                     conditions_present += [condition]
                     enablers_for[condition.id_] = [p for p in
-                                                   Process.all(self.conn)
+                                                   Process.all(self._conn)
                                                    if condition in p.enables]
                     disablers_for[condition.id_] = [p for p in
                                                    if condition in p.enables]
                     disablers_for[condition.id_] = [p for p in
-                                                    Process.all(self.conn)
+                                                    Process.all(self._conn)
                                                     if condition in p.disables]
         seen_todos: set[int] = set()
         top_nodes = [t.get_step_tree(seen_todos)
                                                     if condition in p.disables]
         seen_todos: set[int] = set()
         top_nodes = [t.get_step_tree(seen_todos)
@@ -344,33 +370,27 @@ class TaskHandler(BaseHTTPRequestHandler):
                 'enablers_for': enablers_for,
                 'disablers_for': disablers_for,
                 'conditions_present': conditions_present,
                 'enablers_for': enablers_for,
                 'disablers_for': disablers_for,
                 'conditions_present': conditions_present,
-                'processes': Process.all(self.conn)}
+                'processes': Process.all(self._conn)}
 
     @_get_item(Todo)
     def do_GET_todo(self, todo: Todo) -> dict[str, object]:
         """Show single Todo of ?id=."""
 
 
     @_get_item(Todo)
     def do_GET_todo(self, todo: Todo) -> dict[str, object]:
         """Show single Todo of ?id=."""
 
-        @dataclass
-        class TodoStepsNode:
-            """Collect what's useful for Todo steps tree display."""
-            id_: int
-            todo: Todo | None
-            process: Process | None
-            children: list[TodoStepsNode]  # pylint: disable=undefined-variable
-            fillable: bool = False
-
-        def walk_process_steps(id_: int,
+        def walk_process_steps(node_id: int,
                                process_step_nodes: list[ProcessStepsNode],
                                process_step_nodes: list[ProcessStepsNode],
-                               steps_nodes: list[TodoStepsNode]) -> None:
+                               steps_nodes: list[TodoOrProcStepNode]) -> int:
             for process_step_node in process_step_nodes:
             for process_step_node in process_step_nodes:
-                id_ += 1
-                node = TodoStepsNode(id_, None, process_step_node.process, [])
+                node_id += 1
+                proc = Process.by_id(self._conn,
+                                     process_step_node.step.step_process_id)
+                node = TodoOrProcStepNode(node_id, None, proc, [])
                 steps_nodes += [node]
                 steps_nodes += [node]
-                walk_process_steps(id_, list(process_step_node.steps.values()),
-                                   node.children)
+                node_id = walk_process_steps(
+                        node_id, process_step_node.steps, node.children)
+            return node_id
 
 
-        def walk_todo_steps(id_: int, todos: list[Todo],
-                            steps_nodes: list[TodoStepsNode]) -> None:
+        def walk_todo_steps(node_id: int, todos: list[Todo],
+                            steps_nodes: list[TodoOrProcStepNode]) -> int:
             for todo in todos:
                 matched = False
                 for match in [item for item in steps_nodes
             for todo in todos:
                 matched = False
                 for match in [item for item in steps_nodes
@@ -380,15 +400,18 @@ class TaskHandler(BaseHTTPRequestHandler):
                     matched = True
                     for child in match.children:
                         child.fillable = True
                     matched = True
                     for child in match.children:
                         child.fillable = True
-                    walk_todo_steps(id_, todo.children, match.children)
+                    node_id = walk_todo_steps(
+                            node_id, todo.children, match.children)
                 if not matched:
                 if not matched:
-                    id_ += 1
-                    node = TodoStepsNode(id_, todo, None, [])
+                    node_id += 1
+                    node = TodoOrProcStepNode(node_id, todo, None, [])
                     steps_nodes += [node]
                     steps_nodes += [node]
-                    walk_todo_steps(id_, todo.children, node.children)
+                    node_id = walk_todo_steps(
+                            node_id, todo.children, node.children)
+            return node_id
 
 
-        def collect_adoptables_keys(steps_nodes: list[TodoStepsNode]
-                                    ) -> set[int]:
+        def collect_adoptables_keys(
+                steps_nodes: list[TodoOrProcStepNode]) -> set[int]:
             ids = set()
             for node in steps_nodes:
                 if not node.todo:
             ids = set()
             for node in steps_nodes:
                 if not node.todo:
@@ -399,37 +422,37 @@ class TaskHandler(BaseHTTPRequestHandler):
             return ids
 
         todo_steps = [step.todo for step in todo.get_step_tree(set()).children]
             return ids
 
         todo_steps = [step.todo for step in todo.get_step_tree(set()).children]
-        process_tree = todo.process.get_steps(self.conn, None)
-        steps_todo_to_process: list[TodoStepsNode] = []
-        walk_process_steps(0, list(process_tree.values()),
-                           steps_todo_to_process)
+        process_tree = todo.process.get_steps(self._conn, None)
+        steps_todo_to_process: list[TodoOrProcStepNode] = []
+        last_node_id = walk_process_steps(0, process_tree,
+                                          steps_todo_to_process)
         for steps_node in steps_todo_to_process:
             steps_node.fillable = True
         for steps_node in steps_todo_to_process:
             steps_node.fillable = True
-        walk_todo_steps(len(steps_todo_to_process), todo_steps,
-                        steps_todo_to_process)
+        walk_todo_steps(last_node_id, todo_steps, steps_todo_to_process)
         adoptables: dict[int, list[Todo]] = {}
         adoptables: dict[int, list[Todo]] = {}
-        any_adoptables = [Todo.by_id(self.conn, t.id_)
-                          for t in Todo.by_date(self.conn, todo.date)
+        any_adoptables = [Todo.by_id(self._conn, t.id_)
+                          for t in Todo.by_date(self._conn, todo.date)
                           if t.id_ is not None
                           and t != todo]
         for id_ in collect_adoptables_keys(steps_todo_to_process):
             adoptables[id_] = [t for t in any_adoptables
                                if t.process.id_ == id_]
                           if t.id_ is not None
                           and t != todo]
         for id_ in collect_adoptables_keys(steps_todo_to_process):
             adoptables[id_] = [t for t in any_adoptables
                                if t.process.id_ == id_]
-        return {'todo': todo, 'steps_todo_to_process': steps_todo_to_process,
+        return {'todo': todo,
+                'steps_todo_to_process': steps_todo_to_process,
                 'adoption_candidates_for': adoptables,
                 'adoption_candidates_for': adoptables,
-                'process_candidates': Process.all(self.conn),
+                'process_candidates': sorted(Process.all(self._conn)),
                 'todo_candidates': any_adoptables,
                 'todo_candidates': any_adoptables,
-                'condition_candidates': Condition.all(self.conn)}
+                'condition_candidates': Condition.all(self._conn)}
 
     def do_GET_todos(self) -> dict[str, object]:
         """Show Todos from ?start= to ?end=, of ?process=, ?comment= pattern"""
 
     def do_GET_todos(self) -> dict[str, object]:
         """Show Todos from ?start= to ?end=, of ?process=, ?comment= pattern"""
-        sort_by = self._params.get_str('sort_by')
-        start = self._params.get_str('start')
-        end = self._params.get_str('end')
+        sort_by = self._params.get_str_or_fail('sort_by', 'title')
+        start = self._params.get_str_or_fail('start', '')
+        end = self._params.get_str_or_fail('end', '')
         process_id = self._params.get_int_or_none('process_id')
         process_id = self._params.get_int_or_none('process_id')
-        comment_pattern = self._params.get_str('comment_pattern')
-        todos = []
-        ret = Todo.by_date_range_with_limits(self.conn, (start, end))
+        comment_pattern = self._params.get_str_or_fail('comment_pattern', '')
+        #
+        ret = Todo.by_date_range_with_limits(self._conn, (start, end))
         todos_by_date_range, start, end = ret
         todos = [t for t in todos_by_date_range
                  if comment_pattern in t.comment
         todos_by_date_range, start, end = ret
         todos = [t for t in todos_by_date_range
                  if comment_pattern in t.comment
@@ -437,13 +460,14 @@ class TaskHandler(BaseHTTPRequestHandler):
         sort_by = Todo.sort_by(todos, sort_by)
         return {'start': start, 'end': end, 'process_id': process_id,
                 'comment_pattern': comment_pattern, 'todos': todos,
         sort_by = Todo.sort_by(todos, sort_by)
         return {'start': start, 'end': end, 'process_id': process_id,
                 'comment_pattern': comment_pattern, 'todos': todos,
-                'all_processes': Process.all(self.conn), 'sort_by': sort_by}
+                'all_processes': Process.all(self._conn), 'sort_by': sort_by}
 
     def do_GET_conditions(self) -> dict[str, object]:
         """Show all Conditions."""
 
     def do_GET_conditions(self) -> dict[str, object]:
         """Show all Conditions."""
-        pattern = self._params.get_str('pattern')
-        sort_by = self._params.get_str('sort_by')
-        conditions = Condition.matching(self.conn, pattern)
+        pattern = self._params.get_str_or_fail('pattern', '')
+        sort_by = self._params.get_str_or_fail('sort_by', 'title')
+        #
+        conditions = Condition.matching(self._conn, pattern)
         sort_by = Condition.sort_by(conditions, sort_by)
         return {'conditions': conditions,
                 'sort_by': sort_by,
         sort_by = Condition.sort_by(conditions, sort_by)
         return {'conditions': conditions,
                 'sort_by': sort_by,
@@ -452,7 +476,7 @@ class TaskHandler(BaseHTTPRequestHandler):
     @_get_item(Condition)
     def do_GET_condition(self, c: Condition) -> dict[str, object]:
         """Show Condition of ?id=."""
     @_get_item(Condition)
     def do_GET_condition(self, c: Condition) -> dict[str, object]:
         """Show Condition of ?id=."""
-        ps = Process.all(self.conn)
+        ps = Process.all(self._conn)
         return {'condition': c, 'is_new': c.id_ is None,
                 'enabled_processes': [p for p in ps if c in p.conditions],
                 'disabled_processes': [p for p in ps if c in p.blockers],
         return {'condition': c, 'is_new': c.id_ is None,
                 'enabled_processes': [p for p in ps if c in p.conditions],
                 'disabled_processes': [p for p in ps if c in p.blockers],
@@ -475,26 +499,30 @@ class TaskHandler(BaseHTTPRequestHandler):
         owner_ids = self._params.get_all_int('step_to')
         owned_ids = self._params.get_all_int('has_step')
         title_64 = self._params.get_str('title_b64')
         owner_ids = self._params.get_all_int('step_to')
         owned_ids = self._params.get_all_int('has_step')
         title_64 = self._params.get_str('title_b64')
+        title_new = None
         if title_64:
             try:
         if title_64:
             try:
-                title = b64decode(title_64.encode()).decode()
+                title_new = b64decode(title_64.encode()).decode()
             except binascii_Exception as exc:
                 msg = 'invalid base64 for ?title_b64='
                 raise BadFormatException(msg) from exc
             except binascii_Exception as exc:
                 msg = 'invalid base64 for ?title_b64='
                 raise BadFormatException(msg) from exc
-            process.title.set(title)
+        #
+        if title_new:
+            process.title.set(title_new)
         preset_top_step = None
         preset_top_step = None
-        owners = process.used_as_step_by(self.conn)
+        owners = process.used_as_step_by(self._conn)
         for step_id in owner_ids:
         for step_id in owner_ids:
-            owners += [Process.by_id(self.conn, step_id)]
+            owners += [Process.by_id(self._conn, step_id)]
         for process_id in owned_ids:
         for process_id in owned_ids:
-            Process.by_id(self.conn, process_id)  # to ensure ID exists
+            Process.by_id(self._conn, process_id)  # to ensure ID exists
             preset_top_step = process_id
         return {'process': process, 'is_new': process.id_ is None,
                 'preset_top_step': preset_top_step,
             preset_top_step = process_id
         return {'process': process, 'is_new': process.id_ is None,
                 'preset_top_step': preset_top_step,
-                'steps': process.get_steps(self.conn), 'owners': owners,
-                'n_todos': len(Todo.by_process_id(self.conn, process.id_)),
-                'process_candidates': Process.all(self.conn),
-                'condition_candidates': Condition.all(self.conn)}
+                'steps': process.get_steps(self._conn),
+                'owners': owners,
+                'n_todos': len(Todo.by_process_id(self._conn, process.id_)),
+                'process_candidates': Process.all(self._conn),
+                'condition_candidates': Condition.all(self._conn)}
 
     @_get_item(Process)
     def do_GET_process_titles(self, p: Process) -> dict[str, object]:
 
     @_get_item(Process)
     def do_GET_process_titles(self, p: Process) -> dict[str, object]:
@@ -513,9 +541,10 @@ class TaskHandler(BaseHTTPRequestHandler):
 
     def do_GET_processes(self) -> dict[str, object]:
         """Show all Processes."""
 
     def do_GET_processes(self) -> dict[str, object]:
         """Show all Processes."""
-        pattern = self._params.get_str('pattern')
-        sort_by = self._params.get_str('sort_by')
-        processes = Process.matching(self.conn, pattern)
+        pattern = self._params.get_str_or_fail('pattern', '')
+        sort_by = self._params.get_str_or_fail('sort_by', 'title')
+        #
+        processes = Process.matching(self._conn, pattern)
         sort_by = Process.sort_by(processes, sort_by)
         return {'processes': processes, 'sort_by': sort_by, 'pattern': pattern}
 
         sort_by = Process.sort_by(processes, sort_by)
         return {'processes': processes, 'sort_by': sort_by, 'pattern': pattern}
 
@@ -531,18 +560,18 @@ class TaskHandler(BaseHTTPRequestHandler):
                 # (because pylint here fails to detect the use of wrapper as a
                 # method to self with respective access privileges)
                 id_ = self._params.get_int_or_none('id')
                 # (because pylint here fails to detect the use of wrapper as a
                 # method to self with respective access privileges)
                 id_ = self._params.get_int_or_none('id')
-                for _ in self._form_data.get_all_str('delete'):
+                for _ in self._form.get_all_str('delete'):
                     if id_ is None:
                         msg = 'trying to delete non-saved ' +\
                                 f'{target_class.__name__}'
                         raise NotFoundException(msg)
                     if id_ is None:
                         msg = 'trying to delete non-saved ' +\
                                 f'{target_class.__name__}'
                         raise NotFoundException(msg)
-                    item = target_class.by_id(self.conn, id_)
-                    item.remove(self.conn)
+                    item = target_class.by_id(self._conn, id_)
+                    item.remove(self._conn)
                     return redir_target
                 if target_class.can_create_by_id:
                     return redir_target
                 if target_class.can_create_by_id:
-                    item = target_class.by_id_or_create(self.conn, id_)
+                    item = target_class.by_id_or_create(self._conn, id_)
                 else:
                 else:
-                    item = target_class.by_id(self.conn, id_)
+                    item = target_class.by_id(self._conn, id_)
                 return f(self, item)
             return wrapper
         return decorator
                 return f(self, item)
             return wrapper
         return decorator
@@ -550,106 +579,133 @@ class TaskHandler(BaseHTTPRequestHandler):
     def _change_versioned_timestamps(self, cls: Any, attr_name: str) -> str:
         """Update history timestamps for VersionedAttribute."""
         id_ = self._params.get_int_or_none('id')
     def _change_versioned_timestamps(self, cls: Any, attr_name: str) -> str:
         """Update history timestamps for VersionedAttribute."""
         id_ = self._params.get_int_or_none('id')
-        item = cls.by_id(self.conn, id_)
+        item = cls.by_id(self._conn, id_)
         attr = getattr(item, attr_name)
         attr = getattr(item, attr_name)
-        for k, v in self._form_data.get_first_strings_starting('at:').items():
-            old = k[3:]
-            if old[19:] != v:
-                attr.reset_timestamp(old, f'{v}.0')
-        attr.save(self.conn)
+        for k, vals in self._form.get_all_of_key_prefixed('at:').items():
+            if k[19:] != vals[0]:
+                attr.reset_timestamp(k, f'{vals[0]}.0')
+        attr.save(self._conn)
         return f'/{cls.name_lowercase()}_{attr_name}s?id={item.id_}'
 
     def do_POST_day(self) -> str:
         """Update or insert Day of date and Todos mapped to it."""
         # pylint: disable=too-many-locals
         return f'/{cls.name_lowercase()}_{attr_name}s?id={item.id_}'
 
     def do_POST_day(self) -> str:
         """Update or insert Day of date and Todos mapped to it."""
         # pylint: disable=too-many-locals
-        date = self._params.get_str('date')
-        day_comment = self._form_data.get_str('day_comment')
-        make_type = self._form_data.get_str('make_type')
-        old_todos = self._form_data.get_all_int('todo_id')
-        new_todos = self._form_data.get_all_int('new_todo')
-        comments = self._form_data.get_all_str('comment')
-        efforts = self._form_data.get_all_floats_or_nones('effort')
-        done_todos = self._form_data.get_all_int('done')
-        for _ in [id_ for id_ in done_todos if id_ not in old_todos]:
-            raise BadFormatException('"done" field refers to unknown Todo')
+        date = self._params.get_str_or_fail('date')
+        day_comment = self._form.get_str_or_fail('day_comment')
+        make_type = self._form.get_str_or_fail('make_type')
+        old_todos = self._form.get_all_int('todo_id')
+        new_todos_by_process = self._form.get_all_int('new_todo')
+        comments = self._form.get_all_str('comment')
+        efforts = self._form.get_all_floats_or_nones('effort')
+        done_todos = self._form.get_all_int('done')
         is_done = [t_id in done_todos for t_id in old_todos]
         if not (len(old_todos) == len(is_done) == len(comments)
                 == len(efforts)):
             msg = 'not equal number each of number of todo_id, comments, ' +\
                     'and efforts inputs'
             raise BadFormatException(msg)
         is_done = [t_id in done_todos for t_id in old_todos]
         if not (len(old_todos) == len(is_done) == len(comments)
                 == len(efforts)):
             msg = 'not equal number each of number of todo_id, comments, ' +\
                     'and efforts inputs'
             raise BadFormatException(msg)
-        day = Day.by_id_or_create(self.conn, date)
+        for _ in [id_ for id_ in done_todos if id_ not in old_todos]:
+            raise BadFormatException('"done" field refers to unknown Todo')
+        #
+        day = Day.by_id_or_create(self._conn, date)
         day.comment = day_comment
         day.comment = day_comment
-        day.save(self.conn)
-        for process_id in sorted(new_todos):
-            if 'empty' == make_type:
-                process = Process.by_id(self.conn, process_id)
-                todo = Todo(None, process, False, date)
-                todo.save(self.conn)
-            else:
-                Todo.create_with_children(self.conn, process_id, date)
+        day.save(self._conn)
+        new_todos = []
+        for process_id in sorted(new_todos_by_process):
+            process = Process.by_id(self._conn, process_id)
+            todo = Todo(None, process, False, date)
+            todo.save(self._conn)
+            new_todos += [todo]
+        if 'full' == make_type:
+            for todo in new_todos:
+                todo.ensure_children(self._conn)
         for i, todo_id in enumerate(old_todos):
         for i, todo_id in enumerate(old_todos):
-            todo = Todo.by_id(self.conn, todo_id)
+            todo = Todo.by_id(self._conn, todo_id)
             todo.is_done = is_done[i]
             todo.comment = comments[i]
             todo.effort = efforts[i]
             todo.is_done = is_done[i]
             todo.comment = comments[i]
             todo.effort = efforts[i]
-            todo.save(self.conn)
+            todo.save(self._conn)
         return f'/day?date={date}&make_type={make_type}'
 
     @_delete_or_post(Todo, '/')
     def do_POST_todo(self, todo: Todo) -> str:
         """Update Todo and its children."""
         # pylint: disable=too-many-locals
         return f'/day?date={date}&make_type={make_type}'
 
     @_delete_or_post(Todo, '/')
     def do_POST_todo(self, todo: Todo) -> str:
         """Update Todo and its children."""
         # pylint: disable=too-many-locals
-        adopted_child_ids = self._form_data.get_all_int('adopt')
-        processes_to_make_full = self._form_data.get_all_int('make_full')
-        processes_to_make_empty = self._form_data.get_all_int('make_empty')
-        fill_fors = self._form_data.get_first_strings_starting('fill_for_')
-        effort = self._form_data.get_str('effort', ignore_strict=True)
-        conditions = self._form_data.get_all_int('conditions')
-        disables = self._form_data.get_all_int('disables')
-        blockers = self._form_data.get_all_int('blockers')
-        enables = self._form_data.get_all_int('enables')
-        is_done = len(self._form_data.get_all_str('done')) > 0
-        calendarize = len(self._form_data.get_all_str('calendarize')) > 0
-        comment = self._form_data.get_str('comment', ignore_strict=True)
-        for v in fill_fors.values():
-            if v.startswith('make_empty_'):
-                processes_to_make_empty += [int(v[11:])]
-            elif v.startswith('make_full_'):
-                processes_to_make_full += [int(v[10:])]
-            elif v != 'ignore':
-                adopted_child_ids += [int(v)]
-        to_remove = []
-        for child in todo.children:
-            assert isinstance(child.id_, int)
-            if child.id_ not in adopted_child_ids:
-                to_remove += [child.id_]
-        for id_ in to_remove:
-            child = Todo.by_id(self.conn, id_)
-            todo.remove_child(child)
-        for child_id in adopted_child_ids:
-            if child_id in [c.id_ for c in todo.children]:
-                continue
-            child = Todo.by_id(self.conn, child_id)
-            todo.add_child(child)
-        for process_id in processes_to_make_empty:
-            process = Process.by_id(self.conn, process_id)
-            made = Todo(None, process, False, todo.date)
-            made.save(self.conn)
-            todo.add_child(made)
-        for process_id in processes_to_make_full:
-            made = Todo.create_with_children(self.conn, process_id, todo.date)
-            todo.add_child(made)
-        todo.effort = float(effort) if effort else None
-        todo.set_conditions(self.conn, conditions)
-        todo.set_blockers(self.conn, blockers)
-        todo.set_enables(self.conn, enables)
-        todo.set_disables(self.conn, disables)
-        todo.is_done = is_done
-        todo.calendarize = calendarize
-        todo.comment = comment
-        todo.save(self.conn)
-        return f'/todo?id={todo.id_}'
+        # pylint: disable=too-many-branches
+        # pylint: disable=too-many-statements
+        assert todo.id_ is not None
+        adoptees = [(id_, todo.id_) for id_ in self._form.get_all_int('adopt')]
+        to_make = {'full': [(id_, todo.id_)
+                            for id_ in self._form.get_all_int('make_full')],
+                   'empty': [(id_, todo.id_)
+                             for id_ in self._form.get_all_int('make_empty')]}
+        step_fillers_to = self._form.get_all_of_key_prefixed('step_filler_to_')
+        to_update: dict[str, Any] = {
+            'comment': self._form.get_str_or_fail('comment', '')}
+        for k in ('is_done', 'calendarize'):
+            v = self._form.get_bool_or_none(k)
+            if v is not None:
+                to_update[k] = v
+        cond_rels = [self._form.get_all_int(name) for name in
+                     ['conditions', 'blockers', 'enables', 'disables']]
+        effort_or_not = self._form.get_str('effort')
+        if effort_or_not is not None:
+            if effort_or_not == '':
+                to_update['effort'] = None
+            else:
+                try:
+                    to_update['effort'] = float(effort_or_not)
+                except ValueError as e:
+                    msg = 'cannot float form field value for key: effort'
+                    raise BadFormatException(msg) from e
+        for k, fillers in step_fillers_to.items():
+            try:
+                parent_id = int(k)
+            except ValueError as e:
+                msg = f'bad step_filler_to_ key: {k}'
+                raise BadFormatException(msg) from e
+            for filler in [f for f in fillers if f != 'ignore']:
+                target_id: int
+                prefix = 'make_'
+                to_int = filler[5:] if filler.startswith(prefix) else filler
+                try:
+                    target_id = int(to_int)
+                except ValueError as e:
+                    msg = f'bad fill_for target: {filler}'
+                    raise BadFormatException(msg) from e
+                if filler.startswith(prefix):
+                    to_make['empty'] += [(target_id, parent_id)]
+                else:
+                    adoptees += [(target_id, parent_id)]
+        #
+        todo.set_condition_relations(self._conn, *cond_rels)
+        for parent in [Todo.by_id(self._conn, a[1])
+                       for a in adoptees] + [todo]:
+            for child in parent.children:
+                if child not in [t[0] for t in adoptees
+                                 if t[0] == child.id_ and t[1] == parent.id_]:
+                    parent.remove_child(child)
+                    parent.save(self._conn)
+        for child_id, parent_id in adoptees:
+            parent = Todo.by_id(self._conn, parent_id)
+            if child_id not in [c.id_ for c in parent.children]:
+                parent.add_child(Todo.by_id(self._conn, child_id))
+                parent.save(self._conn)
+        todo.update_attrs(**to_update)
+        for approach, make_data in to_make.items():
+            for process_id, parent_id in make_data:
+                parent = Todo.by_id(self._conn, parent_id)
+                process = Process.by_id(self._conn, process_id)
+                made = Todo(None, process, False, todo.date)
+                made.save(self._conn)
+                if 'full' == approach:
+                    made.ensure_children(self._conn)
+                parent.add_child(made)
+                parent.save(self._conn)
+        # todo.save() may destroy Todo if .effort < 0, so retrieve .id_ early
+        url = f'/todo?id={todo.id_}'
+        todo.save(self._conn)
+        return url
 
     def do_POST_process_descriptions(self) -> str:
         """Update history timestamps for Process.description."""
 
     def do_POST_process_descriptions(self) -> str:
         """Update history timestamps for Process.description."""
@@ -667,71 +723,53 @@ class TaskHandler(BaseHTTPRequestHandler):
     def do_POST_process(self, process: Process) -> str:
         """Update or insert Process of ?id= and fields defined in postvars."""
         # pylint: disable=too-many-locals
     def do_POST_process(self, process: Process) -> str:
         """Update or insert Process of ?id= and fields defined in postvars."""
         # pylint: disable=too-many-locals
-        # pylint: disable=too-many-statements
-        title = self._form_data.get_str('title')
-        description = self._form_data.get_str('description')
-        effort = self._form_data.get_float('effort')
-        conditions = self._form_data.get_all_int('conditions')
-        blockers = self._form_data.get_all_int('blockers')
-        enables = self._form_data.get_all_int('enables')
-        disables = self._form_data.get_all_int('disables')
-        calendarize = self._form_data.get_all_str('calendarize') != []
-        suppresses = self._form_data.get_all_int('suppresses')
-        step_of = self._form_data.get_all_str('step_of')
-        keep_steps = self._form_data.get_all_int('keep_step')
-        step_ids = self._form_data.get_all_int('steps')
-        new_top_steps = self._form_data.get_all_str('new_top_step')
-        step_process_id_to = {}
-        step_parent_id_to = {}
+
+        def id_or_title(l_id_or_title: list[str]) -> tuple[str, list[int]]:
+            l_ids, title = [], ''
+            for id_or_title in l_id_or_title:
+                try:
+                    l_ids += [int(id_or_title)]
+                except ValueError:
+                    title = id_or_title
+            return title, l_ids
+
+        versioned = {'title': self._form.get_str_or_fail('title'),
+                     'description': self._form.get_str_or_fail('description'),
+                     'effort': self._form.get_float_or_fail('effort')}
+        cond_rels = [self._form.get_all_int(s) for s
+                     in ['conditions', 'blockers', 'enables', 'disables']]
+        calendarize = self._form.get_bool_or_none('calendarize')
+        step_of = self._form.get_all_str('step_of')
+        suppressions = self._form.get_all_int('suppresses')
+        kept_steps = self._form.get_all_int('kept_steps')
+        new_top_step_procs = self._form.get_all_str('new_top_step')
         new_steps_to = {}
         new_steps_to = {}
-        for step_id in step_ids:
+        for step_id in kept_steps:
             name = f'new_step_to_{step_id}'
             name = f'new_step_to_{step_id}'
-            new_steps_to[step_id] = self._form_data.get_all_int(name)
-        for step_id in keep_steps:
-            name = f'step_{step_id}_process_id'
-            step_process_id_to[step_id] = self._form_data.get_int(name)
-            name = f'step_{step_id}_parent_id'
-            step_parent_id_to[step_id] = self._form_data.get_int_or_none(name)
-        process.title.set(title)
-        process.description.set(description)
-        process.effort.set(effort)
-        process.set_conditions(self.conn, conditions)
-        process.set_blockers(self.conn, blockers)
-        process.set_enables(self.conn, enables)
-        process.set_disables(self.conn, disables)
-        process.calendarize = calendarize
-        process.save(self.conn)
+            new_steps_to[step_id] = self._form.get_all_int(name)
+        new_owner_title, owners_to_set = id_or_title(step_of)
+        new_step_title, new_top_step_proc_ids = id_or_title(new_top_step_procs)
+        #
+        for k, v in versioned.items():
+            getattr(process, k).set(v)
+        if calendarize is not None:
+            process.calendarize = calendarize
+        process.save(self._conn)
         assert isinstance(process.id_, int)
         assert isinstance(process.id_, int)
-        new_step_title = None
-        steps: list[ProcessStep] = []
-        for step_id in keep_steps:
-            if step_id not in step_ids:
-                raise BadFormatException('trying to keep unknown step')
-            step = ProcessStep(step_id, process.id_,
-                               step_process_id_to[step_id],
-                               step_parent_id_to[step_id])
-            steps += [step]
-        for step_id in step_ids:
-            new = [ProcessStep(None, process.id_, step_process_id, step_id)
-                   for step_process_id in new_steps_to[step_id]]
-            steps += new
-        for step_identifier in new_top_steps:
-            try:
-                step_process_id = int(step_identifier)
-                step = ProcessStep(None, process.id_, step_process_id, None)
-                steps += [step]
-            except ValueError:
-                new_step_title = step_identifier
-        process.set_steps(self.conn, steps)
-        process.set_step_suppressions(self.conn, suppresses)
-        owners_to_set = []
-        new_owner_title = None
-        for owner_identifier in step_of:
-            try:
-                owners_to_set += [int(owner_identifier)]
-            except ValueError:
-                new_owner_title = owner_identifier
-        process.set_owners(self.conn, owners_to_set)
+        # set relations to Conditions and ProcessSteps / other Processes
+        process.set_condition_relations(self._conn, *cond_rels)
+        owned_steps = []
+        for step_id in kept_steps:
+            owned_steps += [ProcessStep.by_id(self._conn, step_id)]
+            owned_steps += [  # new sub-steps
+                    ProcessStep(None, process.id_, step_process_id, step_id)
+                    for step_process_id in new_steps_to[step_id]]
+        for step_process_id in new_top_step_proc_ids:
+            owned_steps += [ProcessStep(None, process.id_, step_process_id,
+                                        None)]
+        process.set_step_relations(self._conn, owners_to_set, suppressions,
+                                   owned_steps)
+        # encode titles for potential newly-to-create Processes up or down
         params = f'id={process.id_}'
         if new_step_title:
             title_b64_encoded = b64encode(new_step_title.encode()).decode()
         params = f'id={process.id_}'
         if new_step_title:
             title_b64_encoded = b64encode(new_step_title.encode()).decode()
@@ -739,7 +777,7 @@ class TaskHandler(BaseHTTPRequestHandler):
         elif new_owner_title:
             title_b64_encoded = b64encode(new_owner_title.encode()).decode()
             params = f'has_step={process.id_}&title_b64={title_b64_encoded}'
         elif new_owner_title:
             title_b64_encoded = b64encode(new_owner_title.encode()).decode()
             params = f'has_step={process.id_}&title_b64={title_b64_encoded}'
-        process.save(self.conn)
+        process.save(self._conn)
         return f'/process?{params}'
 
     def do_POST_condition_descriptions(self) -> str:
         return f'/process?{params}'
 
     def do_POST_condition_descriptions(self) -> str:
@@ -753,11 +791,13 @@ class TaskHandler(BaseHTTPRequestHandler):
     @_delete_or_post(Condition, '/conditions')
     def do_POST_condition(self, condition: Condition) -> str:
         """Update/insert Condition of ?id= and fields defined in postvars."""
     @_delete_or_post(Condition, '/conditions')
     def do_POST_condition(self, condition: Condition) -> str:
         """Update/insert Condition of ?id= and fields defined in postvars."""
-        is_active = self._form_data.get_str('is_active') == 'True'
-        title = self._form_data.get_str('title')
-        description = self._form_data.get_str('description')
-        condition.is_active = is_active
+        title = self._form.get_str_or_fail('title')
+        description = self._form.get_str_or_fail('description')
+        is_active = self._form.get_bool_or_none('is_active')
+        #
+        if is_active is not None:
+            condition.is_active = is_active
         condition.title.set(title)
         condition.description.set(description)
         condition.title.set(title)
         condition.description.set(description)
-        condition.save(self.conn)
+        condition.save(self._conn)
         return f'/condition?id={condition.id_}'
         return f'/condition?id={condition.id_}'
diff --git a/plomtask/misc.py b/plomtask/misc.py
new file mode 100644 (file)
index 0000000..fa79bf5
--- /dev/null
@@ -0,0 +1,33 @@
+"""What doesn't fit elsewhere so far."""
+from typing import Any
+
+
+class DictableNode:
+    """Template for display chain nodes providing .as_dict_and_refs."""
+    # pylint: disable=too-few-public-methods
+    _to_dict: list[str] = []
+
+    def __init__(self, *args: Any) -> None:
+        for i, arg in enumerate(args):
+            setattr(self, self._to_dict[i], arg)
+
+    @property
+    def as_dict_and_refs(self) -> tuple[dict[str, object], list[Any]]:
+        """Return self as json.dumps-ready dict, list of referenced objects."""
+        d = {}
+        refs = []
+        for name in self._to_dict:
+            attr = getattr(self, name)
+            if hasattr(attr, 'id_'):
+                d[name] = attr.id_
+                continue
+            if isinstance(attr, list):
+                d[name] = []
+                for item in attr:
+                    item_d, item_refs = item.as_dict_and_refs
+                    d[name] += [item_d]
+                    for item_ref in [r for r in item_refs if r not in refs]:
+                        refs += [item_ref]
+                continue
+            d[name] = attr
+        return d, refs
index bb1de3a4a3356415473bc652d650e202886eb01b..23eb624353b656c08f38b3c1c1150c0f48f9a4d5 100644 (file)
@@ -1,8 +1,8 @@
 """Collecting Processes and Process-related items."""
 from __future__ import annotations
 """Collecting Processes and Process-related items."""
 from __future__ import annotations
-from dataclasses import dataclass
 from typing import Set, Any
 from sqlite3 import Row
 from typing import Set, Any
 from sqlite3 import Row
+from plomtask.misc import DictableNode
 from plomtask.db import DatabaseConnection, BaseModel
 from plomtask.versioned_attributes import VersionedAttribute
 from plomtask.conditions import Condition, ConditionsRelations
 from plomtask.db import DatabaseConnection, BaseModel
 from plomtask.versioned_attributes import VersionedAttribute
 from plomtask.conditions import Condition, ConditionsRelations
@@ -10,23 +10,24 @@ from plomtask.exceptions import (NotFoundException, BadFormatException,
                                  HandledException)
 
 
                                  HandledException)
 
 
-@dataclass
-class ProcessStepsNode:
+class ProcessStepsNode(DictableNode):
     """Collects what's useful to know for ProcessSteps tree display."""
     """Collects what's useful to know for ProcessSteps tree display."""
+    # pylint: disable=too-few-public-methods
+    step: ProcessStep
     process: Process
     process: Process
-    parent_id: int | None
     is_explicit: bool
     is_explicit: bool
-    steps: dict[int, ProcessStepsNode]
+    steps: list[ProcessStepsNode]
     seen: bool = False
     is_suppressed: bool = False
     seen: bool = False
     is_suppressed: bool = False
+    _to_dict = ['step', 'process', 'is_explicit', 'steps', 'seen',
+                'is_suppressed']
 
 
 class Process(BaseModel[int], ConditionsRelations):
     """Template for, and metadata for, Todos, and their arrangements."""
     # pylint: disable=too-many-instance-attributes
     table_name = 'processes'
 
 
 class Process(BaseModel[int], ConditionsRelations):
     """Template for, and metadata for, Todos, and their arrangements."""
     # pylint: disable=too-many-instance-attributes
     table_name = 'processes'
-    to_save = ['calendarize']
-    to_save_versioned = ['title', 'description', 'effort']
+    to_save_simples = ['calendarize']
     to_save_relations = [('process_conditions', 'process', 'conditions', 0),
                          ('process_blockers', 'process', 'blockers', 0),
                          ('process_enables', 'process', 'enables', 0),
     to_save_relations = [('process_conditions', 'process', 'conditions', 0),
                          ('process_blockers', 'process', 'blockers', 0),
                          ('process_enables', 'process', 'enables', 0),
@@ -34,6 +35,7 @@ class Process(BaseModel[int], ConditionsRelations):
                          ('process_step_suppressions', 'process',
                           'suppressed_steps', 0)]
     add_to_dict = ['explicit_steps']
                          ('process_step_suppressions', 'process',
                           'suppressed_steps', 0)]
     add_to_dict = ['explicit_steps']
+    versioned_defaults = {'title': 'UNNAMED', 'description': '', 'effort': 1.0}
     to_search = ['title.newest', 'description.newest']
     can_create_by_id = True
     sorters = {'steps': lambda p: len(p.explicit_steps),
     to_search = ['title.newest', 'description.newest']
     can_create_by_id = True
     sorters = {'steps': lambda p: len(p.explicit_steps),
@@ -44,9 +46,10 @@ class Process(BaseModel[int], ConditionsRelations):
     def __init__(self, id_: int | None, calendarize: bool = False) -> None:
         BaseModel.__init__(self, id_)
         ConditionsRelations.__init__(self)
     def __init__(self, id_: int | None, calendarize: bool = False) -> None:
         BaseModel.__init__(self, id_)
         ConditionsRelations.__init__(self)
-        self.title = VersionedAttribute(self, 'process_titles', 'UNNAMED')
-        self.description = VersionedAttribute(self, 'process_descriptions', '')
-        self.effort = VersionedAttribute(self, 'process_efforts', 1.0)
+        for name in ['title', 'description', 'effort']:
+            attr = VersionedAttribute(self, f'process_{name}s',
+                                      self.versioned_defaults[name])
+            setattr(self, name, attr)
         self.explicit_steps: list[ProcessStep] = []
         self.suppressed_steps: list[ProcessStep] = []
         self.calendarize = calendarize
         self.explicit_steps: list[ProcessStep] = []
         self.suppressed_steps: list[ProcessStep] = []
         self.calendarize = calendarize
@@ -86,7 +89,7 @@ class Process(BaseModel[int], ConditionsRelations):
         return [self.__class__.by_id(db_conn, id_) for id_ in owner_ids]
 
     def get_steps(self, db_conn: DatabaseConnection, external_owner:
         return [self.__class__.by_id(db_conn, id_) for id_ in owner_ids]
 
     def get_steps(self, db_conn: DatabaseConnection, external_owner:
-                  Process | None = None) -> dict[int, ProcessStepsNode]:
+                  Process | None = None) -> list[ProcessStepsNode]:
         """Return tree of depended-on explicit and implicit ProcessSteps."""
 
         def make_node(step: ProcessStep, suppressed: bool) -> ProcessStepsNode:
         """Return tree of depended-on explicit and implicit ProcessSteps."""
 
         def make_node(step: ProcessStep, suppressed: bool) -> ProcessStepsNode:
@@ -94,26 +97,26 @@ class Process(BaseModel[int], ConditionsRelations):
             if external_owner is not None:
                 is_explicit = step.owner_id == external_owner.id_
             process = self.__class__.by_id(db_conn, step.step_process_id)
             if external_owner is not None:
                 is_explicit = step.owner_id == external_owner.id_
             process = self.__class__.by_id(db_conn, step.step_process_id)
-            step_steps = {}
+            step_steps = []
             if not suppressed:
                 step_steps = process.get_steps(db_conn, external_owner)
             if not suppressed:
                 step_steps = process.get_steps(db_conn, external_owner)
-            return ProcessStepsNode(process, step.parent_step_id,
-                                    is_explicit, step_steps, False, suppressed)
+            return ProcessStepsNode(step, process, is_explicit, step_steps,
+                                    False, suppressed)
 
 
-        def walk_steps(node_id: int, node: ProcessStepsNode) -> None:
-            node.seen = node_id in seen_step_ids
-            seen_step_ids.add(node_id)
+        def walk_steps(node: ProcessStepsNode) -> None:
+            node.seen = node.step.id_ in seen_step_ids
+            assert isinstance(node.step.id_, int)
+            seen_step_ids.add(node.step.id_)
             if node.is_suppressed:
                 return
             explicit_children = [s for s in self.explicit_steps
             if node.is_suppressed:
                 return
             explicit_children = [s for s in self.explicit_steps
-                                 if s.parent_step_id == node_id]
+                                 if s.parent_step_id == node.step.id_]
             for child in explicit_children:
             for child in explicit_children:
-                assert isinstance(child.id_, int)
-                node.steps[child.id_] = make_node(child, False)
-            for id_, step in node.steps.items():
-                walk_steps(id_, step)
+                node.steps += [make_node(child, False)]
+            for step in node.steps:
+                walk_steps(step)
 
 
-        steps: dict[int, ProcessStepsNode] = {}
+        step_nodes: list[ProcessStepsNode] = []
         seen_step_ids: Set[int] = set()
         if external_owner is None:
             external_owner = self
         seen_step_ids: Set[int] = set()
         if external_owner is None:
             external_owner = self
@@ -121,21 +124,59 @@ class Process(BaseModel[int], ConditionsRelations):
                      if s.parent_step_id is None]:
             assert isinstance(step.id_, int)
             new_node = make_node(step, step in external_owner.suppressed_steps)
                      if s.parent_step_id is None]:
             assert isinstance(step.id_, int)
             new_node = make_node(step, step in external_owner.suppressed_steps)
-            steps[step.id_] = new_node
-        for step_id, step_node in steps.items():
-            walk_steps(step_id, step_node)
-        return steps
+            step_nodes += [new_node]
+        for step_node in step_nodes:
+            walk_steps(step_node)
+        return step_nodes
+
+    def set_step_relations(self,
+                           db_conn: DatabaseConnection,
+                           owners: list[int],
+                           suppressions: list[int],
+                           owned_steps: list[ProcessStep]
+                           ) -> None:
+        """Set step owners, suppressions, and owned steps."""
+        self._set_owners(db_conn, owners)
+        self._set_step_suppressions(db_conn, suppressions)
+        self.set_steps(db_conn, owned_steps)
 
 
-    def set_step_suppressions(self, db_conn: DatabaseConnection,
-                              step_ids: list[int]) -> None:
+    def _set_step_suppressions(self,
+                               db_conn: DatabaseConnection,
+                               step_ids: list[int]
+                               ) -> None:
         """Set self.suppressed_steps from step_ids."""
         assert isinstance(self.id_, int)
         db_conn.delete_where('process_step_suppressions', 'process', self.id_)
         self.suppressed_steps = [ProcessStep.by_id(db_conn, s)
                                  for s in step_ids]
 
         """Set self.suppressed_steps from step_ids."""
         assert isinstance(self.id_, int)
         db_conn.delete_where('process_step_suppressions', 'process', self.id_)
         self.suppressed_steps = [ProcessStep.by_id(db_conn, s)
                                  for s in step_ids]
 
-    def set_steps(self, db_conn: DatabaseConnection,
-                  steps: list[ProcessStep]) -> None:
+    def _set_owners(self,
+                    db_conn: DatabaseConnection,
+                    owner_ids: list[int]
+                    ) -> None:
+        """Re-set owners to those identified in owner_ids."""
+        owners_old = self.used_as_step_by(db_conn)
+        losers = [o for o in owners_old if o.id_ not in owner_ids]
+        owners_old_ids = [o.id_ for o in owners_old]
+        winners = [Process.by_id(db_conn, id_) for id_ in owner_ids
+                   if id_ not in owners_old_ids]
+        steps_to_remove = []
+        for loser in losers:
+            steps_to_remove += [s for s in loser.explicit_steps
+                                if s.step_process_id == self.id_]
+        for step in steps_to_remove:
+            step.remove(db_conn)
+        for winner in winners:
+            assert isinstance(winner.id_, int)
+            assert isinstance(self.id_, int)
+            new_step = ProcessStep(None, winner.id_, self.id_, None)
+            new_explicit_steps = winner.explicit_steps + [new_step]
+            winner.set_steps(db_conn, new_explicit_steps)
+
+    def set_steps(self,
+                  db_conn: DatabaseConnection,
+                  steps: list[ProcessStep]
+                  ) -> None:
         """Set self.explicit_steps in bulk.
 
         Checks against recursion, and turns into top-level steps any of
         """Set self.explicit_steps in bulk.
 
         Checks against recursion, and turns into top-level steps any of
@@ -163,27 +204,6 @@ class Process(BaseModel[int], ConditionsRelations):
             walk_steps(step)
             step.save(db_conn)
 
             walk_steps(step)
             step.save(db_conn)
 
-    def set_owners(self, db_conn: DatabaseConnection,
-                   owner_ids: list[int]) -> None:
-        """Re-set owners to those identified in owner_ids."""
-        owners_old = self.used_as_step_by(db_conn)
-        losers = [o for o in owners_old if o.id_ not in owner_ids]
-        owners_old_ids = [o.id_ for o in owners_old]
-        winners = [Process.by_id(db_conn, id_) for id_ in owner_ids
-                   if id_ not in owners_old_ids]
-        steps_to_remove = []
-        for loser in losers:
-            steps_to_remove += [s for s in loser.explicit_steps
-                                if s.step_process_id == self.id_]
-        for step in steps_to_remove:
-            step.remove(db_conn)
-        for winner in winners:
-            assert isinstance(winner.id_, int)
-            assert isinstance(self.id_, int)
-            new_step = ProcessStep(None, winner.id_, self.id_, None)
-            new_explicit_steps = winner.explicit_steps + [new_step]
-            winner.set_steps(db_conn, new_explicit_steps)
-
     def save(self, db_conn: DatabaseConnection) -> None:
         """Add (or re-write) self and connected items to DB."""
         super().save(db_conn)
     def save(self, db_conn: DatabaseConnection) -> None:
         """Add (or re-write) self and connected items to DB."""
         super().save(db_conn)
@@ -210,7 +230,7 @@ class Process(BaseModel[int], ConditionsRelations):
 class ProcessStep(BaseModel[int]):
     """Sub-unit of Processes."""
     table_name = 'process_steps'
 class ProcessStep(BaseModel[int]):
     """Sub-unit of Processes."""
     table_name = 'process_steps'
-    to_save = ['owner_id', 'step_process_id', 'parent_step_id']
+    to_save_simples = ['owner_id', 'step_process_id', 'parent_step_id']
 
     def __init__(self, id_: int | None, owner_id: int, step_process_id: int,
                  parent_step_id: int | None) -> None:
 
     def __init__(self, id_: int | None, owner_id: int, step_process_id: int,
                  parent_step_id: int | None) -> None:
index f5388b58f25ec1237b65b751c8fd5fa352160ddf..9f9fdb4aadb3c9912b258453dbe507e96f6c4503 100644 (file)
@@ -2,6 +2,7 @@
 from __future__ import annotations
 from typing import Any, Set
 from sqlite3 import Row
 from __future__ import annotations
 from typing import Any, Set
 from sqlite3 import Row
+from plomtask.misc import DictableNode
 from plomtask.db import DatabaseConnection, BaseModel
 from plomtask.processes import Process, ProcessStepsNode
 from plomtask.versioned_attributes import VersionedAttribute
 from plomtask.db import DatabaseConnection, BaseModel
 from plomtask.processes import Process, ProcessStepsNode
 from plomtask.versioned_attributes import VersionedAttribute
@@ -11,27 +12,24 @@ from plomtask.exceptions import (NotFoundException, BadFormatException,
 from plomtask.dating import valid_date
 
 
 from plomtask.dating import valid_date
 
 
-class TodoNode:
+class TodoNode(DictableNode):
     """Collects what's useful to know for Todo/Condition tree display."""
     # pylint: disable=too-few-public-methods
     todo: Todo
     seen: bool
     children: list[TodoNode]
     """Collects what's useful to know for Todo/Condition tree display."""
     # pylint: disable=too-few-public-methods
     todo: Todo
     seen: bool
     children: list[TodoNode]
+    _to_dict = ['todo', 'seen', 'children']
 
 
-    def __init__(self,
-                 todo: Todo,
-                 seen: bool,
-                 children: list[TodoNode]) -> None:
-        self.todo = todo
-        self.seen = seen
-        self.children = children
 
 
-    @property
-    def as_dict(self) -> dict[str, object]:
-        """Return self as (json.dumps-coompatible) dict."""
-        return {'todo': self.todo.id_,
-                'seen': self.seen,
-                'children': [c.as_dict for c in self.children]}
+class TodoOrProcStepNode(DictableNode):
+    """Collect what's useful for Todo-or-ProcessStep tree display."""
+    # pylint: disable=too-few-public-methods
+    node_id: int
+    todo: Todo | None
+    process: Process | None
+    children: list[TodoOrProcStepNode]  # pylint: disable=undefined-variable
+    fillable: bool = False
+    _to_dict = ['node_id', 'todo', 'process', 'children', 'fillable']
 
 
 class Todo(BaseModel[int], ConditionsRelations):
 
 
 class Todo(BaseModel[int], ConditionsRelations):
@@ -39,8 +37,8 @@ class Todo(BaseModel[int], ConditionsRelations):
     # pylint: disable=too-many-instance-attributes
     # pylint: disable=too-many-public-methods
     table_name = 'todos'
     # pylint: disable=too-many-instance-attributes
     # pylint: disable=too-many-public-methods
     table_name = 'todos'
-    to_save = ['process_id', 'is_done', 'date', 'comment', 'effort',
-               'calendarize']
+    to_save_simples = ['process_id', 'is_done', 'date', 'comment', 'effort',
+                       'calendarize']
     to_save_relations = [('todo_conditions', 'todo', 'conditions', 0),
                          ('todo_blockers', 'todo', 'blockers', 0),
                          ('todo_enables', 'todo', 'enables', 0),
     to_save_relations = [('todo_conditions', 'todo', 'conditions', 0),
                          ('todo_blockers', 'todo', 'blockers', 0),
                          ('todo_enables', 'todo', 'enables', 0),
@@ -89,36 +87,31 @@ class Todo(BaseModel[int], ConditionsRelations):
         todos, _, _ = cls.by_date_range_with_limits(db_conn, date_range)
         return todos
 
         todos, _, _ = cls.by_date_range_with_limits(db_conn, date_range)
         return todos
 
-    @classmethod
-    def create_with_children(cls, db_conn: DatabaseConnection,
-                             process_id: int, date: str) -> Todo:
-        """Create Todo of process for date, ensure children."""
-
-        def key_order_func(n: ProcessStepsNode) -> int:
-            assert isinstance(n.process.id_, int)
-            return n.process.id_
+    def ensure_children(self, db_conn: DatabaseConnection) -> None:
+        """Ensure Todo children (create or adopt) demanded by Process chain."""
 
         def walk_steps(parent: Todo, step_node: ProcessStepsNode) -> Todo:
 
         def walk_steps(parent: Todo, step_node: ProcessStepsNode) -> Todo:
-            adoptables = [t for t in cls.by_date(db_conn, date)
+            adoptables = [t for t in Todo.by_date(db_conn, parent.date)
                           if (t not in parent.children)
                           and (t != parent)
                           if (t not in parent.children)
                           and (t != parent)
-                          and step_node.process == t.process]
+                          and step_node.process.id_ == t.process_id]
             satisfier = None
             for adoptable in adoptables:
                 satisfier = adoptable
                 break
             if not satisfier:
             satisfier = None
             for adoptable in adoptables:
                 satisfier = adoptable
                 break
             if not satisfier:
-                satisfier = cls(None, step_node.process, False, date)
+                satisfier = Todo(None, step_node.process, False, parent.date)
                 satisfier.save(db_conn)
                 satisfier.save(db_conn)
-            sub_step_nodes = list(step_node.steps.values())
-            sub_step_nodes.sort(key=key_order_func)
+            sub_step_nodes = sorted(
+                    step_node.steps,
+                    key=lambda s: s.process.id_ if s.process.id_ else 0)
             for sub_node in sub_step_nodes:
                 if sub_node.is_suppressed:
                     continue
                 n_slots = len([n for n in sub_step_nodes
                                if n.process == sub_node.process])
                 filled_slots = len([t for t in satisfier.children
             for sub_node in sub_step_nodes:
                 if sub_node.is_suppressed:
                     continue
                 n_slots = len([n for n in sub_step_nodes
                                if n.process == sub_node.process])
                 filled_slots = len([t for t in satisfier.children
-                                    if t.process == sub_node.process])
+                                    if t.process.id_ == sub_node.process.id_])
                 # if we did not newly create satisfier, it may already fill
                 # some step dependencies, so only fill what remains open
                 if n_slots - filled_slots > 0:
                 # if we did not newly create satisfier, it may already fill
                 # some step dependencies, so only fill what remains open
                 if n_slots - filled_slots > 0:
@@ -126,16 +119,13 @@ class Todo(BaseModel[int], ConditionsRelations):
             satisfier.save(db_conn)
             return satisfier
 
             satisfier.save(db_conn)
             return satisfier
 
-        process = Process.by_id(db_conn, process_id)
-        todo = cls(None, process, False, date)
-        todo.save(db_conn)
+        process = Process.by_id(db_conn, self.process_id)
         steps_tree = process.get_steps(db_conn)
         steps_tree = process.get_steps(db_conn)
-        for step_node in steps_tree.values():
+        for step_node in steps_tree:
             if step_node.is_suppressed:
                 continue
             if step_node.is_suppressed:
                 continue
-            todo.add_child(walk_steps(todo, step_node))
-        todo.save(db_conn)
-        return todo
+            self.add_child(walk_steps(self, step_node))
+        self.save(db_conn)
 
     @classmethod
     def from_table_row(cls, db_conn: DatabaseConnection,
 
     @classmethod
     def from_table_row(cls, db_conn: DatabaseConnection,
@@ -207,8 +197,9 @@ class Todo(BaseModel[int], ConditionsRelations):
         return 0
 
     @property
         return 0
 
     @property
-    def process_id(self) -> int | str | None:
+    def process_id(self) -> int:
         """Needed for super().save to save Processes as attributes."""
         """Needed for super().save to save Processes as attributes."""
+        assert isinstance(self.process.id_, int)
         return self.process.id_
 
     @property
         return self.process.id_
 
     @property
@@ -231,6 +222,7 @@ class Todo(BaseModel[int], ConditionsRelations):
     @property
     def title(self) -> VersionedAttribute:
         """Shortcut to .process.title."""
     @property
     def title(self) -> VersionedAttribute:
         """Shortcut to .process.title."""
+        assert isinstance(self.process.title, VersionedAttribute)
         return self.process.title
 
     @property
         return self.process.title
 
     @property
@@ -311,6 +303,11 @@ class Todo(BaseModel[int], ConditionsRelations):
         self.children.remove(child)
         child.parents.remove(self)
 
         self.children.remove(child)
         child.parents.remove(self)
 
+    def update_attrs(self, **kwargs: Any) -> None:
+        """Update self's attributes listed in kwargs."""
+        for k, v in kwargs.items():
+            setattr(self, k, v)
+
     def save(self, db_conn: DatabaseConnection) -> None:
         """On save calls, also check if auto-deletion by effort < 0."""
         if self.effort and self.effort < 0 and self.is_deletable:
     def save(self, db_conn: DatabaseConnection) -> None:
         """On save calls, also check if auto-deletion by effort < 0."""
         if self.effort and self.effort < 0 and self.is_deletable:
index 8861c9834ff3924d6459ced5cb9c69629424bb45..f5e17f3a848dd08f9066155af036a90c2c1b0941 100644 (file)
@@ -17,12 +17,16 @@ class VersionedAttribute:
                  parent: Any, table_name: str, default: str | float) -> None:
         self.parent = parent
         self.table_name = table_name
                  parent: Any, table_name: str, default: str | float) -> None:
         self.parent = parent
         self.table_name = table_name
-        self.default = default
+        self._default = default
         self.history: dict[str, str | float] = {}
         self.history: dict[str, str | float] = {}
+        # NB: For tighter mypy testing, we might prefer self.history to be
+        # dict[str, float] | dict[str, str] instead, but my current coding
+        # knowledge only manages to make that work by adding much further
+        # complexity, so let's leave it at that for now …
 
     def __hash__(self) -> int:
         history_tuples = tuple((k, v) for k, v in self.history.items())
 
     def __hash__(self) -> int:
         history_tuples = tuple((k, v) for k, v in self.history.items())
-        hashable = (self.parent.id_, self.table_name, self.default,
+        hashable = (self.parent.id_, self.table_name, self._default,
                     history_tuples)
         return hash(hashable)
 
                     history_tuples)
         return hash(hashable)
 
@@ -31,11 +35,16 @@ class VersionedAttribute:
         """Return most recent timestamp."""
         return sorted(self.history.keys())[-1]
 
         """Return most recent timestamp."""
         return sorted(self.history.keys())[-1]
 
+    @property
+    def value_type_name(self) -> str:
+        """Return string of name of attribute value type."""
+        return type(self._default).__name__
+
     @property
     def newest(self) -> str | float:
     @property
     def newest(self) -> str | float:
-        """Return most recent value, or self.default if self.history empty."""
+        """Return most recent value, or self._default if self.history empty."""
         if 0 == len(self.history):
         if 0 == len(self.history):
-            return self.default
+            return self._default
         return self.history[self._newest_timestamp]
 
     def reset_timestamp(self, old_str: str, new_str: str) -> None:
         return self.history[self._newest_timestamp]
 
     def reset_timestamp(self, old_str: str, new_str: str) -> None:
@@ -89,7 +98,7 @@ class VersionedAttribute:
             queried_time += ' 23:59:59.999'
         sorted_timestamps = sorted(self.history.keys())
         if 0 == len(sorted_timestamps):
             queried_time += ' 23:59:59.999'
         sorted_timestamps = sorted(self.history.keys())
         if 0 == len(sorted_timestamps):
-            return self.default
+            return self._default
         selected_timestamp = sorted_timestamps[0]
         for timestamp in sorted_timestamps[1:]:
             if timestamp > queried_time:
         selected_timestamp = sorted_timestamps[0]
         for timestamp in sorted_timestamps[1:]:
             if timestamp > queried_time:
diff --git a/templates/calendar_txt.html b/templates/calendar_txt.html
new file mode 100644 (file)
index 0000000..567ddd8
--- /dev/null
@@ -0,0 +1,20 @@
+{% extends '_base.html' %}
+
+{% block content %}
+<h3>calendar</h3>
+
+<p><a href="/calendar">normal view</a></p>
+
+<form action="calendar_txt" method="GET">
+from <input name="start" class="date" value="{{start}}" />
+to <input name="end" class="date" value="{{end}}" />
+<input type="submit" value="OK" />
+</form>
+<table>
+
+<pre>{% for day in days %}{% if day.weekday == "Monday" %}
+---{% endif %}{% if day.comment or day.calendarized_todos %}
+{{day.weekday|truncate(2,True,'',0)}} {{day.date}} {{day.comment|e}}{% endif %}{% if day.calendarized_todos%}{% for todo in day.calendarized_todos %}
+* {{todo.title_then|e}}{% if todo.comment %} / {{todo.comment|e}}{% endif %}{% endfor %}{% endif %}{% endfor %}
+</pre>
+{% endblock %}
index 7bb503eeafd2a68e21987021a7db0a110c4e548b..a4029dc333193ae95c20e93624d0a478b91dd4c1 100644 (file)
@@ -17,14 +17,12 @@ details[open] > summary::after {
 
 
 
 
 
 
-{% macro step_with_steps(step_id, step_node, indent) %}
+{% macro step_with_steps(step_node, indent) %}
 <tr>
 <td>
 <tr>
 <td>
-<input type="hidden" name="steps" value="{{step_id}}" />
+<input type="hidden" name="steps" value="{{step_node.step.id_}}" />
 {% if step_node.is_explicit %}
 {% if step_node.is_explicit %}
-<input type="checkbox" name="keep_step" value="{{step_id}}" checked />
-<input type="hidden" name="step_{{step_id}}_process_id" value="{{step_node.process.id_}}" />
-<input type="hidden" name="step_{{step_id}}_parent_id" value="{{step_node.parent_id or ''}}" />
+<input type="checkbox" name="kept_steps" value="{{step_node.step.id_}}" checked />
 {% endif %}
 </td>
 
 {% endif %}
 </td>
 
@@ -60,8 +58,8 @@ details[open] > summary::after {
 {% endif %}
 </tr>
 {% if step_node.is_explicit or not step_node.seen %}
 {% endif %}
 </tr>
 {% if step_node.is_explicit or not step_node.seen %}
-{% for substep_id, substep in step_node.steps.items() %}
-{{ step_with_steps(substep_id, substep, indent+1) }}
+{% for substep in step_node.steps %}
+{{ step_with_steps(substep, indent+1) }}
 {% endfor %}
 {% endif %}
 {% endmacro %}
 {% endfor %}
 {% endif %}
 {% endmacro %}
@@ -116,8 +114,8 @@ edit process of ID {{process.id_}}
 <td>
 {% if steps %}
 <table>
 <td>
 {% if steps %}
 <table>
-{% for step_id, step_node in steps.items() %}
-{{ step_with_steps(step_id, step_node, 0) }}
+{% for step_node in steps %}
+{{ step_with_steps(step_node, 0) }}
 {% endfor %}
 </table>
 {% endif %}
 {% endfor %}
 </table>
 {% endif %}
index fea931ab83ddf57536ab375ce3773a3f656204ce..de5dbd27c9a0b9ca6299cdb48ea283464129c077 100644 (file)
@@ -22,21 +22,21 @@ select{ font-size: 0.5em; margin: 0; padding: 0; }
 <a href="todo?id={{item.todo.id_}}">{{item.todo.title_then|e}}</a>
 {% else %}
 {{item.process.title.newest|e}}
 <a href="todo?id={{item.todo.id_}}">{{item.todo.title_then|e}}</a>
 {% else %}
 {{item.process.title.newest|e}}
-{% if indent == 0 %}
-· fill: <select name="fill_for_{{item.id_}}">
+{% if parent_todo %}
+· fill: <select name="step_filler_to_{{parent_todo.id_}}">
 <option value="ignore">--</option>
 <option value="ignore">--</option>
-<option value="make_empty_{{item.process.id_}}">make empty</option>
-<option value="make_full_{{item.process.id_}}">make full</option>
+<option value="make_{{item.process.id_}}">make empty</option>
 {% for adoptable in adoption_candidates_for[item.process.id_] %}
 <option value="{{adoptable.id_}}">adopt #{{adoptable.id_}}{% if adoptable.comment %} / {{adoptable.comment}}{% endif %}</option>
 {% endfor %}
 </select>
 {% endif %}
 {% for adoptable in adoption_candidates_for[item.process.id_] %}
 <option value="{{adoptable.id_}}">adopt #{{adoptable.id_}}{% if adoptable.comment %} / {{adoptable.comment}}{% endif %}</option>
 {% endfor %}
 </select>
 {% endif %}
+
 {% endif %}
 </td>
 </tr>
 {% for child in item.children %}
 {% endif %}
 </td>
 </tr>
 {% for child in item.children %}
-{{ draw_tree_row(child, item, indent+1) }}
+{{ draw_tree_row(child, item.todo, indent+1) }}
 {% endfor %}
 {% endmacro %}
 
 {% endfor %}
 {% endmacro %}
 
@@ -57,8 +57,8 @@ select{ font-size: 0.5em; margin: 0; padding: 0; }
 </tr>
 <tr>
 <th>done</th>
 </tr>
 <tr>
 <th>done</th>
-<td><input type="checkbox" name="done" {% if todo.is_done %}checked {% endif %} {% if not todo.is_doable %}disabled {% endif %}/>
-{% if not todo.is_doable and todo.is_done %}<input type="hidden" name="done" value="1" />{% endif %}
+<td><input type="checkbox" name="is_done" {% if todo.is_done %}checked {% endif %} {% if not todo.is_doable %}disabled {% endif %}/>
+{% if not todo.is_doable and todo.is_done %}<input type="hidden" name="is_done" value="1" />{% endif %}
 </td>
 </tr>
 <tr>
 </td>
 </tr>
 <tr>
index bf04f7b674b84ab2dcad9ebe349400809d449187..6feda94fb8bc9007773bb0f47f08ed6dfd49d545 100644 (file)
@@ -1,5 +1,7 @@
 """Test Conditions module."""
 """Test Conditions module."""
-from tests.utils import TestCaseWithDB, TestCaseWithServer, TestCaseSansDB
+from typing import Any
+from tests.utils import (TestCaseSansDB, TestCaseWithDB, TestCaseWithServer,
+                         Expected)
 from plomtask.conditions import Condition
 from plomtask.processes import Process
 from plomtask.todos import Todo
 from plomtask.conditions import Condition
 from plomtask.processes import Process
 from plomtask.todos import Todo
@@ -9,14 +11,12 @@ from plomtask.exceptions import HandledException
 class TestsSansDB(TestCaseSansDB):
     """Tests requiring no DB setup."""
     checked_class = Condition
 class TestsSansDB(TestCaseSansDB):
     """Tests requiring no DB setup."""
     checked_class = Condition
-    versioned_defaults_to_test = {'title': 'UNNAMED', 'description': ''}
 
 
 class TestsWithDB(TestCaseWithDB):
     """Tests requiring DB, but not server setup."""
     checked_class = Condition
 
 
 class TestsWithDB(TestCaseWithDB):
     """Tests requiring DB, but not server setup."""
     checked_class = Condition
-    default_init_kwargs = {'is_active': False}
-    test_versioneds = {'title': str, 'description': str}
+    default_init_kwargs = {'is_active': 0}
 
     def test_remove(self) -> None:
         """Test .remove() effects on DB and cache."""
 
     def test_remove(self) -> None:
         """Test .remove() effects on DB and cache."""
@@ -24,165 +24,137 @@ class TestsWithDB(TestCaseWithDB):
         proc = Process(None)
         proc.save(self.db_conn)
         todo = Todo(None, proc, False, '2024-01-01')
         proc = Process(None)
         proc.save(self.db_conn)
         todo = Todo(None, proc, False, '2024-01-01')
+        todo.save(self.db_conn)
+        # check condition can only be deleted if not depended upon
         for depender in (proc, todo):
         for depender in (proc, todo):
-            assert hasattr(depender, 'save')
-            assert hasattr(depender, 'set_conditions')
             c = Condition(None)
             c.save(self.db_conn)
             c = Condition(None)
             c.save(self.db_conn)
-            depender.save(self.db_conn)
-            depender.set_conditions(self.db_conn, [c.id_], 'conditions')
+            assert isinstance(c.id_, int)
+            depender.set_condition_relations(self.db_conn, [c.id_], [], [], [])
             depender.save(self.db_conn)
             with self.assertRaises(HandledException):
                 c.remove(self.db_conn)
             depender.save(self.db_conn)
             with self.assertRaises(HandledException):
                 c.remove(self.db_conn)
-            depender.set_conditions(self.db_conn, [], 'conditions')
+            depender.set_condition_relations(self.db_conn, [], [], [], [])
             depender.save(self.db_conn)
             c.remove(self.db_conn)
 
 
             depender.save(self.db_conn)
             c.remove(self.db_conn)
 
 
+class ExpectedGetConditions(Expected):
+    """Builder of expectations for GET /conditions."""
+    _default_dict = {'sort_by': 'title', 'pattern': ''}
+
+    def recalc(self) -> None:
+        """Update internal dictionary by subclass-specific rules."""
+        super().recalc()
+        self._fields['conditions'] = self.as_ids(self.lib_all('Condition'))
+
+
+class ExpectedGetCondition(Expected):
+    """Builder of expectations for GET /condition."""
+    _on_empty_make_temp = ('Condition', 'cond_as_dict')
+
+    def __init__(self, id_: int, *args: Any, **kwargs: Any) -> None:
+        self._fields = {'condition': id_}
+        super().__init__(*args, **kwargs)
+
+    def recalc(self) -> None:
+        """Update internal dictionary by subclass-specific rules."""
+        super().recalc()
+        for p_field, c_field in [('conditions', 'enabled_processes'),
+                                 ('disables', 'disabling_processes'),
+                                 ('blockers', 'disabled_processes'),
+                                 ('enables', 'enabling_processes')]:
+            self._fields[c_field] = self.as_ids([
+                p for p in self.lib_all('Process')
+                if self._fields['condition'] in p[p_field]])
+        self._fields['is_new'] = False
+
+
 class TestsWithServer(TestCaseWithServer):
     """Module tests against our HTTP server/handler (and database)."""
 
 class TestsWithServer(TestCaseWithServer):
     """Module tests against our HTTP server/handler (and database)."""
 
-    @classmethod
-    def GET_condition_dict(cls, cond: dict[str, object]) -> dict[str, object]:
-        """Return JSON of GET /condition to expect."""
-        return {'is_new': False,
-                'enabled_processes': [],
-                'disabled_processes': [],
-                'enabling_processes': [],
-                'disabling_processes': [],
-                'condition': cond['id'],
-                '_library': {'Condition': cls.as_refs([cond])}}
-
-    @classmethod
-    def GET_conditions_dict(cls, conds: list[dict[str, object]]
-                            ) -> dict[str, object]:
-        """Return JSON of GET /conditions to expect."""
-        library = {'Condition': cls.as_refs(conds)} if conds else {}
-        d: dict[str, object] = {'conditions': cls.as_id_list(conds),
-                                'sort_by': 'title',
-                                'pattern': '',
-                                '_library': library}
-        return d
-
     def test_fail_POST_condition(self) -> None:
         """Test malformed/illegal POST /condition requests."""
     def test_fail_POST_condition(self) -> None:
         """Test malformed/illegal POST /condition requests."""
-        # check invalid POST payloads
+        # check incomplete POST payloads
         url = '/condition'
         self.check_post({}, url, 400)
         self.check_post({'title': ''}, url, 400)
         url = '/condition'
         self.check_post({}, url, 400)
         self.check_post({'title': ''}, url, 400)
-        self.check_post({'title': '', 'description': ''}, url, 400)
-        self.check_post({'title': '', 'is_active': False}, url, 400)
-        self.check_post({'description': '', 'is_active': False}, url, 400)
+        self.check_post({'title': '', 'is_active': 0}, url, 400)
+        self.check_post({'description': '', 'is_active': 0}, url, 400)
         # check valid POST payload on bad paths
         # check valid POST payload on bad paths
-        valid_payload = {'title': '', 'description': '', 'is_active': False}
+        valid_payload = {'title': '', 'description': '', 'is_active': 0}
         self.check_post(valid_payload, '/condition?id=foo', 400)
 
         self.check_post(valid_payload, '/condition?id=foo', 400)
 
-    def test_do_POST_condition(self) -> None:
+    def test_POST_condition(self) -> None:
         """Test (valid) POST /condition and its effect on GET /condition[s]."""
         """Test (valid) POST /condition and its effect on GET /condition[s]."""
-        # test valid POST's effect on …
-        post = {'title': 'foo', 'description': 'oof', 'is_active': False}
-        self.check_post(post, '/condition', 302, '/condition?id=1')
-        # … single /condition
-        cond = self.cond_as_dict(titles=['foo'], descriptions=['oof'])
-        assert isinstance(cond['_versioned'], dict)
-        expected_single = self.GET_condition_dict(cond)
-        self.check_json_get('/condition?id=1', expected_single)
-        # … full /conditions
-        expected_all = self.GET_conditions_dict([cond])
-        self.check_json_get('/conditions', expected_all)
+        exp_single = ExpectedGetCondition(1)
+        exp_all = ExpectedGetConditions()
+        all_exps = [exp_single, exp_all]
+        # test valid POST's effect on single /condition and full /conditions
+        post = {'title': 'foo', 'description': 'oof', 'is_active': 0}
+        self.post_exp_cond(all_exps, 1, post, '', '?id=1')
+        self.check_json_get('/condition?id=1', exp_single)
+        self.check_json_get('/conditions', exp_all)
         # test (no) effect of invalid POST to existing Condition on /condition
         self.check_post({}, '/condition?id=1', 400)
         # test (no) effect of invalid POST to existing Condition on /condition
         self.check_post({}, '/condition?id=1', 400)
-        self.check_json_get('/condition?id=1', expected_single)
+        self.check_json_get('/condition?id=1', exp_single)
         # test effect of POST changing title and activeness
         # test effect of POST changing title and activeness
-        post = {'title': 'bar', 'description': 'oof', 'is_active': True}
-        self.check_post(post, '/condition?id=1', 302)
-        cond['_versioned']['title'][1] = 'bar'
-        cond['is_active'] = True
-        self.check_json_get('/condition?id=1', expected_single)
-        # test deletion POST's effect on …
-        self.check_post({'delete': ''}, '/condition?id=1', 302, '/conditions')
-        cond = self.cond_as_dict()
-        assert isinstance(expected_single['_library'], dict)
-        expected_single['_library']['Condition'] = self.as_refs([cond])
-        self.check_json_get('/condition?id=1', expected_single)
-        # … full /conditions
-        expected_all['conditions'] = []
-        expected_all['_library'] = {}
-        self.check_json_get('/conditions', expected_all)
-
-    def test_do_GET_condition(self) -> None:
+        post = {'title': 'bar', 'description': 'oof', 'is_active': 1}
+        self.post_exp_cond(all_exps, 1, post, '?id=1', '?id=1')
+        self.check_json_get('/condition?id=1', exp_single)
+        self.check_json_get('/conditions', exp_all)
+        # test deletion POST's effect, both to return id=1 into empty single,
+        # full /conditions into empty list
+        self.post_exp_cond(all_exps, 1, {'delete': ''}, '?id=1', 's')
+        self.check_json_get('/condition?id=1', exp_single)
+        self.check_json_get('/conditions', exp_all)
+
+    def test_GET_condition(self) -> None:
         """More GET /condition testing, especially for Process relations."""
         # check expected default status codes
         self.check_get_defaults('/condition')
         # make Condition and two Processes that among them establish all
         """More GET /condition testing, especially for Process relations."""
         # check expected default status codes
         self.check_get_defaults('/condition')
         # make Condition and two Processes that among them establish all
-        # possible ConditionsRelations to it, …
-        cond_post = {'title': 'foo', 'description': 'oof', 'is_active': False}
-        self.check_post(cond_post, '/condition', 302, '/condition?id=1')
-        proc1_post = {'title': 'A', 'description': '', 'effort': 1.0,
+        # possible ConditionsRelations to it, check /condition displays all
+        exp = ExpectedGetCondition(1)
+        cond_post = {'title': 'foo', 'description': 'oof', 'is_active': 0}
+        self.post_exp_cond([exp], 1, cond_post, '', '?id=1')
+        proc1_post = {'title': 'A', 'description': '', 'effort': 1.1,
                       'conditions': [1], 'disables': [1]}
                       'conditions': [1], 'disables': [1]}
-        proc2_post = {'title': 'B', 'description': '', 'effort': 1.0,
+        proc2_post = {'title': 'B', 'description': '', 'effort': 0.9,
                       'enables': [1], 'blockers': [1]}
                       'enables': [1], 'blockers': [1]}
-        self.post_process(1, proc1_post)
-        self.post_process(2, proc2_post)
-        # … then check /condition displays all these properly.
-        cond = self.cond_as_dict(titles=['foo'], descriptions=['oof'])
-        assert isinstance(cond['id'], int)
-        proc1 = self.proc_as_dict(conditions=[cond['id']],
-                                  disables=[cond['id']])
-        proc2 = self.proc_as_dict(2, 'B',
-                                  blockers=[cond['id']],
-                                  enables=[cond['id']])
-        expected = self.GET_condition_dict(cond)
-        assert isinstance(expected['_library'], dict)
-        expected['enabled_processes'] = self.as_id_list([proc1])
-        expected['disabled_processes'] = self.as_id_list([proc2])
-        expected['enabling_processes'] = self.as_id_list([proc2])
-        expected['disabling_processes'] = self.as_id_list([proc1])
-        expected['_library']['Process'] = self.as_refs([proc1, proc2])
-        self.check_json_get('/condition?id=1', expected)
-
-    def test_do_GET_conditions(self) -> None:
+        self.post_exp_process([exp], proc1_post, 1)
+        self.post_exp_process([exp], proc2_post, 2)
+        self.check_json_get('/condition?id=1', exp)
+
+    def test_GET_conditions(self) -> None:
         """Test GET /conditions."""
         # test empty result on empty DB, default-settings on empty params
         """Test GET /conditions."""
         # test empty result on empty DB, default-settings on empty params
-        expected = self.GET_conditions_dict([])
-        self.check_json_get('/conditions', expected)
-        # test on meaningless non-empty params (incl. entirely un-used key),
+        exp = ExpectedGetConditions()
+        self.check_json_get('/conditions', exp)
+        # test ignorance of meaningless non-empty params (incl. unknown key),
         # that 'sort_by' default to 'title' (even if set to something else, as
         # long as without handler) and 'pattern' get preserved
         # that 'sort_by' default to 'title' (even if set to something else, as
         # long as without handler) and 'pattern' get preserved
-        expected['pattern'] = 'bar'  # preserved despite zero effect!
-        url = '/conditions?sort_by=foo&pattern=bar&foo=x'
-        self.check_json_get(url, expected)
+        exp.set('pattern', 'bar')
+        exp.set('sort_by', 'title')  # for clarity (already default)
+        self.check_json_get('/conditions?sort_by=foo&pattern=bar&foo=x', exp)
         # test non-empty result, automatic (positive) sorting by title
         # test non-empty result, automatic (positive) sorting by title
-        post1 = {'is_active': False, 'title': 'foo', 'description': 'oof'}
-        post2 = {'is_active': False, 'title': 'bar', 'description': 'rab'}
-        post3 = {'is_active': True, 'title': 'baz', 'description': 'zab'}
-        self.check_post(post1, '/condition', 302, '/condition?id=1')
-        self.check_post(post2, '/condition', 302, '/condition?id=2')
-        self.check_post(post3, '/condition', 302, '/condition?id=3')
-        cond1 = self.cond_as_dict(1, False, ['foo'], ['oof'])
-        cond2 = self.cond_as_dict(2, False, ['bar'], ['rab'])
-        cond3 = self.cond_as_dict(3, True, ['baz'], ['zab'])
-        expected = self.GET_conditions_dict([cond2, cond3, cond1])
-        self.check_json_get('/conditions', expected)
+        exp.set('pattern', '')
+        post_cond1 = {'is_active': 0, 'title': 'foo', 'description': 'oof'}
+        post_cond2 = {'is_active': 0, 'title': 'bar', 'description': 'rab'}
+        post_cond3 = {'is_active': 1, 'title': 'baz', 'description': 'zab'}
+        for i, post in enumerate([post_cond1, post_cond2, post_cond3]):
+            self.post_exp_cond([exp], i+1, post, '', f'?id={i+1}')
+        self.check_filter(exp, 'conditions', 'sort_by', 'title', [2, 3, 1])
         # test other sortings
         # test other sortings
-        # (NB: by .is_active has two items of =False, their order currently
-        # is not explicitly made predictable, so mail fail until we do)
-        expected['conditions'] = self.as_id_list([cond1, cond3, cond2])
-        expected['sort_by'] = '-title'
-        self.check_json_get('/conditions?sort_by=-title', expected)
-        expected['conditions'] = self.as_id_list([cond1, cond2, cond3])
-        expected['sort_by'] = 'is_active'
-        self.check_json_get('/conditions?sort_by=is_active', expected)
-        expected['conditions'] = self.as_id_list([cond3, cond1, cond2])
-        expected['sort_by'] = '-is_active'
-        self.check_json_get('/conditions?sort_by=-is_active', expected)
+        self.check_filter(exp, 'conditions', 'sort_by', '-title', [1, 3, 2])
+        self.check_filter(exp, 'conditions', 'sort_by', 'is_active', [1, 2, 3])
+        self.check_filter(exp, 'conditions', 'sort_by', '-is_active',
+                          [3, 2, 1])
         # test pattern matching on title
         # test pattern matching on title
-        expected = self.GET_conditions_dict([cond2, cond3])
-        expected['pattern'] = 'ba'
-        self.check_json_get('/conditions?pattern=ba', expected)
+        exp.set('sort_by', 'title')
+        exp.lib_del('Condition', 1)
+        self.check_filter(exp, 'conditions', 'pattern', 'ba', [2, 3])
         # test pattern matching on description
         # test pattern matching on description
-        assert isinstance(expected['_library'], dict)
-        expected['conditions'] = self.as_id_list([cond1])
-        expected['_library']['Condition'] = self.as_refs([cond1])
-        expected['pattern'] = 'oo'
-        self.check_json_get('/conditions?pattern=oo', expected)
+        exp.lib_wipe('Condition')
+        exp.set_cond_from_post(1, post_cond1)
+        self.check_filter(exp, 'conditions', 'pattern', 'of', [1])
index 8e3768c660937b5ba32078ee13d03cf275aa57fa..aac150b91e62ac56a7ed6837172fa0b3fd16c797 100644 (file)
 """Test Days module."""
 """Test Days module."""
-from unittest import TestCase
-from datetime import datetime
-from typing import Callable
-from tests.utils import TestCaseWithDB, TestCaseWithServer
-from plomtask.dating import date_in_n_days
+from datetime import datetime, timedelta
+from typing import Any
+from tests.utils import (TestCaseSansDB, TestCaseWithDB, TestCaseWithServer,
+                         Expected)
+from plomtask.dating import date_in_n_days as tested_date_in_n_days
 from plomtask.days import Day
 
 from plomtask.days import Day
 
+# so far the same as plomtask.dating.DATE_FORMAT, but for testing purposes we
+# want to explicitly state our expectations here indepedently from that
+TESTING_DATE_FORMAT = '%Y-%m-%d'
 
 
-class TestsSansDB(TestCase):
+
+def _testing_date_in_n_days(n: int) -> str:
+    """Return in TEST_DATE_FORMAT date from today + n days.
+
+    As with TESTING_DATE_FORMAT, we assume this equal the original's code
+    at plomtask.dating.date_in_n_days, but want to state our expectations
+    explicitly to rule out importing issues from the original.
+    """
+    date = datetime.now() + timedelta(days=n)
+    return date.strftime(TESTING_DATE_FORMAT)
+
+
+class TestsSansDB(TestCaseSansDB):
     """Days module tests not requiring DB setup."""
     """Days module tests not requiring DB setup."""
-    legal_ids = ['2024-01-01']
-    illegal_ids = ['foo', '2024-02-30', '2024-02-01 23:00:00']
+    checked_class = Day
+    legal_ids = ['2024-01-01', '2024-02-29']
+    illegal_ids = ['foo', '2023-02-29', '2024-02-30', '2024-02-01 23:00:00']
+
+    def test_date_in_n_days(self) -> None:
+        """Test dating.date_in_n_days"""
+        for n in [-100, -2, -1, 0, 1, 2, 1000]:
+            date = datetime.now() + timedelta(days=n)
+            self.assertEqual(tested_date_in_n_days(n),
+                             date.strftime(TESTING_DATE_FORMAT))
 
     def test_Day_datetime_weekday_neighbor_dates(self) -> None:
 
     def test_Day_datetime_weekday_neighbor_dates(self) -> None:
-        """Test Day's date parsing."""
+        """Test Day's date parsing and neighbourhood resolution."""
         self.assertEqual(datetime(2024, 5, 1), Day('2024-05-01').datetime)
         self.assertEqual('Sunday', Day('2024-03-17').weekday)
         self.assertEqual('March', Day('2024-03-17').month_name)
         self.assertEqual('2023-12-31', Day('2024-01-01').prev_date)
         self.assertEqual('2023-03-01', Day('2023-02-28').next_date)
 
         self.assertEqual(datetime(2024, 5, 1), Day('2024-05-01').datetime)
         self.assertEqual('Sunday', Day('2024-03-17').weekday)
         self.assertEqual('March', Day('2024-03-17').month_name)
         self.assertEqual('2023-12-31', Day('2024-01-01').prev_date)
         self.assertEqual('2023-03-01', Day('2023-02-28').next_date)
 
-    def test_Day_sorting(self) -> None:
-        """Test sorting by .__lt__ and Day.__eq__."""
-        day1 = Day('2024-01-01')
-        day2 = Day('2024-01-02')
-        day3 = Day('2024-01-03')
-        days = [day3, day1, day2]
-        self.assertEqual(sorted(days), [day1, day2, day3])
-
 
 class TestsWithDB(TestCaseWithDB):
     """Tests requiring DB, but not server setup."""
     checked_class = Day
     default_ids = ('2024-01-01', '2024-01-02', '2024-01-03')
 
 
 class TestsWithDB(TestCaseWithDB):
     """Tests requiring DB, but not server setup."""
     checked_class = Day
     default_ids = ('2024-01-01', '2024-01-02', '2024-01-03')
 
-    def test_Day_by_date_range_filled(self) -> None:
-        """Test Day.by_date_range_filled."""
-        date1, date2, date3 = self.default_ids
-        day1 = Day(date1)
-        day2 = Day(date2)
-        day3 = Day(date3)
-        for day in [day1, day2, day3]:
-            day.save(self.db_conn)
-        # check date range includes limiter days
-        self.assertEqual(Day.by_date_range_filled(self.db_conn, date1, date3),
-                         [day1, day2, day3])
-        # check first date range value excludes what's earlier
-        self.assertEqual(Day.by_date_range_filled(self.db_conn, date2, date3),
-                         [day2, day3])
-        # check second date range value excludes what's later
-        self.assertEqual(Day.by_date_range_filled(self.db_conn, date1, date2),
-                         [day1, day2])
-        # check swapped (impossible) date range returns emptiness
-        self.assertEqual(Day.by_date_range_filled(self.db_conn, date3, date1),
-                         [])
-        # check fill_gaps= instantiates unsaved dates within date range
-        # (but does not store them)
-        day5 = Day('2024-01-05')
-        day6 = Day('2024-01-06')
-        day6.save(self.db_conn)
-        day7 = Day('2024-01-07')
-        self.assertEqual(Day.by_date_range_filled(self.db_conn,
-                                                  day5.date, day7.date),
-                         [day5, day6, day7])
-        self.check_identity_with_cache_and_db([day1, day2, day3, day6])
-        # check 'today' is interpreted as today's date
-        today = Day(date_in_n_days(0))
-        self.assertEqual(Day.by_date_range_filled(self.db_conn,
-                                                  'today', 'today'),
-                         [today])
-        prev_day = Day(date_in_n_days(-1))
-        next_day = Day(date_in_n_days(1))
-        self.assertEqual(Day.by_date_range_filled(self.db_conn,
-                                                  'yesterday', 'tomorrow'),
-                         [prev_day, today, next_day])
+    def test_Day_by_date_range_with_limits(self) -> None:
+        """Test .by_date_range_with_limits."""
+        self.check_by_date_range_with_limits('id', set_id_field=False)
+
+    def test_Day_with_filled_gaps(self) -> None:
+        """Test .with_filled_gaps."""
+
+        def test(range_indexes: tuple[int, int], indexes_to_provide: list[int]
+                 ) -> None:
+            start_i, end_i = range_indexes
+            days_provided = []
+            days_expected = days_sans_comment[:]
+            for i in indexes_to_provide:
+                day_with_comment = days_with_comment[i]
+                days_provided += [day_with_comment]
+                days_expected[i] = day_with_comment
+            days_expected = days_expected[start_i:end_i+1]
+            start, end = dates[start_i], dates[end_i]
+            days_result = self.checked_class.with_filled_gaps(days_provided,
+                                                              start, end)
+            self.assertEqual(days_result, days_expected)
+
+        # for provided Days we use those from days_with_comment, to identify
+        # them against same-dated mere filler Days by their lack of comment
+        # (identity with Day at the respective position in days_sans_comment)
+        dates = [f'2024-02-0{n+1}' for n in range(9)]
+        days_with_comment = [Day(date, comment=date[-1:]) for date in dates]
+        days_sans_comment = [Day(date, comment='') for date in dates]
+        # check provided Days recognizable in (full-range) interval
+        test((0, 8), [0, 4, 8])
+        # check limited range, but limiting Days provided
+        test((2, 6), [2, 5, 6])
+        # check Days within range but beyond provided Days also filled in
+        test((1, 7), [2, 5])
+        # check provided Days beyond range ignored
+        test((3, 5), [1, 2, 4, 6, 7])
+        # check inversion of start_date and end_date returns empty list
+        test((5, 3), [2, 4, 6])
+        # check empty provision still creates filler elements in interval
+        test((3, 5), [])
+        # check single-element selection creating only filler beyond provided
+        test((1, 1), [2, 4, 6])
+        # check (un-saved) filler Days don't show up in cache or DB
+        # dates = [f'2024-02-0{n}' for n in range(1, 6)]
+        day = Day(dates[3])
+        day.save(self.db_conn)
+        self.checked_class.with_filled_gaps([day], dates[0], dates[-1])
+        self.check_identity_with_cache_and_db([day])
+        # check 'today', 'yesterday', 'tomorrow' are interpreted
+        yesterday = Day('yesterday')
+        tomorrow = Day('tomorrow')
+        today = Day('today')
+        result = self.checked_class.with_filled_gaps([today], 'yesterday',
+                                                     'tomorrow')
+        self.assertEqual(result, [yesterday, today, tomorrow])
+
+
+class ExpectedGetCalendar(Expected):
+    """Builder of expectations for GET /calendar."""
+
+    def __init__(self, start: int, end: int, *args: Any, **kwargs: Any
+                 ) -> None:
+        self._fields = {'start': _testing_date_in_n_days(start),
+                        'end': _testing_date_in_n_days(end),
+                        'today': _testing_date_in_n_days(0)}
+        self._fields['days'] = [_testing_date_in_n_days(i)
+                                for i in range(start, end+1)]
+        super().__init__(*args, **kwargs)
+        for date in self._fields['days']:
+            self.lib_set('Day', [self.day_as_dict(date)])
+
+
+class ExpectedGetDay(Expected):
+    """Builder of expectations for GET /day."""
+    _default_dict = {'make_type': 'full'}
+    _on_empty_make_temp = ('Day', 'day_as_dict')
+
+    def __init__(self, date: str, *args: Any, **kwargs: Any) -> None:
+        self._fields = {'day': date}
+        super().__init__(*args, **kwargs)
+
+    def recalc(self) -> None:
+        super().recalc()
+        todos = [t for t in self.lib_all('Todo')
+                 if t['date'] == self._fields['day']]
+        self.lib_get('Day', self._fields['day'])['todos'] = self.as_ids(todos)
+        self._fields['top_nodes'] = [
+                {'children': [], 'seen': 0, 'todo': todo['id']}
+                for todo in todos]
+        for todo in todos:
+            proc = self.lib_get('Process', todo['process_id'])
+            for title in ['conditions', 'enables', 'blockers', 'disables']:
+                todo[title] = proc[title]
+        conds_present = set()
+        for todo in todos:
+            for title in ['conditions', 'enables', 'blockers', 'disables']:
+                for cond_id in todo[title]:
+                    conds_present.add(cond_id)
+        self._fields['conditions_present'] = list(conds_present)
+        for prefix in ['en', 'dis']:
+            blers = {}
+            for cond_id in conds_present:
+                blers[str(cond_id)] = self.as_ids(
+                        [t for t in todos if cond_id in t[f'{prefix}ables']])
+            self._fields[f'{prefix}ablers_for'] = blers
+        self._fields['processes'] = self.as_ids(self.lib_all('Process'))
 
 
 class TestsWithServer(TestCaseWithServer):
     """Tests against our HTTP server/handler (and database)."""
 
 
 
 class TestsWithServer(TestCaseWithServer):
     """Tests against our HTTP server/handler (and database)."""
 
-    @classmethod
-    def GET_day_dict(cls, date: str) -> dict[str, object]:
-        """Return JSON of GET /day to expect."""
-        # day: dict[str, object] = {'id': date, 'comment': '', 'todos': []}
-        day = cls._day_as_dict(date)
-        d: dict[str, object] = {'day': date,
-                                'top_nodes': [],
-                                'make_type': '',
-                                'enablers_for': {},
-                                'disablers_for': {},
-                                'conditions_present': [],
-                                'processes': [],
-                                '_library': {'Day': cls.as_refs([day])}}
-        return d
-
-    @classmethod
-    def GET_calendar_dict(cls, start: int, end: int) -> dict[str, object]:
-        """Return JSON of GET /calendar to expect."""
-        today_date = date_in_n_days(0)
-        start_date = date_in_n_days(start)
-        end_date = date_in_n_days(end)
-        dates = [date_in_n_days(i) for i in range(start, end+1)]
-        days = [cls._day_as_dict(d) for d in dates]
-        library = {'Day': cls.as_refs(days)} if len(days) > 0 else {}
-        return {'today': today_date, 'start': start_date, 'end': end_date,
-                'days': dates, '_library': library}
-
-    @staticmethod
-    def _todo_as_dict(id_: int = 1,
-                      process_id: int = 1,
-                      date: str = '2024-01-01',
-                      conditions: None | list[int] = None,
-                      disables: None | list[int] = None,
-                      blockers: None | list[int] = None,
-                      enables: None | list[int] = None
-                      ) -> dict[str, object]:
-        """Return JSON of Todo to expect."""
-        # pylint: disable=too-many-arguments
-        d = {'id': id_,
-             'date': date,
-             'process_id': process_id,
-             'is_done': False,
-             'calendarize': False,
-             'comment': '',
-             'children': [],
-             'parents': [],
-             'effort': None,
-             'conditions': conditions if conditions else [],
-             'disables': disables if disables else [],
-             'blockers': blockers if blockers else [],
-             'enables': enables if enables else []}
-        return d
-
-    @staticmethod
-    def _todo_node_as_dict(todo_id: int) -> dict[str, object]:
-        """Return JSON of TodoNode to expect."""
-        return {'children': [], 'seen': False, 'todo': todo_id}
-
-    @staticmethod
-    def _day_as_dict(date: str) -> dict[str, object]:
-        return {'id': date, 'comment': '', 'todos': []}
-
-    @staticmethod
-    def _post_batch(list_of_args: list[list[object]],
-                    names_of_simples: list[str],
-                    names_of_versioneds: list[str],
-                    f_as_dict: Callable[..., dict[str, object]],
-                    f_to_post: Callable[..., None | dict[str, object]]
-                    ) -> list[dict[str, object]]:
-        """Post expected=f_as_dict(*args) as input to f_to_post, for many."""
-        expecteds = []
-        for args in list_of_args:
-            expecteds += [f_as_dict(*args)]
-        for expected in expecteds:
-            assert isinstance(expected['_versioned'], dict)
-            post = {}
-            for name in names_of_simples:
-                post[name] = expected[name]
-            for name in names_of_versioneds:
-                post[name] = expected['_versioned'][name][0]
-            f_to_post(expected['id'], post)
-        return expecteds
-
-    def _post_day(self, params: str = '',
-                  form_data: None | dict[str, object] = None,
-                  redir_to: str = '',
-                  status: int = 302,
-                  ) -> None:
-        """POST /day?{params} with form_data."""
-        if not form_data:
-            form_data = {'day_comment': '', 'make_type': ''}
-        target = f'/day?{params}'
-        if not redir_to:
-            redir_to = f'{target}&make_type={form_data["make_type"]}'
-        self.check_post(form_data, target, status, redir_to)
-
     def test_basic_GET_day(self) -> None:
         """Test basic (no Processes/Conditions/Todos) GET /day basics."""
         # check illegal date parameters
     def test_basic_GET_day(self) -> None:
         """Test basic (no Processes/Conditions/Todos) GET /day basics."""
         # check illegal date parameters
+        self.check_get('/day?date=', 400)
         self.check_get('/day?date=foo', 400)
         self.check_get('/day?date=2024-02-30', 400)
         # check undefined day
         self.check_get('/day?date=foo', 400)
         self.check_get('/day?date=2024-02-30', 400)
         # check undefined day
-        date = date_in_n_days(0)
-        expected = self.GET_day_dict(date)
-        self.check_json_get('/day', expected)
-        # NB: GET ?date="today"/"yesterday"/"tomorrow" in test_basic_POST_day
-        # check 'make_type' GET parameter affects immediate reply, but …
+        date = _testing_date_in_n_days(0)
+        exp = ExpectedGetDay(date)
+        self.check_json_get('/day', exp)
+        # check defined day, with and without make_type parameter
         date = '2024-01-01'
         date = '2024-01-01'
-        expected = self.GET_day_dict(date)
-        expected['make_type'] = 'bar'
-        self.check_json_get(f'/day?date={date}&make_type=bar', expected)
-        # … not any following, …
-        expected['make_type'] = ''
-        self.check_json_get(f'/day?date={date}', expected)
-        # … not even when part of a POST request
-        post: dict[str, object] = {'day_comment': '', 'make_type': 'foo'}
-        self._post_day(f'date={date}', post)
-        self.check_json_get(f'/day?date={date}', expected)
+        exp = ExpectedGetDay(date)
+        exp.set('make_type', 'bar')
+        self.check_json_get(f'/day?date={date}&make_type=bar', exp)
+        # check parsing of 'yesterday', 'today', 'tomorrow'
+        for name, dist in [('yesterday', -1), ('today', 0), ('tomorrow', +1)]:
+            date = _testing_date_in_n_days(dist)
+            exp = ExpectedGetDay(date)
+            self.check_json_get(f'/day?date={name}', exp)
 
     def test_fail_POST_day(self) -> None:
         """Test malformed/illegal POST /day requests."""
 
     def test_fail_POST_day(self) -> None:
         """Test malformed/illegal POST /day requests."""
@@ -206,7 +189,7 @@ class TestsWithServer(TestCaseWithServer):
         self.check_post({'day_comment': ''}, url, 400)
         self.check_post({'make_type': ''}, url, 400)
         # to next check illegal new_todo values, we need an actual Process
         self.check_post({'day_comment': ''}, url, 400)
         self.check_post({'make_type': ''}, url, 400)
         # to next check illegal new_todo values, we need an actual Process
-        self.post_process(1)
+        self.post_exp_process([], {}, 1)
         # check illegal new_todo values
         post: dict[str, object]
         post = {'make_type': '', 'day_comment': '', 'new_todo': ['foo']}
         # check illegal new_todo values
         post: dict[str, object]
         post = {'make_type': '', 'day_comment': '', 'new_todo': ['foo']}
@@ -254,122 +237,175 @@ class TestsWithServer(TestCaseWithServer):
         self.check_post(post, '/day?date=foo', 400)
 
     def test_basic_POST_day(self) -> None:
         self.check_post(post, '/day?date=foo', 400)
 
     def test_basic_POST_day(self) -> None:
-        """Test basic (no Todos) POST /day.
+        """Test basic (no Processes/Conditions/Todos) POST /day.
 
 
-        Check POST (& GET!) requests properly parse 'today', 'tomorrow',
-        'yesterday', and actual date strings;
+        Check POST requests properly parse 'today', 'tomorrow', 'yesterday',
+        and actual date strings;
         preserve 'make_type' setting in redirect even if nonsensical;
         preserve 'make_type' setting in redirect even if nonsensical;
-        and store 'day_comment'
+        and store 'day_comment'.
         """
         for name, dist, test_str in [('2024-01-01', None, 'a'),
                                      ('today', 0, 'b'),
                                      ('yesterday', -1, 'c'),
                                      ('tomorrow', +1, 'd')]:
         """
         for name, dist, test_str in [('2024-01-01', None, 'a'),
                                      ('today', 0, 'b'),
                                      ('yesterday', -1, 'c'),
                                      ('tomorrow', +1, 'd')]:
-            date = name if dist is None else date_in_n_days(dist)
+            date = name if dist is None else _testing_date_in_n_days(dist)
             post = {'day_comment': test_str, 'make_type': f'x:{test_str}'}
             post_url = f'/day?date={name}'
             redir_url = f'{post_url}&make_type={post["make_type"]}'
             self.check_post(post, post_url, 302, redir_url)
             post = {'day_comment': test_str, 'make_type': f'x:{test_str}'}
             post_url = f'/day?date={name}'
             redir_url = f'{post_url}&make_type={post["make_type"]}'
             self.check_post(post, post_url, 302, redir_url)
-            expected = self.GET_day_dict(date)
-            assert isinstance(expected['_library'], dict)
-            expected['_library']['Day'][date]['comment'] = test_str
-            self.check_json_get(post_url, expected)
+            exp = ExpectedGetDay(date)
+            exp.set_day_from_post(date, post)
+            self.check_json_get(post_url, exp)
 
     def test_GET_day_with_processes_and_todos(self) -> None:
         """Test GET /day displaying Processes and Todos (no trees)."""
         date = '2024-01-01'
 
     def test_GET_day_with_processes_and_todos(self) -> None:
         """Test GET /day displaying Processes and Todos (no trees)."""
         date = '2024-01-01'
-        # check Processes get displayed in ['processes'] and ['_library']
-        procs_data = [[1, 'foo', 'oof', 1.1], [2, 'bar', 'rab', 0.9]]
-        procs_expected = self._post_batch(procs_data, [],
-                                          ['title', 'description', 'effort'],
-                                          self.proc_as_dict, self.post_process)
-        expected = self.GET_day_dict(date)
-        assert isinstance(expected['_library'], dict)
-        expected['processes'] = self.as_id_list(procs_expected)
-        expected['_library']['Process'] = self.as_refs(procs_expected)
-        self._post_day(f'date={date}')
-        self.check_json_get(f'/day?date={date}', expected)
+        exp = ExpectedGetDay(date)
+        # check Processes get displayed in ['processes'] and ['_library'],
+        # even without any Todos referencing them
+        proc_posts = [{'title': 'foo', 'description': 'oof', 'effort': 1.1},
+                      {'title': 'bar', 'description': 'rab', 'effort': 0.9}]
+        for i, proc_post in enumerate(proc_posts):
+            self.post_exp_process([exp], proc_post, i+1)
+        self.check_json_get(f'/day?date={date}', exp)
         # post Todos of either process and check their display
         # post Todos of either process and check their display
-        post_day: dict[str, object]
-        post_day = {'day_comment': '', 'make_type': '', 'new_todo': [1, 2]}
-        todos = [self._todo_as_dict(1, 1, date),
-                 self._todo_as_dict(2, 2, date)]
-        expected['_library']['Todo'] = self.as_refs(todos)
-        expected['_library']['Day'][date]['todos'] = self.as_id_list(todos)
-        nodes = [self._todo_node_as_dict(1), self._todo_node_as_dict(2)]
-        expected['top_nodes'] = nodes
-        self._post_day(f'date={date}', post_day)
-        self.check_json_get(f'/day?date={date}', expected)
+        self.post_exp_day([exp], {'new_todo': [1, 2]})
+        self.check_json_get(f'/day?date={date}', exp)
+        # test malformed Todo manipulation posts
+        post_day = {'day_comment': '', 'make_type': '', 'comment': [''],
+                    'new_todo': [], 'done': [1], 'effort': [2.3]}
+        self.check_post(post_day, f'/day?date={date}', 400)  # no todo_id
+        post_day['todo_id'] = [2]  # not identifying Todo refered by done
+        self.check_post(post_day, f'/day?date={date}', 400)
+        post_day['todo_id'] = [1, 2]  # imply range beyond that of effort etc.
+        self.check_post(post_day, f'/day?date={date}', 400)
+        post_day['comment'] = ['FOO', '']
+        self.check_post(post_day, f'/day?date={date}', 400)
+        post_day['effort'] = [2.3, '']
+        post_day['comment'] = ['']
+        self.check_post(post_day, f'/day?date={date}', 400)
         # add a comment to one Todo and set the other's doneness and effort
         # add a comment to one Todo and set the other's doneness and effort
-        post_day = {'day_comment': '', 'make_type': '', 'new_todo': [],
-                    'todo_id': [1, 2], 'done': [2], 'comment': ['FOO', ''],
-                    'effort': [2.3, '']}
-        expected['_library']['Todo']['1']['comment'] = 'FOO'
-        expected['_library']['Todo']['1']['effort'] = 2.3
-        expected['_library']['Todo']['2']['is_done'] = True
-        self._post_day(f'date={date}', post_day)
-        self.check_json_get(f'/day?date={date}', expected)
+        post_day['comment'] = ['FOO', '']
+        self.post_exp_day([exp], post_day)
+        self.check_json_get(f'/day?date={date}', exp)
+        # invert effort and comment between both Todos
+        # (cannot invert doneness, /day only collects positive setting)
+        post_day['comment'] = ['', 'FOO']
+        post_day['effort'] = ['', 2.3]
+        self.post_exp_day([exp], post_day)
+        self.check_json_get(f'/day?date={date}', exp)
+
+    def test_POST_day_todo_make_types(self) -> None:
+        """Test behavior of POST /todo on 'make_type'='full' and 'empty'."""
+        date = '2024-01-01'
+        exp = ExpectedGetDay(date)
+        # create two Processes, with second one step of first one
+        self.post_exp_process([exp], {}, 2)
+        self.post_exp_process([exp], {'new_top_step': 2}, 1)
+        exp.lib_set('ProcessStep', [exp.procstep_as_dict(1, 1, 2, None)])
+        self.check_json_get(f'/day?date={date}', exp)
+        # post Todo of adopting Process, with make_type=full
+        self.post_exp_day([exp], {'make_type': 'full', 'new_todo': [1]})
+        exp.lib_get('Todo', 1)['children'] = [2]
+        exp.lib_set('Todo', [exp.todo_as_dict(2, 2)])
+        top_nodes = [{'todo': 1,
+                      'seen': 0,
+                      'children': [{'todo': 2,
+                                    'seen': 0,
+                                    'children': []}]}]
+        exp.force('top_nodes', top_nodes)
+        self.check_json_get(f'/day?date={date}', exp)
+        # post another Todo of adopting Process, expect to adopt existing
+        self.post_exp_day([exp], {'make_type': 'full', 'new_todo': [1]})
+        exp.lib_set('Todo', [exp.todo_as_dict(3, 1, children=[2])])
+        top_nodes += [{'todo': 3,
+                       'seen': 0,
+                       'children': [{'todo': 2,
+                                     'seen': 1,
+                                     'children': []}]}]
+        exp.force('top_nodes', top_nodes)
+        self.check_json_get(f'/day?date={date}', exp)
+        # post another Todo of adopting Process, make_type=empty
+        self.post_exp_day([exp], {'make_type': 'empty', 'new_todo': [1]})
+        exp.lib_set('Todo', [exp.todo_as_dict(4, 1)])
+        top_nodes += [{'todo': 4,
+                       'seen': 0,
+                       'children': []}]
+        exp.force('top_nodes', top_nodes)
+        self.check_json_get(f'/day?date={date}', exp)
+
+    def test_POST_day_new_todo_order_commutative(self) -> None:
+        """Check that order of 'new_todo' values in POST /day don't matter."""
+        date = '2024-01-01'
+        exp = ExpectedGetDay(date)
+        self.post_exp_process([exp], {}, 2)
+        self.post_exp_process([exp], {'new_top_step': 2}, 1)
+        exp.lib_set('ProcessStep', [exp.procstep_as_dict(1, 1, 2, None)])
+        # make-full-day-post batch of Todos of both Processes in one order …,
+        self.post_exp_day([exp], {'make_type': 'full', 'new_todo': [1, 2]})
+        top_nodes: list[dict[str, Any]] = [{'todo': 1,
+                                            'seen': 0,
+                                            'children': [{'todo': 2,
+                                                          'seen': 0,
+                                                          'children': []}]}]
+        exp.force('top_nodes', top_nodes)
+        exp.lib_get('Todo', 1)['children'] = [2]
+        self.check_json_get(f'/day?date={date}', exp)
+        # … and then in the other, expecting same node tree / relations
+        exp.lib_del('Day', date)
+        date = '2024-01-02'
+        exp.set('day', date)
+        day_post = {'make_type': 'full', 'new_todo': [2, 1]}
+        self.post_exp_day([exp], day_post, date)
+        exp.lib_del('Todo', 1)
+        exp.lib_del('Todo', 2)
+        top_nodes[0]['todo'] = 3  # was: 1
+        top_nodes[0]['children'][0]['todo'] = 4  # was: 2
+        exp.lib_get('Todo', 3)['children'] = [4]
+        self.check_json_get(f'/day?date={date}', exp)
 
     def test_GET_day_with_conditions(self) -> None:
         """Test GET /day displaying Conditions and their relations."""
         date = '2024-01-01'
 
     def test_GET_day_with_conditions(self) -> None:
         """Test GET /day displaying Conditions and their relations."""
         date = '2024-01-01'
-        # add Process with Conditions and their Todos, check display
-        conds_data = [[1, False, ['A'], ['a']], [2, True, ['B'], ['b']]]
-        conds_expected = self._post_batch(
-                conds_data, ['is_active'], ['title', 'description'],
-                self.cond_as_dict,
-                lambda x, y: self.check_post(y, f'/condition?id={x}', 302))
-        cond_names = ['conditions', 'disables', 'blockers', 'enables']
-        procs_data = [[1, 'foo', 'oof', 1.1, [1], [1], [2], [2]],
-                      [2, 'bar', 'rab', 0.9, [2], [2], [1], [1]]]
-        procs_expected = self._post_batch(procs_data, cond_names,
-                                          ['title', 'description', 'effort'],
-                                          self.proc_as_dict, self.post_process)
-        expected = self.GET_day_dict(date)
-        assert isinstance(expected['_library'], dict)
-        expected['processes'] = self.as_id_list(procs_expected)
-        expected['_library']['Process'] = self.as_refs(procs_expected)
-        expected['_library']['Condition'] = self.as_refs(conds_expected)
-        self._post_day(f'date={date}')
-        self.check_json_get(f'/day?date={date}', expected)
-        # add Todos in relation to Conditions, check consequences
-        post_day: dict[str, object]
-        post_day = {'day_comment': '', 'make_type': '', 'new_todo': [1, 2]}
-        todos = [self._todo_as_dict(1, 1, date, [1], [1], [2], [2]),
-                 self._todo_as_dict(2, 2, date, [2], [2], [1], [1])]
-        expected['_library']['Todo'] = self.as_refs(todos)
-        expected['_library']['Day'][date]['todos'] = self.as_id_list(todos)
-        nodes = [self._todo_node_as_dict(1), self._todo_node_as_dict(2)]
-        expected['top_nodes'] = nodes
-        expected['disablers_for'] = {'1': [1], '2': [2]}
-        expected['enablers_for'] = {'1': [2], '2': [1]}
-        expected['conditions_present'] = self.as_id_list(conds_expected)
-        self._post_day(f'date={date}', post_day)
-        self.check_json_get(f'/day?date={date}', expected)
+        exp = ExpectedGetDay(date)
+        # check non-referenced Conditions not shown
+        cond_posts = [{'is_active': 0, 'title': 'A', 'description': 'a'},
+                      {'is_active': 1, 'title': 'B', 'description': 'b'}]
+        for i, cond_post in enumerate(cond_posts):
+            self.check_post(cond_post, f'/condition?id={i+1}')
+        self.check_json_get(f'/day?date={date}', exp)
+        # add Processes with Conditions, check Conditions now shown
+        for i, (c1, c2) in enumerate([(1, 2), (2, 1)]):
+            post = {'conditions': [c1], 'disables': [c1],
+                    'blockers': [c2], 'enables': [c2]}
+            self.post_exp_process([exp], post, i+1)
+        for i, cond_post in enumerate(cond_posts):
+            exp.set_cond_from_post(i+1, cond_post)
+        self.check_json_get(f'/day?date={date}', exp)
+        # add Todos in relation to Conditions, check consequence relations
+        self.post_exp_day([exp], {'new_todo': [1, 2]})
+        self.check_json_get(f'/day?date={date}', exp)
 
     def test_GET_calendar(self) -> None:
         """Test GET /calendar responses based on various inputs, DB states."""
         # check illegal date range delimiters
         self.check_get('/calendar?start=foo', 400)
         self.check_get('/calendar?end=foo', 400)
 
     def test_GET_calendar(self) -> None:
         """Test GET /calendar responses based on various inputs, DB states."""
         # check illegal date range delimiters
         self.check_get('/calendar?start=foo', 400)
         self.check_get('/calendar?end=foo', 400)
-        # check default range without saved days
-        expected = self.GET_calendar_dict(-1, 366)
-        self.check_json_get('/calendar', expected)
-        self.check_json_get('/calendar?start=&end=', expected)
-        # check named days as delimiters
-        expected = self.GET_calendar_dict(-1, +1)
-        self.check_json_get('/calendar?start=yesterday&end=tomorrow', expected)
+        # check default range for expected selection/order without saved days
+        exp = ExpectedGetCalendar(-1, 366)
+        self.check_json_get('/calendar', exp)
+        self.check_json_get('/calendar?start=&end=', exp)
+        # check with named days as delimiters
+        exp = ExpectedGetCalendar(-1, +1)
+        self.check_json_get('/calendar?start=yesterday&end=tomorrow', exp)
         # check zero-element range
         # check zero-element range
-        expected = self.GET_calendar_dict(+1, 0)
-        self.check_json_get('/calendar?start=tomorrow&end=today', expected)
-        # check saved day shows up in results with proven by its comment
-        post_day: dict[str, object] = {'day_comment': 'foo', 'make_type': ''}
-        date1 = date_in_n_days(-2)
-        self._post_day(f'date={date1}', post_day)
-        start_date = date_in_n_days(-5)
-        end_date = date_in_n_days(+5)
+        exp = ExpectedGetCalendar(+1, 0)
+        self.check_json_get('/calendar?start=tomorrow&end=today', exp)
+        # check saved day shows up in results, proven by its comment
+        start_date = _testing_date_in_n_days(-5)
+        date = _testing_date_in_n_days(-2)
+        end_date = _testing_date_in_n_days(+5)
+        exp = ExpectedGetCalendar(-5, +5)
+        self.post_exp_day([exp], {'day_comment': 'foo'}, date)
         url = f'/calendar?start={start_date}&end={end_date}'
         url = f'/calendar?start={start_date}&end={end_date}'
-        expected = self.GET_calendar_dict(-5, +5)
-        assert isinstance(expected['_library'], dict)
-        expected['_library']['Day'][date1]['comment'] = post_day['day_comment']
-        self.check_json_get(url, expected)
+        self.check_json_get(url, exp)
index a27f0d0a1f8c0a3330be0e6c6906e3a7d6d53fd2..86474c7204cab11d2624a73a88a82554362b7df5 100644 (file)
@@ -8,143 +8,156 @@ from plomtask.exceptions import BadFormatException
 class TestsSansServer(TestCase):
     """Tests that do not require DB setup or a server."""
 
 class TestsSansServer(TestCase):
     """Tests that do not require DB setup or a server."""
 
-    def test_InputsParser_get_str(self) -> None:
-        """Test InputsParser.get_str on strict and non-strictk."""
-        parser = InputsParser({}, False)
-        self.assertEqual('', parser.get_str('foo'))
-        self.assertEqual('bar', parser.get_str('foo', 'bar'))
-        parser.strict = True
+    def test_InputsParser_get_str_or_fail(self) -> None:
+        """Test InputsParser.get_str."""
+        parser = InputsParser({})
         with self.assertRaises(BadFormatException):
         with self.assertRaises(BadFormatException):
-            parser.get_str('foo')
+            parser.get_str_or_fail('foo')
+        self.assertEqual('bar', parser.get_str_or_fail('foo', 'bar'))
+        parser = InputsParser({'foo': []})
         with self.assertRaises(BadFormatException):
         with self.assertRaises(BadFormatException):
-            parser.get_str('foo', 'bar')
-        parser = InputsParser({'foo': []}, False)
-        self.assertEqual('bar', parser.get_str('foo', 'bar'))
-        with self.assertRaises(BadFormatException):
-            InputsParser({'foo': []}, True).get_str('foo', 'bar')
-        for strictness in (False, True):
-            parser = InputsParser({'foo': ['baz']}, strictness)
-            self.assertEqual('baz', parser.get_str('foo', 'bar'))
-            parser = InputsParser({'foo': ['baz', 'quux']}, strictness)
-            self.assertEqual('baz', parser.get_str('foo', 'bar'))
+            parser.get_str_or_fail('foo')
+        self.assertEqual('bar', parser.get_str_or_fail('foo', 'bar'))
+        parser = InputsParser({'foo': ['baz']})
+        self.assertEqual('baz', parser.get_str_or_fail('foo', 'bar'))
+        parser = InputsParser({'foo': ['baz', 'quux']})
+        self.assertEqual('baz', parser.get_str_or_fail('foo', 'bar'))
 
 
-    def test_InputsParser_get_first_strings_starting(self) -> None:
-        """Test InputsParser.get_first_strings_starting [non-]strict."""
-        for strictness in (False, True):
-            parser = InputsParser({}, strictness)
-            self.assertEqual({},
-                             parser.get_first_strings_starting(''))
-            parser = InputsParser({}, strictness)
-            self.assertEqual({},
-                             parser.get_first_strings_starting('foo'))
-            parser = InputsParser({'foo': ['bar']}, strictness)
-            self.assertEqual({'foo': 'bar'},
-                             parser.get_first_strings_starting(''))
-            parser = InputsParser({'x': ['y']}, strictness)
-            self.assertEqual({'x': 'y'},
-                             parser.get_first_strings_starting('x'))
-            parser = InputsParser({'xx': ['y']}, strictness)
-            self.assertEqual({'xx': 'y'},
-                             parser.get_first_strings_starting('x'))
-            parser = InputsParser({'xx': ['y']}, strictness)
-            self.assertEqual({},
-                             parser.get_first_strings_starting('xxx'))
-            d = {'xxx': ['x'], 'xxy': ['y'], 'xyy': ['z']}
-            parser = InputsParser(d, strictness)
-            self.assertEqual({'xxx': 'x', 'xxy': 'y'},
-                             parser.get_first_strings_starting('xx'))
-            d = {'xxx': ['x', 'y', 'z'], 'xxy': ['y', 'z']}
-            parser = InputsParser(d, strictness)
-            self.assertEqual({'xxx': 'x', 'xxy': 'y'},
-                             parser.get_first_strings_starting('xx'))
+    def test_InputsParser_get_str(self) -> None:
+        """Test InputsParser.get_str."""
+        parser = InputsParser({})
+        self.assertEqual(None, parser.get_str('foo'))
+        self.assertEqual('bar', parser.get_str('foo', 'bar'))
+        parser = InputsParser({'foo': []})
+        self.assertEqual(None, parser.get_str('foo'))
+        self.assertEqual('bar', parser.get_str('foo', 'bar'))
+        parser = InputsParser({'foo': ['baz']})
+        self.assertEqual('baz', parser.get_str('foo', 'bar'))
+        parser = InputsParser({'foo': ['baz', 'quux']})
+        self.assertEqual('baz', parser.get_str('foo', 'bar'))
 
 
-    def test_InputsParser_get_int(self) -> None:
-        """Test InputsParser.get_int on strict and non-strict."""
-        for strictness in (False, True):
-            with self.assertRaises(BadFormatException):
-                InputsParser({}, strictness).get_int('foo')
-            with self.assertRaises(BadFormatException):
-                InputsParser({'foo': []}, strictness).get_int('foo')
-            with self.assertRaises(BadFormatException):
-                InputsParser({'foo': ['']}, strictness).get_int('foo')
-            with self.assertRaises(BadFormatException):
-                InputsParser({'foo': ['bar']}, strictness).get_int('foo')
-            with self.assertRaises(BadFormatException):
-                InputsParser({'foo': ['0.1']}).get_int('foo')
-            parser = InputsParser({'foo': ['0']}, strictness)
-            self.assertEqual(0, parser.get_int('foo'))
-            parser = InputsParser({'foo': ['17', '23']}, strictness)
-            self.assertEqual(17, parser.get_int('foo'))
+    def test_InputsParser_get_all_of_key_prefixed(self) -> None:
+        """Test InputsParser.get_all_of_key_prefixed."""
+        parser = InputsParser({})
+        self.assertEqual({},
+                         parser.get_all_of_key_prefixed(''))
+        self.assertEqual({},
+                         parser.get_all_of_key_prefixed('foo'))
+        parser = InputsParser({'foo': ['bar']})
+        self.assertEqual({'foo': ['bar']},
+                         parser.get_all_of_key_prefixed(''))
+        parser = InputsParser({'x': ['y', 'z']})
+        self.assertEqual({'': ['y', 'z']},
+                         parser.get_all_of_key_prefixed('x'))
+        parser = InputsParser({'xx': ['y', 'Z']})
+        self.assertEqual({'x': ['y', 'Z']},
+                         parser.get_all_of_key_prefixed('x'))
+        parser = InputsParser({'xx': ['y']})
+        self.assertEqual({},
+                         parser.get_all_of_key_prefixed('xxx'))
+        parser = InputsParser({'xxx': ['x'], 'xxy': ['y'], 'xyy': ['z']})
+        self.assertEqual({'x': ['x'], 'y': ['y']},
+                         parser.get_all_of_key_prefixed('xx'))
+        parser = InputsParser({'xxx': ['x', 'y'], 'xxy': ['y', 'z']})
+        self.assertEqual({'x': ['x', 'y'], 'y': ['y', 'z']},
+                         parser.get_all_of_key_prefixed('xx'))
 
     def test_InputsParser_get_int_or_none(self) -> None:
 
     def test_InputsParser_get_int_or_none(self) -> None:
-        """Test InputsParser.get_int_or_none on strict and non-strict."""
-        for strictness in (False, True):
-            parser = InputsParser({}, strictness)
-            self.assertEqual(None, parser.get_int_or_none('foo'))
-            parser = InputsParser({'foo': []}, strictness)
-            self.assertEqual(None, parser.get_int_or_none('foo'))
-            parser = InputsParser({'foo': ['']}, strictness)
-            self.assertEqual(None, parser.get_int_or_none('foo'))
-            parser = InputsParser({'foo': ['0']}, strictness)
-            self.assertEqual(0, parser.get_int_or_none('foo'))
-            with self.assertRaises(BadFormatException):
-                InputsParser({'foo': ['None']},
-                             strictness).get_int_or_none('foo')
-            with self.assertRaises(BadFormatException):
-                InputsParser({'foo': ['0.1']},
-                             strictness).get_int_or_none('foo')
-            parser = InputsParser({'foo': ['23']}, strictness)
-            self.assertEqual(23, parser.get_int_or_none('foo'))
+        """Test InputsParser.get_int_or_none."""
+        parser = InputsParser({})
+        self.assertEqual(None, parser.get_int_or_none('foo'))
+        parser = InputsParser({'foo': []})
+        self.assertEqual(None, parser.get_int_or_none('foo'))
+        parser = InputsParser({'foo': ['']})
+        self.assertEqual(None, parser.get_int_or_none('foo'))
+        parser = InputsParser({'foo': ['0']})
+        self.assertEqual(0, parser.get_int_or_none('foo'))
+        with self.assertRaises(BadFormatException):
+            InputsParser({'foo': ['None']}).get_int_or_none('foo')
+        with self.assertRaises(BadFormatException):
+            InputsParser({'foo': ['0.1']}).get_int_or_none('foo')
+        parser = InputsParser({'foo': ['23']})
+        self.assertEqual(23, parser.get_int_or_none('foo'))
 
 
-    def test_InputsParser_get_float(self) -> None:
-        """Test InputsParser.get_float on strict and non-strict."""
-        for strictness in (False, True):
-            with self.assertRaises(BadFormatException):
-                InputsParser({}, strictness).get_float('foo')
-            with self.assertRaises(BadFormatException):
-                InputsParser({'foo': []}, strictness).get_float('foo')
-            with self.assertRaises(BadFormatException):
-                InputsParser({'foo': ['']}, strictness).get_float('foo')
-            with self.assertRaises(BadFormatException):
-                InputsParser({'foo': ['bar']}, strictness).get_float('foo')
-            parser = InputsParser({'foo': ['0']}, strictness)
-            self.assertEqual(0, parser.get_float('foo'))
-            parser = InputsParser({'foo': ['0.1']}, strictness)
-            self.assertEqual(0.1, parser.get_float('foo'))
-            parser = InputsParser({'foo': ['1.23', '456']}, strictness)
-            self.assertEqual(1.23, parser.get_float('foo'))
+    def test_InputsParser_get_float_or_fail(self) -> None:
+        """Test InputsParser.get_float_or_fail."""
+        with self.assertRaises(BadFormatException):
+            InputsParser({}).get_float_or_fail('foo')
+        with self.assertRaises(BadFormatException):
+            InputsParser({'foo': ['']}).get_float_or_fail('foo')
+        with self.assertRaises(BadFormatException):
+            InputsParser({'foo': ['bar']}).get_float_or_fail('foo')
+        parser = InputsParser({'foo': ['0']})
+        self.assertEqual(0, parser.get_float_or_fail('foo'))
+        parser = InputsParser({'foo': ['0.1']})
+        self.assertEqual(0.1, parser.get_float_or_fail('foo'))
+        parser = InputsParser({'foo': ['1.23', '456']})
+        self.assertEqual(1.23, parser.get_float_or_fail('foo'))
+        with self.assertRaises(BadFormatException):
+            InputsParser({}).get_float_or_fail('foo')
+        with self.assertRaises(BadFormatException):
+            InputsParser({'foo': []}).get_float_or_fail('foo')
+
+    def test_InputsParser_get_bool_or_none(self) -> None:
+        """Test InputsParser.get_all_str."""
+        parser = InputsParser({})
+        self.assertEqual(None, parser.get_bool_or_none('foo'))
+        parser = InputsParser({'val': ['foo']})
+        self.assertEqual(None, parser.get_bool_or_none('foo'))
+        parser = InputsParser({'val': ['True']})
+        self.assertEqual(None, parser.get_bool_or_none('foo'))
+        parser = InputsParser({'foo': []})
+        self.assertEqual(None, parser.get_bool_or_none('foo'))
+        parser = InputsParser({'foo': ['None']})
+        self.assertEqual(False, parser.get_bool_or_none('foo'))
+        parser = InputsParser({'foo': ['0']})
+        self.assertEqual(False, parser.get_bool_or_none('foo'))
+        parser = InputsParser({'foo': ['']})
+        self.assertEqual(False, parser.get_bool_or_none('foo'))
+        parser = InputsParser({'foo': ['bar']})
+        self.assertEqual(False, parser.get_bool_or_none('foo'))
+        parser = InputsParser({'foo': ['bar', 'baz']})
+        self.assertEqual(False, parser.get_bool_or_none('foo'))
+        parser = InputsParser({'foo': ['False']})
+        self.assertEqual(False, parser.get_bool_or_none('foo'))
+        parser = InputsParser({'foo': ['true']})
+        self.assertEqual(True, parser.get_bool_or_none('foo'))
+        parser = InputsParser({'foo': ['True']})
+        self.assertEqual(True, parser.get_bool_or_none('foo'))
+        parser = InputsParser({'foo': ['1']})
+        self.assertEqual(True, parser.get_bool_or_none('foo'))
+        parser = InputsParser({'foo': ['on']})
+        self.assertEqual(True, parser.get_bool_or_none('foo'))
 
     def test_InputsParser_get_all_str(self) -> None:
 
     def test_InputsParser_get_all_str(self) -> None:
-        """Test InputsParser.get_all_str on strict and non-strict."""
-        for strictness in (False, True):
-            parser = InputsParser({}, strictness)
-            self.assertEqual([], parser.get_all_str('foo'))
-            parser = InputsParser({'foo': []}, strictness)
-            self.assertEqual([], parser.get_all_str('foo'))
-            parser = InputsParser({'foo': ['bar']}, strictness)
-            self.assertEqual(['bar'], parser.get_all_str('foo'))
-            parser = InputsParser({'foo': ['bar', 'baz']}, strictness)
-            self.assertEqual(['bar', 'baz'], parser.get_all_str('foo'))
+        """Test InputsParser.get_all_str."""
+        parser = InputsParser({})
+        self.assertEqual([], parser.get_all_str('foo'))
+        parser = InputsParser({'foo': []})
+        self.assertEqual([], parser.get_all_str('foo'))
+        parser = InputsParser({'foo': ['bar']})
+        self.assertEqual(['bar'], parser.get_all_str('foo'))
+        parser = InputsParser({'foo': ['bar', 'baz']})
+        self.assertEqual(['bar', 'baz'], parser.get_all_str('foo'))
 
     def test_InputsParser_strict_get_all_int(self) -> None:
 
     def test_InputsParser_strict_get_all_int(self) -> None:
-        """Test InputsParser.get_all_int on strict and non-strict."""
-        for strictness in (False, True):
-            parser = InputsParser({}, strictness)
-            self.assertEqual([], parser.get_all_int('foo'))
-            parser = InputsParser({'foo': []}, strictness)
-            self.assertEqual([], parser.get_all_int('foo'))
-            parser = InputsParser({'foo': ['']}, strictness)
-            self.assertEqual([], parser.get_all_int('foo'))
-            parser = InputsParser({'foo': ['0']}, strictness)
-            self.assertEqual([0], parser.get_all_int('foo'))
-            parser = InputsParser({'foo': ['0', '17']}, strictness)
-            self.assertEqual([0, 17], parser.get_all_int('foo'))
-            parser = InputsParser({'foo': ['0.1', '17']}, strictness)
-            with self.assertRaises(BadFormatException):
-                parser.get_all_int('foo')
-            parser = InputsParser({'foo': ['None', '17']}, strictness)
-            with self.assertRaises(BadFormatException):
-                parser.get_all_int('foo')
+        """Test InputsParser.get_all_int."""
+        parser = InputsParser({})
+        self.assertEqual([], parser.get_all_int('foo'))
+        parser = InputsParser({'foo': []})
+        self.assertEqual([], parser.get_all_int('foo'))
+        parser = InputsParser({'foo': ['']})
+        self.assertEqual([], parser.get_all_int('foo'))
+        parser = InputsParser({'foo': ['0']})
+        self.assertEqual([0], parser.get_all_int('foo'))
+        parser = InputsParser({'foo': ['0', '17']})
+        self.assertEqual([0, 17], parser.get_all_int('foo'))
+        parser = InputsParser({'foo': ['0.1', '17']})
+        with self.assertRaises(BadFormatException):
+            parser.get_all_int('foo')
+        parser = InputsParser({'foo': ['None', '17']})
+        with self.assertRaises(BadFormatException):
+            parser.get_all_int('foo')
 
 
 class TestsWithServer(TestCaseWithServer):
 
 
 class TestsWithServer(TestCaseWithServer):
index 1b20e217d077d826765f5a83c9a2b3250de38ba2..422c28324a9b04c79c5ee96cb50ec1032f7ead96 100644 (file)
@@ -1,29 +1,27 @@
 """Test Processes module."""
 from typing import Any
 """Test Processes module."""
 from typing import Any
-from tests.utils import TestCaseWithDB, TestCaseWithServer, TestCaseSansDB
-from plomtask.processes import Process, ProcessStep, ProcessStepsNode
+from tests.utils import (TestCaseSansDB, TestCaseWithDB, TestCaseWithServer,
+                         Expected)
+from plomtask.processes import Process, ProcessStep
 from plomtask.conditions import Condition
 from plomtask.conditions import Condition
-from plomtask.exceptions import HandledException, NotFoundException
-from plomtask.todos import Todo
+from plomtask.exceptions import NotFoundException
 
 
 class TestsSansDB(TestCaseSansDB):
     """Module tests not requiring DB setup."""
     checked_class = Process
 
 
 class TestsSansDB(TestCaseSansDB):
     """Module tests not requiring DB setup."""
     checked_class = Process
-    versioned_defaults_to_test = {'title': 'UNNAMED', 'description': '',
-                                  'effort': 1.0}
 
 
 class TestsSansDBProcessStep(TestCaseSansDB):
     """Module tests not requiring DB setup."""
     checked_class = ProcessStep
 
 
 class TestsSansDBProcessStep(TestCaseSansDB):
     """Module tests not requiring DB setup."""
     checked_class = ProcessStep
-    default_init_args = [2, 3, 4]
+    default_init_kwargs = {'owner_id': 2, 'step_process_id': 3,
+                           'parent_step_id': 4}
 
 
 class TestsWithDB(TestCaseWithDB):
     """Module tests requiring DB setup."""
     checked_class = Process
 
 
 class TestsWithDB(TestCaseWithDB):
     """Module tests requiring DB setup."""
     checked_class = Process
-    test_versioneds = {'title': str, 'description': str, 'effort': float}
 
     def three_processes(self) -> tuple[Process, Process, Process]:
         """Return three saved processes."""
 
     def three_processes(self) -> tuple[Process, Process, Process]:
         """Return three saved processes."""
@@ -45,12 +43,10 @@ class TestsWithDB(TestCaseWithDB):
         set_1 = [c1, c2]
         set_2 = [c2, c3]
         set_3 = [c1, c3]
         set_1 = [c1, c2]
         set_2 = [c2, c3]
         set_3 = [c1, c3]
-        p.set_conditions(self.db_conn, [c.id_ for c in set_1
-                                        if isinstance(c.id_, int)])
-        p.set_enables(self.db_conn, [c.id_ for c in set_2
-                                     if isinstance(c.id_, int)])
-        p.set_disables(self.db_conn, [c.id_ for c in set_3
-                                      if isinstance(c.id_, int)])
+        conds = [c.id_ for c in set_1 if isinstance(c.id_, int)]
+        enables = [c.id_ for c in set_2 if isinstance(c.id_, int)]
+        disables = [c.id_ for c in set_3 if isinstance(c.id_, int)]
+        p.set_condition_relations(self.db_conn, conds, [], enables, disables)
         p.save(self.db_conn)
         return p, set_1, set_2, set_3
 
         p.save(self.db_conn)
         return p, set_1, set_2, set_3
 
@@ -76,107 +72,114 @@ class TestsWithDB(TestCaseWithDB):
             self.assertEqual(sorted(r.enables), sorted(set2))
             self.assertEqual(sorted(r.disables), sorted(set3))
 
             self.assertEqual(sorted(r.enables), sorted(set2))
             self.assertEqual(sorted(r.disables), sorted(set3))
 
-    def test_Process_steps(self) -> None:
-        """Test addition, nesting, and non-recursion of ProcessSteps"""
-        # pylint: disable=too-many-locals
-        # pylint: disable=too-many-statements
-        p1, p2, p3 = self.three_processes()
-        assert isinstance(p1.id_, int)
-        assert isinstance(p2.id_, int)
-        assert isinstance(p3.id_, int)
-        steps_p1: list[ProcessStep] = []
-        # add step of process p2 as first (top-level) step to p1
-        s_p2_to_p1 = ProcessStep(None, p1.id_, p2.id_, None)
-        steps_p1 += [s_p2_to_p1]
-        p1.set_steps(self.db_conn, steps_p1)
-        p1_dict: dict[int, ProcessStepsNode] = {}
-        p1_dict[1] = ProcessStepsNode(p2, None, True, {})
-        self.assertEqual(p1.get_steps(self.db_conn, None), p1_dict)
-        # add step of process p3 as second (top-level) step to p1
-        s_p3_to_p1 = ProcessStep(None, p1.id_, p3.id_, None)
-        steps_p1 += [s_p3_to_p1]
-        p1.set_steps(self.db_conn, steps_p1)
-        p1_dict[2] = ProcessStepsNode(p3, None, True, {})
-        self.assertEqual(p1.get_steps(self.db_conn, None), p1_dict)
-        # add step of process p3 as first (top-level) step to p2,
-        steps_p2: list[ProcessStep] = []
-        s_p3_to_p2 = ProcessStep(None, p2.id_, p3.id_, None)
-        steps_p2 += [s_p3_to_p2]
-        p2.set_steps(self.db_conn, steps_p2)
-        # expect it as implicit sub-step of p1's second (p3) step
-        p2_dict = {3: ProcessStepsNode(p3, None, False, {})}
-        p1_dict[1].steps[3] = p2_dict[3]
-        self.assertEqual(p1.get_steps(self.db_conn, None), p1_dict)
-        # add step of process p2 as explicit sub-step to p1's second sub-step
-        s_p2_to_p1_first = ProcessStep(None, p1.id_, p2.id_, s_p3_to_p1.id_)
-        steps_p1 += [s_p2_to_p1_first]
-        p1.set_steps(self.db_conn, steps_p1)
-        seen_3 = ProcessStepsNode(p3, None, False, {}, False)
-        p1_dict[1].steps[3].seen = True
-        p1_dict[2].steps[4] = ProcessStepsNode(p2, s_p3_to_p1.id_, True,
-                                               {3: seen_3})
-        self.assertEqual(p1.get_steps(self.db_conn, None), p1_dict)
-        # add step of process p3 as explicit sub-step to non-existing p1
-        # sub-step (of id=999), expect it to become another p1 top-level step
-        s_p3_to_p1_999 = ProcessStep(None, p1.id_, p3.id_, 999)
-        steps_p1 += [s_p3_to_p1_999]
-        p1.set_steps(self.db_conn, steps_p1)
-        p1_dict[5] = ProcessStepsNode(p3, None, True, {})
-        self.assertEqual(p1.get_steps(self.db_conn, None), p1_dict)
-        # add step of process p3 as explicit sub-step to p1's implicit p3
-        # sub-step, expect it to become another p1 top-level step
-        s_p3_to_p1_impl_p3 = ProcessStep(None, p1.id_, p3.id_, s_p3_to_p2.id_)
-        steps_p1 += [s_p3_to_p1_impl_p3]
-        p1.set_steps(self.db_conn, steps_p1)
-        p1_dict[6] = ProcessStepsNode(p3, None, True, {})
-        self.assertEqual(p1.get_steps(self.db_conn, None), p1_dict)
-        self.assertEqual(p1.used_as_step_by(self.db_conn), [])
-        self.assertEqual(p2.used_as_step_by(self.db_conn), [p1])
-        self.assertEqual(p3.used_as_step_by(self.db_conn), [p1, p2])
-        # # add step of process p3 as explicit sub-step to p1's first sub-step,
-        # # expect it to eliminate implicit p3 sub-step
-        # s_p3_to_p1_first_explicit = ProcessStep(None, p1.id_, p3.id_,
-        #                                         s_p2_to_p1.id_)
-        # p1_dict[1].steps = {7: ProcessStepsNode(p3, 1, True, {})}
-        # p1_dict[2].steps[4].steps[3].seen = False
-        # steps_p1 += [s_p3_to_p1_first_explicit]
-        # p1.set_steps(self.db_conn, steps_p1)
-        # self.assertEqual(p1.get_steps(self.db_conn, None), p1_dict)
-        # ensure implicit steps non-top explicit steps are shown
-        s_p3_to_p2_first = ProcessStep(None, p2.id_, p3.id_, s_p3_to_p2.id_)
-        steps_p2 += [s_p3_to_p2_first]
-        p2.set_steps(self.db_conn, steps_p2)
-        p1_dict[1].steps[3].steps[7] = ProcessStepsNode(p3, 3, False, {}, True)
-        p1_dict[2].steps[4].steps[3].steps[7] = ProcessStepsNode(p3, 3, False,
-                                                                 {}, False)
-        self.assertEqual(p1.get_steps(self.db_conn, None), p1_dict)
-        # ensure suppressed step nodes are hidden
-        assert isinstance(s_p3_to_p2.id_, int)
-        p1.set_step_suppressions(self.db_conn, [s_p3_to_p2.id_])
-        p1_dict[1].steps[3].steps = {}
-        p1_dict[1].steps[3].is_suppressed = True
-        p1_dict[2].steps[4].steps[3].steps = {}
-        p1_dict[2].steps[4].steps[3].is_suppressed = True
-        self.assertEqual(p1.get_steps(self.db_conn), p1_dict)
+    # def test_Process_steps(self) -> None:
+    #     """Test addition, nesting, and non-recursion of ProcessSteps"""
+    #     # pylint: disable=too-many-locals
+    #     # pylint: disable=too-many-statements
+    #     p1, p2, p3 = self.three_processes()
+    #     assert isinstance(p1.id_, int)
+    #     assert isinstance(p2.id_, int)
+    #     assert isinstance(p3.id_, int)
+    #     steps_p1: list[ProcessStep] = []
+    #     # add step of process p2 as first (top-level) step to p1
+    #     s_p2_to_p1 = ProcessStep(None, p1.id_, p2.id_, None)
+    #     steps_p1 += [s_p2_to_p1]
+    #     p1.set_steps(self.db_conn, steps_p1)
+    #     p1_dict: dict[int, ProcessStepsNode] = {}
+    #     p1_dict[1] = ProcessStepsNode(p2, None, True, {})
+    #     self.assertEqual(p1.get_steps(self.db_conn, None), p1_dict)
+    #     # add step of process p3 as second (top-level) step to p1
+    #     s_p3_to_p1 = ProcessStep(None, p1.id_, p3.id_, None)
+    #     steps_p1 += [s_p3_to_p1]
+    #     p1.set_steps(self.db_conn, steps_p1)
+    #     p1_dict[2] = ProcessStepsNode(p3, None, True, {})
+    #     self.assertEqual(p1.get_steps(self.db_conn, None), p1_dict)
+    #     # add step of process p3 as first (top-level) step to p2,
+    #     steps_p2: list[ProcessStep] = []
+    #     s_p3_to_p2 = ProcessStep(None, p2.id_, p3.id_, None)
+    #     steps_p2 += [s_p3_to_p2]
+    #     p2.set_steps(self.db_conn, steps_p2)
+    #     # expect it as implicit sub-step of p1's second (p3) step
+    #     p2_dict = {3: ProcessStepsNode(p3, None, False, {})}
+    #     p1_dict[1].steps[3] = p2_dict[3]
+    #     self.assertEqual(p1.get_steps(self.db_conn, None), p1_dict)
+    #     # add step of process p2 as explicit sub-step to p1's second sub-step
+    #     s_p2_to_p1_first = ProcessStep(None, p1.id_, p2.id_, s_p3_to_p1.id_)
+    #     steps_p1 += [s_p2_to_p1_first]
+    #     p1.set_steps(self.db_conn, steps_p1)
+    #     seen_3 = ProcessStepsNode(p3, None, False, {}, False)
+    #     p1_dict[1].steps[3].seen = True
+    #     p1_dict[2].steps[4] = ProcessStepsNode(p2, s_p3_to_p1.id_, True,
+    #                                            {3: seen_3})
+    #     self.assertEqual(p1.get_steps(self.db_conn, None), p1_dict)
+    #     # add step of process p3 as explicit sub-step to non-existing p1
+    #     # sub-step (of id=999), expect it to become another p1 top-level step
+    #     s_p3_to_p1_999 = ProcessStep(None, p1.id_, p3.id_, 999)
+    #     steps_p1 += [s_p3_to_p1_999]
+    #     p1.set_steps(self.db_conn, steps_p1)
+    #     p1_dict[5] = ProcessStepsNode(p3, None, True, {})
+    #     self.assertEqual(p1.get_steps(self.db_conn, None), p1_dict)
+    #     # add step of process p3 as explicit sub-step to p1's implicit p3
+    #     # sub-step, expect it to become another p1 top-level step
+    #     s_p3_to_p1_impl_p3 = ProcessStep(None, p1.id_, p3.id_,
+    #                                      s_p3_to_p2.id_)
+    #     steps_p1 += [s_p3_to_p1_impl_p3]
+    #     p1.set_steps(self.db_conn, steps_p1)
+    #     p1_dict[6] = ProcessStepsNode(p3, None, True, {})
+    #     self.assertEqual(p1.get_steps(self.db_conn, None), p1_dict)
+    #     self.assertEqual(p1.used_as_step_by(self.db_conn), [])
+    #     self.assertEqual(p2.used_as_step_by(self.db_conn), [p1])
+    #     self.assertEqual(p3.used_as_step_by(self.db_conn), [p1, p2])
+    #     # # add step of process p3 as explicit sub-step to p1's first
+    #     # # sub-step, expect it to eliminate implicit p3 sub-step
+    #     # s_p3_to_p1_first_explicit = ProcessStep(None, p1.id_, p3.id_,
+    #     #                                         s_p2_to_p1.id_)
+    #     # p1_dict[1].steps = {7: ProcessStepsNode(p3, 1, True, {})}
+    #     # p1_dict[2].steps[4].steps[3].seen = False
+    #     # steps_p1 += [s_p3_to_p1_first_explicit]
+    #     # p1.set_steps(self.db_conn, steps_p1)
+    #     # self.assertEqual(p1.get_steps(self.db_conn, None), p1_dict)
+    #     # ensure implicit steps non-top explicit steps are shown
+    #     s_p3_to_p2_first = ProcessStep(None, p2.id_, p3.id_, s_p3_to_p2.id_)
+    #     steps_p2 += [s_p3_to_p2_first]
+    #     p2.set_steps(self.db_conn, steps_p2)
+    #     p1_dict[1].steps[3].steps[7] = ProcessStepsNode(p3, 3, False, {},
+    #                                                     True)
+    #     p1_dict[2].steps[4].steps[3].steps[7] = ProcessStepsNode(
+    #             p3, 3, False, {}, False)
+    #     self.assertEqual(p1.get_steps(self.db_conn, None), p1_dict)
+    #     # ensure suppressed step nodes are hidden
+    #     assert isinstance(s_p3_to_p2.id_, int)
+    #     p1.set_step_suppressions(self.db_conn, [s_p3_to_p2.id_])
+    #     p1_dict[1].steps[3].steps = {}
+    #     p1_dict[1].steps[3].is_suppressed = True
+    #     p1_dict[2].steps[4].steps[3].steps = {}
+    #     p1_dict[2].steps[4].steps[3].is_suppressed = True
+    #     self.assertEqual(p1.get_steps(self.db_conn), p1_dict)
 
     def test_Process_conditions(self) -> None:
         """Test setting Process.conditions/enables/disables."""
         p = Process(None)
         p.save(self.db_conn)
 
     def test_Process_conditions(self) -> None:
         """Test setting Process.conditions/enables/disables."""
         p = Process(None)
         p.save(self.db_conn)
-        for target in ('conditions', 'enables', 'disables'):
-            method = getattr(p, f'set_{target}')
+        targets = ['conditions', 'blockers', 'enables', 'disables']
+        for i, target in enumerate(targets):
             c1, c2 = Condition(None), Condition(None)
             c1.save(self.db_conn)
             c2.save(self.db_conn)
             assert isinstance(c1.id_, int)
             assert isinstance(c2.id_, int)
             c1, c2 = Condition(None), Condition(None)
             c1.save(self.db_conn)
             c2.save(self.db_conn)
             assert isinstance(c1.id_, int)
             assert isinstance(c2.id_, int)
-            method(self.db_conn, [])
+            args: list[list[int]] = [[], [], [], []]
+            args[i] = []
+            p.set_condition_relations(self.db_conn, *args)
             self.assertEqual(getattr(p, target), [])
             self.assertEqual(getattr(p, target), [])
-            method(self.db_conn, [c1.id_])
+            args[i] = [c1.id_]
+            p.set_condition_relations(self.db_conn, *args)
             self.assertEqual(getattr(p, target), [c1])
             self.assertEqual(getattr(p, target), [c1])
-            method(self.db_conn, [c2.id_])
+            args[i] = [c2.id_]
+            p.set_condition_relations(self.db_conn, *args)
             self.assertEqual(getattr(p, target), [c2])
             self.assertEqual(getattr(p, target), [c2])
-            method(self.db_conn, [c1.id_, c2.id_])
+            args[i] = [c1.id_, c2.id_]
+            p.set_condition_relations(self.db_conn, *args)
             self.assertEqual(getattr(p, target), [c1, c2])
 
     def test_remove(self) -> None:
             self.assertEqual(getattr(p, target), [c1, c2])
 
     def test_remove(self) -> None:
@@ -189,26 +192,21 @@ class TestsWithDB(TestCaseWithDB):
         step = ProcessStep(None, p2.id_, p1.id_, None)
         p2.set_steps(self.db_conn, [step])
         step_id = step.id_
         step = ProcessStep(None, p2.id_, p1.id_, None)
         p2.set_steps(self.db_conn, [step])
         step_id = step.id_
-        with self.assertRaises(HandledException):
-            p1.remove(self.db_conn)
         p2.set_steps(self.db_conn, [])
         with self.assertRaises(NotFoundException):
         p2.set_steps(self.db_conn, [])
         with self.assertRaises(NotFoundException):
+            # check unset ProcessSteps actually cannot be found anymore
             assert step_id is not None
             ProcessStep.by_id(self.db_conn, step_id)
         p1.remove(self.db_conn)
         step = ProcessStep(None, p2.id_, p3.id_, None)
         p2.set_steps(self.db_conn, [step])
         step_id = step.id_
             assert step_id is not None
             ProcessStep.by_id(self.db_conn, step_id)
         p1.remove(self.db_conn)
         step = ProcessStep(None, p2.id_, p3.id_, None)
         p2.set_steps(self.db_conn, [step])
         step_id = step.id_
+        # check _can_ remove Process pointed to by ProcessStep.owner_id, and …
         p2.remove(self.db_conn)
         with self.assertRaises(NotFoundException):
         p2.remove(self.db_conn)
         with self.assertRaises(NotFoundException):
+            # … being dis-owned eliminates ProcessStep
             assert step_id is not None
             ProcessStep.by_id(self.db_conn, step_id)
             assert step_id is not None
             ProcessStep.by_id(self.db_conn, step_id)
-        todo = Todo(None, p3, False, '2024-01-01')
-        todo.save(self.db_conn)
-        with self.assertRaises(HandledException):
-            p3.remove(self.db_conn)
-        todo.remove(self.db_conn)
-        p3.remove(self.db_conn)
 
 
 class TestsWithDBForProcessStep(TestCaseWithDB):
 
 
 class TestsWithDBForProcessStep(TestCaseWithDB):
@@ -235,155 +233,176 @@ class TestsWithDBForProcessStep(TestCaseWithDB):
         self.check_identity_with_cache_and_db([])
 
 
         self.check_identity_with_cache_and_db([])
 
 
+class ExpectedGetProcess(Expected):
+    """Builder of expectations for GET /processes."""
+    _default_dict = {'is_new': False, 'preset_top_step': None, 'n_todos': 0}
+    _on_empty_make_temp = ('Process', 'proc_as_dict')
+
+    def __init__(self,
+                 proc_id: int,
+                 *args: Any, **kwargs: Any) -> None:
+        self._fields = {'process': proc_id, 'steps': []}
+        super().__init__(*args, **kwargs)
+
+    @staticmethod
+    def stepnode_as_dict(step_id: int,
+                         proc_id: int,
+                         seen: bool = False,
+                         steps: None | list[dict[str, object]] = None,
+                         is_explicit: bool = True,
+                         is_suppressed: bool = False) -> dict[str, object]:
+        # pylint: disable=too-many-arguments
+        """Return JSON of ProcessStepNode to expect."""
+        return {'step': step_id,
+                'process': proc_id,
+                'seen': seen,
+                'steps': steps if steps else [],
+                'is_explicit': is_explicit,
+                'is_suppressed': is_suppressed}
+
+    def recalc(self) -> None:
+        """Update internal dictionary by subclass-specific rules."""
+        super().recalc()
+        self._fields['process_candidates'] = self.as_ids(
+                self.lib_all('Process'))
+        self._fields['condition_candidates'] = self.as_ids(
+                self.lib_all('Condition'))
+        self._fields['owners'] = [
+                s['owner_id'] for s in self.lib_all('ProcessStep')
+                if s['step_process_id'] == self._fields['process']]
+
+
+class ExpectedGetProcesses(Expected):
+    """Builder of expectations for GET /processes."""
+    _default_dict = {'sort_by': 'title', 'pattern': ''}
+
+    def recalc(self) -> None:
+        """Update internal dictionary by subclass-specific rules."""
+        super().recalc()
+        self._fields['processes'] = self.as_ids(self.lib_all('Process'))
+
+
 class TestsWithServer(TestCaseWithServer):
     """Module tests against our HTTP server/handler (and database)."""
 
 class TestsWithServer(TestCaseWithServer):
     """Module tests against our HTTP server/handler (and database)."""
 
-    def test_do_POST_process(self) -> None:
+    def _post_process(self, id_: int = 1,
+                      form_data: dict[str, Any] | None = None
+                      ) -> dict[str, Any]:
+        """POST basic Process."""
+        if not form_data:
+            form_data = {'title': 'foo', 'description': 'foo', 'effort': 1.1}
+        self.check_post(form_data, f'/process?id={id_}',
+                        redir=f'/process?id={id_}')
+        return form_data
+
+    def test_fail_POST_process(self) -> None:
         """Test POST /process and its effect on the database."""
         """Test POST /process and its effect on the database."""
-        self.assertEqual(0, len(Process.all(self.db_conn)))
-        form_data = self.post_process()
-        self.assertEqual(1, len(Process.all(self.db_conn)))
-        self.check_post(form_data, '/process?id=FOO', 400)
-        self.check_post(form_data | {'effort': 'foo'}, '/process?id=', 400)
-        self.check_post({}, '/process?id=', 400)
-        self.check_post({'title': '', 'description': ''}, '/process?id=', 400)
-        self.check_post({'title': '', 'effort': 1.1}, '/process?id=', 400)
-        self.check_post({'description': '', 'effort': 1.0},
-                        '/process?id=', 400)
-        self.assertEqual(1, len(Process.all(self.db_conn)))
-        form_data = {'title': 'foo', 'description': 'foo', 'effort': 1.0}
-        self.post_process(2, form_data | {'conditions': []})
-        self.check_post(form_data | {'conditions': [1]}, '/process?id=', 404)
-        self.check_post({'title': 'foo', 'description': 'foo',
-                         'is_active': False},
-                        '/condition', 302, '/condition?id=1')
-        self.post_process(3, form_data | {'conditions': [1]})
-        self.post_process(4, form_data | {'disables': [1]})
-        self.post_process(5, form_data | {'enables': [1]})
-        form_data['delete'] = ''
-        self.check_post(form_data, '/process?id=', 404)
-        self.check_post(form_data, '/process?id=6', 404)
-        self.check_post(form_data, '/process?id=5', 302, '/processes')
-
-    def test_do_POST_process_steps(self) -> None:
+        valid_post = {'title': '', 'description': '', 'effort': 1.0}
+        # check payloads lacking minimum expecteds
+        self.check_post({}, '/process', 400)
+        self.check_post({'title': '', 'description': ''}, '/process', 400)
+        self.check_post({'title': '', 'effort': 1}, '/process', 400)
+        self.check_post({'description': '', 'effort': 1}, '/process', 400)
+        # check payloads of bad data types
+        self.check_post(valid_post | {'effort': ''}, '/process', 400)
+        # check references to non-existant items
+        self.check_post(valid_post | {'conditions': [1]}, '/process', 404)
+        self.check_post(valid_post | {'disables': [1]}, '/process', 404)
+        self.check_post(valid_post | {'blockers': [1]}, '/process', 404)
+        self.check_post(valid_post | {'enables': [1]}, '/process', 404)
+        self.check_post(valid_post | {'new_top_step': 2}, '/process', 404)
+        # check deletion of non-existant
+        self.check_post({'delete': ''}, '/process?id=1', 404)
+
+    def test_basic_POST_process(self) -> None:
+        """Test basic GET/POST /process operations."""
+        # check on un-saved
+        exp = ExpectedGetProcess(1)
+        exp.force('process_candidates', [])
+        self.check_json_get('/process?id=1', exp)
+        # check on minimal payload post
+        valid_post = {'title': 'foo', 'description': 'oof', 'effort': 2.3}
+        exp.unforce('process_candidates')
+        self.post_exp_process([exp], valid_post, 1)
+        self.check_json_get('/process?id=1', exp)
+        # check n_todos field
+        self.post_exp_day([], {'new_todo': ['1']}, '2024-01-01')
+        self.post_exp_day([], {'new_todo': ['1']}, '2024-01-02')
+        exp.set('n_todos', 2)
+        self.check_json_get('/process?id=1', exp)
+        # check cannot delete if Todos to Process
+        self.check_post({'delete': ''}, '/process?id=1', 500)
+        # check cannot delete if some ProcessStep's .step_process_id
+        self.post_exp_process([exp], valid_post, 2)
+        self.post_exp_process([exp], valid_post | {'new_top_step': 2}, 3)
+        self.check_post({'delete': ''}, '/process?id=2', 500)
+        # check successful deletion
+        self.post_exp_process([exp], valid_post, 4)
+        self.check_post({'delete': ''}, '/process?id=4', 302, '/processes')
+        exp = ExpectedGetProcess(4)
+        exp.set_proc_from_post(1, valid_post)
+        exp.set_proc_from_post(2, valid_post)
+        exp.set_proc_from_post(3, valid_post)
+        exp.lib_set('ProcessStep', [exp.procstep_as_dict(1, 3, 2)])
+        exp.force('process_candidates', [1, 2, 3])
+        self.check_json_get('/process?id=4', exp)
+
+    def test_POST_process_steps(self) -> None:
         """Test behavior of ProcessStep posting."""
         # pylint: disable=too-many-statements
         """Test behavior of ProcessStep posting."""
         # pylint: disable=too-many-statements
-        form_data_1 = self.post_process(1)
-        self.post_process(2)
-        self.post_process(3)
-        # post first (top-level) step of process 2 to process 1
-        form_data_1['new_top_step'] = [2]
-        self.post_process(1, form_data_1)
-        retrieved_process = Process.by_id(self.db_conn, 1)
-        self.assertEqual(len(retrieved_process.explicit_steps), 1)
-        retrieved_step = retrieved_process.explicit_steps[0]
-        retrieved_step_id = retrieved_step.id_
-        self.assertEqual(retrieved_step.step_process_id, 2)
-        self.assertEqual(retrieved_step.owner_id, 1)
-        self.assertEqual(retrieved_step.parent_step_id, None)
-        # post empty steps list to process, expect clean slate, and old step to
-        # completely disappear
-        form_data_1['new_top_step'] = []
-        self.post_process(1, form_data_1)
-        retrieved_process = Process.by_id(self.db_conn, 1)
-        self.assertEqual(retrieved_process.explicit_steps, [])
-        assert retrieved_step_id is not None
-        with self.assertRaises(NotFoundException):
-            ProcessStep.by_id(self.db_conn, retrieved_step_id)
-        # post new first (top_level) step of process 3 to process 1
-        form_data_1['new_top_step'] = [3]
-        self.post_process(1, form_data_1)
-        retrieved_process = Process.by_id(self.db_conn, 1)
-        retrieved_step = retrieved_process.explicit_steps[0]
-        self.assertEqual(retrieved_step.step_process_id, 3)
-        self.assertEqual(retrieved_step.owner_id, 1)
-        self.assertEqual(retrieved_step.parent_step_id, None)
-        # post to process steps list without keeps, expect clean slate
-        form_data_1['new_top_step'] = []
-        form_data_1['steps'] = [retrieved_step.id_]
-        self.post_process(1, form_data_1)
-        retrieved_process = Process.by_id(self.db_conn, 1)
-        self.assertEqual(retrieved_process.explicit_steps, [])
-        # post to process empty steps list but keep, expect 400
-        form_data_1['steps'] = []
-        form_data_1['keep_step'] = [retrieved_step_id]
-        self.check_post(form_data_1, '/process?id=1', 400, '/process?id=1')
-        # post to process steps list with keep on non-created step, expect 400
-        form_data_1['steps'] = [retrieved_step_id]
-        form_data_1['keep_step'] = [retrieved_step_id]
-        self.check_post(form_data_1, '/process?id=1', 400, '/process?id=1')
-        # post to process steps list with keep and process ID, expect 200
-        form_data_1[f'step_{retrieved_step_id}_process_id'] = [2]
-        self.post_process(1, form_data_1)
-        retrieved_process = Process.by_id(self.db_conn, 1)
-        self.assertEqual(len(retrieved_process.explicit_steps), 1)
-        retrieved_step = retrieved_process.explicit_steps[0]
-        self.assertEqual(retrieved_step.step_process_id, 2)
-        self.assertEqual(retrieved_step.owner_id, 1)
-        self.assertEqual(retrieved_step.parent_step_id, None)
-        # post nonsense, expect 400 and preservation of previous state
-        form_data_1['steps'] = ['foo']
-        form_data_1['keep_step'] = []
-        self.check_post(form_data_1, '/process?id=1', 400, '/process?id=1')
-        retrieved_process = Process.by_id(self.db_conn, 1)
-        self.assertEqual(len(retrieved_process.explicit_steps), 1)
-        retrieved_step = retrieved_process.explicit_steps[0]
-        self.assertEqual(retrieved_step.step_process_id, 2)
-        self.assertEqual(retrieved_step.owner_id, 1)
-        self.assertEqual(retrieved_step.parent_step_id, None)
-        # post to process steps list with keep and process ID, expect 200
-        form_data_1['new_top_step'] = [3]
-        form_data_1['steps'] = [retrieved_step.id_]
-        form_data_1['keep_step'] = [retrieved_step.id_]
-        self.post_process(1, form_data_1)
-        retrieved_process = Process.by_id(self.db_conn, 1)
-        self.assertEqual(len(retrieved_process.explicit_steps), 2)
-        retrieved_step_0 = retrieved_process.explicit_steps[1]
-        self.assertEqual(retrieved_step_0.step_process_id, 3)
-        self.assertEqual(retrieved_step_0.owner_id, 1)
-        self.assertEqual(retrieved_step_0.parent_step_id, None)
-        retrieved_step_1 = retrieved_process.explicit_steps[0]
-        self.assertEqual(retrieved_step_1.step_process_id, 2)
-        self.assertEqual(retrieved_step_1.owner_id, 1)
-        self.assertEqual(retrieved_step_1.parent_step_id, None)
-        # post to process steps list with keeps etc., but trigger recursion
-        form_data_1['new_top_step'] = []
-        form_data_1['steps'] = [retrieved_step_0.id_, retrieved_step_1.id_]
-        form_data_1['keep_step'] = [retrieved_step_0.id_, retrieved_step_1.id_]
-        form_data_1[f'step_{retrieved_step_0.id_}_process_id'] = [2]
-        form_data_1[f'step_{retrieved_step_1.id_}_process_id'] = [1]
-        self.check_post(form_data_1, '/process?id=1', 400, '/process?id=1')
-        # check previous status preserved despite failed steps setting
-        retrieved_process = Process.by_id(self.db_conn, 1)
-        self.assertEqual(len(retrieved_process.explicit_steps), 2)
-        retrieved_step_0 = retrieved_process.explicit_steps[0]
-        self.assertEqual(retrieved_step_0.step_process_id, 2)
-        self.assertEqual(retrieved_step_0.owner_id, 1)
-        self.assertEqual(retrieved_step_0.parent_step_id, None)
-        retrieved_step_1 = retrieved_process.explicit_steps[1]
-        self.assertEqual(retrieved_step_1.step_process_id, 3)
-        self.assertEqual(retrieved_step_1.owner_id, 1)
-        self.assertEqual(retrieved_step_1.parent_step_id, None)
-        # post sub-step to step
-        form_data_1[f'step_{retrieved_step_0.id_}_process_id'] = [3]
-        form_data_1[f'new_step_to_{retrieved_step_0.id_}'] = [3]
-        self.post_process(1, form_data_1)
-        retrieved_process = Process.by_id(self.db_conn, 1)
-        self.assertEqual(len(retrieved_process.explicit_steps), 3)
-        retrieved_step_0 = retrieved_process.explicit_steps[1]
-        self.assertEqual(retrieved_step_0.step_process_id, 2)
-        self.assertEqual(retrieved_step_0.owner_id, 1)
-        self.assertEqual(retrieved_step_0.parent_step_id, None)
-        retrieved_step_1 = retrieved_process.explicit_steps[0]
-        self.assertEqual(retrieved_step_1.step_process_id, 3)
-        self.assertEqual(retrieved_step_1.owner_id, 1)
-        self.assertEqual(retrieved_step_1.parent_step_id, None)
-        retrieved_step_2 = retrieved_process.explicit_steps[2]
-        self.assertEqual(retrieved_step_2.step_process_id, 3)
-        self.assertEqual(retrieved_step_2.owner_id, 1)
-        self.assertEqual(retrieved_step_2.parent_step_id, retrieved_step_1.id_)
-
-    def test_do_GET(self) -> None:
+        url = '/process?id=1'
+        exp = ExpectedGetProcess(1)
+        self.post_exp_process([exp], {}, 1)
+        # post first (top-level) step of proc 2 to proc 1 by 'step_of' in 2
+        self.post_exp_process([exp], {'step_of': 1}, 2)
+        exp.lib_set('ProcessStep', [exp.procstep_as_dict(1, 1, 2)])
+        exp.set('steps', [exp.stepnode_as_dict(1, 2)])
+        self.check_json_get(url, exp)
+        # post empty/absent steps list to process, expect clean slate, and old
+        # step to completely disappear
+        self.post_exp_process([exp], {}, 1)
+        exp.lib_wipe('ProcessStep')
+        exp.set('steps', [])
+        self.check_json_get(url, exp)
+        # post new step of proc2 to proc1 by 'new_top_step'
+        self.post_exp_process([exp], {'new_top_step': 2}, 1)
+        exp.lib_set('ProcessStep', [exp.procstep_as_dict(1, 1, 2)])
+        self.post_exp_process([exp], {'kept_steps': [1]}, 1)
+        exp.set('steps', [exp.stepnode_as_dict(1, 2)])
+        self.check_json_get(url, exp)
+        # fail on single- and multi-step recursion
+        p_min = {'title': '', 'description': '', 'effort': 0}
+        self.check_post(p_min | {'new_top_step': 1}, url, 400)
+        self.check_post(p_min | {'step_of': 1}, url, 400)
+        self.post_exp_process([exp], {'new_top_step': 1}, 2)
+        self.check_post(p_min | {'step_of': 2, 'new_top_step': 2}, url, 400)
+        self.post_exp_process([exp], {}, 3)
+        self.post_exp_process([exp], {'step_of': 3}, 4)
+        self.check_post(p_min | {'new_top_step': 3, 'step_of': 4}, url, 400)
+        # post sibling steps
+        self.post_exp_process([exp], {}, 4)
+        self.post_exp_process([exp], {'new_top_step': 4}, 1)
+        self.post_exp_process([exp], {'kept_steps': [1], 'new_top_step': 4}, 1)
+        exp.lib_set('ProcessStep', [exp.procstep_as_dict(1, 1, 4),
+                                    exp.procstep_as_dict(2, 1, 4)])
+        exp.set('steps', [exp.stepnode_as_dict(1, 4),
+                          exp.stepnode_as_dict(2, 4)])
+        self.check_json_get(url, exp)
+        # post sub-step chain
+        p = {'kept_steps': [1, 2], 'new_step_to_2': 4}
+        self.post_exp_process([exp], p, 1)
+        exp.lib_set('ProcessStep', [exp.procstep_as_dict(3, 1, 4, 2)])
+        exp.set('steps', [exp.stepnode_as_dict(1, 4),
+                          exp.stepnode_as_dict(2, 4, steps=[
+                              exp.stepnode_as_dict(3, 4)])])
+        self.check_json_get(url, exp)
+        # fail posting sub-step that would cause recursion
+        self.post_exp_process([exp], {}, 6)
+        self.post_exp_process([exp], {'new_top_step': 6}, 5)
+        p = p_min | {'kept_steps': [1, 2, 3], 'new_step_to_2': 5, 'step_of': 6}
+        self.check_post(p, url, 400)
+
+    def test_GET(self) -> None:
         """Test /process and /processes response codes."""
         self.check_get('/process', 200)
         self.check_get('/process?id=', 200)
         """Test /process and /processes response codes."""
         self.check_get('/process', 200)
         self.check_get('/process?id=', 200)
@@ -402,109 +421,50 @@ class TestsWithServer(TestCaseWithServer):
         # of ID=1 here so we know the 404 comes from step_to=2 etc. (that tie
         # the Process displayed by /process to others), not from not finding
         # the main Process itself
         # of ID=1 here so we know the 404 comes from step_to=2 etc. (that tie
         # the Process displayed by /process to others), not from not finding
         # the main Process itself
-        self.post_process(1)
+        self.post_exp_process([], {}, 1)
         self.check_get('/process?id=1&step_to=2', 404)
         self.check_get('/process?id=1&has_step=2', 404)
 
         self.check_get('/process?id=1&step_to=2', 404)
         self.check_get('/process?id=1&has_step=2', 404)
 
-    @classmethod
-    def GET_processes_dict(cls, procs: list[dict[str, object]]
-                           ) -> dict[str, object]:
-        """Return JSON of GET /processes to expect."""
-        library = {'Process': cls.as_refs(procs)} if procs else {}
-        d: dict[str, object] = {'processes': cls.as_id_list(procs),
-                                'sort_by': 'title',
-                                'pattern': '',
-                                '_library': library}
-        return d
-
-    @staticmethod
-    def procstep_as_dict(id_: int,
-                         owner_id: int,
-                         step_process_id: int,
-                         parent_step_id: int | None = None
-                         ) -> dict[str, object]:
-        """Return JSON of Process to expect."""
-        return {'id': id_,
-                'owner_id': owner_id,
-                'step_process_id': step_process_id,
-                'parent_step_id': parent_step_id}
-
     def test_GET_processes(self) -> None:
         """Test GET /processes."""
         # pylint: disable=too-many-statements
         # test empty result on empty DB, default-settings on empty params
     def test_GET_processes(self) -> None:
         """Test GET /processes."""
         # pylint: disable=too-many-statements
         # test empty result on empty DB, default-settings on empty params
-        expected = self.GET_processes_dict([])
-        self.check_json_get('/processes', expected)
+        exp = ExpectedGetProcesses()
+        self.check_json_get('/processes', exp)
         # test on meaningless non-empty params (incl. entirely un-used key),
         # that 'sort_by' default to 'title' (even if set to something else, as
         # long as without handler) and 'pattern' get preserved
         # test on meaningless non-empty params (incl. entirely un-used key),
         # that 'sort_by' default to 'title' (even if set to something else, as
         # long as without handler) and 'pattern' get preserved
-        expected['pattern'] = 'bar'  # preserved despite zero effect!
+        exp.set('pattern', 'bar')
         url = '/processes?sort_by=foo&pattern=bar&foo=x'
         url = '/processes?sort_by=foo&pattern=bar&foo=x'
-        self.check_json_get(url, expected)
+        self.check_json_get(url, exp)
         # test non-empty result, automatic (positive) sorting by title
         # test non-empty result, automatic (positive) sorting by title
-        post1: dict[str, Any]
-        post2: dict[str, Any]
-        post3: dict[str, Any]
-        post1 = {'title': 'foo', 'description': 'oof', 'effort': 1.0}
-        post2 = {'title': 'bar', 'description': 'rab', 'effort': 1.1}
-        post2['new_top_step'] = 1
-        post3 = {'title': 'baz', 'description': 'zab', 'effort': 0.9}
-        post3['new_top_step'] = 1
-        self.post_process(1, post1)
-        self.post_process(2, post2)
-        self.post_process(3, post3)
-        post3['new_top_step'] = 2
-        post3['keep_step'] = 2
-        post3['steps'] = [2]
-        post3['step_2_process_id'] = 1
-        self.post_process(3, post3)
-        proc1 = self.proc_as_dict(1, post1['title'],
-                                  post1['description'], post1['effort'])
-        proc2 = self.proc_as_dict(2, post2['title'],
-                                  post2['description'], post2['effort'])
-        proc3 = self.proc_as_dict(3, post3['title'],
-                                  post3['description'], post3['effort'])
-        proc2['explicit_steps'] = [1]
-        proc3['explicit_steps'] = [2, 3]
-        step1 = self.procstep_as_dict(1, 2, 1)
-        step2 = self.procstep_as_dict(2, 3, 1)
-        step3 = self.procstep_as_dict(3, 3, 2)
-        expected = self.GET_processes_dict([proc2, proc3, proc1])
-        assert isinstance(expected['_library'], dict)
-        expected['_library']['ProcessStep'] = self.as_refs([step1, step2,
-                                                            step3])
-        self.check_json_get('/processes', expected)
+        proc1_post = {'title': 'foo', 'description': 'oof', 'effort': 1.0}
+        self.post_exp_process([exp], proc1_post, 1)
+        proc2_post = {'title': 'bar', 'description': 'rab', 'effort': 1.1}
+        self.post_exp_process([exp], proc2_post | {'new_top_step': [1]}, 2)
+        proc3_post = {'title': 'baz', 'description': 'zab', 'effort': 0.9}
+        self.post_exp_process([exp], proc3_post | {'new_top_step': [1]}, 3)
+        proc3_post = proc3_post | {'new_top_step': [2], 'kept_steps': [2]}
+        self.post_exp_process([exp], proc3_post, 3)
+        exp.lib_set('ProcessStep', [exp.procstep_as_dict(1, 2, 1),
+                                    exp.procstep_as_dict(2, 3, 1),
+                                    exp.procstep_as_dict(3, 3, 2)])
+        exp.set('pattern', '')
+        self.check_filter(exp, 'processes', 'sort_by', 'title', [2, 3, 1])
         # test other sortings
         # test other sortings
-        expected['sort_by'] = '-title'
-        expected['processes'] = self.as_id_list([proc1, proc3, proc2])
-        self.check_json_get('/processes?sort_by=-title', expected)
-        expected['sort_by'] = 'effort'
-        expected['processes'] = self.as_id_list([proc3, proc1, proc2])
-        self.check_json_get('/processes?sort_by=effort', expected)
-        expected['sort_by'] = '-effort'
-        expected['processes'] = self.as_id_list([proc2, proc1, proc3])
-        self.check_json_get('/processes?sort_by=-effort', expected)
-        expected['sort_by'] = 'steps'
-        expected['processes'] = self.as_id_list([proc1, proc2, proc3])
-        self.check_json_get('/processes?sort_by=steps', expected)
-        expected['sort_by'] = '-steps'
-        expected['processes'] = self.as_id_list([proc3, proc2, proc1])
-        self.check_json_get('/processes?sort_by=-steps', expected)
-        expected['sort_by'] = 'owners'
-        expected['processes'] = self.as_id_list([proc3, proc2, proc1])
-        self.check_json_get('/processes?sort_by=owners', expected)
-        expected['sort_by'] = '-owners'
-        expected['processes'] = self.as_id_list([proc1, proc2, proc3])
-        self.check_json_get('/processes?sort_by=-owners', expected)
+        self.check_filter(exp, 'processes', 'sort_by', '-title', [1, 3, 2])
+        self.check_filter(exp, 'processes', 'sort_by', 'effort', [3, 1, 2])
+        self.check_filter(exp, 'processes', 'sort_by', '-effort', [2, 1, 3])
+        self.check_filter(exp, 'processes', 'sort_by', 'steps', [1, 2, 3])
+        self.check_filter(exp, 'processes', 'sort_by', '-steps', [3, 2, 1])
+        self.check_filter(exp, 'processes', 'sort_by', 'owners', [3, 2, 1])
+        self.check_filter(exp, 'processes', 'sort_by', '-owners', [1, 2, 3])
         # test pattern matching on title
         # test pattern matching on title
-        expected = self.GET_processes_dict([proc2, proc3])
-        assert isinstance(expected['_library'], dict)
-        expected['pattern'] = 'ba'
-        expected['_library']['ProcessStep'] = self.as_refs([step1, step2,
-                                                            step3])
-        self.check_json_get('/processes?pattern=ba', expected)
+        exp.set('sort_by', 'title')
+        exp.lib_del('Process', '1')
+        self.check_filter(exp, 'processes', 'pattern', 'ba', [2, 3])
         # test pattern matching on description
         # test pattern matching on description
-        expected['processes'] = self.as_id_list([proc1])
-        expected['_library'] = {'Process': self.as_refs([proc1])}
-        expected['pattern'] = 'of'
-        self.check_json_get('/processes?pattern=of', expected)
+        exp.lib_wipe('Process')
+        exp.lib_wipe('ProcessStep')
+        self.post_exp_process([exp], {'description': 'oof', 'effort': 1.0}, 1)
+        self.check_filter(exp, 'processes', 'pattern', 'of', [1])
index dd57ee4c0c28cfc73d3d9c08dd6c18ab2dd7cd7b..2ecf3b845d4f0128712dfdd63510a683c2d9aeaf 100644 (file)
@@ -1,5 +1,7 @@
 """Test Todos module."""
 """Test Todos module."""
-from tests.utils import TestCaseSansDB, TestCaseWithDB, TestCaseWithServer
+from typing import Any
+from tests.utils import (TestCaseSansDB, TestCaseWithDB, TestCaseWithServer,
+                         Expected)
 from plomtask.todos import Todo, TodoNode
 from plomtask.processes import Process, ProcessStep
 from plomtask.conditions import Condition
 from plomtask.todos import Todo, TodoNode
 from plomtask.processes import Process, ProcessStep
 from plomtask.conditions import Condition
@@ -10,15 +12,12 @@ from plomtask.exceptions import (NotFoundException, BadFormatException,
 class TestsWithDB(TestCaseWithDB, TestCaseSansDB):
     """Tests requiring DB, but not server setup.
 
 class TestsWithDB(TestCaseWithDB, TestCaseSansDB):
     """Tests requiring DB, but not server setup.
 
-    NB: We subclass TestCaseSansDB too, to pull in its .test_id_validation,
-    which for Todo wouldn't run without a DB being set up due to the need for
-    Processes with set IDs.
+    NB: We subclass TestCaseSansDB too, to run any tests there that due to any
+    Todo requiring a _saved_ Process wouldn't run without a DB.
     """
     checked_class = Todo
     default_init_kwargs = {'process': None, 'is_done': False,
                            'date': '2024-01-01'}
     """
     checked_class = Todo
     default_init_kwargs = {'process': None, 'is_done': False,
                            'date': '2024-01-01'}
-    # solely used for TestCaseSansDB.test_id_setting
-    default_init_args = [None, False, '2024-01-01']
 
     def setUp(self) -> None:
         super().setUp()
 
     def setUp(self) -> None:
         super().setUp()
@@ -31,7 +30,6 @@ class TestsWithDB(TestCaseWithDB, TestCaseSansDB):
         self.cond2 = Condition(None)
         self.cond2.save(self.db_conn)
         self.default_init_kwargs['process'] = self.proc
         self.cond2 = Condition(None)
         self.cond2.save(self.db_conn)
         self.default_init_kwargs['process'] = self.proc
-        self.default_init_args[0] = self.proc
 
     def test_Todo_init(self) -> None:
         """Test creation of Todo and what they default to."""
 
     def test_Todo_init(self) -> None:
         """Test creation of Todo and what they default to."""
@@ -41,9 +39,9 @@ class TestsWithDB(TestCaseWithDB, TestCaseSansDB):
         process.save(self.db_conn)
         assert isinstance(self.cond1.id_, int)
         assert isinstance(self.cond2.id_, int)
         process.save(self.db_conn)
         assert isinstance(self.cond1.id_, int)
         assert isinstance(self.cond2.id_, int)
-        process.set_conditions(self.db_conn, [self.cond1.id_, self.cond2.id_])
-        process.set_enables(self.db_conn, [self.cond1.id_])
-        process.set_disables(self.db_conn, [self.cond2.id_])
+        process.set_condition_relations(self.db_conn,
+                                        [self.cond1.id_, self.cond2.id_], [],
+                                        [self.cond1.id_], [self.cond2.id_])
         todo_no_id = Todo(None, process, False, self.date1)
         self.assertEqual(todo_no_id.conditions, [self.cond1, self.cond2])
         self.assertEqual(todo_no_id.enables, [self.cond1])
         todo_no_id = Todo(None, process, False, self.date1)
         self.assertEqual(todo_no_id.conditions, [self.cond1, self.cond2])
         self.assertEqual(todo_no_id.enables, [self.cond1])
@@ -64,14 +62,18 @@ class TestsWithDB(TestCaseWithDB, TestCaseSansDB):
         with self.assertRaises(BadFormatException):
             self.assertEqual(Todo.by_date(self.db_conn, 'foo'), [])
 
         with self.assertRaises(BadFormatException):
             self.assertEqual(Todo.by_date(self.db_conn, 'foo'), [])
 
+    def test_Todo_by_date_range_with_limits(self) -> None:
+        """Test .by_date_range_with_limits."""
+        self.check_by_date_range_with_limits('day')
+
     def test_Todo_on_conditions(self) -> None:
         """Test effect of Todos on Conditions."""
         assert isinstance(self.cond1.id_, int)
         assert isinstance(self.cond2.id_, int)
         todo = Todo(None, self.proc, False, self.date1)
         todo.save(self.db_conn)
     def test_Todo_on_conditions(self) -> None:
         """Test effect of Todos on Conditions."""
         assert isinstance(self.cond1.id_, int)
         assert isinstance(self.cond2.id_, int)
         todo = Todo(None, self.proc, False, self.date1)
         todo.save(self.db_conn)
-        todo.set_enables(self.db_conn, [self.cond1.id_])
-        todo.set_disables(self.db_conn, [self.cond2.id_])
+        todo.set_condition_relations(self.db_conn, [], [],
+                                     [self.cond1.id_], [self.cond2.id_])
         todo.is_done = True
         self.assertEqual(self.cond1.is_active, True)
         self.assertEqual(self.cond2.is_active, False)
         todo.is_done = True
         self.assertEqual(self.cond1.is_active, True)
         self.assertEqual(self.cond2.is_active, False)
@@ -112,7 +114,8 @@ class TestsWithDB(TestCaseWithDB, TestCaseSansDB):
         todo_1.is_done = True
         todo_2.is_done = True
         todo_2.is_done = False
         todo_1.is_done = True
         todo_2.is_done = True
         todo_2.is_done = False
-        todo_2.set_conditions(self.db_conn, [self.cond1.id_])
+        todo_2.set_condition_relations(
+                self.db_conn, [self.cond1.id_], [], [], [])
         with self.assertRaises(BadFormatException):
             todo_2.is_done = True
         self.cond1.is_active = True
         with self.assertRaises(BadFormatException):
             todo_2.is_done = True
         self.cond1.is_active = True
@@ -120,16 +123,24 @@ class TestsWithDB(TestCaseWithDB, TestCaseSansDB):
 
     def test_Todo_step_tree(self) -> None:
         """Test self-configuration of TodoStepsNode tree for Day view."""
 
     def test_Todo_step_tree(self) -> None:
         """Test self-configuration of TodoStepsNode tree for Day view."""
+
+        def todo_node_as_dict(node: TodoNode) -> dict[str, object]:
+            return {'todo': node.todo.id_, 'seen': node.seen,
+                    'children': [todo_node_as_dict(c) for c in node.children]}
+
         todo_1 = Todo(None, self.proc, False, self.date1)
         todo_1.save(self.db_conn)
         assert isinstance(todo_1.id_, int)
         # test minimum
         node_0 = TodoNode(todo_1, False, [])
         todo_1 = Todo(None, self.proc, False, self.date1)
         todo_1.save(self.db_conn)
         assert isinstance(todo_1.id_, int)
         # test minimum
         node_0 = TodoNode(todo_1, False, [])
-        self.assertEqual(todo_1.get_step_tree(set()).as_dict, node_0.as_dict)
+        cmp_0_dict = todo_node_as_dict(todo_1.get_step_tree(set()))
+        cmp_1_dict = todo_node_as_dict(node_0)
+        self.assertEqual(cmp_0_dict, cmp_1_dict)
         # test non_emtpy seen_todo does something
         node_0.seen = True
         # test non_emtpy seen_todo does something
         node_0.seen = True
-        self.assertEqual(todo_1.get_step_tree({todo_1.id_}).as_dict,
-                         node_0.as_dict)
+        cmp_0_dict = todo_node_as_dict(todo_1.get_step_tree({todo_1.id_}))
+        cmp_1_dict = todo_node_as_dict(node_0)
+        self.assertEqual(cmp_0_dict, cmp_1_dict)
         # test child shows up
         todo_2 = Todo(None, self.proc, False, self.date1)
         todo_2.save(self.db_conn)
         # test child shows up
         todo_2 = Todo(None, self.proc, False, self.date1)
         todo_2.save(self.db_conn)
@@ -138,7 +149,9 @@ class TestsWithDB(TestCaseWithDB, TestCaseSansDB):
         node_2 = TodoNode(todo_2, False, [])
         node_0.children = [node_2]
         node_0.seen = False
         node_2 = TodoNode(todo_2, False, [])
         node_0.children = [node_2]
         node_0.seen = False
-        self.assertEqual(todo_1.get_step_tree(set()).as_dict, node_0.as_dict)
+        cmp_0_dict = todo_node_as_dict(todo_1.get_step_tree(set()))
+        cmp_1_dict = todo_node_as_dict(node_0)
+        self.assertEqual(cmp_0_dict, cmp_1_dict)
         # test child shows up with child
         todo_3 = Todo(None, self.proc, False, self.date1)
         todo_3.save(self.db_conn)
         # test child shows up with child
         todo_3 = Todo(None, self.proc, False, self.date1)
         todo_3.save(self.db_conn)
@@ -146,15 +159,19 @@ class TestsWithDB(TestCaseWithDB, TestCaseSansDB):
         todo_2.add_child(todo_3)
         node_3 = TodoNode(todo_3, False, [])
         node_2.children = [node_3]
         todo_2.add_child(todo_3)
         node_3 = TodoNode(todo_3, False, [])
         node_2.children = [node_3]
-        self.assertEqual(todo_1.get_step_tree(set()).as_dict, node_0.as_dict)
+        cmp_0_dict = todo_node_as_dict(todo_1.get_step_tree(set()))
+        cmp_1_dict = todo_node_as_dict(node_0)
+        self.assertEqual(cmp_0_dict, cmp_1_dict)
         # test same todo can be child-ed multiple times at different locations
         todo_1.add_child(todo_3)
         node_4 = TodoNode(todo_3, True, [])
         node_0.children += [node_4]
         # test same todo can be child-ed multiple times at different locations
         todo_1.add_child(todo_3)
         node_4 = TodoNode(todo_3, True, [])
         node_0.children += [node_4]
-        self.assertEqual(todo_1.get_step_tree(set()).as_dict, node_0.as_dict)
+        cmp_0_dict = todo_node_as_dict(todo_1.get_step_tree(set()))
+        cmp_1_dict = todo_node_as_dict(node_0)
+        self.assertEqual(cmp_0_dict, cmp_1_dict)
 
 
-    def test_Todo_create_with_children(self) -> None:
-        """Test parenthood guaranteeds of Todo.create_with_children."""
+    def test_Todo_ensure_children(self) -> None:
+        """Test parenthood guarantees of Todo.ensure_children."""
         assert isinstance(self.proc.id_, int)
         proc2 = Process(None)
         proc2.save(self.db_conn)
         assert isinstance(self.proc.id_, int)
         proc2 = Process(None)
         proc2.save(self.db_conn)
@@ -178,12 +195,15 @@ class TestsWithDB(TestCaseWithDB, TestCaseSansDB):
         todo_ignore.save(self.db_conn)
         self.assertEqual(todo_ignore.children, [])
         # test create_with_children on step-less does nothing
         todo_ignore.save(self.db_conn)
         self.assertEqual(todo_ignore.children, [])
         # test create_with_children on step-less does nothing
-        todo_1 = Todo.create_with_children(self.db_conn, self.proc.id_,
-                                           self.date1)
+        todo_1 = Todo(None, self.proc, False, self.date1)
+        todo_1.save(self.db_conn)
+        todo_1.ensure_children(self.db_conn)
         self.assertEqual(todo_1.children, [])
         self.assertEqual(len(Todo.all(self.db_conn)), 2)
         # test create_with_children adopts and creates, and down tree too
         self.assertEqual(todo_1.children, [])
         self.assertEqual(len(Todo.all(self.db_conn)), 2)
         # test create_with_children adopts and creates, and down tree too
-        todo_2 = Todo.create_with_children(self.db_conn, proc2.id_, self.date1)
+        todo_2 = Todo(None, proc2, False, self.date1)
+        todo_2.save(self.db_conn)
+        todo_2.ensure_children(self.db_conn)
         self.assertEqual(3, len(todo_2.children))
         self.assertEqual(todo_1, todo_2.children[0])
         self.assertEqual(self.proc, todo_2.children[2].process)
         self.assertEqual(3, len(todo_2.children))
         self.assertEqual(todo_1, todo_2.children[0])
         self.assertEqual(self.proc, todo_2.children[2].process)
@@ -192,238 +212,325 @@ class TestsWithDB(TestCaseWithDB, TestCaseSansDB):
         self.assertEqual(len(todo_3.children), 1)
         self.assertEqual(todo_3.children[0].process, proc4)
 
         self.assertEqual(len(todo_3.children), 1)
         self.assertEqual(todo_3.children[0].process, proc4)
 
-    def test_Todo_remove(self) -> None:
-        """Test removal."""
-        todo_1 = Todo(None, self.proc, False, self.date1)
-        todo_1.save(self.db_conn)
-        assert todo_1.id_ is not None
-        todo_0 = Todo(None, self.proc, False, self.date1)
-        todo_0.save(self.db_conn)
-        todo_0.add_child(todo_1)
-        todo_2 = Todo(None, self.proc, False, self.date1)
-        todo_2.save(self.db_conn)
-        todo_1.add_child(todo_2)
-        todo_1_id = todo_1.id_
-        todo_1.remove(self.db_conn)
-        with self.assertRaises(NotFoundException):
-            Todo.by_id(self.db_conn, todo_1_id)
-        self.assertEqual(todo_0.children, [])
-        self.assertEqual(todo_2.parents, [])
-        todo_2.comment = 'foo'
-        with self.assertRaises(HandledException):
-            todo_2.remove(self.db_conn)
-        todo_2.comment = ''
-        todo_2.effort = 5
-        with self.assertRaises(HandledException):
-            todo_2.remove(self.db_conn)
 
 
-    def test_Todo_autoremoval(self) -> None:
-        """"Test automatic removal for Todo.effort < 0."""
-        todo_1 = Todo(None, self.proc, False, self.date1)
-        todo_1.save(self.db_conn)
-        todo_1.comment = 'foo'
-        todo_1.effort = -0.1
-        todo_1.save(self.db_conn)
-        assert todo_1.id_ is not None
-        Todo.by_id(self.db_conn, todo_1.id_)
-        todo_1.comment = ''
-        todo_1_id = todo_1.id_
-        todo_1.save(self.db_conn)
-        with self.assertRaises(NotFoundException):
-            Todo.by_id(self.db_conn, todo_1_id)
+class ExpectedGetTodo(Expected):
+    """Builder of expectations for GET /todo."""
 
 
+    def __init__(self,
+                 todo_id: int,
+                 *args: Any, **kwargs: Any) -> None:
+        self._fields = {'todo': todo_id,
+                        'steps_todo_to_process': []}
+        super().__init__(*args, **kwargs)
 
 
-class TestsWithServer(TestCaseWithServer):
-    """Tests against our HTTP server/handler (and database)."""
+    def recalc(self) -> None:
+        """Update internal dictionary by subclass-specific rules."""
 
 
-    def test_do_POST_day(self) -> None:
-        """Test Todo posting of POST /day."""
-        self.post_process()
-        self.post_process(2)
-        proc = Process.by_id(self.db_conn, 1)
-        proc2 = Process.by_id(self.db_conn, 2)
-        form_data = {'day_comment': '', 'make_type': 'full'}
-        self.check_post(form_data, '/day?date=2024-01-01&make_type=full', 302)
-        self.assertEqual(Todo.by_date(self.db_conn, '2024-01-01'), [])
-        proc = Process.by_id(self.db_conn, 1)
-        form_data['new_todo'] = str(proc.id_)
-        self.check_post(form_data, '/day?date=2024-01-01&make_type=full', 302)
-        todos = Todo.by_date(self.db_conn, '2024-01-01')
-        self.assertEqual(1, len(todos))
-        todo1 = todos[0]
-        self.assertEqual(todo1.id_, 1)
-        proc = Process.by_id(self.db_conn, 1)
-        self.assertEqual(todo1.process.id_, proc.id_)
-        self.assertEqual(todo1.is_done, False)
-        proc2 = Process.by_id(self.db_conn, 2)
-        form_data['new_todo'] = str(proc2.id_)
-        self.check_post(form_data, '/day?date=2024-01-01&make_type=full', 302)
-        todos = Todo.by_date(self.db_conn, '2024-01-01')
-        todo1 = todos[1]
-        self.assertEqual(todo1.id_, 2)
-        proc2 = Process.by_id(self.db_conn, 1)
-        todo1 = Todo.by_date(self.db_conn, '2024-01-01')[0]
-        self.assertEqual(todo1.id_, 1)
-        self.assertEqual(todo1.process.id_, proc2.id_)
-        self.assertEqual(todo1.is_done, False)
+        def walk_steps(step: dict[str, Any]) -> None:
+            if not step['todo']:
+                proc_id = step['process']
+                cands = self.as_ids(
+                        [t for t in todos if proc_id == t['process_id']
+                         and t['id'] in self._fields['todo_candidates']])
+                self._fields['adoption_candidates_for'][str(proc_id)] = cands
+            for child in step['children']:
+                walk_steps(child)
 
 
-    def test_do_POST_todo(self) -> None:
-        """Test POST /todo."""
-        def post_and_reload(form_data: dict[str, object], status: int = 302,
-                            redir_url: str = '/todo?id=1') -> Todo:
-            self.check_post(form_data, '/todo?id=1', status, redir_url)
-            return Todo.by_date(self.db_conn, '2024-01-01')[0]
-        # test minimum
-        self.post_process()
-        self.check_post({'day_comment': '', 'new_todo': 1,
-                         'make_type': 'full'},
-                        '/day?date=2024-01-01&make_type=full', 302)
-        # test posting to bad URLs
-        self.check_post({}, '/todo=', 404)
-        self.check_post({}, '/todo?id=', 404)
-        self.check_post({}, '/todo?id=FOO', 400)
-        self.check_post({}, '/todo?id=0', 404)
-        # test posting naked entity
-        todo1 = post_and_reload({})
-        self.assertEqual(todo1.children, [])
-        self.assertEqual(todo1.parents, [])
-        self.assertEqual(todo1.is_done, False)
-        # test posting doneness
-        todo1 = post_and_reload({'done': ''})
-        self.assertEqual(todo1.is_done, True)
-        # test implicitly posting non-doneness
-        todo1 = post_and_reload({})
-        self.assertEqual(todo1.is_done, False)
-        # test malformed adoptions
-        self.check_post({'adopt': 'foo'}, '/todo?id=1', 400)
-        self.check_post({'adopt': 1}, '/todo?id=1', 400)
-        self.check_post({'adopt': 2}, '/todo?id=1', 404)
-        # test posting second todo of same process
-        self.check_post({'day_comment': '', 'new_todo': 1,
-                         'make_type': 'full'},
-                        '/day?date=2024-01-01&make_type=full', 302)
-        # test todo 1 adopting todo 2
-        todo1 = post_and_reload({'adopt': 2})
-        todo2 = Todo.by_date(self.db_conn, '2024-01-01')[1]
-        self.assertEqual(todo1.children, [todo2])
-        self.assertEqual(todo1.parents, [])
-        self.assertEqual(todo2.children, [])
-        self.assertEqual(todo2.parents, [todo1])
-        # test todo1 cannot be set done with todo2 not done yet
-        todo1 = post_and_reload({'done': '', 'adopt': 2}, 400)
-        self.assertEqual(todo1.is_done, False)
-        # test todo1 un-adopting todo 2 by just not sending an adopt
-        todo1 = post_and_reload({}, 302)
-        todo2 = Todo.by_date(self.db_conn, '2024-01-01')[1]
-        self.assertEqual(todo1.children, [])
-        self.assertEqual(todo1.parents, [])
-        self.assertEqual(todo2.children, [])
-        self.assertEqual(todo2.parents, [])
-        # test todo1 deletion
-        todo1 = post_and_reload({'delete': ''}, 302, '/')
+        super().recalc()
+        self.lib_wipe('Day')
+        todos = self.lib_all('Todo')
+        procs = self.lib_all('Process')
+        conds = self.lib_all('Condition')
+        self._fields['todo_candidates'] = self.as_ids(
+                [t for t in todos if t['id'] != self._fields['todo']])
+        self._fields['process_candidates'] = self.as_ids(procs)
+        self._fields['condition_candidates'] = self.as_ids(conds)
+        self._fields['adoption_candidates_for'] = {}
+        for step in self._fields['steps_todo_to_process']:
+            walk_steps(step)
+
+    @staticmethod
+    def step_as_dict(node_id: int,
+                     children: list[dict[str, object]],
+                     process: int | None = None,
+                     todo: int | None = None,
+                     fillable: bool = False,
+                     ) -> dict[str, object]:
+        """Return JSON of TodoOrProcStepsNode to expect."""
+        return {'node_id': node_id,
+                'children': children,
+                'process': process,
+                'fillable': fillable,
+                'todo': todo}
 
 
-    def test_do_POST_day_todo_adoption(self) -> None:
-        """Test Todos posted to Day view may adopt existing Todos."""
-        form_data = self.post_process()
-        form_data = self.post_process(2, form_data | {'new_top_step': 1})
-        form_data = {'day_comment': '', 'new_todo': 1, 'make_type': 'full'}
-        self.check_post(form_data, '/day?date=2024-01-01&make_type=full', 302)
-        form_data['new_todo'] = 2
-        self.check_post(form_data, '/day?date=2024-01-01&make_type=full', 302)
-        todo1 = Todo.by_date(self.db_conn, '2024-01-01')[0]
-        todo2 = Todo.by_date(self.db_conn, '2024-01-01')[1]
-        self.assertEqual(todo1.children, [])
-        self.assertEqual(todo1.parents, [todo2])
-        self.assertEqual(todo2.children, [todo1])
-        self.assertEqual(todo2.parents, [])
 
 
-    def test_do_POST_day_todo_multiple(self) -> None:
-        """Test multiple Todos can be posted to Day view."""
-        form_data = self.post_process()
-        form_data = self.post_process(2)
-        form_data = {'day_comment': '', 'new_todo': [1, 2],
-                     'make_type': 'full'}
-        self.check_post(form_data, '/day?date=2024-01-01&make_type=full', 302)
-        todo1 = Todo.by_date(self.db_conn, '2024-01-01')[0]
-        todo2 = Todo.by_date(self.db_conn, '2024-01-01')[1]
-        self.assertEqual(todo1.process.id_, 1)
-        self.assertEqual(todo2.process.id_, 2)
+class TestsWithServer(TestCaseWithServer):
+    """Tests against our HTTP server/handler (and database)."""
 
 
-    def test_do_POST_day_todo_multiple_inner_adoption(self) -> None:
-        """Test multiple Todos can be posted to Day view w. inner adoption."""
+    def _post_exp_todo(
+            self, id_: int, payload: dict[str, Any], exp: Expected) -> None:
+        self.check_post(payload, f'/todo?id={id_}')
+        exp.set_todo_from_post(id_, payload)
 
 
-        def key_order_func(t: Todo) -> int:
-            assert isinstance(t.process.id_, int)
-            return t.process.id_
+    def test_basic_fail_POST_todo(self) -> None:
+        """Test basic malformed/illegal POST /todo requests."""
+        self.post_exp_process([], {}, 1)
+        # test we cannot just POST into non-existing Todo
+        self.check_post({}, '/todo', 404)
+        self.check_post({}, '/todo?id=FOO', 400)
+        self.check_post({}, '/todo?id=0', 404)
+        self.check_post({}, '/todo?id=1', 404)
+        # test malformed values on existing Todo
+        self.post_exp_day([], {'new_todo': [1]})
+        for name in ['adopt', 'effort', 'make_full', 'make_empty',
+                     'conditions', 'disables', 'blockers', 'enables']:
+            self.check_post({name: 'x'}, '/todo?id=1', 400, '/todo')
+        for prefix in ['make_', '']:
+            for suffix in ['', 'x', '1.1']:
+                self.check_post({'step_filler_to_1': [f'{prefix}{suffix}']},
+                                 '/todo?id=1', 400, '/todo')
+        for suffix in ['', 'x', '1.1']:
+                self.check_post({'step_filler_to_{suffix}': ['1']},
+                                 '/todo?id=1', 400, '/todo')
 
 
-        def check_adoption(date: str, new_todos: list[int]) -> None:
-            form_data = {'day_comment': '', 'new_todo': new_todos,
-                         'make_type': 'full'}
-            self.check_post(form_data, f'/day?date={date}&make_type=full', 302)
-            day_todos = Todo.by_date(self.db_conn, date)
-            day_todos.sort(key=key_order_func)
-            todo1 = day_todos[0]
-            todo2 = day_todos[1]
-            self.assertEqual(todo1.children, [])
-            self.assertEqual(todo1.parents, [todo2])
-            self.assertEqual(todo2.children, [todo1])
-            self.assertEqual(todo2.parents, [])
+    def test_basic_POST_todo(self) -> None:
+        """Test basic POST /todo manipulations."""
+        exp = ExpectedGetTodo(1)
+        self.post_exp_process([exp], {'calendarize': 0}, 1)
+        self.post_exp_day([exp], {'new_todo': [1]})
+        # test posting naked entity at first changes nothing
+        self.check_json_get('/todo?id=1', exp)
+        self.check_post({}, '/todo?id=1')
+        self.check_json_get('/todo?id=1', exp)
+        # test posting doneness, comment, calendarization, effort
+        todo_post = {'is_done': 1, 'calendarize': 1,
+                     'comment': 'foo', 'effort': 2.3}
+        self._post_exp_todo(1, todo_post, exp)
+        self.check_json_get('/todo?id=1', exp)
+        # test implicitly un-setting (only) comment by empty post
+        self.check_post({}, '/todo?id=1')
+        exp.lib_get('Todo', 1)['comment'] = ''
+        self.check_json_get('/todo?id=1', exp)
+        # test effort post can be explicitly unset by "effort":"" post
+        self.check_post({'effort': ''}, '/todo?id=1')
+        exp.lib_get('Todo', 1)['effort'] = None
+        self.check_json_get('/todo?id=1', exp)
+        # test Condition posts
+        c1_post = {'title': 'foo', 'description': 'oof', 'is_active': 0}
+        c2_post = {'title': 'bar', 'description': 'rab', 'is_active': 1}
+        self.post_exp_cond([exp], 1, c1_post, '?id=1', '?id=1')
+        self.post_exp_cond([exp], 2, c2_post, '?id=2', '?id=2')
+        self.check_json_get('/todo?id=1', exp)
+        todo_post = {'conditions': [1], 'disables': [1],
+                     'blockers': [2], 'enables': [2]}
+        self._post_exp_todo(1, todo_post, exp)
+        self.check_json_get('/todo?id=1', exp)
 
 
-        def check_nesting_adoption(process_id: int, date: str,
-                                   new_top_steps: list[int]) -> None:
-            form_data = {'title': '', 'description': '', 'effort': 1,
-                         'step_of': [2]}
-            form_data = self.post_process(1, form_data)
-            form_data['new_top_step'] = new_top_steps
-            form_data['step_of'] = []
-            form_data = self.post_process(process_id, form_data)
-            form_data = {'day_comment': '', 'new_todo': [process_id],
-                         'make_type': 'full'}
-            self.check_post(form_data, f'/day?date={date}&make_type=full', 302)
-            day_todos = Todo.by_date(self.db_conn, date)
-            day_todos.sort(key=key_order_func, reverse=True)
-            self.assertEqual(len(day_todos), 3)
-            todo1 = day_todos[0]  # process of process_id
-            todo2 = day_todos[1]  # process 2
-            todo3 = day_todos[2]  # process 1
-            self.assertEqual(sorted(todo1.children), sorted([todo2, todo3]))
-            self.assertEqual(todo1.parents, [])
-            self.assertEqual(todo2.children, [todo3])
-            self.assertEqual(todo2.parents, [todo1])
-            self.assertEqual(todo3.children, [])
-            self.assertEqual(sorted(todo3.parents), sorted([todo2, todo1]))
+    def test_POST_todo_deletion(self) -> None:
+        """Test deletions via POST /todo."""
+        exp = ExpectedGetTodo(1)
+        self.post_exp_process([exp], {}, 1)
+        # test failure of deletion on non-existing Todo
+        self.check_post({'delete': ''}, '/todo?id=2', 404, '/')
+        # test deletion of existing Todo
+        self.post_exp_day([exp], {'new_todo': [1]})
+        self.check_post({'delete': ''}, '/todo?id=1', 302, '/')
+        self.check_get('/todo?id=1', 404)
+        exp.lib_del('Todo', 1)
+        # test deletion of adopted Todo
+        self.post_exp_day([exp], {'new_todo': [1]})
+        self.post_exp_day([exp], {'new_todo': [1]})
+        self.check_post({'adopt': 2}, '/todo?id=1')
+        self.check_post({'delete': ''}, '/todo?id=2', 302, '/')
+        exp.lib_del('Todo', 2)
+        self.check_get('/todo?id=2', 404)
+        self.check_json_get('/todo?id=1', exp)
+        # test deletion of adopting Todo
+        self.post_exp_day([exp], {'new_todo': [1]})
+        self.check_post({'adopt': 2}, '/todo?id=1')
+        self.check_post({'delete': ''}, '/todo?id=1', 302, '/')
+        exp.set('todo', 2)
+        exp.lib_del('Todo', 1)
+        self.check_json_get('/todo?id=2', exp)
+        # test cannot delete Todo with comment or effort
+        self.check_post({'comment': 'foo'}, '/todo?id=2')
+        self.check_post({'delete': ''}, '/todo?id=2', 500, '/')
+        self.check_post({'effort': 5}, '/todo?id=2')
+        self.check_post({'delete': ''}, '/todo?id=2', 500, '/')
+        # test deletion via effort < 0, but only if deletable
+        self.check_post({'effort': -1, 'comment': 'foo'}, '/todo?id=2')
+        self.check_post({}, '/todo?id=2')
+        self.check_get('/todo?id=2', 404)
 
 
-        form_data = self.post_process()
-        form_data = self.post_process(2, form_data | {'new_top_step': 1})
-        check_adoption('2024-01-01', [1, 2])
-        check_adoption('2024-01-02', [2, 1])
-        check_nesting_adoption(3, '2024-01-03', [1, 2])
-        check_nesting_adoption(4, '2024-01-04', [2, 1])
+    def test_POST_todo_adoption(self) -> None:
+        """Test adoption via POST /todo with "adopt"."""
+        # post two Todos to Day, have first adopt second
+        exp = ExpectedGetTodo(1)
+        self.post_exp_process([exp], {}, 1)
+        self.post_exp_day([exp], {'new_todo': [1]})
+        self.post_exp_day([exp], {'new_todo': [1]})
+        self._post_exp_todo(1, {'adopt': 2}, exp)
+        exp.set('steps_todo_to_process', [exp.step_as_dict(1, [], todo=2)])
+        self.check_json_get('/todo?id=1', exp)
+        # test Todo un-adopting by just not sending an adopt
+        self._post_exp_todo(1, {}, exp)
+        exp.set('steps_todo_to_process', [])
+        self.check_json_get('/todo?id=1', exp)
+        # test fail on trying to adopt non-existing Todo
+        self.check_post({'adopt': 3}, '/todo?id=1', 404)
+        # test cannot self-adopt
+        self.check_post({'adopt': 1}, '/todo?id=1', 400)
+        # test cannot do 1-step circular adoption
+        self._post_exp_todo(2, {'adopt': 1}, exp)
+        self.check_post({'adopt': 2}, '/todo?id=1', 400)
+        # test cannot do 2-step circular adoption
+        self.post_exp_day([exp], {'new_todo': [1]})
+        self._post_exp_todo(3, {'adopt': 2}, exp)
+        self.check_post({'adopt': 3}, '/todo?id=1', 400)
+        # test can adopt Todo into ProcessStep chain via its Process (with key
+        # 'step_filler' equivalent to single-element 'adopt' if intable)
+        self.post_exp_process([exp], {}, 2)
+        self.post_exp_process([exp], {}, 3)
+        self.post_exp_process([exp], {'new_top_step': [2, 3]}, 1)
+        exp.lib_set('ProcessStep', [exp.procstep_as_dict(1, 1, 2),
+                                    exp.procstep_as_dict(2, 1, 3)])
+        step1_proc2 = exp.step_as_dict(1, [], 2, None, True)
+        step2_proc3 = exp.step_as_dict(2, [], 3, None, True)
+        exp.set('steps_todo_to_process', [step1_proc2, step2_proc3])
+        self.post_exp_day([exp], {'new_todo': [2]})
+        self.post_exp_day([exp], {'new_todo': [3]})
+        self.check_json_get('/todo?id=1', exp)
+        self._post_exp_todo(1, {'step_filler_to_1': 5, 'adopt': [4]}, exp)
+        exp.lib_get('Todo', 1)['children'] += [5]
+        step1_proc2 = exp.step_as_dict(1, [], 2, 4, True)
+        step2_proc3 = exp.step_as_dict(2, [], 3, 5, True)
+        exp.set('steps_todo_to_process', [step1_proc2, step2_proc3])
+        self.check_json_get('/todo?id=1', exp)
+        # test 'ignore' values for 'step_filler' are ignored, and intable
+        # 'step_filler' values are interchangeable with those of 'adopt'
+        todo_post = {'adopt': 5, 'step_filler_to_1': ['ignore', 4]}
+        self.check_post(todo_post, '/todo?id=1')
+        self.check_json_get('/todo?id=1', exp)
+        # test cannot adopt into non-top-level elements of chain, instead
+        # creating new top-level steps when adopting of respective Process
+        self.post_exp_process([exp], {}, 4)
+        self.post_exp_process([exp], {'new_top_step': 4, 'step_of': [1]}, 3)
+        exp.lib_set('ProcessStep', [exp.procstep_as_dict(3, 3, 4)])
+        step3_proc4 = exp.step_as_dict(3, [], 4, None, True)
+        step2_proc3 = exp.step_as_dict(2, [step3_proc4], 3, 5, True)
+        exp.set('steps_todo_to_process', [step1_proc2, step2_proc3])
+        self.post_exp_day([exp], {'new_todo': [4]})
+        self._post_exp_todo(1, {'adopt': [4, 5, 6]}, exp)
+        step4_todo6 = exp.step_as_dict(4, [], None, 6, False)
+        exp.set('steps_todo_to_process', [step1_proc2, step2_proc3,
+                                          step4_todo6])
+        self.check_json_get('/todo?id=1', exp)
 
 
-    def test_do_POST_day_todo_doneness(self) -> None:
-        """Test Todo doneness can be posted to Day view."""
-        self.post_process()
-        form_data = {'day_comment': '', 'new_todo': [1], 'make_type': 'full'}
-        self.check_post(form_data, '/day?date=2024-01-01&make_type=full', 302)
-        todo = Todo.by_date(self.db_conn, '2024-01-01')[0]
-        form_data = {'day_comment': '', 'todo_id': [1], 'make_type': 'full',
-                     'comment': [''], 'done': [], 'effort': ['']}
-        self.check_post(form_data, '/day?date=2024-01-01&make_type=full', 302)
-        todo = Todo.by_date(self.db_conn, '2024-01-01')[0]
-        self.assertEqual(todo.is_done, False)
-        form_data = {'day_comment': '', 'todo_id': [1], 'done': [1],
-                     'make_type': 'full', 'comment': [''], 'effort': ['']}
-        self.check_post(form_data, '/day?date=2024-01-01&make_type=full', 302)
-        todo = Todo.by_date(self.db_conn, '2024-01-01')[0]
-        self.assertEqual(todo.is_done, True)
+    def test_POST_todo_make_empty(self) -> None:
+        """Test creation via POST /todo "step_filler_to"/"make"."""
+        # create chain of Processes
+        exp = ExpectedGetTodo(1)
+        self.post_exp_process([exp], {}, 1)
+        for i in range(1, 4):
+            self.post_exp_process([exp], {'new_top_step': i}, i+1)
+        exp.lib_set('ProcessStep', [exp.procstep_as_dict(1, 2, 1),
+                                    exp.procstep_as_dict(2, 3, 2),
+                                    exp.procstep_as_dict(3, 4, 3)])
+        # post (childless) Todo of chain end, then make empty on next in line
+        self.post_exp_day([exp], {'new_todo': [4]})
+        step3_proc1 = exp.step_as_dict(3, [], 1)
+        step2_proc2 = exp.step_as_dict(2, [step3_proc1], 2)
+        step1_proc3 = exp.step_as_dict(1, [step2_proc2], 3, None, True)
+        exp.set('steps_todo_to_process', [step1_proc3])
+        self.check_json_get('/todo?id=1', exp)
+        self.check_post({'step_filler_to_1': 'make_3'}, '/todo?id=1')
+        exp.set_todo_from_post(2, {'process_id': 3})
+        exp.set_todo_from_post(1, {'process_id': 4, 'children': [2]})
+        step2_proc2 = exp.step_as_dict(2, [step3_proc1], 2, None, True)
+        step1_proc3 = exp.step_as_dict(1, [step2_proc2], 3, 2, True)
+        exp.set('steps_todo_to_process', [step1_proc3])
+        self.check_json_get('/todo?id=1', exp)
+        # make new top-level Todo without chain implied by its Process
+        self.check_post({'make_empty': 2, 'adopt': [2]}, '/todo?id=1')
+        exp.set_todo_from_post(3, {'process_id': 2})
+        exp.set_todo_from_post(1, {'process_id': 4, 'children': [2, 3]})
+        step4_todo3 = exp.step_as_dict(4, [], None, 3)
+        exp.set('steps_todo_to_process', [step1_proc3, step4_todo3])
+        self.check_json_get('/todo?id=1', exp)
+        # fail on trying to call make_empty on non-existing Process
+        self.check_post({'make_full': 5}, '/todo?id=1', 404)
 
 
-    def test_do_GET_todo(self) -> None:
+    def test_GET_todo(self) -> None:
         """Test GET /todo response codes."""
         """Test GET /todo response codes."""
-        self.post_process()
-        form_data = {'day_comment': '', 'new_todo': 1, 'make_type': 'full'}
-        self.check_post(form_data, '/day?date=2024-01-01&make_type=full', 302)
+        # test malformed or illegal parameter values
         self.check_get('/todo', 404)
         self.check_get('/todo?id=', 404)
         self.check_get('/todo?id=foo', 400)
         self.check_get('/todo?id=0', 404)
         self.check_get('/todo', 404)
         self.check_get('/todo?id=', 404)
         self.check_get('/todo?id=foo', 400)
         self.check_get('/todo?id=0', 404)
-        self.check_get('/todo?id=1', 200)
+        self.check_get('/todo?id=2', 404)
+        # test all existing Processes are shown as available
+        exp = ExpectedGetTodo(1)
+        self.post_exp_process([exp], {}, 1)
+        self.post_exp_day([exp], {'new_todo': [1]})
+        self.post_exp_process([exp], {}, 2)
+        self.check_json_get('/todo?id=1', exp)
+        # test chain of Processes shown as potential step nodes
+        self.post_exp_process([exp], {}, 3)
+        self.post_exp_process([exp], {}, 4)
+        self.post_exp_process([exp], {'new_top_step': 2}, 1)
+        self.post_exp_process([exp], {'new_top_step': 3, 'step_of': [1]}, 2)
+        self.post_exp_process([exp], {'new_top_step': 4, 'step_of': [2]}, 3)
+        exp.lib_set('ProcessStep', [exp.procstep_as_dict(1, 1, 2, None),
+                                    exp.procstep_as_dict(2, 2, 3, None),
+                                    exp.procstep_as_dict(3, 3, 4, None)])
+        step3_proc4 = exp.step_as_dict(3, [], 4)
+        step2_proc3 = exp.step_as_dict(2, [step3_proc4], 3)
+        step1_proc2 = exp.step_as_dict(1, [step2_proc3], 2, fillable=True)
+        exp.set('steps_todo_to_process', [step1_proc2])
+        self.check_json_get('/todo?id=1', exp)
+        # test display of parallel chains
+        proc_steps_post = {'new_top_step': 4, 'kept_steps': [1, 3]}
+        self.post_exp_process([], proc_steps_post, 1)
+        step4_proc4 = exp.step_as_dict(4, [], 4, fillable=True)
+        exp.lib_set('ProcessStep', [exp.procstep_as_dict(4, 1, 4, None)])
+        exp.set('steps_todo_to_process', [step1_proc2, step4_proc4])
+        self.check_json_get('/todo?id=1', exp)
+
+    def test_POST_todo_doneness_relations(self) -> None:
+        """Test Todo.is_done Condition, adoption relations for /todo POSTs."""
+        self.post_exp_process([], {}, 1)
+        # test Todo with adoptee can only be set done if adoptee is done too
+        self.post_exp_day([], {'new_todo': [1]})
+        self.post_exp_day([], {'new_todo': [1]})
+        self.check_post({'adopt': 2, 'is_done': 1}, '/todo?id=1', 400)
+        self.check_post({'is_done': 1}, '/todo?id=2')
+        self.check_post({'adopt': 2, 'is_done': 1}, '/todo?id=1', 302)
+        # test Todo cannot be set undone with adopted Todo not done yet
+        self.check_post({'is_done': 0}, '/todo?id=2')
+        self.check_post({'adopt': 2, 'is_done': 0}, '/todo?id=1', 400)
+        # test unadoption relieves block
+        self.check_post({'is_done': 0}, '/todo?id=1', 302)
+        # test Condition being set or unset can block doneness setting
+        c1_post = {'title': '', 'description': '', 'is_active': 0}
+        c2_post = {'title': '', 'description': '', 'is_active': 1}
+        self.check_post(c1_post, '/condition', redir='/condition?id=1')
+        self.check_post(c2_post, '/condition', redir='/condition?id=2')
+        self.check_post({'conditions': [1], 'is_done': 1}, '/todo?id=1', 400)
+        self.check_post({'is_done': 1}, '/todo?id=1', 302)
+        self.check_post({'is_done': 0}, '/todo?id=1', 302)
+        self.check_post({'blockers': [2], 'is_done': 1}, '/todo?id=1', 400)
+        self.check_post({'is_done': 1}, '/todo?id=1', 302)
+        # test setting Todo doneness can set/un-set Conditions, but only on
+        # doneness change, not by mere passive state
+        self.check_post({'is_done': 0}, '/todo?id=2', 302)
+        self.check_post({'enables': [1], 'is_done': 1}, '/todo?id=1')
+        self.check_post({'conditions': [1], 'is_done': 1}, '/todo?id=2', 400)
+        self.check_post({'enables': [1], 'is_done': 0}, '/todo?id=1')
+        self.check_post({'enables': [1], 'is_done': 1}, '/todo?id=1')
+        self.check_post({'conditions': [1], 'is_done': 1}, '/todo?id=2')
+        self.check_post({'blockers': [1], 'is_done': 0}, '/todo?id=2', 400)
+        self.check_post({'disables': [1], 'is_done': 1}, '/todo?id=1')
+        self.check_post({'blockers': [1], 'is_done': 0}, '/todo?id=2', 400)
+        self.check_post({'disables': [1]}, '/todo?id=1')
+        self.check_post({'disables': [1], 'is_done': 1}, '/todo?id=1')
+        self.check_post({'blockers': [1]}, '/todo?id=2')
index 665436873c27af704a13827715d3c795e04e1fe1..d1b6eac52ea08c196afc7501d10224de138879ec 100644 (file)
 """Shared test utilities."""
 """Shared test utilities."""
+# pylint: disable=too-many-lines
 from __future__ import annotations
 from unittest import TestCase
 from typing import Mapping, Any, Callable
 from threading import Thread
 from http.client import HTTPConnection
 from __future__ import annotations
 from unittest import TestCase
 from typing import Mapping, Any, Callable
 from threading import Thread
 from http.client import HTTPConnection
-from json import loads as json_loads
+from datetime import datetime, timedelta
+from time import sleep
+from json import loads as json_loads, dumps as json_dumps
 from urllib.parse import urlencode
 from uuid import uuid4
 from os import remove as remove_file
 from urllib.parse import urlencode
 from uuid import uuid4
 from os import remove as remove_file
+from pprint import pprint
 from plomtask.db import DatabaseFile, DatabaseConnection
 from plomtask.http import TaskHandler, TaskServer
 from plomtask.processes import Process, ProcessStep
 from plomtask.conditions import Condition
 from plomtask.days import Day
 from plomtask.db import DatabaseFile, DatabaseConnection
 from plomtask.http import TaskHandler, TaskServer
 from plomtask.processes import Process, ProcessStep
 from plomtask.conditions import Condition
 from plomtask.days import Day
+from plomtask.dating import DATE_FORMAT
 from plomtask.todos import Todo
 from plomtask.todos import Todo
+from plomtask.versioned_attributes import VersionedAttribute, TIMESTAMP_FMT
 from plomtask.exceptions import NotFoundException, HandledException
 
 
 from plomtask.exceptions import NotFoundException, HandledException
 
 
-def _within_checked_class(f: Callable[..., None]) -> Callable[..., None]:
-    def wrapper(self: TestCase) -> None:
-        if hasattr(self, 'checked_class'):
-            f(self)
-    return wrapper
+VERSIONED_VALS: dict[str,
+                     list[str] | list[float]] = {'str': ['A', 'B'],
+                                                 'float': [0.3, 1.1]}
+VALID_TRUES = {True, 'True', 'true', '1', 'on'}
 
 
 
 
-class TestCaseSansDB(TestCase):
-    """Tests requiring no DB setup."""
+class TestCaseAugmented(TestCase):
+    """Tester core providing helpful basic internal decorators and methods."""
     checked_class: Any
     checked_class: Any
-    default_init_args: list[Any] = []
-    versioned_defaults_to_test: dict[str, str | float] = {}
-    legal_ids = [1, 5]
-    illegal_ids = [0]
+    default_init_kwargs: dict[str, Any] = {}
+
+    @staticmethod
+    def _run_if_checked_class(f: Callable[..., None]) -> Callable[..., None]:
+        def wrapper(self: TestCase) -> None:
+            if hasattr(self, 'checked_class'):
+                f(self)
+        return wrapper
+
+    @classmethod
+    def _run_on_versioned_attributes(cls,
+                                     f: Callable[..., None]
+                                     ) -> Callable[..., None]:
+        @cls._run_if_checked_class
+        def wrapper(self: TestCase) -> None:
+            assert isinstance(self, TestCaseAugmented)
+            for attr_name in self.checked_class.to_save_versioned():
+                default = self.checked_class.versioned_defaults[attr_name]
+                owner = self.checked_class(None, **self.default_init_kwargs)
+                attr = getattr(owner, attr_name)
+                to_set = VERSIONED_VALS[attr.value_type_name]
+                f(self, owner, attr_name, attr, default, to_set)
+        return wrapper
+
+    @classmethod
+    def _make_from_defaults(cls, id_: float | str | None) -> Any:
+        return cls.checked_class(id_, **cls.default_init_kwargs)
+
 
 
-    @_within_checked_class
+class TestCaseSansDB(TestCaseAugmented):
+    """Tests requiring no DB setup."""
+    legal_ids: list[str] | list[int] = [1, 5]
+    illegal_ids: list[str] | list[int] = [0]
+
+    @TestCaseAugmented._run_if_checked_class
     def test_id_validation(self) -> None:
         """Test .id_ validation/setting."""
         for id_ in self.illegal_ids:
             with self.assertRaises(HandledException):
     def test_id_validation(self) -> None:
         """Test .id_ validation/setting."""
         for id_ in self.illegal_ids:
             with self.assertRaises(HandledException):
-                self.checked_class(id_, *self.default_init_args)
+                self._make_from_defaults(id_)
         for id_ in self.legal_ids:
         for id_ in self.legal_ids:
-            obj = self.checked_class(id_, *self.default_init_args)
+            obj = self._make_from_defaults(id_)
             self.assertEqual(obj.id_, id_)
 
             self.assertEqual(obj.id_, id_)
 
-    @_within_checked_class
-    def test_versioned_defaults(self) -> None:
-        """Test defaults of VersionedAttributes."""
-        id_ = self.legal_ids[0]
-        obj = self.checked_class(id_, *self.default_init_args)
-        for k, v in self.versioned_defaults_to_test.items():
-            self.assertEqual(getattr(obj, k).newest, v)
+    @TestCaseAugmented._run_on_versioned_attributes
+    def test_versioned_set(self,
+                           _: Any,
+                           __: str,
+                           attr: VersionedAttribute,
+                           default: str | float,
+                           to_set: list[str] | list[float]
+                           ) -> None:
+        """Test VersionedAttribute.set() behaves as expected."""
+        attr.set(default)
+        self.assertEqual(list(attr.history.values()), [default])
+        # check same value does not get set twice in a row,
+        # and that not even its timestamp get updated
+        timestamp = list(attr.history.keys())[0]
+        attr.set(default)
+        self.assertEqual(list(attr.history.values()), [default])
+        self.assertEqual(list(attr.history.keys())[0], timestamp)
+        # check that different value _will_ be set/added
+        attr.set(to_set[0])
+        timesorted_vals = [attr.history[t] for
+                           t in sorted(attr.history.keys())]
+        expected = [default, to_set[0]]
+        self.assertEqual(timesorted_vals, expected)
+        # check that a previously used value can be set if not most recent
+        attr.set(default)
+        timesorted_vals = [attr.history[t] for
+                           t in sorted(attr.history.keys())]
+        expected = [default, to_set[0], default]
+        self.assertEqual(timesorted_vals, expected)
+        # again check for same value not being set twice in a row, even for
+        # later items
+        attr.set(to_set[1])
+        timesorted_vals = [attr.history[t] for
+                           t in sorted(attr.history.keys())]
+        expected = [default, to_set[0], default, to_set[1]]
+        self.assertEqual(timesorted_vals, expected)
+        attr.set(to_set[1])
+        self.assertEqual(timesorted_vals, expected)
+
+    @TestCaseAugmented._run_on_versioned_attributes
+    def test_versioned_newest(self,
+                              _: Any,
+                              __: str,
+                              attr: VersionedAttribute,
+                              default: str | float,
+                              to_set: list[str] | list[float]
+                              ) -> None:
+        """Test VersionedAttribute.newest."""
+        # check .newest on empty history returns .default
+        self.assertEqual(attr.newest, default)
+        # check newest element always returned
+        for v in [to_set[0], to_set[1]]:
+            attr.set(v)
+            self.assertEqual(attr.newest, v)
+        # check newest element returned even if also early value
+        attr.set(default)
+        self.assertEqual(attr.newest, default)
+
+    @TestCaseAugmented._run_on_versioned_attributes
+    def test_versioned_at(self,
+                          _: Any,
+                          __: str,
+                          attr: VersionedAttribute,
+                          default: str | float,
+                          to_set: list[str] | list[float]
+                          ) -> None:
+        """Test .at() returns values nearest to queried time, or default."""
+        # check .at() return default on empty history
+        timestamp_a = datetime.now().strftime(TIMESTAMP_FMT)
+        self.assertEqual(attr.at(timestamp_a), default)
+        # check value exactly at timestamp returned
+        attr.set(to_set[0])
+        timestamp_b = list(attr.history.keys())[0]
+        self.assertEqual(attr.at(timestamp_b), to_set[0])
+        # check earliest value returned if exists, rather than default
+        self.assertEqual(attr.at(timestamp_a), to_set[0])
+        # check reverts to previous value for timestamps not indexed
+        sleep(0.00001)
+        timestamp_between = datetime.now().strftime(TIMESTAMP_FMT)
+        sleep(0.00001)
+        attr.set(to_set[1])
+        timestamp_c = sorted(attr.history.keys())[-1]
+        self.assertEqual(attr.at(timestamp_c), to_set[1])
+        self.assertEqual(attr.at(timestamp_between), to_set[0])
+        sleep(0.00001)
+        timestamp_after_c = datetime.now().strftime(TIMESTAMP_FMT)
+        self.assertEqual(attr.at(timestamp_after_c), to_set[1])
 
 
 
 
-class TestCaseWithDB(TestCase):
+class TestCaseWithDB(TestCaseAugmented):
     """Module tests not requiring DB setup."""
     """Module tests not requiring DB setup."""
-    checked_class: Any
-    default_ids: tuple[int | str, int | str, int | str] = (1, 2, 3)
-    default_init_kwargs: dict[str, Any] = {}
-    test_versioneds: dict[str, type] = {}
+    default_ids: tuple[int, int, int] | tuple[str, str, str] = (1, 2, 3)
 
     def setUp(self) -> None:
         Condition.empty_cache()
 
     def setUp(self) -> None:
         Condition.empty_cache()
@@ -80,7 +191,7 @@ class TestCaseWithDB(TestCase):
         return db_found
 
     def _change_obj(self, obj: object) -> str:
         return db_found
 
     def _change_obj(self, obj: object) -> str:
-        attr_name: str = self.checked_class.to_save[-1]
+        attr_name: str = self.checked_class.to_save_simples[-1]
         attr = getattr(obj, attr_name)
         new_attr: str | int | float | bool
         if isinstance(attr, (int, float)):
         attr = getattr(obj, attr_name)
         new_attr: str | int | float | bool
         if isinstance(attr, (int, float)):
@@ -106,92 +217,159 @@ class TestCaseWithDB(TestCase):
         hashes_db_found = [hash(x) for x in db_found]
         self.assertEqual(sorted(hashes_content), sorted(hashes_db_found))
 
         hashes_db_found = [hash(x) for x in db_found]
         self.assertEqual(sorted(hashes_content), sorted(hashes_db_found))
 
-    @_within_checked_class
-    def test_saving_versioned(self) -> None:
+    def check_by_date_range_with_limits(self,
+                                        date_col: str,
+                                        set_id_field: bool = True
+                                        ) -> None:
+        """Test .by_date_range_with_limits."""
+        # pylint: disable=too-many-locals
+        f = self.checked_class.by_date_range_with_limits
+        # check illegal ranges
+        legal_range = ('yesterday', 'tomorrow')
+        for i in [0, 1]:
+            for bad_date in ['foo', '2024-02-30', '2024-01-01 12:00:00']:
+                date_range = list(legal_range[:])
+                date_range[i] = bad_date
+                with self.assertRaises(HandledException):
+                    f(self.db_conn, date_range, date_col)
+        # check empty, translation of 'yesterday' and 'tomorrow'
+        items, start, end = f(self.db_conn, legal_range, date_col)
+        self.assertEqual(items, [])
+        yesterday = datetime.now() + timedelta(days=-1)
+        tomorrow = datetime.now() + timedelta(days=+1)
+        self.assertEqual(start, yesterday.strftime(DATE_FORMAT))
+        self.assertEqual(end, tomorrow.strftime(DATE_FORMAT))
+        # prepare dated items for non-empty results
+        kwargs_with_date = self.default_init_kwargs.copy()
+        if set_id_field:
+            kwargs_with_date['id_'] = None
+        objs = []
+        dates = ['2024-01-01', '2024-01-02', '2024-01-04']
+        for date in ['2024-01-01', '2024-01-02', '2024-01-04']:
+            kwargs_with_date['date'] = date
+            obj = self.checked_class(**kwargs_with_date)
+            objs += [obj]
+        # check ranges still empty before saving
+        date_range = [dates[0], dates[-1]]
+        self.assertEqual(f(self.db_conn, date_range, date_col)[0], [])
+        # check all objs displayed within closed interval
+        for obj in objs:
+            obj.save(self.db_conn)
+        self.assertEqual(f(self.db_conn, date_range, date_col)[0], objs)
+        # check that only displayed what exists within interval
+        date_range = ['2023-12-20', '2024-01-03']
+        expected = [objs[0], objs[1]]
+        self.assertEqual(f(self.db_conn, date_range, date_col)[0], expected)
+        date_range = ['2024-01-03', '2024-01-30']
+        expected = [objs[2]]
+        self.assertEqual(f(self.db_conn, date_range, date_col)[0], expected)
+        # check that inverted interval displays nothing
+        date_range = [dates[-1], dates[0]]
+        self.assertEqual(f(self.db_conn, date_range, date_col)[0], [])
+        # check that "today" is interpreted, and single-element interval
+        today_date = datetime.now().strftime(DATE_FORMAT)
+        kwargs_with_date['date'] = today_date
+        obj_today = self.checked_class(**kwargs_with_date)
+        obj_today.save(self.db_conn)
+        date_range = ['today', 'today']
+        items, start, end = f(self.db_conn, date_range, date_col)
+        self.assertEqual(start, today_date)
+        self.assertEqual(start, end)
+        self.assertEqual(items, [obj_today])
+
+    @TestCaseAugmented._run_on_versioned_attributes
+    def test_saving_versioned_attributes(self,
+                                         owner: Any,
+                                         attr_name: str,
+                                         attr: VersionedAttribute,
+                                         _: str | float,
+                                         to_set: list[str] | list[float]
+                                         ) -> None:
         """Test storage and initialization of versioned attributes."""
         """Test storage and initialization of versioned attributes."""
-        def retrieve_attr_vals() -> list[object]:
+
+        def retrieve_attr_vals(attr: VersionedAttribute) -> list[object]:
             attr_vals_saved: list[object] = []
             attr_vals_saved: list[object] = []
-            assert hasattr(retrieved, 'id_')
             for row in self.db_conn.row_where(attr.table_name, 'parent',
             for row in self.db_conn.row_where(attr.table_name, 'parent',
-                                              retrieved.id_):
+                                              owner.id_):
                 attr_vals_saved += [row[2]]
             return attr_vals_saved
                 attr_vals_saved += [row[2]]
             return attr_vals_saved
-        for attr_name, type_ in self.test_versioneds.items():
-            # fail saving attributes on non-saved owner
-            owner = self.checked_class(None, **self.default_init_kwargs)
-            vals: list[Any] = ['t1', 't2'] if type_ == str else [0.9, 1.1]
-            attr = getattr(owner, attr_name)
-            attr.set(vals[0])
-            attr.set(vals[1])
-            with self.assertRaises(NotFoundException):
-                attr.save(self.db_conn)
-            owner.save(self.db_conn)
-            # check stored attribute is as expected
-            retrieved = self._load_from_db(owner.id_)[0]
-            attr = getattr(retrieved, attr_name)
-            self.assertEqual(sorted(attr.history.values()), vals)
-            # check owner.save() created entries in attr table
-            attr_vals_saved = retrieve_attr_vals()
-            self.assertEqual(vals, attr_vals_saved)
-            # check setting new val to attr inconsequential to DB without save
-            attr.set(vals[0])
-            attr_vals_saved = retrieve_attr_vals()
-            self.assertEqual(vals, attr_vals_saved)
-            # check save finally adds new val
+
+        attr.set(to_set[0])
+        # check that without attr.save() no rows in DB
+        rows = self.db_conn.row_where(attr.table_name, 'parent', owner.id_)
+        self.assertEqual([], rows)
+        # fail saving attributes on non-saved owner
+        with self.assertRaises(NotFoundException):
             attr.save(self.db_conn)
             attr.save(self.db_conn)
-            attr_vals_saved = retrieve_attr_vals()
-            self.assertEqual(vals + [vals[0]], attr_vals_saved)
+        # check owner.save() created entries as expected in attr table
+        owner.save(self.db_conn)
+        attr_vals_saved = retrieve_attr_vals(attr)
+        self.assertEqual([to_set[0]], attr_vals_saved)
+        # check changing attr val without save affects owner in memory …
+        attr.set(to_set[1])
+        cmp_attr = getattr(owner, attr_name)
+        self.assertEqual(to_set, list(cmp_attr.history.values()))
+        self.assertEqual(cmp_attr.history, attr.history)
+        # … but does not yet affect DB
+        attr_vals_saved = retrieve_attr_vals(attr)
+        self.assertEqual([to_set[0]], attr_vals_saved)
+        # check individual attr.save also stores new val to DB
+        attr.save(self.db_conn)
+        attr_vals_saved = retrieve_attr_vals(attr)
+        self.assertEqual(to_set, attr_vals_saved)
 
 
-    @_within_checked_class
+    @TestCaseAugmented._run_if_checked_class
     def test_saving_and_caching(self) -> None:
         """Test effects of .cache() and .save()."""
         id1 = self.default_ids[0]
         # check failure to cache without ID (if None-ID input possible)
         if isinstance(id1, int):
     def test_saving_and_caching(self) -> None:
         """Test effects of .cache() and .save()."""
         id1 = self.default_ids[0]
         # check failure to cache without ID (if None-ID input possible)
         if isinstance(id1, int):
-            obj0 = self.checked_class(None, **self.default_init_kwargs)
+            obj0 = self._make_from_defaults(None)
             with self.assertRaises(HandledException):
                 obj0.cache()
         # check mere object init itself doesn't even store in cache
             with self.assertRaises(HandledException):
                 obj0.cache()
         # check mere object init itself doesn't even store in cache
-        obj1 = self.checked_class(id1, **self.default_init_kwargs)
+        obj1 = self._make_from_defaults(id1)
         self.assertEqual(self.checked_class.get_cache(), {})
         # check .cache() fills cache, but not DB
         obj1.cache()
         self.assertEqual(self.checked_class.get_cache(), {id1: obj1})
         self.assertEqual(self.checked_class.get_cache(), {})
         # check .cache() fills cache, but not DB
         obj1.cache()
         self.assertEqual(self.checked_class.get_cache(), {id1: obj1})
-        db_found = self._load_from_db(id1)
-        self.assertEqual(db_found, [])
+        found_in_db = self._load_from_db(id1)
+        self.assertEqual(found_in_db, [])
         # check .save() sets ID (for int IDs), updates cache, and fills DB
         # (expect ID to be set to id1, despite obj1 already having that as ID:
         # it's generated by cursor.lastrowid on the DB table, and with obj1
         # not written there, obj2 should get it first!)
         id_input = None if isinstance(id1, int) else id1
         # check .save() sets ID (for int IDs), updates cache, and fills DB
         # (expect ID to be set to id1, despite obj1 already having that as ID:
         # it's generated by cursor.lastrowid on the DB table, and with obj1
         # not written there, obj2 should get it first!)
         id_input = None if isinstance(id1, int) else id1
-        obj2 = self.checked_class(id_input, **self.default_init_kwargs)
+        obj2 = self._make_from_defaults(id_input)
         obj2.save(self.db_conn)
         obj2.save(self.db_conn)
-        obj2_hash = hash(obj2)
         self.assertEqual(self.checked_class.get_cache(), {id1: obj2})
         self.assertEqual(self.checked_class.get_cache(), {id1: obj2})
-        db_found += self._load_from_db(id1)
-        self.assertEqual([hash(o) for o in db_found], [obj2_hash])
+        # NB: we'll only compare hashes because obj2 itself disappears on
+        # .from_table_row-triggered database reload
+        obj2_hash = hash(obj2)
+        found_in_db += self._load_from_db(id1)
+        self.assertEqual([hash(o) for o in found_in_db], [obj2_hash])
         # check we cannot overwrite obj2 with obj1 despite its same ID,
         # since it has disappeared now
         with self.assertRaises(HandledException):
             obj1.save(self.db_conn)
 
         # check we cannot overwrite obj2 with obj1 despite its same ID,
         # since it has disappeared now
         with self.assertRaises(HandledException):
             obj1.save(self.db_conn)
 
-    @_within_checked_class
+    @TestCaseAugmented._run_if_checked_class
     def test_by_id(self) -> None:
         """Test .by_id()."""
         id1, id2, _ = self.default_ids
         # check failure if not yet saved
     def test_by_id(self) -> None:
         """Test .by_id()."""
         id1, id2, _ = self.default_ids
         # check failure if not yet saved
-        obj1 = self.checked_class(id1, **self.default_init_kwargs)
+        obj1 = self._make_from_defaults(id1)
         with self.assertRaises(NotFoundException):
             self.checked_class.by_id(self.db_conn, id1)
         # check identity of cached and retrieved
         obj1.cache()
         self.assertEqual(obj1, self.checked_class.by_id(self.db_conn, id1))
         # check identity of saved and retrieved
         with self.assertRaises(NotFoundException):
             self.checked_class.by_id(self.db_conn, id1)
         # check identity of cached and retrieved
         obj1.cache()
         self.assertEqual(obj1, self.checked_class.by_id(self.db_conn, id1))
         # check identity of saved and retrieved
-        obj2 = self.checked_class(id2, **self.default_init_kwargs)
+        obj2 = self._make_from_defaults(id2)
         obj2.save(self.db_conn)
         self.assertEqual(obj2, self.checked_class.by_id(self.db_conn, id2))
 
         obj2.save(self.db_conn)
         self.assertEqual(obj2, self.checked_class.by_id(self.db_conn, id2))
 
-    @_within_checked_class
+    @TestCaseAugmented._run_if_checked_class
     def test_by_id_or_create(self) -> None:
         """Test .by_id_or_create."""
         # check .by_id_or_create fails if wrong class
     def test_by_id_or_create(self) -> None:
         """Test .by_id_or_create."""
         # check .by_id_or_create fails if wrong class
@@ -214,17 +392,19 @@ class TestCaseWithDB(TestCase):
             self.checked_class.by_id(self.db_conn, item.id_)
         self.assertEqual(self.checked_class(item.id_), item)
 
             self.checked_class.by_id(self.db_conn, item.id_)
         self.assertEqual(self.checked_class(item.id_), item)
 
-    @_within_checked_class
+    @TestCaseAugmented._run_if_checked_class
     def test_from_table_row(self) -> None:
         """Test .from_table_row() properly reads in class directly from DB."""
         id_ = self.default_ids[0]
     def test_from_table_row(self) -> None:
         """Test .from_table_row() properly reads in class directly from DB."""
         id_ = self.default_ids[0]
-        obj = self.checked_class(id_, **self.default_init_kwargs)
+        obj = self._make_from_defaults(id_)
         obj.save(self.db_conn)
         assert isinstance(obj.id_, type(id_))
         for row in self.db_conn.row_where(self.checked_class.table_name,
                                           'id', obj.id_):
             # check .from_table_row reproduces state saved, no matter if obj
             # later changed (with caching even)
         obj.save(self.db_conn)
         assert isinstance(obj.id_, type(id_))
         for row in self.db_conn.row_where(self.checked_class.table_name,
                                           'id', obj.id_):
             # check .from_table_row reproduces state saved, no matter if obj
             # later changed (with caching even)
+            # NB: we'll only compare hashes because obj itself disappears on
+            # .from_table_row-triggered database reload
             hash_original = hash(obj)
             attr_name = self._change_obj(obj)
             obj.cache()
             hash_original = hash(obj)
             attr_name = self._change_obj(obj)
             obj.cache()
@@ -235,27 +415,37 @@ class TestCaseWithDB(TestCase):
             # check cache contains what .from_table_row just produced
             self.assertEqual({retrieved.id_: retrieved},
                              self.checked_class.get_cache())
             # check cache contains what .from_table_row just produced
             self.assertEqual({retrieved.id_: retrieved},
                              self.checked_class.get_cache())
-        # check .from_table_row also reads versioned attributes from DB
-        for attr_name, type_ in self.test_versioneds.items():
-            owner = self.checked_class(None)
-            vals: list[Any] = ['t1', 't2'] if type_ == str else [0.9, 1.1]
-            attr = getattr(owner, attr_name)
-            attr.set(vals[0])
-            attr.set(vals[1])
-            owner.save(self.db_conn)
-            for row in self.db_conn.row_where(owner.table_name, 'id',
+
+    @TestCaseAugmented._run_on_versioned_attributes
+    def test_versioned_history_from_row(self,
+                                        owner: Any,
+                                        _: str,
+                                        attr: VersionedAttribute,
+                                        default: str | float,
+                                        to_set: list[str] | list[float]
+                                        ) -> None:
+        """"Test VersionedAttribute.history_from_row() knows its DB rows."""
+        attr.set(to_set[0])
+        attr.set(to_set[1])
+        owner.save(self.db_conn)
+        # make empty VersionedAttribute, fill from rows, compare to owner's
+        for row in self.db_conn.row_where(owner.table_name, 'id', owner.id_):
+            loaded_attr = VersionedAttribute(owner, attr.table_name, default)
+            for row in self.db_conn.row_where(attr.table_name, 'parent',
                                               owner.id_):
                                               owner.id_):
-                retrieved = owner.__class__.from_table_row(self.db_conn, row)
-                attr = getattr(retrieved, attr_name)
-                self.assertEqual(sorted(attr.history.values()), vals)
+                loaded_attr.history_from_row(row)
+            self.assertEqual(len(attr.history.keys()),
+                             len(loaded_attr.history.keys()))
+            for timestamp, value in attr.history.items():
+                self.assertEqual(value, loaded_attr.history[timestamp])
 
 
-    @_within_checked_class
+    @TestCaseAugmented._run_if_checked_class
     def test_all(self) -> None:
         """Test .all() and its relation to cache and savings."""
     def test_all(self) -> None:
         """Test .all() and its relation to cache and savings."""
-        id_1, id_2, id_3 = self.default_ids
-        item1 = self.checked_class(id_1, **self.default_init_kwargs)
-        item2 = self.checked_class(id_2, **self.default_init_kwargs)
-        item3 = self.checked_class(id_3, **self.default_init_kwargs)
+        id1, id2, id3 = self.default_ids
+        item1 = self._make_from_defaults(id1)
+        item2 = self._make_from_defaults(id2)
+        item3 = self._make_from_defaults(id3)
         # check .all() returns empty list on un-cached items
         self.assertEqual(self.checked_class.all(self.db_conn), [])
         # check that all() shows only cached/saved items
         # check .all() returns empty list on un-cached items
         self.assertEqual(self.checked_class.all(self.db_conn), [])
         # check that all() shows only cached/saved items
@@ -267,11 +457,11 @@ class TestCaseWithDB(TestCase):
         self.assertEqual(sorted(self.checked_class.all(self.db_conn)),
                          sorted([item1, item2, item3]))
 
         self.assertEqual(sorted(self.checked_class.all(self.db_conn)),
                          sorted([item1, item2, item3]))
 
-    @_within_checked_class
+    @TestCaseAugmented._run_if_checked_class
     def test_singularity(self) -> None:
         """Test pointers made for single object keep pointing to it."""
         id1 = self.default_ids[0]
     def test_singularity(self) -> None:
         """Test pointers made for single object keep pointing to it."""
         id1 = self.default_ids[0]
-        obj = self.checked_class(id1, **self.default_init_kwargs)
+        obj = self._make_from_defaults(id1)
         obj.save(self.db_conn)
         # change object, expect retrieved through .by_id to carry change
         attr_name = self._change_obj(obj)
         obj.save(self.db_conn)
         # change object, expect retrieved through .by_id to carry change
         attr_name = self._change_obj(obj)
@@ -279,23 +469,27 @@ class TestCaseWithDB(TestCase):
         retrieved = self.checked_class.by_id(self.db_conn, id1)
         self.assertEqual(new_attr, getattr(retrieved, attr_name))
 
         retrieved = self.checked_class.by_id(self.db_conn, id1)
         self.assertEqual(new_attr, getattr(retrieved, attr_name))
 
-    @_within_checked_class
-    def test_versioned_singularity_title(self) -> None:
-        """Test singularity of VersionedAttributes on saving (with .title)."""
-        if 'title' in self.test_versioneds:
-            obj = self.checked_class(None)
-            obj.save(self.db_conn)
-            assert isinstance(obj.id_, int)
-            # change obj, expect retrieved through .by_id to carry change
-            obj.title.set('named')
-            retrieved = self.checked_class.by_id(self.db_conn, obj.id_)
-            self.assertEqual(obj.title.history, retrieved.title.history)
+    @TestCaseAugmented._run_on_versioned_attributes
+    def test_versioned_singularity(self,
+                                   owner: Any,
+                                   attr_name: str,
+                                   attr: VersionedAttribute,
+                                   _: str | float,
+                                   to_set: list[str] | list[float]
+                                   ) -> None:
+        """Test singularity of VersionedAttributes on saving."""
+        owner.save(self.db_conn)
+        # change obj, expect retrieved through .by_id to carry change
+        attr.set(to_set[0])
+        retrieved = self.checked_class.by_id(self.db_conn, owner.id_)
+        attr_retrieved = getattr(retrieved, attr_name)
+        self.assertEqual(attr.history, attr_retrieved.history)
 
 
-    @_within_checked_class
+    @TestCaseAugmented._run_if_checked_class
     def test_remove(self) -> None:
         """Test .remove() effects on DB and cache."""
         id_ = self.default_ids[0]
     def test_remove(self) -> None:
         """Test .remove() effects on DB and cache."""
         id_ = self.default_ids[0]
-        obj = self.checked_class(id_, **self.default_init_kwargs)
+        obj = self._make_from_defaults(id_)
         # check removal only works after saving
         with self.assertRaises(HandledException):
             obj.remove(self.db_conn)
         # check removal only works after saving
         with self.assertRaises(HandledException):
             obj.remove(self.db_conn)
@@ -308,90 +502,425 @@ class TestCaseWithDB(TestCase):
         self.check_identity_with_cache_and_db([])
 
 
         self.check_identity_with_cache_and_db([])
 
 
-class TestCaseWithServer(TestCaseWithDB):
-    """Module tests against our HTTP server/handler (and database)."""
+class Expected:
+    """Builder of (JSON-like) dict to compare against responses of test server.
 
 
-    def setUp(self) -> None:
-        super().setUp()
-        self.httpd = TaskServer(self.db_file, ('localhost', 0), TaskHandler)
-        self.server_thread = Thread(target=self.httpd.serve_forever)
-        self.server_thread.daemon = True
-        self.server_thread.start()
-        self.conn = HTTPConnection(str(self.httpd.server_address[0]),
-                                   self.httpd.server_address[1])
-        self.httpd.set_json_mode()
+    Collects all items and relations we expect expressed in the server's JSON
+    responses and puts them into the proper json.dumps-friendly dict structure,
+    accessibla via .as_dict, to compare them in TestsWithServer.check_json_get.
 
 
-    def tearDown(self) -> None:
-        self.httpd.shutdown()
-        self.httpd.server_close()
-        self.server_thread.join()
-        super().tearDown()
+    On its own provides for .as_dict output only {"_library": …}, initialized
+    from .__init__ and to be directly manipulated via the .lib* methods.
+    Further structures of the expected response may be added and kept
+    up-to-date by subclassing .__init__, .recalc, and .d.
 
 
-    @staticmethod
-    def as_id_list(items: list[dict[str, object]]) -> list[int | str]:
-        """Return list of only 'id' fields of items."""
-        id_list = []
-        for item in items:
-            assert isinstance(item['id'], (int, str))
-            id_list += [item['id']]
-        return id_list
+    NB: Lots of expectations towards server behavior will be made explicit here
+    (or in the subclasses) rather than in the actual TestCase methods' code.
+    """
+    _default_dict: dict[str, Any]
+    _forced: dict[str, Any]
+    _fields: dict[str, Any]
+    _on_empty_make_temp: tuple[str, str]
+
+    def __init__(self,
+                 todos: list[dict[str, Any]] | None = None,
+                 procs: list[dict[str, Any]] | None = None,
+                 procsteps: list[dict[str, Any]] | None = None,
+                 conds: list[dict[str, Any]] | None = None,
+                 days: list[dict[str, Any]] | None = None
+                 ) -> None:
+        # pylint: disable=too-many-arguments
+        for name in ['_default_dict', '_fields', '_forced']:
+            if not hasattr(self, name):
+                setattr(self, name, {})
+        self._lib = {}
+        for title, items in [('Todo', todos),
+                             ('Process', procs),
+                             ('ProcessStep', procsteps),
+                             ('Condition', conds),
+                             ('Day', days)]:
+            if items:
+                self._lib[title] = self._as_refs(items)
+        for k, v in self._default_dict.items():
+            if k not in self._fields:
+                self._fields[k] = v
+
+    def recalc(self) -> None:
+        """Update internal dictionary by subclass-specific rules."""
+        todos = self.lib_all('Todo')
+        for todo in todos:
+            todo['parents'] = []
+        for todo in todos:
+            for child_id in todo['children']:
+                self.lib_get('Todo', child_id)['parents'] += [todo['id']]
+            todo['children'].sort()
+        procsteps = self.lib_all('ProcessStep')
+        procs = self.lib_all('Process')
+        for proc in procs:
+            proc['explicit_steps'] = [s['id'] for s in procsteps
+                                      if s['owner_id'] == proc['id']]
+
+    @property
+    def as_dict(self) -> dict[str, Any]:
+        """Return dict to compare against test server JSON responses."""
+        make_temp = False
+        if hasattr(self, '_on_empty_make_temp'):
+            category, dicter = getattr(self, '_on_empty_make_temp')
+            id_ = self._fields[category.lower()]
+            make_temp = not bool(self.lib_get(category, id_))
+            if make_temp:
+                f = getattr(self, dicter)
+                self.lib_set(category, [f(id_)])
+        self.recalc()
+        d = {'_library': self._lib}
+        for k, v in self._fields.items():
+            # we expect everything sortable to be sorted
+            if isinstance(v, list) and k not in self._forced:
+                # NB: if we don't test for v being list, sorted() on an empty
+                # dict may return an empty list
+                try:
+                    v = sorted(v)
+                except TypeError:
+                    pass
+            d[k] = v
+        for k, v in self._forced.items():
+            d[k] = v
+        if make_temp:
+            json = json_dumps(d)
+            self.lib_del(category, id_)
+            d = json_loads(json)
+        return d
+
+    def lib_get(self, category: str, id_: str | int) -> dict[str, Any]:
+        """From library, return item of category and id_, or empty dict."""
+        str_id = str(id_)
+        if category in self._lib and str_id in self._lib[category]:
+            return self._lib[category][str_id]
+        return {}
+
+    def lib_all(self, category: str) -> list[dict[str, Any]]:
+        """From library, return items of category, or [] if none."""
+        if category in self._lib:
+            return list(self._lib[category].values())
+        return []
+
+    def lib_set(self, category: str, items: list[dict[str, object]]) -> None:
+        """Update library for category with items."""
+        if category not in self._lib:
+            self._lib[category] = {}
+        for k, v in self._as_refs(items).items():
+            self._lib[category][k] = v
+
+    def lib_del(self, category: str, id_: str | int) -> None:
+        """Remove category element of id_ from library."""
+        del self._lib[category][str(id_)]
+        if 0 == len(self._lib[category]):
+            del self._lib[category]
+
+    def lib_wipe(self, category: str) -> None:
+        """Remove category from library."""
+        if category in self._lib:
+            del self._lib[category]
+
+    def set(self, field_name: str, value: object) -> None:
+        """Set top-level .as_dict field."""
+        self._fields[field_name] = value
+
+    def force(self, field_name: str, value: object) -> None:
+        """Set ._forced field to ensure value in .as_dict."""
+        self._forced[field_name] = value
+
+    def unforce(self, field_name: str) -> None:
+        """Unset ._forced field."""
+        del self._forced[field_name]
 
     @staticmethod
 
     @staticmethod
-    def as_refs(items: list[dict[str, object]]
-                ) -> dict[str, dict[str, object]]:
+    def _as_refs(items: list[dict[str, object]]
+                 ) -> dict[str, dict[str, object]]:
         """Return dictionary of items by their 'id' fields."""
         refs = {}
         for item in items:
             refs[str(item['id'])] = item
         return refs
 
         """Return dictionary of items by their 'id' fields."""
         refs = {}
         for item in items:
             refs[str(item['id'])] = item
         return refs
 
+    @staticmethod
+    def as_ids(items: list[dict[str, Any]]) -> list[int] | list[str]:
+        """Return list of only 'id' fields of items."""
+        return [item['id'] for item in items]
+
+    @staticmethod
+    def day_as_dict(date: str, comment: str = '') -> dict[str, object]:
+        """Return JSON of Day to expect."""
+        return {'id': date, 'comment': comment, 'todos': []}
+
+    def set_day_from_post(self, date: str, d: dict[str, Any]) -> None:
+        """Set Day of date in library based on POST dict d."""
+        day = self.day_as_dict(date)
+        for k, v in d.items():
+            if 'day_comment' == k:
+                day['comment'] = v
+            elif 'new_todo' == k:
+                next_id = 1
+                for todo in self.lib_all('Todo'):
+                    if next_id <= todo['id']:
+                        next_id = todo['id'] + 1
+                for proc_id in sorted(v):
+                    todo = self.todo_as_dict(next_id, proc_id, date)
+                    self.lib_set('Todo', [todo])
+                    next_id += 1
+            elif 'done' == k:
+                for todo_id in v:
+                    self.lib_get('Todo', todo_id)['is_done'] = True
+            elif 'todo_id' == k:
+                for i, todo_id in enumerate(v):
+                    t = self.lib_get('Todo', todo_id)
+                    if 'comment' in d:
+                        t['comment'] = d['comment'][i]
+                    if 'effort' in d:
+                        effort = d['effort'][i] if d['effort'][i] else None
+                        t['effort'] = effort
+        self.lib_set('Day', [day])
+
     @staticmethod
     def cond_as_dict(id_: int = 1,
                      is_active: bool = False,
     @staticmethod
     def cond_as_dict(id_: int = 1,
                      is_active: bool = False,
-                     titles: None | list[str] = None,
-                     descriptions: None | list[str] = None
+                     title: None | str = None,
+                     description: None | str = None,
                      ) -> dict[str, object]:
         """Return JSON of Condition to expect."""
                      ) -> dict[str, object]:
         """Return JSON of Condition to expect."""
+        versioned: dict[str, dict[str, object]]
+        versioned = {'title': {}, 'description': {}}
+        if title is not None:
+            versioned['title']['0'] = title
+        if description is not None:
+            versioned['description']['0'] = description
+        return {'id': id_, 'is_active': is_active, '_versioned': versioned}
+
+    def set_cond_from_post(self, id_: int, d: dict[str, Any]) -> None:
+        """Set Condition of id_ in library based on POST dict d."""
+        if d == {'delete': ''}:
+            self.lib_del('Condition', id_)
+            return
+        cond = self.lib_get('Condition', id_)
+        if cond:
+            cond['is_active'] = d['is_active']
+            for category in ['title', 'description']:
+                history = cond['_versioned'][category]
+                if len(history) > 0:
+                    last_i = sorted([int(k) for k in history.keys()])[-1]
+                    if d[category] != history[str(last_i)]:
+                        history[str(last_i + 1)] = d[category]
+                else:
+                    history['0'] = d[category]
+        else:
+            cond = self.cond_as_dict(
+                    id_, d['is_active'], d['title'], d['description'])
+        self.lib_set('Condition', [cond])
+
+    @staticmethod
+    def todo_as_dict(id_: int = 1,
+                     process_id: int = 1,
+                     date: str = '2024-01-01',
+                     conditions: None | list[int] = None,
+                     disables: None | list[int] = None,
+                     blockers: None | list[int] = None,
+                     enables: None | list[int] = None,
+                     calendarize: bool = False,
+                     comment: str = '',
+                     is_done: bool = False,
+                     effort: float | None = None,
+                     children: list[int] | None = None,
+                     parents: list[int] | None = None,
+                     ) -> dict[str, object]:
+        """Return JSON of Todo to expect."""
+        # pylint: disable=too-many-arguments
         d = {'id': id_,
         d = {'id': id_,
-             'is_active': is_active,
-             '_versioned': {
-                 'title': {},
-                 'description': {}}}
-        titles = titles if titles else []
-        descriptions = descriptions if descriptions else []
-        assert isinstance(d['_versioned'], dict)
-        for i, title in enumerate(titles):
-            d['_versioned']['title'][i] = title
-        for i, description in enumerate(descriptions):
-            d['_versioned']['description'][i] = description
+             'date': date,
+             'process_id': process_id,
+             'is_done': is_done,
+             'calendarize': calendarize,
+             'comment': comment,
+             'children': children if children else [],
+             'parents': parents if parents else [],
+             'effort': effort,
+             'conditions': conditions if conditions else [],
+             'disables': disables if disables else [],
+             'blockers': blockers if blockers else [],
+             'enables': enables if enables else []}
         return d
 
         return d
 
+    def set_todo_from_post(self, id_: int, d: dict[str, Any]) -> None:
+        """Set Todo of id_ in library based on POST dict d."""
+        corrected_kwargs: dict[str, Any] = {'children': []}
+        for k, v in d.items():
+            if k.startswith('step_filler_to_'):
+                continue
+            elif 'adopt' == k:
+                new_children = v if isinstance(v, list) else [v]
+                corrected_kwargs['children'] += new_children
+                continue
+            elif k in {'is_done', 'calendarize'}:
+                v = v in VALID_TRUES
+            corrected_kwargs[k] = v
+        todo = self.lib_get('Todo', id_)
+        if todo:
+            for k, v in corrected_kwargs.items():
+                todo[k] = v
+        else:
+            todo = self.todo_as_dict(id_, **corrected_kwargs)
+        self.lib_set('Todo', [todo])
+
+    @staticmethod
+    def procstep_as_dict(id_: int,
+                         owner_id: int,
+                         step_process_id: int,
+                         parent_step_id: int | None = None
+                         ) -> dict[str, object]:
+        """Return JSON of ProcessStep to expect."""
+        return {'id': id_,
+                'owner_id': owner_id,
+                'step_process_id': step_process_id,
+                'parent_step_id': parent_step_id}
+
     @staticmethod
     def proc_as_dict(id_: int = 1,
     @staticmethod
     def proc_as_dict(id_: int = 1,
-                     title: str = 'A',
-                     description: str = '',
-                     effort: float = 1.0,
+                     title: None | str = None,
+                     description: None | str = None,
+                     effort: None | float = None,
                      conditions: None | list[int] = None,
                      disables: None | list[int] = None,
                      blockers: None | list[int] = None,
                      conditions: None | list[int] = None,
                      disables: None | list[int] = None,
                      blockers: None | list[int] = None,
-                     enables: None | list[int] = None
+                     enables: None | list[int] = None,
+                     explicit_steps: None | list[int] = None
                      ) -> dict[str, object]:
         """Return JSON of Process to expect."""
         # pylint: disable=too-many-arguments
                      ) -> dict[str, object]:
         """Return JSON of Process to expect."""
         # pylint: disable=too-many-arguments
+        versioned: dict[str, dict[str, object]]
+        versioned = {'title': {}, 'description': {}, 'effort': {}}
+        if title is not None:
+            versioned['title']['0'] = title
+        if description is not None:
+            versioned['description']['0'] = description
+        if effort is not None:
+            versioned['effort']['0'] = effort
         d = {'id': id_,
              'calendarize': False,
              'suppressed_steps': [],
         d = {'id': id_,
              'calendarize': False,
              'suppressed_steps': [],
-             'explicit_steps': [],
-             '_versioned': {
-                 'title': {0: title},
-                 'description': {0: description},
-                 'effort': {0: effort}},
+             'explicit_steps': explicit_steps if explicit_steps else [],
+             '_versioned': versioned,
              'conditions': conditions if conditions else [],
              'disables': disables if disables else [],
              'enables': enables if enables else [],
              'blockers': blockers if blockers else []}
         return d
 
              'conditions': conditions if conditions else [],
              'disables': disables if disables else [],
              'enables': enables if enables else [],
              'blockers': blockers if blockers else []}
         return d
 
+    def set_proc_from_post(self, id_: int, d: dict[str, Any]) -> None:
+        """Set Process of id_ in library based on POST dict d."""
+        proc = self.lib_get('Process', id_)
+        if proc:
+            for category in ['title', 'description', 'effort']:
+                history = proc['_versioned'][category]
+                if len(history) > 0:
+                    last_i = sorted([int(k) for k in history.keys()])[-1]
+                    if d[category] != history[str(last_i)]:
+                        history[str(last_i + 1)] = d[category]
+                else:
+                    history['0'] = d[category]
+        else:
+            proc = self.proc_as_dict(id_,
+                                     d['title'], d['description'], d['effort'])
+        ignore = {'title', 'description', 'effort', 'new_top_step', 'step_of',
+                  'kept_steps'}
+        for k, v in d.items():
+            if k in ignore\
+                    or k.startswith('step_') or k.startswith('new_step_to'):
+                continue
+            if k in {'calendarize'}:
+                v = v in VALID_TRUES
+            elif k in {'suppressed_steps', 'explicit_steps', 'conditions',
+                       'disables', 'enables', 'blockers'}:
+                if not isinstance(v, list):
+                    v = [v]
+            proc[k] = v
+        self.lib_set('Process', [proc])
+
+
+class TestCaseWithServer(TestCaseWithDB):
+    """Module tests against our HTTP server/handler (and database)."""
+
+    def setUp(self) -> None:
+        super().setUp()
+        self.httpd = TaskServer(self.db_file, ('localhost', 0), TaskHandler)
+        self.server_thread = Thread(target=self.httpd.serve_forever)
+        self.server_thread.daemon = True
+        self.server_thread.start()
+        self.conn = HTTPConnection(str(self.httpd.server_address[0]),
+                                   self.httpd.server_address[1])
+        self.httpd.render_mode = 'json'
+
+    def tearDown(self) -> None:
+        self.httpd.shutdown()
+        self.httpd.server_close()
+        self.server_thread.join()
+        super().tearDown()
+
+    def post_exp_cond(self,
+                      exps: list[Expected],
+                      id_: int,
+                      payload: dict[str, object],
+                      path_suffix: str = '',
+                      redir_suffix: str = ''
+                      ) -> None:
+        """POST /condition(s), appropriately update Expecteds."""
+        # pylint: disable=too-many-arguments
+        path = f'/condition{path_suffix}'
+        redir = f'/condition{redir_suffix}'
+        self.check_post(payload, path, redir=redir)
+        for exp in exps:
+            exp.set_cond_from_post(id_, payload)
+
+    def post_exp_day(self,
+                     exps: list[Expected],
+                     payload: dict[str, Any],
+                     date: str = '2024-01-01'
+                     ) -> None:
+        """POST /day, appropriately update Expecteds."""
+        if 'make_type' not in payload:
+            payload['make_type'] = 'empty'
+        if 'day_comment' not in payload:
+            payload['day_comment'] = ''
+        target = f'/day?date={date}'
+        redir_to = f'{target}&make_type={payload["make_type"]}'
+        self.check_post(payload, target, 302, redir_to)
+        for exp in exps:
+            exp.set_day_from_post(date, payload)
+
+    def post_exp_process(self,
+                         exps: list[Expected],
+                         payload: dict[str, Any],
+                         id_: int,
+                         ) -> dict[str, object]:
+        """POST /process, appropriately update Expecteds."""
+        if 'title' not in payload:
+            payload['title'] = 'foo'
+        if 'description' not in payload:
+            payload['description'] = 'foo'
+        if 'effort' not in payload:
+            payload['effort'] = 1.1
+        self.check_post(payload, f'/process?id={id_}',
+                        redir=f'/process?id={id_}')
+        for exp in exps:
+            exp.set_proc_from_post(id_, payload)
+        return payload
+
+    def check_filter(self, exp: Expected, category: str, key: str,
+                     val: str, list_ids: list[int]) -> None:
+        """Check GET /{category}?{key}={val} sorts to list_ids."""
+        # pylint: disable=too-many-arguments
+        exp.set(key, val)
+        exp.force(category, list_ids)
+        self.check_json_get(f'/{category}?{key}={val}', exp)
+
     def check_redirect(self, target: str) -> None:
         """Check that self.conn answers with a 302 redirect to target."""
         response = self.conn.getresponse()
     def check_redirect(self, target: str) -> None:
         """Check that self.conn answers with a 302 redirect to target."""
         response = self.conn.getresponse()
@@ -404,7 +933,7 @@ class TestCaseWithServer(TestCaseWithDB):
         self.assertEqual(self.conn.getresponse().status, expected_code)
 
     def check_post(self, data: Mapping[str, object], target: str,
         self.assertEqual(self.conn.getresponse().status, expected_code)
 
     def check_post(self, data: Mapping[str, object], target: str,
-                   expected_code: int, redirect_location: str = '') -> None:
+                   expected_code: int = 302, redir: str = '') -> None:
         """Check that POST of data to target yields expected_code."""
         encoded_form_data = urlencode(data, doseq=True).encode('utf-8')
         headers = {'Content-Type': 'application/x-www-form-urlencoded',
         """Check that POST of data to target yields expected_code."""
         encoded_form_data = urlencode(data, doseq=True).encode('utf-8')
         headers = {'Content-Type': 'application/x-www-form-urlencoded',
@@ -412,9 +941,8 @@ class TestCaseWithServer(TestCaseWithDB):
         self.conn.request('POST', target,
                           body=encoded_form_data, headers=headers)
         if 302 == expected_code:
         self.conn.request('POST', target,
                           body=encoded_form_data, headers=headers)
         if 302 == expected_code:
-            if redirect_location == '':
-                redirect_location = target
-            self.check_redirect(redirect_location)
+            redir = target if redir == '' else redir
+            self.check_redirect(redir)
         else:
             self.assertEqual(self.conn.getresponse().status, expected_code)
 
         else:
             self.assertEqual(self.conn.getresponse().status, expected_code)
 
@@ -426,40 +954,72 @@ class TestCaseWithServer(TestCaseWithDB):
         self.check_get(f'/{path}?id=0', 500)
         self.check_get(f'{path}?id=1', 200)
 
         self.check_get(f'/{path}?id=0', 500)
         self.check_get(f'{path}?id=1', 200)
 
-    def post_process(self, id_: int = 1,
-                     form_data: dict[str, Any] | None = None
-                     ) -> dict[str, Any]:
-        """POST basic Process."""
-        if not form_data:
-            form_data = {'title': 'foo', 'description': 'foo', 'effort': 1.1}
-        self.check_post(form_data, f'/process?id={id_}', 302,
-                        f'/process?id={id_}')
-        return form_data
-
-    def check_json_get(self, path: str, expected: dict[str, object]) -> None:
+    def check_json_get(self, path: str, expected: Expected) -> None:
         """Compare JSON on GET path with expected.
 
         To simplify comparison of VersionedAttribute histories, transforms
         """Compare JSON on GET path with expected.
 
         To simplify comparison of VersionedAttribute histories, transforms
-        timestamp keys of VersionedAttribute history keys into integers
-        counting chronologically forward from 0.
+        timestamp keys of VersionedAttribute history keys into (strings of)
+        integers counting chronologically forward from 0.
         """
         """
+
         def rewrite_history_keys_in(item: Any) -> Any:
             if isinstance(item, dict):
                 if '_versioned' in item.keys():
         def rewrite_history_keys_in(item: Any) -> Any:
             if isinstance(item, dict):
                 if '_versioned' in item.keys():
-                    for k in item['_versioned']:
-                        vals = item['_versioned'][k].values()
+                    for category in item['_versioned']:
+                        vals = item['_versioned'][category].values()
                         history = {}
                         for i, val in enumerate(vals):
                         history = {}
                         for i, val in enumerate(vals):
-                            history[i] = val
-                        item['_versioned'][k] = history
-                for k in list(item.keys()):
-                    rewrite_history_keys_in(item[k])
+                            history[str(i)] = val
+                        item['_versioned'][category] = history
+                for category in list(item.keys()):
+                    rewrite_history_keys_in(item[category])
             elif isinstance(item, list):
                 item[:] = [rewrite_history_keys_in(i) for i in item]
             return item
             elif isinstance(item, list):
                 item[:] = [rewrite_history_keys_in(i) for i in item]
             return item
+
+        def walk_diffs(path: str, cmp1: object, cmp2: object) -> None:
+            # pylint: disable=too-many-branches
+            def warn(intro: str, val: object) -> None:
+                if isinstance(val, (str, int, float)):
+                    print(intro, val)
+                else:
+                    print(intro)
+                    pprint(val)
+            if cmp1 != cmp2:
+                if isinstance(cmp1, dict) and isinstance(cmp2, dict):
+                    for k, v in cmp1.items():
+                        if k not in cmp2:
+                            warn(f'DIFF {path}: retrieved lacks {k}', v)
+                        elif v != cmp2[k]:
+                            walk_diffs(f'{path}:{k}', v, cmp2[k])
+                    for k in [k for k in cmp2.keys() if k not in cmp1]:
+                        warn(f'DIFF {path}: expected lacks retrieved\'s {k}',
+                             cmp2[k])
+                elif isinstance(cmp1, list) and isinstance(cmp2, list):
+                    for i, v1 in enumerate(cmp1):
+                        if i >= len(cmp2):
+                            warn(f'DIFF {path}[{i}] retrieved misses:', v1)
+                        elif v1 != cmp2[i]:
+                            walk_diffs(f'{path}[{i}]', v1, cmp2[i])
+                    if len(cmp2) > len(cmp1):
+                        for i, v2 in enumerate(cmp2[len(cmp1):]):
+                            warn(f'DIFF {path}[{len(cmp1)+i}] misses:', v2)
+                else:
+                    warn(f'DIFF {path} – for expected:', cmp1)
+                    warn('… and for retrieved:', cmp2)
+
         self.conn.request('GET', path)
         response = self.conn.getresponse()
         self.assertEqual(response.status, 200)
         retrieved = json_loads(response.read().decode())
         rewrite_history_keys_in(retrieved)
         self.conn.request('GET', path)
         response = self.conn.getresponse()
         self.assertEqual(response.status, 200)
         retrieved = json_loads(response.read().decode())
         rewrite_history_keys_in(retrieved)
-        self.assertEqual(expected, retrieved)
+        cmp = expected.as_dict
+        try:
+            self.assertEqual(cmp, retrieved)
+        except AssertionError as e:
+            print('EXPECTED:')
+            pprint(cmp)
+            print('RETRIEVED:')
+            pprint(retrieved)
+            walk_diffs('', cmp, retrieved)
+            raise e
diff --git a/tests/versioned_attributes.py b/tests/versioned_attributes.py
deleted file mode 100644 (file)
index a75fc3c..0000000
+++ /dev/null
@@ -1,144 +0,0 @@
-""""Test Versioned Attributes in the abstract."""
-from unittest import TestCase
-from time import sleep
-from datetime import datetime
-from tests.utils import TestCaseWithDB
-from plomtask.versioned_attributes import VersionedAttribute, TIMESTAMP_FMT
-from plomtask.db import BaseModel
-
-SQL_TEST_TABLE_STR = '''
-CREATE TABLE versioned_tests (
-  parent INTEGER NOT NULL,
-  timestamp TEXT NOT NULL,
-  value TEXT NOT NULL,
-  PRIMARY KEY (parent, timestamp)
-);
-'''
-SQL_TEST_TABLE_FLOAT = '''
-CREATE TABLE versioned_tests (
-  parent INTEGER NOT NULL,
-  timestamp TEXT NOT NULL,
-  value REAL NOT NULL,
-  PRIMARY KEY (parent, timestamp)
-);
-'''
-
-
-class TestParentType(BaseModel[int]):
-    """Dummy abstracting whatever may use VersionedAttributes."""
-
-
-class TestsSansDB(TestCase):
-    """Tests not requiring DB setup."""
-
-    def test_VersionedAttribute_set(self) -> None:
-        """Test .set() behaves as expected."""
-        # check value gets set even if already is the default
-        attr = VersionedAttribute(None, '', 'A')
-        attr.set('A')
-        self.assertEqual(list(attr.history.values()), ['A'])
-        # check same value does not get set twice in a row,
-        # and that not even its timestamp get updated
-        timestamp = list(attr.history.keys())[0]
-        attr.set('A')
-        self.assertEqual(list(attr.history.values()), ['A'])
-        self.assertEqual(list(attr.history.keys())[0], timestamp)
-        # check that different value _will_ be set/added
-        attr.set('B')
-        self.assertEqual(sorted(attr.history.values()), ['A', 'B'])
-        # check that a previously used value can be set if not most recent
-        attr.set('A')
-        self.assertEqual(sorted(attr.history.values()), ['A', 'A', 'B'])
-        # again check for same value not being set twice in a row, even for
-        # later items
-        attr.set('D')
-        self.assertEqual(sorted(attr.history.values()), ['A', 'A', 'B', 'D'])
-        attr.set('D')
-        self.assertEqual(sorted(attr.history.values()), ['A', 'A', 'B', 'D'])
-
-    def test_VersionedAttribute_newest(self) -> None:
-        """Test .newest returns newest element, or default on empty."""
-        attr = VersionedAttribute(None, '', 'A')
-        self.assertEqual(attr.newest, 'A')
-        attr.set('B')
-        self.assertEqual(attr.newest, 'B')
-        attr.set('C')
-
-    def test_VersionedAttribute_at(self) -> None:
-        """Test .at() returns values nearest to queried time, or default."""
-        # check .at() return default on empty history
-        attr = VersionedAttribute(None, '', 'A')
-        timestamp_a = datetime.now().strftime(TIMESTAMP_FMT)
-        self.assertEqual(attr.at(timestamp_a), 'A')
-        # check value exactly at timestamp returned
-        attr.set('B')
-        timestamp_b = list(attr.history.keys())[0]
-        self.assertEqual(attr.at(timestamp_b), 'B')
-        # check earliest value returned if exists, rather than default
-        self.assertEqual(attr.at(timestamp_a), 'B')
-        # check reverts to previous value for timestamps not indexed
-        sleep(0.00001)
-        timestamp_between = datetime.now().strftime(TIMESTAMP_FMT)
-        sleep(0.00001)
-        attr.set('C')
-        timestamp_c = sorted(attr.history.keys())[-1]
-        self.assertEqual(attr.at(timestamp_c), 'C')
-        self.assertEqual(attr.at(timestamp_between), 'B')
-        sleep(0.00001)
-        timestamp_after_c = datetime.now().strftime(TIMESTAMP_FMT)
-        self.assertEqual(attr.at(timestamp_after_c), 'C')
-
-
-class TestsWithDBStr(TestCaseWithDB):
-    """Module tests requiring DB setup."""
-    default_vals: list[str | float] = ['A', 'B', 'C']
-    init_sql = SQL_TEST_TABLE_STR
-
-    def setUp(self) -> None:
-        super().setUp()
-        self.db_conn.exec(self.init_sql)
-        self.test_parent = TestParentType(1)
-        self.attr = VersionedAttribute(self.test_parent,
-                                       'versioned_tests', self.default_vals[0])
-
-    def test_VersionedAttribute_save(self) -> None:
-        """Test .save() to write to DB."""
-        # check mere .set() calls do not by themselves reflect in the DB
-        self.attr.set(self.default_vals[1])
-        self.assertEqual([],
-                         self.db_conn.row_where('versioned_tests',
-                                                'parent', 1))
-        # check .save() makes history appear in DB
-        self.attr.save(self.db_conn)
-        vals_found = []
-        for row in self.db_conn.row_where('versioned_tests', 'parent', 1):
-            vals_found += [row[2]]
-        self.assertEqual([self.default_vals[1]], vals_found)
-        # check .save() also updates history in DB
-        self.attr.set(self.default_vals[2])
-        self.attr.save(self.db_conn)
-        vals_found = []
-        for row in self.db_conn.row_where('versioned_tests', 'parent', 1):
-            vals_found += [row[2]]
-        self.assertEqual([self.default_vals[1], self.default_vals[2]],
-                         sorted(vals_found))
-
-    def test_VersionedAttribute_history_from_row(self) -> None:
-        """"Test .history_from_row() properly interprets DB rows."""
-        self.attr.set(self.default_vals[1])
-        self.attr.set(self.default_vals[2])
-        self.attr.save(self.db_conn)
-        loaded_attr = VersionedAttribute(self.test_parent, 'versioned_tests',
-                                         self.default_vals[0])
-        for row in self.db_conn.row_where('versioned_tests', 'parent', 1):
-            loaded_attr.history_from_row(row)
-        for timestamp, value in self.attr.history.items():
-            self.assertEqual(value, loaded_attr.history[timestamp])
-        self.assertEqual(len(self.attr.history.keys()),
-                         len(loaded_attr.history.keys()))
-
-
-class TestsWithDBFloat(TestsWithDBStr):
-    """Module tests requiring DB setup."""
-    default_vals: list[str | float] = [0.9, 1.1, 2]
-    init_sql = SQL_TEST_TABLE_FLOAT