_rewrite_files_last_field_processing_first_field(conn, bytes.fromhex)
 
 
+def _mig_7_resave_files(conn: DbConn) -> None:
+    """Re-init all VideoFiles to calc .duration_ms and save it."""
+    # pylint: disable=import-outside-toplevel
+    from ytplom.misc import VideoFile
+    for row in conn.exec('SELECT * FROM files').fetchall():
+        # pylint: disable=protected-access
+        file = VideoFile._from_table_row(row)
+        print(f'New .duration_ms for {file.rel_path}: {file.duration_ms}')
+        file.save(conn)
+
+
 MIGRATIONS: set[DbMigration] = {
     DbMigration(0, Path('0_init.sql'), None),
     DbMigration(1, Path('1_add_files_last_updated.sql'), None),
     DbMigration(4, Path('4_add_files_sha512_blob.sql'),
                 _mig_4_convert_digests),
     DbMigration(5, Path('5_files_redo.sql'), None),
-    DbMigration(6, Path('6_add_files_tags.sql'), None)
+    DbMigration(6, Path('6_add_files_tags.sql'), None),
+    DbMigration(7, Path('7_add_files_duration_ms.sql'), _mig_7_resave_files),
 }
 
 from random import shuffle
 from time import sleep
 from datetime import datetime, timedelta
+from decimal import Decimal
 from json import loads as json_loads
 from urllib.request import urlretrieve
 from uuid import uuid4
   FlagName('do not sync'): FlagsInt(1 << 62),
   FlagName('delete'): FlagsInt(-(1 << 63))
 }
+ONE_MILLION = 1000 * 1000
 
 
 def ensure_expected_dirs(expected_dirs: list[Path]) -> None:
     id_name = 'digest'
     _table_name = 'files'
     _str_field = 'rel_path'
-    _cols = ('digest', 'rel_path', 'flags', 'yt_id', 'last_update', 'tags_str')
+    _cols = ('digest', 'rel_path', 'flags', 'yt_id', 'last_update', 'tags_str',
+             'duration_ms')
     last_update: DatetimeStr
     rel_path: Path
     digest: Hash
                  flags: FlagsInt = FlagsInt(0),
                  yt_id: Optional[YoutubeId] = None,
                  last_update: Optional[DatetimeStr] = None,
-                 tags_str: str = ''
+                 tags_str: str = '',
+                 duration_ms: int = -1,
                  ) -> None:
         self.rel_path = rel_path
         self.digest = digest if digest else Hash.from_file(self.full_path)
         self.flags = flags
         self.tags = TagSet.from_joined(tags_str)
         self.yt_id = yt_id
+        self.duration_ms = (
+                duration_ms if duration_ms >= 0
+                else int(ONE_MILLION * Decimal(
+                    ffprobe(self.full_path)['format']['duration'])))
         if last_update is None:
             self._renew_last_update()
         else:
 
     def __hash__(self) -> int:
         return hash(f'{self.digest.b64}|{self.rel_path}|{self.flags}|'
-                    f'{self.yt_id}|{self.last_update}|{self.tags_str}')
+                    f'{self.yt_id}|{self.last_update}|{self.tags_str}|'
+                    f'{self.duration_ms}')
 
     def _renew_last_update(self):
         self.last_update = DatetimeStr(datetime.now().strftime(TIMESTAMP_FMT))
 
     @property
     def ffprobed_duration(self) -> str:
-        """Return human-friendly formatting of file duration as per ffprobe."""
-        if not self.full_path.is_file():
+        """Return human-friendly formatting of .duration_ms."""
+        if self.duration_ms < 0:
             return '?'
-        json = ffprobe(self.full_path)
-        duration_str = json['format']['duration']
-        m_seconds_str = duration_str.split('.')[1]
-        duration_float = float(duration_str)
-        seconds = int(duration_float)
-        return f'{_readable_seconds(seconds)}.{m_seconds_str}'
+        ms_str = f'{self.duration_ms % ONE_MILLION}'.rjust(6, '0')
+        n_seconds = self.duration_ms // ONE_MILLION
+        return f'{_readable_seconds(n_seconds)}.{ms_str}'
 
     @property
     def present(self) -> bool: