Skip to content

Commit

Permalink
Fix Liskov substitution violations
Browse files Browse the repository at this point in the history
  • Loading branch information
aaronkollasch committed Aug 31, 2022
1 parent 4d024d5 commit 922d356
Show file tree
Hide file tree
Showing 3 changed files with 21 additions and 24 deletions.
4 changes: 2 additions & 2 deletions src/photomanager/async_base.py
Original file line number Diff line number Diff line change
Expand Up @@ -61,10 +61,10 @@ async def do_job(self, worker_id: int, job: AsyncJob):
raise NotImplementedError

def make_pbar(self, all_jobs: Collection[AsyncJob]):
raise NotImplementedError
self.pbar = tqdm(total=sum(job.size for job in all_jobs))

def update_pbar(self, job: AsyncJob):
raise NotImplementedError
self.pbar.update(n=job.size) if self.pbar else None

def close_pbar(self):
if self.pbar is not None:
Expand Down
29 changes: 16 additions & 13 deletions src/photomanager/hasher.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,13 @@
import subprocess as subprocess_std
from asyncio import run
from asyncio import subprocess as subprocess_async
from collections.abc import Iterable
from collections.abc import Callable, Collection, Generator, Iterable
from dataclasses import dataclass, field
from enum import Enum
from io import IOBase
from os import PathLike, cpu_count, fsencode
from os.path import getsize
from typing import Callable, Generator, Optional, TypedDict, TypeVar, Union
from typing import Optional, TypedDict, TypeVar, Union

from blake3 import blake3
from tqdm import tqdm
Expand Down Expand Up @@ -139,6 +139,7 @@ def check_files(
@dataclass
class FileHasherJob(AsyncJob):
file_paths: list[bytes] = field(default_factory=list)
size_mode: str = "B"
known_total_size: Optional[int] = None

@staticmethod
Expand All @@ -151,7 +152,12 @@ def _getsize(path):
@property
def size(self) -> int:
if self.known_total_size is None:
self.known_total_size = sum(self._getsize(path) for path in self.file_paths)
if self.size_mode == "B":
self.known_total_size = sum(
self._getsize(path) for path in self.file_paths
)
else:
self.known_total_size = len(self.file_paths)
return self.known_total_size


Expand Down Expand Up @@ -190,7 +196,9 @@ def cmd_available(cmd) -> bool:
except FileNotFoundError:
return False

async def do_job(self, worker_id: int, job: FileHasherJob):
async def do_job(self, worker_id: int, job: AsyncJob):
if not isinstance(job, FileHasherJob):
raise NotImplementedError
stdout = None
try:
process = await subprocess_async.create_subprocess_exec(
Expand All @@ -208,7 +216,7 @@ async def do_job(self, worker_id: int, job: FileHasherJob):
print("hasher output:", stdout)
raise e

def make_pbar(self, all_jobs: list[FileHasherJob]):
def make_pbar(self, all_jobs: Collection[AsyncJob]):
if self.pbar_unit == "B":
self.pbar = tqdm(
total=sum(job.size for job in all_jobs),
Expand All @@ -217,13 +225,7 @@ def make_pbar(self, all_jobs: list[FileHasherJob]):
unit_divisor=1024,
)
else:
self.pbar = tqdm(total=sum(len(job.file_paths) for job in all_jobs))

def update_pbar(self, job: FileHasherJob):
if self.pbar_unit == "B":
self.pbar.update(job.size) if self.pbar else None
else:
self.pbar.update(len(job.file_paths)) if self.pbar else None
self.pbar = tqdm(total=sum(job.size for job in all_jobs))

@staticmethod
def encode(it: Iterable[PathType]) -> Generator[bytes, None, None]:
Expand All @@ -250,12 +252,13 @@ def check_files(
all_paths = list(make_chunks(self.encode(file_paths), self.batch_size))
all_sizes = (
list(make_chunks(file_sizes, self.batch_size))
if file_sizes is not None
if pbar_unit == "B" and file_sizes is not None
else None
)
for i, paths in enumerate(all_paths):
job = FileHasherJob(
file_paths=paths,
size_mode=pbar_unit,
known_total_size=sum(all_sizes[i]) if all_sizes is not None else None,
)
all_jobs.append(job)
Expand Down
12 changes: 3 additions & 9 deletions src/photomanager/pyexiftool/pyexiftool_async.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,6 @@
from typing import Union

import orjson
from tqdm import tqdm

from photomanager.async_base import AsyncJob, AsyncWorkerQueue, make_chunks

Expand Down Expand Up @@ -125,10 +124,11 @@ def __init__(
self.running = False
self.queue = None
self.batch_size = batch_size
self.pbar = None
self.processes: dict[int, subprocess.Process] = {}

async def do_job(self, worker_id: int, job: ExifToolJob):
async def do_job(self, worker_id: int, job: AsyncJob):
if not isinstance(job, ExifToolJob):
raise NotImplementedError
outputs = [b"None"]
try:
if worker_id in self.processes:
Expand Down Expand Up @@ -182,12 +182,6 @@ async def close_worker(self, worker_id: int):
await process.communicate(b"-stay_open\nFalse\n")
del self.processes[worker_id]

def make_pbar(self, all_jobs: Collection[ExifToolJob]):
self.pbar = tqdm(total=sum(job.size for job in all_jobs))

def update_pbar(self, job: ExifToolJob):
self.pbar.update(n=job.size) if self.pbar else None

def get_metadata_batch(
self, filenames: Collection[str]
) -> dict[str, dict[str, str]]:
Expand Down

0 comments on commit 922d356

Please sign in to comment.