Skip to content

Commit

Permalink
docs: Refine completion wording
Browse files Browse the repository at this point in the history
  • Loading branch information
janw committed Jan 16, 2024
1 parent b7c11d0 commit 85c4d3d
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 8 deletions.
4 changes: 2 additions & 2 deletions podcast_archiver/enums.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from enum import Enum


class QueueCompletionMsg(str, Enum):
COMPLETED = "All episodes downloaded."
class QueueCompletionType(str, Enum):
COMPLETED = "Archived all episodes."
FOUND_EXISTING = "Archive is up to date."
MAX_EPISODES = "Maximum episode count reached."
12 changes: 6 additions & 6 deletions podcast_archiver/processor.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from podcast_archiver.config import Settings
from podcast_archiver.console import console
from podcast_archiver.download import DownloadJob
from podcast_archiver.enums import QueueCompletionMsg
from podcast_archiver.enums import QueueCompletionType
from podcast_archiver.logging import logger
from podcast_archiver.models import Feed

Expand Down Expand Up @@ -73,10 +73,10 @@ def process(self, url: AnyHttpUrl) -> ProcessingResult:
futures, completion_msg = self._process_episodes(feed=feed)
self._handle_futures(futures, result=result)

console.print(f"[bar.finished]✔ {completion_msg}[/]")
console.print(f"\n[bar.finished]✔ {completion_msg}[/]")
return result

def _process_episodes(self, feed: Feed) -> tuple[list[Future[DownloadJob]], QueueCompletionMsg]:
def _process_episodes(self, feed: Feed) -> tuple[list[Future[DownloadJob]], QueueCompletionType]:
futures: list[Future[DownloadJob]] = []
for idx, episode in enumerate(feed.episode_iter(self.settings.maximum_episode_count), 1):
download_job = DownloadJob(
Expand All @@ -88,15 +88,15 @@ def _process_episodes(self, feed: Feed) -> tuple[list[Future[DownloadJob]], Queu
)
if self.settings.update_archive and download_job.target_exists:
logger.info("Up to date with %r", episode)
return futures, QueueCompletionMsg.FOUND_EXISTING
return futures, QueueCompletionType.FOUND_EXISTING

logger.info("Queueing download for %r", episode)
futures.append(self.pool_executor.submit(download_job))
if (max_count := self.settings.maximum_episode_count) and idx == max_count:
logger.info("Reached requested maximum episode count of %s", max_count)
return futures, QueueCompletionMsg.MAX_EPISODES
return futures, QueueCompletionType.MAX_EPISODES

return futures, QueueCompletionMsg.COMPLETED
return futures, QueueCompletionType.COMPLETED

def _handle_futures(self, futures: list[Future[DownloadJob]], *, result: ProcessingResult) -> None:
for future in futures:
Expand Down

0 comments on commit 85c4d3d

Please sign in to comment.