Skip to content

Commit

Permalink
0.5.85 - migration of new version setting, adjusted readme, added req…
Browse files Browse the repository at this point in the history
…uire_physical_release schema, include zurg_update.sh example, adjust collected/wanted items to include physical_release_date, added physical_release_date to db schema, added safe TZ to metadata, added physical release date to metadata, added dynamic queue period to checking queue, added stricter media matching to mediamatcher when using plex, adjusted unrelease/wanted queues to respect physical release setting if enabled, improved db reset in debug, adjusted stats routes to use safer TZ function
  • Loading branch information
godver3 committed Feb 16, 2025
1 parent 79c5101 commit 67bb9bc
Show file tree
Hide file tree
Showing 16 changed files with 258 additions and 90 deletions.
6 changes: 4 additions & 2 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -49,9 +49,11 @@ The core functionality of the software. When started, it:

### dev vs main

dev is the latest version of cli_debrid, and is built for arm64 and amd64. It is not recommended for production use.
dev is the latest version of cli_debrid. It is not recommended for production use, unless you are brave.

main is the stable version of cli_debrid, and is built for amd64. It is recommended for production use.
main is the stable version of cli_debrid. It is recommended for production use.

Development generally works on a 2-3 week cycle, with dev being moved to main at the end of each cycle.

### Library Management

Expand Down
6 changes: 3 additions & 3 deletions database/collected_items.py
Original file line number Diff line number Diff line change
Expand Up @@ -219,12 +219,12 @@ def add_collected_items(media_items_batch, recent=False):
if item_type == 'movie':
conn.execute('''
INSERT OR REPLACE INTO media_items
(imdb_id, tmdb_id, title, year, release_date, state, type, last_updated, metadata_updated, version, collected_at, original_collected_at, genres, filled_by_file, runtime, location_on_disk, upgraded, country, resolution)
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
(imdb_id, tmdb_id, title, year, release_date, state, type, last_updated, metadata_updated, version, collected_at, original_collected_at, genres, filled_by_file, runtime, location_on_disk, upgraded, country, resolution, physical_release_date)
VALUES (?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?,?)
''', (
imdb_id, tmdb_id, normalized_title, item.get('year'),
item.get('release_date'), 'Collected', 'movie',
datetime.now(), datetime.now(), version, collected_at, collected_at, genres, filename, item.get('runtime'), location, False, item.get('country', '').lower(), item.get('resolution')
datetime.now(), datetime.now(), version, collected_at, collected_at, genres, filename, item.get('runtime'), location, False, item.get('country', '').lower(), item.get('resolution'), item.get('physical_release_date')
))
else:
if imdb_id not in airtime_cache:
Expand Down
6 changes: 5 additions & 1 deletion database/schema_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -120,6 +120,9 @@ def migrate_schema():
if 'content_source_detail' not in columns:
conn.execute('ALTER TABLE media_items ADD COLUMN content_source_detail TEXT')
logging.info("Successfully added content_source_detail column to media_items table.")
if 'physical_release_date' not in columns:
conn.execute('ALTER TABLE media_items ADD COLUMN physical_release_date DATE')
logging.info("Successfully added physical_release_date column to media_items table.")

# logging.info("Successfully added new columns to media_items table.")

Expand Down Expand Up @@ -233,7 +236,8 @@ def create_tables():
resolution TEXT,
imdb_aliases TEXT,
title_aliases TEXT,
disable_not_wanted_check BOOLEAN DEFAULT FALSE
disable_not_wanted_check BOOLEAN DEFAULT FALSE,
physical_release_date DATE
)
''')

Expand Down
12 changes: 6 additions & 6 deletions database/wanted_items.py
Original file line number Diff line number Diff line change
Expand Up @@ -437,11 +437,11 @@ def strip_version(version):
if item_type == 'movie':
conn.execute('''
INSERT INTO media_items
(imdb_id, tmdb_id, title, year, release_date, state, type, last_updated, version, genres, runtime, country, content_source, content_source_detail)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
(imdb_id, tmdb_id, title, year, release_date, state, type, last_updated, version, genres, runtime, country, content_source, content_source_detail, physical_release_date)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', (
item.get('imdb_id'), item.get('tmdb_id'), normalized_title, item.get('year'),
item.get('release_date'), 'Wanted', 'movie', datetime.now(), version, genres, item.get('runtime'), item.get('country', '').lower(), item.get('content_source'), item.get('content_source_detail')
item.get('release_date'), 'Wanted', 'movie', datetime.now(), version, genres, item.get('runtime'), item.get('country', '').lower(), item.get('content_source'), item.get('content_source_detail'), item.get('physical_release_date')
))
items_added += 1
else:
Expand Down Expand Up @@ -615,7 +615,7 @@ def process_batch(conn, batch_items, versions, processed):
item.get('year'), item.get('release_date'), 'Wanted', 'movie',
datetime.now(), version, genres, item.get('runtime'),
item.get('country', '').lower(), item.get('content_source'),
item.get('content_source_detail')
item.get('content_source_detail'), item.get('physical_release_date')
))
else:
for version, enabled in versions.items():
Expand All @@ -637,8 +637,8 @@ def process_batch(conn, batch_items, versions, processed):
conn.executemany('''
INSERT INTO media_items
(imdb_id, tmdb_id, title, year, release_date, state, type, last_updated,
version, genres, runtime, country, content_source, content_source_detail)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
version, genres, runtime, country, content_source, content_source_detail, physical_release_date)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', movie_items)
processed['movies'] += len(movie_items)

Expand Down
12 changes: 12 additions & 0 deletions main.py
Original file line number Diff line number Diff line change
Expand Up @@ -978,6 +978,18 @@ def main():
save_config(config)
logging.info("Successfully migrated content sources to include media_type setting")

# Add require_physical_release to existing versions
if 'Scraping' in config and 'versions' in config['Scraping']:
modified = False
for version in config['Scraping']['versions']:
if 'require_physical_release' not in config['Scraping']['versions'][version]:
config['Scraping']['versions'][version]['require_physical_release'] = False
modified = True

if modified:
save_config(config)
logging.info("Added require_physical_release setting to existing versions")

# Add migration for notification settings
if 'Notifications' in config:
notifications_updated = False
Expand Down
73 changes: 63 additions & 10 deletions metadata/metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -224,24 +224,52 @@ def create_episode_item(show_item: Dict[str, Any], season_number: int, episode_n
return episode_item

def _get_local_timezone():
"""Get the local timezone in a cross-platform way with fallback to settings."""
"""Get the local timezone in a cross-platform way with multiple fallbacks."""
# Suppress tzlocal debug messages
import logging
logging.getLogger('tzlocal').setLevel(logging.WARNING)

from tzlocal import get_localzone
from settings import get_setting
from datetime import timezone
import os

# Check for override in settings
#timezone_override = '' # get_setting('Debug', 'timezone_override', '')
#if timezone_override:
# try:
# from zoneinfo import ZoneInfo
# return ZoneInfo(timezone_override)
# except ZoneInfoNotFoundError:
# logging.error(f"Invalid timezone override: {timezone_override}, falling back to system timezone")
# First try: Check for override in settings
timezone_override = get_setting('Debug', 'timezone_override', '')
if timezone_override:
try:
from zoneinfo import ZoneInfo
return ZoneInfo(timezone_override)
except (ImportError, ZoneInfoNotFoundError) as e:
logging.error(f"Invalid timezone override: {timezone_override}, falling back to system timezone")

# Second try: Try getting from environment variable
tz_env = os.environ.get('TZ')
if tz_env:
try:
from zoneinfo import ZoneInfo
return ZoneInfo(tz_env)
except (ImportError, ZoneInfoNotFoundError) as e:
logging.error(f"Invalid TZ environment variable: {tz_env}, trying next fallback")

# Third try: Try tzlocal with exception handling
try:
return get_localzone()
except Exception as e:
logging.error(f"Error getting local timezone from tzlocal: {str(e)}, trying next fallback")

return get_localzone()
# Fourth try: Try common timezone files directly
common_zones = ['America/New_York', 'UTC', 'Etc/UTC']
for zone in common_zones:
try:
from zoneinfo import ZoneInfo
return ZoneInfo(zone)
except (ImportError, ZoneInfoNotFoundError):
continue

# Final fallback: Use UTC
logging.warning("All timezone detection methods failed, falling back to UTC")
return timezone.utc

def update_existing_episodes_states(conn, tmdb_id: str, all_requested_seasons: set):
"""Update states of existing episodes based on requested seasons."""
Expand Down Expand Up @@ -291,6 +319,27 @@ def update_existing_episodes_states(conn, tmdb_id: str, all_requested_seasons: s
logging.error(f"Error updating existing episodes states: {str(e)}")
conn.rollback()

def get_physical_release_date(imdb_id: Optional[str] = None) -> Optional[str]:
"""Get the earliest physical release date for a movie."""
if not imdb_id:
return None

release_dates, _ = DirectAPI.get_movie_release_dates(imdb_id)
if not release_dates:
return None

physical_releases = []
for country, country_releases in release_dates.items():
for release in country_releases:
if release.get('type', '').lower() == 'physical' and release.get('date'):
try:
release_date = datetime.strptime(release.get('date'), "%Y-%m-%d")
physical_releases.append(release_date)
except ValueError:
continue

return min(physical_releases).strftime("%Y-%m-%d") if physical_releases else None

def process_metadata(media_items: List[Dict[str, Any]]) -> Dict[str, List[Dict[str, Any]]]:
from database.database_writing import update_blacklisted_date, update_media_item
from database.core import get_db_connection
Expand Down Expand Up @@ -318,6 +367,10 @@ def process_metadata(media_items: List[Dict[str, Any]]) -> Dict[str, List[Dict[s
continue

if item['media_type'].lower() == 'movie':
# Get physical release date if it's a movie
physical_release_date = get_physical_release_date(metadata.get('imdb_id'))
if physical_release_date:
metadata['physical_release_date'] = physical_release_date
processed_items['movies'].append(metadata)
logging.debug(f"Added movie with content_source_detail={metadata.get('content_source_detail')}")
elif item['media_type'].lower() in ['tv', 'show']:
Expand Down
48 changes: 44 additions & 4 deletions queues/checking_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -233,6 +233,46 @@ def remove_item(self, item: Dict[str, Any]):
del self.progress_checks[torrent_id]
logging.debug(f"Cleaned up progress checks for torrent {torrent_id} as it has no more associated items")

def _calculate_dynamic_queue_period(self, items):
"""Calculate a dynamic queue period based on the number of items.
Base period from settings + 1 minute per item in the checking queue.
Only applies dynamic timing when not using Symlinked/Local file management.
For items added in batches, considers when each item was added."""
base_period = get_setting('Debug', 'checking_queue_period', default=3600)

# Only use dynamic timing if NOT using Symlinked/Local file management
if get_setting('File Management', 'file_collection_management') != 'Symlinked/Local':
items_count = len(items)
# Get the time each item has been in the queue
current_time = time.time()
item_times = []
for item in items:
item_add_time = self.checking_queue_times.get(item['id'], current_time)
time_in_queue = current_time - item_add_time
item_times.append(time_in_queue)

# Sort times to find the newest item
item_times.sort()
newest_item_time = item_times[0] if item_times else 0

# Calculate remaining items that still need processing time
# Only count items that have been in queue for less than base_period
remaining_items = sum(1 for t in item_times if t < base_period)

# Add 60 seconds per remaining item, measured from the newest item's add time
dynamic_period = base_period + (remaining_items * 60)

# Adjust the period based on the newest item's time in queue
# This ensures newer items get their full processing time
if newest_item_time < base_period:
dynamic_period = max(dynamic_period - newest_item_time, base_period)

logging.debug(f"Using dynamic queue period: {dynamic_period}s (base: {base_period}s + {remaining_items} remaining items * 60s, newest item age: {newest_item_time:.1f}s)")
return dynamic_period
else:
logging.debug(f"Using static queue period: {base_period}s (Symlinked/Local file management)")
return base_period

def process(self, queue_manager):
if self.items:
item = self.items[0]
Expand Down Expand Up @@ -333,16 +373,16 @@ def process(self, queue_manager):
if current_progress == 100:
oldest_item_time = min(self.checking_queue_times.get(item['id'], current_time) for item in items)
time_in_queue = current_time - oldest_item_time
checking_queue_limit = get_setting('Debug', 'checking_queue_period')
checking_queue_limit = self._calculate_dynamic_queue_period(items)

logging.info(f"Torrent {torrent_id} has been in checking queue for {time_in_queue:.1f} seconds (limit: {checking_queue_limit} seconds)")
logging.info(f"Torrent {torrent_id} has been in checking queue for {time_in_queue:.1f} seconds (dynamic limit: {checking_queue_limit} seconds for {len(items)} items)")

if time_in_queue > checking_queue_limit:
logging.info(f"Removing torrent {torrent_id} from debrid service as content was not found within {checking_queue_limit} seconds")
logging.info(f"Removing torrent {torrent_id} from debrid service as content was not found within {checking_queue_limit} seconds (dynamic limit for {len(items)} items)")
try:
self.debrid_provider.remove_torrent(
torrent_id,
removal_reason=f"Content not found in checking queue after {checking_queue_limit} seconds"
removal_reason=f"Content not found in checking queue after {checking_queue_limit} seconds (dynamic limit for {len(items)} items)"
)
except Exception as e:
logging.error(f"Failed to remove torrent {torrent_id}: {str(e)}")
Expand Down
12 changes: 9 additions & 3 deletions queues/media_matcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -104,9 +104,15 @@ def _match_tv_content(self, files: List[Dict[str, Any]], item: Dict[str, Any]) -
genres = [genres]
is_anime = any('anime' in genre.lower() for genre in genres)

# Apply relaxed matching for anime or when explicitly enabled
use_relaxed_matching = is_anime or self.relaxed_matching
logging.debug(f"Using relaxed matching ({is_anime}/{self.relaxed_matching})")
# Check if using Plex library management
from settings import get_setting
file_collection_management = get_setting('File Management', 'file_collection_management')
using_plex = file_collection_management == 'Plex'

# Apply relaxed matching only if not using Plex and either it's anime or relaxed matching is enabled
use_relaxed_matching = not using_plex and (is_anime or self.relaxed_matching)
#logging.debug(f"Using relaxed matching ({use_relaxed_matching}) - Plex: {using_plex}, Anime: {is_anime}, Relaxed: {self.relaxed_matching}")

if not all([series_title, item_episode is not None]):
logging.debug(f"Missing required TV info for {series_title}")
return []
Expand Down
21 changes: 21 additions & 0 deletions queues/unreleased_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,6 +31,7 @@ def process(self, queue_manager):
for item in self.items:
item_identifier = queue_manager.generate_identifier(item)
release_date_str = item.get('release_date')
version = item.get('version')

if not release_date_str or release_date_str.lower() == 'unknown':
logging.warning(f"Item {item_identifier} has no release date. Keeping in Unreleased queue.")
Expand All @@ -41,6 +42,26 @@ def process(self, queue_manager):
release_datetime = datetime.combine(release_date, datetime.min.time())
logging.info(f"Item {item_identifier} release date: {release_datetime}")

# Check if version requires physical release
scraping_versions = get_setting('Scraping', 'versions', {})
version_settings = scraping_versions.get(version, {})
require_physical = version_settings.get('require_physical_release', False)
physical_release_date = item.get('physical_release_date')

if require_physical and not physical_release_date:
logging.info(f"Item {item_identifier} requires physical release date but none available. Keeping in Unreleased queue.")
continue

# If physical release is required, use that date instead
if require_physical and physical_release_date:
try:
physical_date = datetime.strptime(physical_release_date, '%Y-%m-%d').date()
release_datetime = datetime.combine(physical_date, datetime.min.time())
logging.info(f"Item {item_identifier} using physical release date: {release_datetime}")
except ValueError:
logging.warning(f"Invalid physical release date format for item {item_identifier}: {physical_release_date}")
continue

# If it's an early release, move it to Wanted immediately
if item.get('early_release', False):
logging.info(f"Item {item_identifier} is an early release. Moving to Wanted queue immediately.")
Expand Down
Loading

0 comments on commit 67bb9bc

Please sign in to comment.