Skip to content

Commit

Permalink
0.5.69 - enabled manual scraper plex refresher (add to checking queue…
Browse files Browse the repository at this point in the history
…), fixed scraper error re: genres, adjusted watch history to include both server and api watch history, added debug template /database/watch_history for reviewing as needed, removed first aired filter for web scraper, Memory chart on performance dashboard not updating to current (showing for a few hours ago) - potential fix implemented, only take approved requests from overseerr (previously just took all requests without filtering)
  • Loading branch information
godver3 committed Feb 3, 2025
1 parent 9dcbd5b commit cc1e79b
Show file tree
Hide file tree
Showing 10 changed files with 885 additions and 386 deletions.
5 changes: 3 additions & 2 deletions content_checkers/overseerr.py
Original file line number Diff line number Diff line change
Expand Up @@ -62,9 +62,10 @@ def fetch_overseerr_wanted_content(overseerr_url: str, overseerr_api_key: str, t

while True:
try:
#logging.debug(f"Fetching Overseerr requests page {page}")
request_url = get_url(overseerr_url, f"/api/v1/request?take={take}&skip={skip}&filter=approved")
logging.debug(f"Fetching Overseerr requests with URL: {request_url}")
response = api.get(
get_url(overseerr_url, f"/api/v1/request?take={take}&skip={skip}"),
request_url,
headers=headers,
timeout=REQUEST_TIMEOUT
)
Expand Down
2 changes: 1 addition & 1 deletion queues/scraping_queue.py
Original file line number Diff line number Diff line change
Expand Up @@ -196,7 +196,7 @@ def scrape_with_fallback(self, item, is_multi_pack, queue_manager, skip_filter=F
logging.info(f"Filtered out {len(results) - len(filtered_results)} results due to not wanted magnets/URLs")
results = filtered_results

is_anime = True if 'anime' in item['genres'] else False
is_anime = True if item.get('genres') and 'anime' in item['genres'] else False

# For episodes, filter by exact season/episode match
if not is_anime:
Expand Down
109 changes: 108 additions & 1 deletion routes/database_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,8 @@
from .models import admin_required
from utilities.plex_functions import remove_file_from_plex
from database.database_reading import get_media_item_by_id
import os
from datetime import datetime

database_bp = Blueprint('database', __name__)

Expand Down Expand Up @@ -341,4 +343,109 @@ def apply_parsed_versions():
})
except Exception as e:
logging.error(f"Error applying parsed versions: {str(e)}")
return jsonify({'success': False, 'error': str(e)}), 500
return jsonify({'success': False, 'error': str(e)}), 500

@database_bp.route('/watch_history', methods=['GET'])
@admin_required
def watch_history():
try:
# Get database connection
db_dir = os.environ.get('USER_DB_CONTENT', '/user/db_content')
db_path = os.path.join(db_dir, 'watch_history.db')

if not os.path.exists(db_path):
flash("Watch history database not found. Please sync Plex watch history first.", "warning")
return render_template('watch_history.html', items=[])

conn = sqlite3.connect(db_path)
cursor = conn.cursor()

# Get filter parameters
content_type = request.args.get('type', 'all') # 'movie', 'episode', or 'all'
sort_by = request.args.get('sort', 'watched_at') # 'title' or 'watched_at'
sort_order = request.args.get('order', 'desc') # 'asc' or 'desc'

# Build query
query = """
SELECT title, type, watched_at, season, episode, show_title, source
FROM watch_history
WHERE 1=1
"""
params = []

if content_type != 'all':
query += " AND type = ?"
params.append(content_type)

query += f" ORDER BY {sort_by} {sort_order}"

# Execute query
cursor.execute(query, params)
items = cursor.fetchall()

# Convert to list of dicts for easier template handling
formatted_items = []
for item in items:
title, type_, watched_at, season, episode, show_title, source = item

# Format the watched_at date
try:
watched_at = datetime.strptime(watched_at, '%Y-%m-%d %H:%M:%S').strftime('%Y-%m-%d %H:%M')
except:
watched_at = 'Unknown'

# Format the display title
if type_ == 'episode' and show_title:
display_title = f"{show_title} - S{season:02d}E{episode:02d} - {title}"
else:
display_title = title

formatted_items.append({
'title': display_title,
'type': type_,
'watched_at': watched_at,
'source': source
})

conn.close()

return render_template('watch_history.html',
items=formatted_items,
content_type=content_type,
sort_by=sort_by,
sort_order=sort_order)

except Exception as e:
logging.error(f"Error in watch history route: {str(e)}")
flash(f"Error retrieving watch history: {str(e)}", "error")
return render_template('watch_history.html', items=[])

@database_bp.route('/watch_history/clear', methods=['POST'])
@admin_required
def clear_watch_history():
try:
# Get database connection
db_dir = os.environ.get('USER_DB_CONTENT', '/user/db_content')
db_path = os.path.join(db_dir, 'watch_history.db')

if not os.path.exists(db_path):
return jsonify({'success': False, 'error': 'Watch history database not found'})

conn = sqlite3.connect(db_path)
cursor = conn.cursor()

# Clear the watch history table
cursor.execute('DELETE FROM watch_history')

# Reset the auto-increment counter
cursor.execute('DELETE FROM sqlite_sequence WHERE name = "watch_history"')

conn.commit()
conn.close()

logging.info("Watch history cleared successfully")
return jsonify({'success': True})

except Exception as e:
logging.error(f"Error clearing watch history: {str(e)}")
return jsonify({'success': False, 'error': str(e)})
61 changes: 61 additions & 0 deletions routes/performance_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -125,3 +125,64 @@ def get_cpu_metrics():

except Exception as e:
return jsonify({'error': str(e)}), 500

@performance_bp.route('/api/performance/memory')
def get_memory_metrics():
"""Get memory performance metrics from the log file."""
log_dir = os.environ.get('USER_LOGS', '/user/logs')
log_file = os.path.join(log_dir, 'performance_log.json')

# Get optional time range parameters
hours = request.args.get('hours', type=int, default=1) # Default to last hour
limit = request.args.get('limit', type=int, default=60) # Default to 60 entries
cutoff_time = datetime.now() - timedelta(hours=hours)

try:
entries = []
if os.path.exists(log_file):
with open(log_file, 'r') as f:
for line in f:
try:
entry = json.loads(line.strip())
# Only process memory metric entries
if entry.get('type') not in ['basic_metrics', 'detailed_memory']:
continue

entry_time = datetime.fromisoformat(entry['timestamp'])
if entry_time >= cutoff_time:
entries.append(entry)
if len(entries) >= limit:
break
except (json.JSONDecodeError, KeyError, ValueError) as e:
continue

# Sort entries by timestamp
entries.sort(key=lambda x: x.get('timestamp', ''))

# Calculate summary statistics
summary = {}
if entries:
memory_metrics = [e['metrics'] for e in entries if 'metrics' in e]
if memory_metrics:
rss_values = [m.get('memory_rss', 0) for m in memory_metrics]
vms_values = [m.get('memory_vms', 0) for m in memory_metrics]
system_memory_used = [m.get('system_memory_used', 0) for m in memory_metrics]

summary = {
'avg_rss_mb': sum(rss_values) / len(rss_values),
'max_rss_mb': max(rss_values),
'min_rss_mb': min(rss_values),
'avg_vms_mb': sum(vms_values) / len(vms_values),
'max_vms_mb': max(vms_values),
'min_vms_mb': min(vms_values),
'avg_system_memory_used': sum(system_memory_used) / len(system_memory_used),
'samples': len(memory_metrics)
}

return jsonify({
'summary': summary,
'entries': entries
})

except Exception as e:
return jsonify({'error': str(e)}), 500
2 changes: 1 addition & 1 deletion routes/scraper_routes.py
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ def add_torrent_to_debrid():
return jsonify({'error': message}), 400

# Check if symlinking is enabled
if get_setting('File Management', 'file_collection_management') == 'Symlinked/Local':
if get_setting('File Management', 'file_collection_management') == 'Symlinked/Local' or 1==1:
try:
# Convert media type to movie_or_episode format
movie_or_episode = 'episode' if media_type == 'tv' or media_type == 'show' else 'movie'
Expand Down
141 changes: 48 additions & 93 deletions templates/performance/dashboard.html
Original file line number Diff line number Diff line change
Expand Up @@ -168,7 +168,6 @@ <h6>Active Threads <span id="thread-count" class="badge bg-secondary">0</span></
const entries = data.entries;
const basicMetrics = entries.findLast(entry => entry.type === 'basic_metrics');
const detailedMemory = entries.findLast(entry => entry.memory);
const memoryGrowthEntries = entries.filter(entry => entry.type === 'basic_metrics').slice(-10);
const resourceHandles = entries.findLast(entry => entry.type === 'file_descriptors');

if (basicMetrics) {
Expand All @@ -184,105 +183,13 @@ <h6>Active Threads <span id="thread-count" class="badge bg-secondary">0</span></
document.getElementById('cpu-details').innerHTML =
`<strong>System Time:</strong> ${metrics.cpu_system_time.toFixed(2)}s ` +
`<strong>User Time:</strong> ${metrics.cpu_user_time.toFixed(2)}s`;

// Update Memory Usage
const memoryRssMB = metrics.memory_rss || 0;
const memoryVmsMB = metrics.memory_vms || 0;
const memoryPercent = metrics.system_memory_used || 0;
const memoryBar = document.getElementById('memory-progress');
const memoryText = document.getElementById('memory-text');

memoryBar.style.width = `${Math.min(memoryPercent, 100)}%`;
memoryText.textContent = `${memoryPercent.toFixed(1)}%`;

document.getElementById('memory-details').innerHTML =
`<strong>RSS Memory:</strong> ${memoryRssMB.toFixed(2)} MB ` +
`<strong>Virtual Memory:</strong> ${memoryVmsMB.toFixed(2)} MB ` +
`<strong>Swap Used:</strong> ${metrics.swap_used.toFixed(2)} MB`;
}

// Memory Analysis
if (detailedMemory) {
const memoryData = detailedMemory.memory;

// Memory by Type
const anonymousHtml = `
<div class="memory-type">
<h6>Anonymous Memory</h6>
<div class="memory-stats">
<span>Size: ${memoryData.anonymous.formatted_size}</span>
<span>Count: ${memoryData.anonymous.count.toLocaleString()} mappings</span>
</div>
</div>`;

// Enhanced file-backed memory section
const fileBackedHtml = `
<div class="memory-type">
<h6>File-backed Memory</h6>
<div class="memory-stats">
<span>Total Size: ${memoryData.file_backed.formatted_size}</span>
<span>Mappings: ${memoryData.file_backed.count.toLocaleString()}</span>
</div>
<div class="file-details">
<h6 class="mt-3">File Details</h6>
<div class="file-list">
${memoryData.open_files.files.map(file => `
<div class="file-item">
<div class="file-path">${file.path}</div>
<div class="file-size">${formatBytes(file.size)}</div>
</div>
`).join('')}
</div>
</div>
</div>`;

// Network connections section
const networkHtml = `
<div class="memory-type">
<h6>Network Connections</h6>
<div class="memory-stats">
<span>Total: ${memoryData.network.total_connections}</span>
${Object.entries(memoryData.network.states).map(([state, count]) =>
`<span>${state}: ${count}</span>`
).join('')}
</div>
</div>`;

document.getElementById('memory-analysis').innerHTML = anonymousHtml + fileBackedHtml + networkHtml;
}

// Memory Growth over time
if (memoryGrowthEntries.length > 0) {
const growthHtml = memoryGrowthEntries
.slice() // Create a copy of the array
.reverse() // Reverse to get most recent first
.slice(0, 5) // Limit to 5 entries
.map(entry => {
const metrics = entry.metrics;
const timestamp = new Date(entry.timestamp).toLocaleString();
return `
<div class="memory-growth-item">
<span class="timestamp">${timestamp}</span>
<div class="memory-values">
<span class="rss">RSS: ${metrics.memory_rss.toFixed(2)} MB</span>
<span class="vms">VMS: ${metrics.memory_vms.toFixed(2)} MB</span>
<span class="swap">Swap: ${metrics.swap_used.toFixed(2)} MB</span>
</div>
</div>`;
}).join('');

document.getElementById('memory-growth-text').innerHTML = growthHtml;
}

// Update Memory Growth Chart
updateMemoryChart(memoryGrowthEntries);

// Resource Handles
if (resourceHandles) {
const metrics = resourceHandles.metrics;
document.getElementById('open-files-count').textContent = metrics.open_files_count || 0;

// Format file types as "extension: count" pairs
const fileTypesText = metrics.file_types ?
Object.entries(metrics.file_types)
.map(([ext, count]) => `${ext}: ${count}`)
Expand Down Expand Up @@ -341,6 +248,54 @@ <h6>Network Connections</h6>
}
})
.catch(error => console.error('Error fetching CPU data:', error));

// Fetch memory data separately
fetch('/performance/api/performance/memory?hours=1')
.then(response => response.json())
.then(data => {
if (data.summary) {
// Update memory summary statistics
const memoryBar = document.getElementById('memory-progress');
const memoryText = document.getElementById('memory-text');
const systemMemoryUsed = data.summary.avg_system_memory_used || 0;

memoryBar.style.width = `${Math.min(systemMemoryUsed, 100)}%`;
memoryText.textContent = `${systemMemoryUsed.toFixed(1)}%`;

// Get the latest entry
const latestEntry = data.entries[data.entries.length - 1];
if (latestEntry && latestEntry.metrics) {
const metrics = latestEntry.metrics;
document.getElementById('memory-details').innerHTML =
`<strong>RSS Memory:</strong> ${metrics.memory_rss.toFixed(2)} MB ` +
`<strong>Virtual Memory:</strong> ${metrics.memory_vms.toFixed(2)} MB ` +
`<strong>Swap Used:</strong> ${metrics.swap_used.toFixed(2)} MB`;
}

// Update memory growth text with recent entries
const recentEntries = data.entries.slice(-5);
const growthHtml = recentEntries
.map(entry => {
const metrics = entry.metrics;
const timestamp = new Date(entry.timestamp).toLocaleString();
return `
<div class="memory-growth-item">
<span class="timestamp">${timestamp}</span>
<div class="memory-values">
<span class="rss">RSS: ${metrics.memory_rss.toFixed(2)} MB</span>
<span class="vms">VMS: ${metrics.memory_vms.toFixed(2)} MB</span>
<span class="swap">Swap: ${metrics.swap_used.toFixed(2)} MB</span>
</div>
</div>`;
}).join('');

document.getElementById('memory-growth-text').innerHTML = growthHtml;

// Update memory history chart
updateMemoryChart(data.entries);
}
})
.catch(error => console.error('Error fetching memory data:', error));
}

function updateCpuChart(entries) {
Expand Down
Loading

0 comments on commit cc1e79b

Please sign in to comment.