1
0

Added support for Jellyfin
All checks were successful
Test Python / Test (push) Successful in 32s

This commit is contained in:
Jon Michael Aanes 2024-09-08 20:20:09 +02:00
parent 9cc7018698
commit 6641053beb
Signed by: Jmaa
SSH Key Fingerprint: SHA256:Ab0GfHGCblESJx7JRE4fj4bFy/KRpeLhi41y4pF3sNA
4 changed files with 82 additions and 3 deletions

View File

@ -0,0 +1,73 @@
import dataclasses
import datetime
import logging
import re
import bs4
from typing import Any
from collections.abc import Iterator
from jellyfin_apiclient_python import JellyfinClient
from ..data import DeduplicateMode, Scraper
from .. import secrets, parse_util, html_util, _version
logger = logging.getLogger(__name__)
URL_SITE_ROOT = 'https://steamcommunity.com/'
URL_GAME_ACHIVEMENTS = URL_SITE_ROOT+'id/{username}/stats/appid/{appid}'
FORMAT_DATE_HEADER = '%d/%m/%YYYY'
def iterate_series(client):
result = client.jellyfin.user_items(params = {
'includeItemTypes': 'Series',
'parentId': 'a656b907eb3a73532e40e44b968d0225',
'userId': 'dd95c1085c1b4e83ba8e8853fbc644ab',
})
yield from result['Items']
def iterate_watched_episodes_of_series(client, series_id: str):
result = client.jellyfin.user_items(params = {
'filters': 'IsPlayed',
'recursive': True,
'includeItemTypes': 'Episode',
'parentId': series_id,
'userId': 'dd95c1085c1b4e83ba8e8853fbc644ab',
'fields': 'AirTime',
})
yield from result['Items']
@dataclasses.dataclass(frozen=True)
class JellyfinWatchHistoryScraper(Scraper):
dataset_name = 'show_episodes_watched'
deduplicate_mode = DeduplicateMode.BY_ALL_COLUMNS
def scrape(self) -> Iterator[dict[str, Any]]:
client = JellyfinClient()
client.config.app('personal_data', _version.__version__,
'test_machine', 'unique_id_1')
client.config.data["auth.ssl"] = False
client.auth.connect_to_address(secrets.JELLYFIN_URL)
client.auth.login(secrets.JELLYFIN_URL, secrets.JELLYFIN_USERNAME, secrets.JELLYFIN_PASSWORD)
for series_data in iterate_series(client):
series_id = series_data['Id']
for episode_data in iterate_watched_episodes_of_series(client, series_id):
episode_index = episode_data.get('IndexNumber')
if episode_index is None:
continue
yield {
'series.name': episode_data['SeriesName'],
'season.name': episode_data['SeasonName'],
'episode.index': int(episode_index),
'episode.name': episode_data['Name'],
'me.last_played_time': episode_data['UserData']['LastPlayedDate'],
'episode.duration_seconds': episode_data['RunTimeTicks'] / 10000000,
'episode.premiere_date': episode_data.get('PremiereDate'),
}
del episode_data
del series_data, series_id

View File

@ -124,7 +124,7 @@ def main(
OUTPUT_PATH / f'{scraper.dataset_name}.csv', OUTPUT_PATH / f'{scraper.dataset_name}.csv',
result_rows, result_rows,
deduplicate_mode=scraper.deduplicate_mode, deduplicate_mode=scraper.deduplicate_mode,
deduplicate_ignore_columns=scraper.deduplicate_ignore_columns, deduplicate_ignore_columns=scraper.deduplicate_ignore_columns(),
) )
logger.info('Scraper done: %s', scraper.dataset_name) logger.info('Scraper done: %s', scraper.dataset_name)

View File

@ -23,7 +23,7 @@ KUCOIN_KEY = load_secret('KUCOIN_KEY')
KUCOIN_SECRET = load_secret('KUCOIN_SECRET') KUCOIN_SECRET = load_secret('KUCOIN_SECRET')
KUCOIN_PASS = load_secret('KUCOIN_PASS') KUCOIN_PASS = load_secret('KUCOIN_PASS')
# KRAKEN # Kraken
KRAKEN_KEY = load_secret('KRAKEN_KEY') KRAKEN_KEY = load_secret('KRAKEN_KEY')
KRAKEN_SECRET = load_secret('KRAKEN_SECRET') KRAKEN_SECRET = load_secret('KRAKEN_SECRET')
@ -35,3 +35,8 @@ HOME_ASSISTANT_LLAK = load_secret('HOME_ASSISTANT_LLAK')
MAILGUN_API_KEY = load_secret('MAILGUN_API_KEY') MAILGUN_API_KEY = load_secret('MAILGUN_API_KEY')
MAILGUN_DOMAIN = load_secret('MAILGUN_DOMAIN') MAILGUN_DOMAIN = load_secret('MAILGUN_DOMAIN')
MAILGUN_RECIPIENT = load_secret('MAILGUN_RECIPIENT') MAILGUN_RECIPIENT = load_secret('MAILGUN_RECIPIENT')
# Jellyfin
JELLYFIN_URL = load_secret('JELLYFIN_URL')
JELLYFIN_USERNAME = load_secret('JELLYFIN_USERNAME')
JELLYFIN_PASSWORD = load_secret('JELLYFIN_PASSWORD')

View File

@ -89,6 +89,7 @@ def deduplicate_by_ignoring_certain_fields(
Output order is stable. Output order is stable.
""" """
to_remove = set() to_remove = set()
for idx1, first in enumerate(dicts): for idx1, first in enumerate(dicts):
for idx2, second in enumerate(dicts[idx1 + 1 :], idx1 + 1): for idx2, second in enumerate(dicts[idx1 + 1 :], idx1 + 1):
@ -163,7 +164,7 @@ def load_csv_file(csv_file: Path) -> list[frozendict]:
def extend_csv_file( def extend_csv_file(
csv_file: Path, csv_file: Path,
new_dicts: list[dict], new_dicts: list[dict[str,typing.Any]],
deduplicate_mode: data.DeduplicateMode, deduplicate_mode: data.DeduplicateMode,
deduplicate_ignore_columns: list[str], deduplicate_ignore_columns: list[str],
) -> dict: ) -> dict: