diff --git a/obsidian_import/__init__.py b/obsidian_import/__init__.py index 418f9b2..6ee0e0c 100644 --- a/obsidian_import/__init__.py +++ b/obsidian_import/__init__.py @@ -308,7 +308,8 @@ def import_data(obsidian_path: Path, dry_run=True): ) continue rows = load_csv_file( - import_def['path'], sniff=not import_def.get('standard_variant'), + import_def['path'], + sniff=not import_def.get('standard_variant'), ) logger.info('Loaded CSV with %d lines', len(rows)) num_files_updated = import_def['import_rows'](vault, rows) diff --git a/personal_data/fetchers/wanikani_lessons.py b/personal_data/fetchers/wanikani_lessons.py index 7d38df0..4c44115 100644 --- a/personal_data/fetchers/wanikani_lessons.py +++ b/personal_data/fetchers/wanikani_lessons.py @@ -5,19 +5,16 @@ from collections.abc import Iterator, Mapping import requests -# Import the base Scraper class; adjust the import if your code structure differs. -from personal_data.data import Scraper +from .. import secrets +from personal_data.data import DeduplicateMode, Scraper logger = logging.getLogger(__name__) @dataclasses.dataclass(frozen=True) class WaniKaniLessonsFetcher(Scraper): - api_token: str - - @staticmethod - def dataset_name() -> str: - return 'wanikani_lessons' + dataset_name = 'wanikani_lessons' + deduplicate_mode = DeduplicateMode.BY_ALL_COLUMNS def scrape(self) -> Iterator[Mapping[str, object]]: """ @@ -26,30 +23,13 @@ class WaniKaniLessonsFetcher(Scraper): """ url = 'https://api.wanikani.com/v2/assignments' headers = { - 'Authorization': f'Bearer {self.api_token}', + 'Authorization': f'Bearer {secrets.wanikani_api_key()}', 'Wanikani-Revision': '20170710', } - response = requests.get(url, headers=headers) - if response.status_code != 200: - logger.error('Error retrieving assignments: %s', response.text) - return + response = self.session.get(url, headers=headers) + response.raise_for_status() data = response.json() - # Check that 'data' key exists in the JSON response. - assignments = data.get('data', []) - for assignment in assignments: - assignment_data = assignment.get('data', {}) - # Only yield if unlocked_at is available. - unlocked_at = assignment_data.get('unlocked_at') - if unlocked_at: - # Convert unlocked_at ISO8601 string (assume 'Z' for UTC) to a datetime object. - try: - dt = datetime.datetime.fromisoformat( - unlocked_at.replace('Z', '+00:00'), - ) - except Exception as e: - logger.error("Error parsing unlocked_at '%s': %s", unlocked_at, e) - continue - yield { - 'subject_id': assignment_data.get('subject_id'), - 'unlocked_at': dt, - } + for assignment in data.get('data', []): + data = assignment['data'] + print(data) + yield data diff --git a/personal_data/main.py b/personal_data/main.py index 30e916e..871c43f 100644 --- a/personal_data/main.py +++ b/personal_data/main.py @@ -97,7 +97,9 @@ def get_cookiejar(use_cookiejar: bool): cookiejar = browsercookie.firefox() if len(cookiejar) > 10: return cookiejar - browsercookie.firefox(['/home/jmaa/.cachy/mbui5xg7.default-release/cookies.sqlite']) + browsercookie.firefox( + ['/home/jmaa/.cachy/mbui5xg7.default-release/cookies.sqlite'], + ) if len(cookiejar) > 10: return cookiejar logger.warning('No cookiejar is used') diff --git a/personal_data/secrets.py b/personal_data/secrets.py index d099911..e8f81a6 100644 --- a/personal_data/secrets.py +++ b/personal_data/secrets.py @@ -71,3 +71,7 @@ JELLYFIN_PASSWORD = secrets.load('JELLYFIN_PASSWORD') WITHINGS_CLIENTID = secrets.load('WITHINGS_CLIENTID') WITHINGS_SECRET = secrets.load('WITHINGS_SECRET') WITHINGS_CALLBACK_URI = secrets.load('WITHINGS_CALLBACK_URI') + +# Other +def wanikani_api_key(): + return secrets.load_or_fail('WANIKANI_API_KEY')