Ruff
This commit is contained in:
parent
6d1f23b1a0
commit
a301a37f2b
|
@ -4,12 +4,12 @@ Sub-module for importing time-based data into Obsidian.
|
|||
"""
|
||||
|
||||
import dataclasses
|
||||
from zoneinfo import ZoneInfo
|
||||
import datetime
|
||||
from collections.abc import Iterator, Iterable
|
||||
from collections.abc import Iterable, Iterator
|
||||
from logging import getLogger
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
from zoneinfo import ZoneInfo
|
||||
|
||||
from personal_data.activity import (
|
||||
ActivitySample,
|
||||
|
@ -120,6 +120,7 @@ class EventContent:
|
|||
subject: str
|
||||
comment: str
|
||||
|
||||
|
||||
def import_activity_sample_csv(
|
||||
vault: ObsidianVault,
|
||||
rows: Rows,
|
||||
|
@ -131,7 +132,9 @@ def import_activity_sample_csv(
|
|||
if group_category is not None:
|
||||
samples = merge_adjacent_samples(list(samples), group_category)
|
||||
|
||||
timezone = ZoneInfo('Europe/Copenhagen') # TODO: Parameterize in an intelligent manner
|
||||
timezone = ZoneInfo(
|
||||
'Europe/Copenhagen',
|
||||
) # TODO: Parameterize in an intelligent manner
|
||||
|
||||
samples_per_date: dict[datetime.date, list[RealizedActivitySample]] = {}
|
||||
for sample in samples:
|
||||
|
|
|
@ -7,8 +7,8 @@ from logging import getLogger
|
|||
from pathlib import Path
|
||||
from typing import Any
|
||||
from zoneinfo import ZoneInfo
|
||||
import enforce_typing
|
||||
|
||||
import enforce_typing
|
||||
import frontmatter
|
||||
import marko
|
||||
import marko.md_renderer
|
||||
|
@ -32,6 +32,7 @@ class Event:
|
|||
assert ':' not in self.subject
|
||||
assert '/' not in self.subject
|
||||
|
||||
|
||||
@dataclasses.dataclass(frozen=True)
|
||||
class FileContents:
|
||||
frontmatter: dict[str, Any]
|
||||
|
@ -57,6 +58,7 @@ FILE_FORMAT = """
|
|||
{blocks_post_events}
|
||||
"""
|
||||
|
||||
|
||||
class ObsidianVault:
|
||||
def __init__(
|
||||
self,
|
||||
|
@ -136,7 +138,9 @@ class ObsidianVault:
|
|||
return contents.events
|
||||
|
||||
def _load_date_contents(self, date: datetime.date) -> FileContents | None:
|
||||
timezone = ZoneInfo('Europe/Copenhagen') # TODO: Parameterize in an intelligent manner
|
||||
timezone = ZoneInfo(
|
||||
'Europe/Copenhagen',
|
||||
) # TODO: Parameterize in an intelligent manner
|
||||
|
||||
file_path = self._date_file_path(date)
|
||||
text = self._load_file_text(file_path) or self._load_file_text(
|
||||
|
@ -149,9 +153,16 @@ class ObsidianVault:
|
|||
ast = MARKDOWN_PARSER.parse(str(file_frontmatter))
|
||||
(pre_events, list_block_items, post_events) = find_events_list_block(ast)
|
||||
events = frozenset(
|
||||
parse_event_string(list_item, date, timezone) for list_item in list_block_items
|
||||
parse_event_string(list_item, date, timezone)
|
||||
for list_item in list_block_items
|
||||
)
|
||||
return FileContents(
|
||||
file_frontmatter.metadata,
|
||||
pre_events,
|
||||
events,
|
||||
post_events,
|
||||
timezone,
|
||||
)
|
||||
return FileContents(file_frontmatter.metadata, pre_events, events, post_events, timezone)
|
||||
|
||||
def _save_date_contents(self, date: datetime.date, contents: FileContents) -> None:
|
||||
blocks_pre_events = ''.join(
|
||||
|
@ -168,7 +179,9 @@ class ObsidianVault:
|
|||
date_sentinel = datetime.datetime(1900, 1, 1, 1, 1, 1, tzinfo=contents.timezone)
|
||||
events.sort(key=lambda x: x.start_time or x.end_time or date_sentinel)
|
||||
|
||||
formatted_events = ['- ' + format_event_string(e, tz = contents.timezone) for e in events]
|
||||
formatted_events = [
|
||||
'- ' + format_event_string(e, tz=contents.timezone) for e in events
|
||||
]
|
||||
formatted_events = list(dict.fromkeys(formatted_events))
|
||||
block_events = '\n'.join(formatted_events)
|
||||
|
||||
|
@ -232,7 +245,9 @@ def find_events_list_block(ast) -> tuple[list, list[str], list]:
|
|||
isinstance(block, marko.block.Heading)
|
||||
and block.children[0].children.lower() == 'events'
|
||||
):
|
||||
events_block = ast.children[block_i + 1] if block_i + 1 < len(ast.children) else None
|
||||
events_block = (
|
||||
ast.children[block_i + 1] if block_i + 1 < len(ast.children) else None
|
||||
)
|
||||
if isinstance(events_block, marko.block.List):
|
||||
offset = 2
|
||||
event_texts = [
|
||||
|
@ -278,9 +293,12 @@ RE_LINK_WIKI = r'\[\[(?:[^\]:]*\/)?([^\]:/]*)\]\]'
|
|||
RE_TIME_FORMAT = RE_TIME + r'(?:\s*\-\s*' + RE_TIME + r')?'
|
||||
|
||||
|
||||
def parse_event_string(event_str: str, date: datetime.date, timezone: ZoneInfo) -> Event:
|
||||
"""Parses event string for the given date.
|
||||
"""
|
||||
def parse_event_string(
|
||||
event_str: str,
|
||||
date: datetime.date,
|
||||
timezone: ZoneInfo,
|
||||
) -> Event:
|
||||
"""Parses event string for the given date."""
|
||||
if m := re.match(
|
||||
r'^\s*'
|
||||
+ RE_TIME_FORMAT
|
||||
|
@ -309,7 +327,9 @@ def parse_event_string(event_str: str, date: datetime.date, timezone: ZoneInfo)
|
|||
logger.info('Could not parse format: %s', event_str)
|
||||
return Event(None, None, None, None, event_str)
|
||||
|
||||
start = datetime.datetime.combine(date, start_time, timezone).astimezone(datetime.UTC)
|
||||
start = datetime.datetime.combine(date, start_time, timezone).astimezone(
|
||||
datetime.UTC,
|
||||
)
|
||||
end = datetime.datetime.combine(date, end_time, timezone).astimezone(datetime.UTC)
|
||||
|
||||
return Event(start, end, m.group(3), m.group(4), m.group(5))
|
||||
|
|
|
@ -21,7 +21,8 @@ URL_USER_GAME_TROPHIES = URL_API_ROOT + 'trophies/{game_id}/{psn_id}'
|
|||
URL_GAMES_OVERVIEW = URL_API_ROOT + '{psn_id}'
|
||||
|
||||
|
||||
PSN_PROFILES_DEFAULT_TIMEZONE=datetime.UTC
|
||||
PSN_PROFILES_DEFAULT_TIMEZONE = datetime.UTC
|
||||
|
||||
|
||||
def game_psnprofiles_id_from_url(relative_url: str) -> int:
|
||||
m = re.match(r'/(?:trophy|trophies)/(\d+)\-(?:[\w-]+)(/[\w-]*)?', relative_url)
|
||||
|
@ -197,7 +198,10 @@ class PsnProfiles(Scraper):
|
|||
if 'Missing\nTimestamp' in cells[2].get_text().strip():
|
||||
continue
|
||||
cells[2].span.span.nobr.sup.extract()
|
||||
gotten_at = parse_util.parse_time(cells[2].get_text(), timezone=PSN_PROFILES_DEFAULT_TIMEZONE)
|
||||
gotten_at = parse_util.parse_time(
|
||||
cells[2].get_text(),
|
||||
timezone=PSN_PROFILES_DEFAULT_TIMEZONE,
|
||||
)
|
||||
|
||||
yield {
|
||||
'game.name': game_name,
|
||||
|
|
|
@ -46,7 +46,7 @@ def try_parse(text: str, fmt: str) -> datetime.datetime | None:
|
|||
return time
|
||||
|
||||
|
||||
def parse_time(text: str, timezone = LOCAL_TIMEZONE) -> datetime.datetime:
|
||||
def parse_time(text: str, timezone=LOCAL_TIMEZONE) -> datetime.datetime:
|
||||
text = text.replace('\n', ' ')
|
||||
text = text.strip()
|
||||
|
||||
|
|
|
@ -1,10 +1,8 @@
|
|||
import datetime
|
||||
|
||||
import pytest
|
||||
|
||||
from obsidian_import import obsidian
|
||||
|
||||
from .test_obsidian_vault import EXAMPLES, EXAMPLE_DATE, EXAMPLE_TIMEZONE
|
||||
from .test_obsidian_vault import EXAMPLE_DATE, EXAMPLE_TIMEZONE, EXAMPLES
|
||||
|
||||
|
||||
def test_parse_event_string():
|
||||
|
@ -15,8 +13,10 @@ def test_parse_event_string():
|
|||
assert event.subject == 'Azumanga Daioh'
|
||||
assert event.start_time is not None
|
||||
|
||||
|
||||
@pytest.mark.parametrize('event', EXAMPLES)
|
||||
def test_format_preserves_information(event: obsidian.Event):
|
||||
formatted = obsidian.format_event_string(event, EXAMPLE_TIMEZONE)
|
||||
assert obsidian.parse_event_string(formatted, EXAMPLE_DATE,
|
||||
EXAMPLE_TIMEZONE) == event
|
||||
assert (
|
||||
obsidian.parse_event_string(formatted, EXAMPLE_DATE, EXAMPLE_TIMEZONE) == event
|
||||
)
|
||||
|
|
|
@ -48,4 +48,6 @@ def test_write_internally():
|
|||
expected_path = Path('test/daily/2020-01-01.md')
|
||||
assert expected_path in vault.internal_file_text_cache
|
||||
|
||||
assert vault.internal_file_text_cache[expected_path].data.startswith(b'---\naliases:\n')
|
||||
assert vault.internal_file_text_cache[expected_path].data.startswith(
|
||||
b'---\naliases:\n',
|
||||
)
|
||||
|
|
Loading…
Reference in New Issue
Block a user