Ruff
This commit is contained in:
parent
6d91e54ced
commit
30bbab2841
|
@ -41,7 +41,9 @@ def iterate_samples_from_rows(rows: Rows) -> Iterator[ActivitySample]:
|
||||||
|
|
||||||
for event_data in rows:
|
for event_data in rows:
|
||||||
(start_at, end_at) = start_end(event_data, possible_keys)
|
(start_at, end_at) = start_end(event_data, possible_keys)
|
||||||
labels = [Label(k, event_data.get(k)) for k in possible_keys.misc if k in event_data]
|
labels = [
|
||||||
|
Label(k, event_data.get(k)) for k in possible_keys.misc if k in event_data
|
||||||
|
]
|
||||||
|
|
||||||
# Create event
|
# Create event
|
||||||
yield ActivitySample(
|
yield ActivitySample(
|
||||||
|
@ -246,6 +248,9 @@ def import_data(obsidian_path: Path, dry_run=True):
|
||||||
|
|
||||||
num_dirty = len([f for f in vault.internal_file_text_cache.values() if f.is_dirty])
|
num_dirty = len([f for f in vault.internal_file_text_cache.values() if f.is_dirty])
|
||||||
logger.info('dirty files in cache: %d', num_dirty)
|
logger.info('dirty files in cache: %d', num_dirty)
|
||||||
logger.info('clean files in cache: %d', len(vault.internal_file_text_cache) - num_dirty)
|
logger.info(
|
||||||
|
'clean files in cache: %d',
|
||||||
|
len(vault.internal_file_text_cache) - num_dirty,
|
||||||
|
)
|
||||||
if not dry_run:
|
if not dry_run:
|
||||||
vault.flush_cache()
|
vault.flush_cache()
|
||||||
|
|
|
@ -58,7 +58,12 @@ MIDNIGHT = datetime.time(0, 0, 0)
|
||||||
|
|
||||||
|
|
||||||
class ObsidianVault:
|
class ObsidianVault:
|
||||||
def __init__(self, vault_path: Path, read_only: bool = 'silent', allow_invalid_vault=False):
|
def __init__(
|
||||||
|
self,
|
||||||
|
vault_path: Path,
|
||||||
|
read_only: bool = 'silent',
|
||||||
|
allow_invalid_vault=False,
|
||||||
|
):
|
||||||
self.vault_path = vault_path
|
self.vault_path = vault_path
|
||||||
self.read_only = read_only
|
self.read_only = read_only
|
||||||
self.internal_file_text_cache: dict[Path, CachedFile] = {}
|
self.internal_file_text_cache: dict[Path, CachedFile] = {}
|
||||||
|
@ -132,7 +137,9 @@ class ObsidianVault:
|
||||||
|
|
||||||
def _load_date_contents(self, date: datetime.date) -> FileContents | None:
|
def _load_date_contents(self, date: datetime.date) -> FileContents | None:
|
||||||
file_path = self._date_file_path(date)
|
file_path = self._date_file_path(date)
|
||||||
text = self._load_file_text(file_path) or self._load_file_text(self._daily_template_path())
|
text = self._load_file_text(file_path) or self._load_file_text(
|
||||||
|
self._daily_template_path(),
|
||||||
|
)
|
||||||
assert text is not None
|
assert text is not None
|
||||||
|
|
||||||
file_frontmatter = frontmatter.loads(text)
|
file_frontmatter = frontmatter.loads(text)
|
||||||
|
@ -159,13 +166,18 @@ class ObsidianVault:
|
||||||
block_events = '\n'.join('- ' + format_event_string(e) for e in events)
|
block_events = '\n'.join('- ' + format_event_string(e) for e in events)
|
||||||
|
|
||||||
post = frontmatter.Post(
|
post = frontmatter.Post(
|
||||||
content=FILE_FORMAT.format(
|
content=FILE_FORMAT.format(
|
||||||
blocks_pre_events=blocks_pre_events,
|
blocks_pre_events=blocks_pre_events,
|
||||||
blocks_post_events=blocks_post_events,
|
blocks_post_events=blocks_post_events,
|
||||||
block_events=block_events,
|
block_events=block_events,
|
||||||
).strip(), metadata=contents.frontmatter)
|
).strip(),
|
||||||
|
metadata=contents.frontmatter,
|
||||||
|
)
|
||||||
|
|
||||||
self._save_file_text_to_cache(self._date_file_path(date), frontmatter.dumps(post).encode('utf8'))
|
self._save_file_text_to_cache(
|
||||||
|
self._date_file_path(date),
|
||||||
|
frontmatter.dumps(post).encode('utf8'),
|
||||||
|
)
|
||||||
|
|
||||||
def _save_file_text_to_cache(self, path: Path, text: bytes) -> None:
|
def _save_file_text_to_cache(self, path: Path, text: bytes) -> None:
|
||||||
if path not in self.internal_file_text_cache:
|
if path not in self.internal_file_text_cache:
|
||||||
|
@ -205,6 +217,7 @@ class ObsidianVault:
|
||||||
f.write(cached_file.data)
|
f.write(cached_file.data)
|
||||||
del path, cached_file
|
del path, cached_file
|
||||||
|
|
||||||
|
|
||||||
def find_events_list_block(ast) -> tuple[list, list[str], list]:
|
def find_events_list_block(ast) -> tuple[list, list[str], list]:
|
||||||
blocks = ast.children
|
blocks = ast.children
|
||||||
for block_i, block in enumerate(blocks):
|
for block_i, block in enumerate(blocks):
|
||||||
|
|
|
@ -21,6 +21,7 @@ def get_modules(backend_dir: Path) -> Iterator[str]:
|
||||||
if name != '__init__':
|
if name != '__init__':
|
||||||
yield name
|
yield name
|
||||||
|
|
||||||
|
|
||||||
FETCHER_MODULES_LOADED = False
|
FETCHER_MODULES_LOADED = False
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -53,7 +53,7 @@ class PsnProfiles(Scraper):
|
||||||
logger.info('Found %d games from overview', len(games_rows))
|
logger.info('Found %d games from overview', len(games_rows))
|
||||||
|
|
||||||
for idx, (game_id, game_name) in enumerate(reversed(games_ids.items())):
|
for idx, (game_id, game_name) in enumerate(reversed(games_ids.items())):
|
||||||
cache_duration = datetime.timedelta(days=min(idx+1, 30))
|
cache_duration = datetime.timedelta(days=min(idx + 1, 30))
|
||||||
yield from self._scrape_game_trophies(game_id, game_name, cache_duration)
|
yield from self._scrape_game_trophies(game_id, game_name, cache_duration)
|
||||||
del game_id
|
del game_id
|
||||||
if idx >= MAX_NUMBER_GAMES_TO_PARSE:
|
if idx >= MAX_NUMBER_GAMES_TO_PARSE:
|
||||||
|
|
|
@ -7,7 +7,7 @@ from pathlib import Path
|
||||||
import requests
|
import requests
|
||||||
import requests_cache
|
import requests_cache
|
||||||
|
|
||||||
from . import data, notification, util, fetchers
|
from . import data, fetchers, notification, util
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,4 @@
|
||||||
def test_init():
|
def test_init():
|
||||||
import personal_data
|
import personal_data
|
||||||
|
|
||||||
assert personal_data.__version__ is not None
|
assert personal_data.__version__ is not None
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
import pytest
|
|
||||||
|
|
||||||
from obsidian_import import obsidian
|
from obsidian_import import obsidian
|
||||||
|
|
||||||
|
@ -26,15 +24,21 @@ EXAMPLES = [
|
||||||
|
|
||||||
|
|
||||||
def test_write_internally():
|
def test_write_internally():
|
||||||
vault = obsidian.ObsidianVault(Path('test'), read_only=True, allow_invalid_vault=True)
|
vault = obsidian.ObsidianVault(
|
||||||
|
Path('test'),
|
||||||
|
read_only=True,
|
||||||
|
allow_invalid_vault=True,
|
||||||
|
)
|
||||||
vault.daily_folder = Path('daily')
|
vault.daily_folder = Path('daily')
|
||||||
vault.path_format = 'YYYY-MM-DD'
|
vault.path_format = 'YYYY-MM-DD'
|
||||||
vault.template_file_path = Path('daily-template-file.md')
|
vault.template_file_path = Path('daily-template-file.md')
|
||||||
|
|
||||||
vault.add_events(datetime.date(2020,1,1), EXAMPLES)
|
vault.add_events(datetime.date(2020, 1, 1), EXAMPLES)
|
||||||
assert len(vault.internal_file_text_cache) == 2
|
assert len(vault.internal_file_text_cache) == 2
|
||||||
|
|
||||||
assert vault.internal_file_text_cache[Path('test/daily-template-file.md')].data.startswith(b'---\n')
|
assert vault.internal_file_text_cache[
|
||||||
|
Path('test/daily-template-file.md')
|
||||||
|
].data.startswith(b'---\n')
|
||||||
|
|
||||||
expected_path = Path('test/daily/2020-01-01.md')
|
expected_path = Path('test/daily/2020-01-01.md')
|
||||||
assert expected_path in vault.internal_file_text_cache
|
assert expected_path in vault.internal_file_text_cache
|
||||||
|
|
Loading…
Reference in New Issue
Block a user