1
0

Compare commits

...

2 Commits

Author SHA1 Message Date
c576687122 Stepmania import
All checks were successful
Run Python tests (through Pytest) / Test (push) Successful in 35s
Verify Python project can be installed, loaded and have version checked / Test (push) Successful in 30s
2025-03-02 00:25:15 +01:00
2e41125952 Standardized imports 2025-03-01 23:36:22 +01:00
2 changed files with 79 additions and 50 deletions

View File

@ -27,6 +27,26 @@ logger = getLogger(__name__)
Row = dict[str, Any] Row = dict[str, Any]
Rows = list[Row] Rows = list[Row]
HOUR = datetime.timedelta(hours=1)
MINUTE = datetime.timedelta(minutes=1)
SECOND = datetime.timedelta(seconds=1)
def to_text_duration(duration: datetime.timedelta) -> str:
hours = int(duration / HOUR)
duration -= hours * HOUR
minutes = int(duration / MINUTE)
duration -= minutes * MINUTE
seconds = int(duration / SECOND)
l = []
if hours > 0:
l.append(f'{hours} hours')
if minutes > 0:
l.append(f'{minutes} minutes')
if seconds > 0:
l.append(f'{seconds} seconds')
return ' '.join(l)
def iterate_samples_from_rows(rows: Rows) -> Iterator[ActivitySample]: def iterate_samples_from_rows(rows: Rows) -> Iterator[ActivitySample]:
assert len(rows) > 0 assert len(rows) > 0
@ -84,7 +104,7 @@ def import_workout_csv(vault: ObsidianVault, rows: Rows) -> int:
def import_step_counts_csv(vault: ObsidianVault, rows: Rows) -> int: def import_step_counts_csv(vault: ObsidianVault, rows: Rows) -> int:
MINIMUM = 300 MINIMUM_STEPS = 300
num_updated = 0 num_updated = 0
@ -100,7 +120,7 @@ def import_step_counts_csv(vault: ObsidianVault, rows: Rows) -> int:
} }
for date, steps in steps_per_date.items(): for date, steps in steps_per_date.items():
if steps < MINIMUM: if steps < MINIMUM_STEPS:
continue continue
was_updated = vault.add_statistic(date, 'Steps', steps) was_updated = vault.add_statistic(date, 'Steps', steps)
if was_updated: if was_updated:
@ -109,6 +129,40 @@ def import_step_counts_csv(vault: ObsidianVault, rows: Rows) -> int:
return num_updated return num_updated
def import_stepmania_steps_csv(vault: ObsidianVault, rows: Rows) -> int:
num_updated = 0
rows_per_date = {}
for row in rows:
date = row['play.start'].date()
rows_per_date.setdefault(date, [])
rows_per_date[date].append(row)
del date, row
COLUMNS = ['score.w1', 'score.w2', 'score.w3', 'score.w4', 'score.w5']
def all_steps(row: dict[str,int]):
return sum(row[column] for column in COLUMNS)
steps_per_date = {
date: sum(all_steps(row) for row in rows) for date, rows in rows_per_date.items()
}
duration_per_date = {
date: sum((row['play.duration'] for row in rows), start=datetime.timedelta()) for date, rows in rows_per_date.items()
}
print(steps_per_date)
print(duration_per_date)
for date in steps_per_date:
was_updated_1 = vault.add_statistic(date, 'Stepmania (Steps)', int(steps_per_date[date]))
was_updated_2 = vault.add_statistic(date, 'Stepmania (Duration)', to_text_duration(duration_per_date[date]))
if was_updated_1 or was_updated_2:
num_updated += 1
del date, was_updated_1, was_updated_2
return num_updated
def escape_for_obsidian_link(link: str) -> str: def escape_for_obsidian_link(link: str) -> str:
return link.replace(':', ' ').replace('/', ' ').replace(' ', ' ') return link.replace(':', ' ').replace('/', ' ').replace(' ', ' ')
@ -167,18 +221,6 @@ def import_activity_sample_csv(
return num_updated return num_updated
def import_activity_sample_csv_from_file(
vault: ObsidianVault,
data_path: Path,
content_mapper,
**kwargs,
) -> int:
rows = load_csv_file(data_path)
logger.info('Loaded CSV with %d lines (%s)', len(rows), data_path)
num_updated = import_activity_sample_csv(vault, rows, content_mapper, **kwargs)
logger.info('Updated %d files', num_updated)
def map_watched_series_content(sample: RealizedActivitySample) -> EventContent: def map_watched_series_content(sample: RealizedActivitySample) -> EventContent:
subject = sample.single_label_with_category('series.name') subject = sample.single_label_with_category('series.name')
comment = '{} Episode {}: *{}*'.format( comment = '{} Episode {}: *{}*'.format(
@ -203,49 +245,36 @@ def map_games_played_content(sample: RealizedActivitySample) -> EventContent:
) )
def import_watched_series_csv_from_file(vault: ObsidianVault) -> int: PATH_WATCHED = Path('output/show_episodes_watched.csv')
data_path = Path('output/show_episodes_watched.csv') PATH_PLAYED = Path('output/games_played.csv')
return import_activity_sample_csv_from_file( PATH_WORKOUT = Path('/home/jmaa/Notes/workout.csv')
vault, PATH_STEP_COUNTS = Path(
data_path, '/home/jmaa/personal-archive/misc-data/step_counts_2023-07-26_to_2024-09-21.csv',
map_watched_series_content, )
) PATH_STEPMANIA = Path('output/stepmania.csv')
def import_played_games_csv_from_file(vault: ObsidianVault) -> int: IMPORTERS = [
data_path = Path('output/games_played.csv') {'path': PATH_WORKOUT, 'import_rows': import_workout_csv},
if not data_path.exists(): {'path': PATH_STEP_COUNTS, 'import_rows': import_step_counts_csv},
logger.warning('Skipping import of played games: %s is missing', data_path) {'path': PATH_STEPMANIA, 'import_rows': import_stepmania_steps_csv},
return 0 {'path': PATH_PLAYED, 'import_rows': lambda vault, rows: import_activity_sample_csv(vault, rows, map_games_played_content, group_category='game.name',) },
return import_activity_sample_csv_from_file( {'path': PATH_WATCHED, 'import_rows': lambda vault, rows: import_activity_sample_csv(vault, rows, map_watched_series_content) },
vault, ]
data_path,
map_games_played_content,
group_category='game.name',
)
def import_data(obsidian_path: Path, dry_run=True): def import_data(obsidian_path: Path, dry_run=True):
vault = ObsidianVault(obsidian_path, read_only=dry_run and 'silent' or None) vault = ObsidianVault(obsidian_path, read_only=dry_run and 'silent' or None)
if False: for import_def in IMPORTERS:
data_path = Path('/home/jmaa/Notes/workout.csv') if not import_def['path'].exists():
rows = load_csv_file(data_path) logger.warning('Skipping %s: %s is missing', import_def['import_rows'], import_def['path'])
continue
rows = load_csv_file(import_def['path'])
logger.info('Loaded CSV with %d lines', len(rows)) logger.info('Loaded CSV with %d lines', len(rows))
num_updated = import_workout_csv(vault, rows) num_files_updated = import_def['import_rows'](vault, rows)
logger.info('Updated %d files', num_updated) logger.info('Updated %d files', num_files_updated)
del import_def, rows
if False:
data_path = Path(
'/home/jmaa/personal-archive/misc-data/step_counts_2023-07-26_to_2024-09-21.csv',
)
rows = load_csv_file(data_path)
logger.info('Loaded CSV with %d lines', len(rows))
num_updated = import_step_counts_csv(vault, rows)
logger.info('Updated %d files', num_updated)
import_played_games_csv_from_file(vault)
import_watched_series_csv_from_file(vault)
num_dirty = len([f for f in vault.internal_file_text_cache.values() if f.is_dirty]) num_dirty = len([f for f in vault.internal_file_text_cache.values() if f.is_dirty])
logger.info('dirty files in cache: %d', num_dirty) logger.info('dirty files in cache: %d', num_dirty)

View File

@ -324,7 +324,7 @@ def parse_event_string(
start_time = datetime.time.fromisoformat(m.group(1)) start_time = datetime.time.fromisoformat(m.group(1))
end_time = datetime.time.fromisoformat(m.group(2)) if m.group(2) else start_time end_time = datetime.time.fromisoformat(m.group(2)) if m.group(2) else start_time
else: else:
logger.info('Could not parse format: %s', event_str) logger.debug('Could not parse format: %s', event_str)
return Event(None, None, None, None, event_str) return Event(None, None, None, None, event_str)
start = datetime.datetime.combine(date, start_time, timezone).astimezone( start = datetime.datetime.combine(date, start_time, timezone).astimezone(