Compare commits
No commits in common. "beb58b61876a003c1353322c915188a5828fc023" and "9656b884e0c7f6af4dc56df8aab27ceb9ed77f96" have entirely different histories.
beb58b6187
...
9656b884e0
|
@ -39,7 +39,7 @@ from .data import (
|
||||||
WorkSample,
|
WorkSample,
|
||||||
)
|
)
|
||||||
from .format import cli, icalendar
|
from .format import cli, icalendar
|
||||||
from .source import git_repo, csv_file
|
from .source import git_repo
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
@ -94,15 +94,6 @@ def parse_arguments():
|
||||||
nargs='+',
|
nargs='+',
|
||||||
type=Path,
|
type=Path,
|
||||||
dest='repositories',
|
dest='repositories',
|
||||||
default=[],
|
|
||||||
)
|
|
||||||
parser.add_argument(
|
|
||||||
'--csv-file',
|
|
||||||
action='extend',
|
|
||||||
nargs='+',
|
|
||||||
type=Path,
|
|
||||||
dest='csv_files',
|
|
||||||
default=[],
|
|
||||||
)
|
)
|
||||||
parser.add_argument(
|
parser.add_argument(
|
||||||
'--filter',
|
'--filter',
|
||||||
|
@ -122,52 +113,31 @@ def parse_arguments():
|
||||||
)
|
)
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
def load_samples(args):
|
|
||||||
shared_time_stamps_set: set[WorkSample] = set()
|
|
||||||
|
|
||||||
# Git repositories
|
|
||||||
for repo_path in args.repositories:
|
|
||||||
logger.warning('Determine commits from %s', repo_path)
|
|
||||||
shared_time_stamps_set |= set(
|
|
||||||
git_repo.iterate_samples_from_git_repository(repo_path),
|
|
||||||
)
|
|
||||||
del repo_path
|
|
||||||
|
|
||||||
# CSV Files
|
|
||||||
for csv_path in args.csv_files:
|
|
||||||
logger.warning('Load samples from %s', csv_path)
|
|
||||||
shared_time_stamps_set |= set(
|
|
||||||
csv_file.iterate_samples_from_csv_file(csv_path),
|
|
||||||
)
|
|
||||||
del csv_path
|
|
||||||
|
|
||||||
|
|
||||||
return shared_time_stamps_set
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
logging.basicConfig()
|
logging.basicConfig()
|
||||||
|
|
||||||
args = parse_arguments()
|
args = parse_arguments()
|
||||||
|
|
||||||
# Determine samples
|
shared_time_stamps_set: set[WorkSample] = set()
|
||||||
shared_time_stamps_set = load_samples(args)
|
for repo_path in args.repositories:
|
||||||
|
logger.warning('Visit %s', repo_path)
|
||||||
|
shared_time_stamps_set |= set(
|
||||||
|
git_repo.iterate_samples_from_git_repository(repo_path),
|
||||||
|
)
|
||||||
|
|
||||||
# Sort samples
|
|
||||||
shared_time_stamps = sorted(shared_time_stamps_set, key=lambda s: s.end_at)
|
shared_time_stamps = sorted(shared_time_stamps_set, key=lambda s: s.end_at)
|
||||||
del shared_time_stamps_set
|
del shared_time_stamps_set
|
||||||
|
|
||||||
# Filter samples
|
|
||||||
sample_filter = args.sample_filter
|
sample_filter = args.sample_filter
|
||||||
if len(sample_filter) != 0:
|
if len(sample_filter) != 0:
|
||||||
logger.warning('Filtering %s samples', len(shared_time_stamps))
|
logger.warning('Filtering %s samples', len(shared_time_stamps))
|
||||||
shared_time_stamps = filter_samples(shared_time_stamps, sample_filter)
|
shared_time_stamps = filter_samples(shared_time_stamps, sample_filter)
|
||||||
logger.warning('Filtered down to %s samples', len(shared_time_stamps))
|
logger.warning('Filtered down to %s samples', len(shared_time_stamps))
|
||||||
|
|
||||||
# Heuristic samples
|
|
||||||
logger.warning('Realizing %s samples', len(shared_time_stamps))
|
logger.warning('Realizing %s samples', len(shared_time_stamps))
|
||||||
shared_time_stamps = list(heuristically_realize_samples(shared_time_stamps))
|
shared_time_stamps = list(heuristically_realize_samples(shared_time_stamps))
|
||||||
|
|
||||||
# Output format
|
|
||||||
if args.format_mode == 'cli_report':
|
if args.format_mode == 'cli_report':
|
||||||
for t in cli.generate_report(shared_time_stamps):
|
for t in cli.generate_report(shared_time_stamps):
|
||||||
sys.stdout.write(t)
|
sys.stdout.write(t)
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
__version__ = '0.1.17'
|
__version__ = '0.1.16'
|
||||||
|
|
|
@ -9,19 +9,15 @@ HOUR = datetime.timedelta(hours=1)
|
||||||
MINUTE = datetime.timedelta(minutes=1)
|
MINUTE = datetime.timedelta(minutes=1)
|
||||||
|
|
||||||
|
|
||||||
def create_title(sample: RealizedWorkSample) -> tuple[str,str]:
|
def create_title(sample: RealizedWorkSample) -> str:
|
||||||
ls = []
|
ls = []
|
||||||
desc = []
|
|
||||||
for label_and_type in sample.labels:
|
for label_and_type in sample.labels:
|
||||||
if label_and_type.startswith(HIDDEN_LABEL_PREFIX):
|
if label_and_type.startswith(HIDDEN_LABEL_PREFIX):
|
||||||
continue
|
continue
|
||||||
if label_and_type.startswith('author:'):
|
if label_and_type.startswith('author:'):
|
||||||
continue
|
continue
|
||||||
if len(ls) == 0:
|
ls.append(label_and_type)
|
||||||
ls.append(label_and_type.split(':')[1])
|
return ' '.join(ls)
|
||||||
else:
|
|
||||||
desc.append(label_and_type)
|
|
||||||
return ' '.join(ls), '\n'.join(desc)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_calendar(
|
def generate_calendar(
|
||||||
|
@ -34,7 +30,9 @@ def generate_calendar(
|
||||||
cal.add('version', '2.0')
|
cal.add('version', '2.0')
|
||||||
|
|
||||||
for sample in samples:
|
for sample in samples:
|
||||||
title, description = create_title(sample)
|
title = create_title(sample)
|
||||||
|
|
||||||
|
description = ''
|
||||||
|
|
||||||
# Create event
|
# Create event
|
||||||
event = icalendar.Event()
|
event = icalendar.Event()
|
||||||
|
|
|
@ -1,57 +0,0 @@
|
||||||
import argparse
|
|
||||||
from collections.abc import Iterator
|
|
||||||
import datetime
|
|
||||||
import urllib.parse
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
from personal_data.util import load_csv_file
|
|
||||||
|
|
||||||
from ..data import WorkSample
|
|
||||||
|
|
||||||
def iterate_samples_from_dicts(rows: list[dict]) -> Iterator[WorkSample]:
|
|
||||||
max_title_parts = 2
|
|
||||||
|
|
||||||
for event_data in rows:
|
|
||||||
|
|
||||||
# Select data
|
|
||||||
possible_time_keys = [
|
|
||||||
k for k, v in event_data.items() if isinstance(v, datetime.date)
|
|
||||||
]
|
|
||||||
possible_name_keys = [k for k, v in event_data.items() if isinstance(v, str)]
|
|
||||||
possible_image_keys = [
|
|
||||||
k for k, v in event_data.items() if isinstance(v, urllib.parse.ParseResult)
|
|
||||||
]
|
|
||||||
|
|
||||||
possible_misc_keys = list(event_data.keys())
|
|
||||||
for k in possible_image_keys:
|
|
||||||
if k in possible_misc_keys:
|
|
||||||
possible_misc_keys.remove(k)
|
|
||||||
del k
|
|
||||||
for k in possible_time_keys :
|
|
||||||
if k in possible_misc_keys:
|
|
||||||
possible_misc_keys.remove(k)
|
|
||||||
del k
|
|
||||||
|
|
||||||
date = event_data[possible_time_keys[0]] if possible_time_keys else None
|
|
||||||
image = event_data[possible_image_keys[0]] if possible_image_keys else None
|
|
||||||
|
|
||||||
if date is None:
|
|
||||||
continue
|
|
||||||
|
|
||||||
title = ': '.join(event_data[k] for k in possible_name_keys[:max_title_parts])
|
|
||||||
description = '\n\n'.join(event_data[k] for k in possible_name_keys[max_title_parts:])
|
|
||||||
|
|
||||||
labels = [f'{k}:{event_data[k]}' for k in possible_misc_keys]
|
|
||||||
|
|
||||||
# Create event
|
|
||||||
yield WorkSample(
|
|
||||||
labels=tuple(labels),
|
|
||||||
start_at=None,
|
|
||||||
end_at=date,
|
|
||||||
)
|
|
||||||
|
|
||||||
del event_data
|
|
||||||
|
|
||||||
def iterate_samples_from_csv_file(file_path: Path) -> Iterator[WorkSample]:
|
|
||||||
dicts = load_csv_file(file_path)
|
|
||||||
yield from iterate_samples_from_dicts(dicts)
|
|
|
@ -1,2 +1 @@
|
||||||
GitPython
|
GitPython
|
||||||
icalendar
|
|
||||||
|
|
Reference in New Issue
Block a user