From e3b23637a04e9b55f7b5499331dbda40604ed194 Mon Sep 17 00:00:00 2001 From: Jon Michael Aanes Date: Wed, 17 Apr 2024 00:45:15 +0200 Subject: [PATCH] Logging --- personal_data/__main__.py | 3 +++ personal_data/mailgun.py | 7 ++++++- personal_data/main.py | 16 +++++++++------- 3 files changed, 18 insertions(+), 8 deletions(-) diff --git a/personal_data/__main__.py b/personal_data/__main__.py index 6a6f82c..7454c06 100644 --- a/personal_data/__main__.py +++ b/personal_data/__main__.py @@ -1,5 +1,6 @@ import personal_data.main import argparse +import logging def parse_arguments(): parser = argparse.ArgumentParser() @@ -7,6 +8,8 @@ def parse_arguments(): return parser.parse_args() def main(): + logging.basicConfig() + logging.getLogger('personal_data').setLevel('INFO') args = parse_arguments() scraper_filter = frozenset(args.fetchers) personal_data.main.main(scraper_filter) diff --git a/personal_data/mailgun.py b/personal_data/mailgun.py index cb65cd3..8c7b677 100644 --- a/personal_data/mailgun.py +++ b/personal_data/mailgun.py @@ -1,7 +1,9 @@ import requests - import personal_data.secrets as secrets +import logging +logger = logging.getLogger(__name__) + MAILGUN_API_ENDPOINT = 'https://api.mailgun.net/v3/{mailgun_domain}/messages' FROM_MAIL_NAME = 'Personal Scrapers' @@ -14,6 +16,8 @@ def send_email(session: requests.Session, subject: str, text: str): assert subject != '' assert text != '' + logger.info('Sending email using mailgun!') + data = { 'from': f'{FROM_MAIL_NAME} <{FROM_MAIL_USERNAME}@{secrets.MAILGUN_DOMAIN}>', 'to': [secrets.MAILGUN_RECIPIENT], @@ -27,5 +31,6 @@ def send_email(session: requests.Session, subject: str, text: str): data=data, ) response.raise_for_status() + logger.info('Email sent!') return response diff --git a/personal_data/main.py b/personal_data/main.py index 6dc265c..443e570 100644 --- a/personal_data/main.py +++ b/personal_data/main.py @@ -127,7 +127,7 @@ def extend_csv_file( with open(filename, 'w') as csvfile: csvfile.write(output_csv) del csvfile - logger.warning( + logger.info( 'Extended CSV "%s" from %d to %d lines', filename, original_num_dicts, @@ -162,19 +162,21 @@ def get_session(cookiejar, *, with_cfscrape: bool) -> requests.Session: return session def send_notification(session: requests.Session, scraper_name: str, latest_dict: frozendict): - maingun.send_email(session, f'Updated {scraper_name}', repr(latest_dict)) - + body = ['A new update has occured for ', scraper_name, '\n'] + for k, v in latest_dict.items(): + body.append(f'{k}: {v}\n') + mailgun.send_email(session, f'Updated {scraper_name}', ''.join(body)) def main(scraper_filter: frozenset[str]): cookiejar = browsercookie.firefox() - logger.warning('Got cookiejar from firefox: %s cookies', len(cookiejar)) + logger.info('Got cookiejar from firefox: %s cookies', len(cookiejar)) for scraper_cls in personal_data.data.Scraper.__subclasses__(): session = get_session(cookiejar, with_cfscrape=scraper_cls.requires_cfscrape()) scraper = scraper_cls(session) if scraper_cls.__name__ not in scraper_filter: continue - logger.warning( + logger.info( 'Running %s, appending to "%s"', scraper_cls.__name__, scraper.dataset_name, @@ -193,10 +195,10 @@ def main(scraper_filter: frozenset[str]): deduplicate_mode=scraper.deduplicate_mode, deduplicate_ignore_columns=scraper.deduplicate_ignore_columns, ) - logger.warning('Scraper done: %s', scraper.dataset_name) + logger.info('Scraper done: %s', scraper.dataset_name) if status['extended']: print('Extended') - send_notification(scraper_cls.__name__, status['dicts'][-1]) + send_notification(session, scraper_cls.__name__, status['dicts'][-1]) del scraper, session