1
0

Made email conditional

This commit is contained in:
Jon Michael Aanes 2024-04-28 23:48:22 +02:00
parent f32ff7f412
commit 60f9d55729
Signed by: Jmaa
SSH Key Fingerprint: SHA256:Ab0GfHGCblESJx7JRE4fj4bFy/KRpeLhi41y4pF3sNA
2 changed files with 7 additions and 4 deletions

View File

@ -10,6 +10,8 @@ def parse_arguments():
parser.add_argument('fetchers', metavar='FETCHER', type=str, nargs='+', parser.add_argument('fetchers', metavar='FETCHER', type=str, nargs='+',
choices=available_scraper_names) choices=available_scraper_names)
parser.add_argument('--cookiejar', action='store_true') parser.add_argument('--cookiejar', action='store_true')
parser.add_argument('--email', action='store_true',
dest='send_email_notification')
return parser.parse_args() return parser.parse_args()
@ -18,7 +20,8 @@ def main():
logging.getLogger('personal_data').setLevel('INFO') logging.getLogger('personal_data').setLevel('INFO')
args = parse_arguments() args = parse_arguments()
scraper_filter = frozenset(args.fetchers) scraper_filter = frozenset(args.fetchers)
personal_data.main.main(scraper_filter, use_cookiejar=args.cookiejar) personal_data.main.main(scraper_filter, use_cookiejar=args.cookiejar,
send_email_notification = args.send_email_notification)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -184,7 +184,8 @@ def available_scraper_names() -> list[str]:
return [scraper_cls.__name__ for scraper_cls in available_scrapers()] return [scraper_cls.__name__ for scraper_cls in available_scrapers()]
def main(scraper_filter: frozenset[str], use_cookiejar: bool) -> None: def main(scraper_filter: frozenset[str], *, use_cookiejar: bool,
send_email_notification: bool = False) -> None:
if use_cookiejar: if use_cookiejar:
cookiejar = browsercookie.firefox() cookiejar = browsercookie.firefox()
logger.info('Got cookiejar from firefox: %s cookies', len(cookiejar)) logger.info('Got cookiejar from firefox: %s cookies', len(cookiejar))
@ -218,8 +219,7 @@ def main(scraper_filter: frozenset[str], use_cookiejar: bool) -> None:
) )
logger.info('Scraper done: %s', scraper.dataset_name) logger.info('Scraper done: %s', scraper.dataset_name)
if status['extended']: if status['extended'] and send_email_notification:
print('Extended')
send_notification(session, scraper_cls.__name__, status['dicts'][-1]) send_notification(session, scraper_cls.__name__, status['dicts'][-1])
del scraper, session del scraper, session