1
0

Ruff
All checks were successful
Build container / Package-Python (push) Successful in 25s
Build container / Package-Container (push) Successful in 1m20s

This commit is contained in:
Jon Michael Aanes 2024-05-09 16:59:56 +02:00
parent 8fef2e9ef5
commit 7eb4b0bb09
Signed by: Jmaa
SSH Key Fingerprint: SHA256:Ab0GfHGCblESJx7JRE4fj4bFy/KRpeLhi41y4pF3sNA
4 changed files with 28 additions and 11 deletions

View File

@ -7,11 +7,15 @@ import personal_data.main
def parse_arguments():
available_scraper_names = personal_data.main.available_scraper_names()
parser = argparse.ArgumentParser()
parser.add_argument('fetchers', metavar='FETCHER', type=str, nargs='+',
choices=available_scraper_names)
parser.add_argument(
'fetchers',
metavar='FETCHER',
type=str,
nargs='+',
choices=available_scraper_names,
)
parser.add_argument('--cookiejar', action='store_true')
parser.add_argument('--email', action='store_true',
dest='send_email_notification')
parser.add_argument('--email', action='store_true', dest='send_email_notification')
return parser.parse_args()
@ -20,8 +24,11 @@ def main():
logging.getLogger('personal_data').setLevel('INFO')
args = parse_arguments()
scraper_filter = frozenset(args.fetchers)
personal_data.main.main(scraper_filter, use_cookiejar=args.cookiejar,
send_email_notification = args.send_email_notification)
personal_data.main.main(
scraper_filter,
use_cookiejar=args.cookiejar,
send_email_notification=args.send_email_notification,
)
if __name__ == '__main__':

View File

@ -1,5 +1,4 @@
import dataclasses
import datetime
import logging
import re
from collections.abc import Iterator
@ -16,6 +15,7 @@ logger = logging.getLogger(__name__)
URL_PROFILE = 'https://psnprofiles.com/{psn_id}'
URL_USER_GAME_TROPHIES = 'https://psnprofiles.com/trophies/{game_id}/{psn_id}'
def game_psnprofiles_id_from_url(relative_url: str) -> int:
m = re.match(r'/(?:trophy|trophies)/(\d+)\-(?:[\w-]+)(/[\w-]*)?', relative_url)
result = m.group(1)
@ -117,7 +117,9 @@ class PsnProfilesScraper(Scraper):
if len(small_infos) > 2:
time_played_div = small_infos[2]
time_played_div.sup.extract()
time_played = personal_data.parse_util.parse_date(time_played_div.get_text())
time_played = personal_data.parse_util.parse_date(
time_played_div.get_text(),
)
else:
time_played = None

View File

@ -1,5 +1,4 @@
import csv
from collections.abc import Iterator
import datetime
import io
import logging
@ -177,15 +176,21 @@ def send_notification(
body.append(f'{k}: {v}\n')
mailgun.send_email(session, f'Updated {scraper_name}', ''.join(body))
def available_scrapers() -> list[type[personal_data.data.Scraper]]:
return personal_data.data.Scraper.__subclasses__()
def available_scraper_names() -> list[str]:
return [scraper_cls.__name__ for scraper_cls in available_scrapers()]
def main(scraper_filter: frozenset[str], *, use_cookiejar: bool,
send_email_notification: bool = False) -> None:
def main(
scraper_filter: frozenset[str],
*,
use_cookiejar: bool,
send_email_notification: bool = False,
) -> None:
if use_cookiejar:
cookiejar = browsercookie.firefox()
logger.info('Got cookiejar from firefox: %s cookies', len(cookiejar))

View File

@ -28,14 +28,17 @@ def parse_duration(text: str) -> datetime.timedelta:
unit = DATETIME_UNITS[unit]
return unit * num
def parse_response_datetime(response) -> datetime.datetime:
return datetime.datetime.strptime(response.headers['Date'], FORMAT_DATE_HEADER)
def parse_time(text: str) -> datetime.datetime:
text = text.replace('\n', ' ')
text = text.strip()
return datetime.datetime.strptime(text, '%d %b %Y %I:%M:%S %p')
def parse_date(text: str) -> datetime.date:
return datetime.datetime.strptime(
text.strip(),