Ruff
This commit is contained in:
parent
8fef2e9ef5
commit
7eb4b0bb09
|
@ -7,11 +7,15 @@ import personal_data.main
|
||||||
def parse_arguments():
|
def parse_arguments():
|
||||||
available_scraper_names = personal_data.main.available_scraper_names()
|
available_scraper_names = personal_data.main.available_scraper_names()
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('fetchers', metavar='FETCHER', type=str, nargs='+',
|
parser.add_argument(
|
||||||
choices=available_scraper_names)
|
'fetchers',
|
||||||
|
metavar='FETCHER',
|
||||||
|
type=str,
|
||||||
|
nargs='+',
|
||||||
|
choices=available_scraper_names,
|
||||||
|
)
|
||||||
parser.add_argument('--cookiejar', action='store_true')
|
parser.add_argument('--cookiejar', action='store_true')
|
||||||
parser.add_argument('--email', action='store_true',
|
parser.add_argument('--email', action='store_true', dest='send_email_notification')
|
||||||
dest='send_email_notification')
|
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
@ -20,8 +24,11 @@ def main():
|
||||||
logging.getLogger('personal_data').setLevel('INFO')
|
logging.getLogger('personal_data').setLevel('INFO')
|
||||||
args = parse_arguments()
|
args = parse_arguments()
|
||||||
scraper_filter = frozenset(args.fetchers)
|
scraper_filter = frozenset(args.fetchers)
|
||||||
personal_data.main.main(scraper_filter, use_cookiejar=args.cookiejar,
|
personal_data.main.main(
|
||||||
send_email_notification = args.send_email_notification)
|
scraper_filter,
|
||||||
|
use_cookiejar=args.cookiejar,
|
||||||
|
send_email_notification=args.send_email_notification,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import dataclasses
|
import dataclasses
|
||||||
import datetime
|
|
||||||
import logging
|
import logging
|
||||||
import re
|
import re
|
||||||
from collections.abc import Iterator
|
from collections.abc import Iterator
|
||||||
|
@ -16,6 +15,7 @@ logger = logging.getLogger(__name__)
|
||||||
URL_PROFILE = 'https://psnprofiles.com/{psn_id}'
|
URL_PROFILE = 'https://psnprofiles.com/{psn_id}'
|
||||||
URL_USER_GAME_TROPHIES = 'https://psnprofiles.com/trophies/{game_id}/{psn_id}'
|
URL_USER_GAME_TROPHIES = 'https://psnprofiles.com/trophies/{game_id}/{psn_id}'
|
||||||
|
|
||||||
|
|
||||||
def game_psnprofiles_id_from_url(relative_url: str) -> int:
|
def game_psnprofiles_id_from_url(relative_url: str) -> int:
|
||||||
m = re.match(r'/(?:trophy|trophies)/(\d+)\-(?:[\w-]+)(/[\w-]*)?', relative_url)
|
m = re.match(r'/(?:trophy|trophies)/(\d+)\-(?:[\w-]+)(/[\w-]*)?', relative_url)
|
||||||
result = m.group(1)
|
result = m.group(1)
|
||||||
|
@ -117,7 +117,9 @@ class PsnProfilesScraper(Scraper):
|
||||||
if len(small_infos) > 2:
|
if len(small_infos) > 2:
|
||||||
time_played_div = small_infos[2]
|
time_played_div = small_infos[2]
|
||||||
time_played_div.sup.extract()
|
time_played_div.sup.extract()
|
||||||
time_played = personal_data.parse_util.parse_date(time_played_div.get_text())
|
time_played = personal_data.parse_util.parse_date(
|
||||||
|
time_played_div.get_text(),
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
time_played = None
|
time_played = None
|
||||||
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
import csv
|
import csv
|
||||||
from collections.abc import Iterator
|
|
||||||
import datetime
|
import datetime
|
||||||
import io
|
import io
|
||||||
import logging
|
import logging
|
||||||
|
@ -177,15 +176,21 @@ def send_notification(
|
||||||
body.append(f'{k}: {v}\n')
|
body.append(f'{k}: {v}\n')
|
||||||
mailgun.send_email(session, f'Updated {scraper_name}', ''.join(body))
|
mailgun.send_email(session, f'Updated {scraper_name}', ''.join(body))
|
||||||
|
|
||||||
|
|
||||||
def available_scrapers() -> list[type[personal_data.data.Scraper]]:
|
def available_scrapers() -> list[type[personal_data.data.Scraper]]:
|
||||||
return personal_data.data.Scraper.__subclasses__()
|
return personal_data.data.Scraper.__subclasses__()
|
||||||
|
|
||||||
|
|
||||||
def available_scraper_names() -> list[str]:
|
def available_scraper_names() -> list[str]:
|
||||||
return [scraper_cls.__name__ for scraper_cls in available_scrapers()]
|
return [scraper_cls.__name__ for scraper_cls in available_scrapers()]
|
||||||
|
|
||||||
|
|
||||||
def main(scraper_filter: frozenset[str], *, use_cookiejar: bool,
|
def main(
|
||||||
send_email_notification: bool = False) -> None:
|
scraper_filter: frozenset[str],
|
||||||
|
*,
|
||||||
|
use_cookiejar: bool,
|
||||||
|
send_email_notification: bool = False,
|
||||||
|
) -> None:
|
||||||
if use_cookiejar:
|
if use_cookiejar:
|
||||||
cookiejar = browsercookie.firefox()
|
cookiejar = browsercookie.firefox()
|
||||||
logger.info('Got cookiejar from firefox: %s cookies', len(cookiejar))
|
logger.info('Got cookiejar from firefox: %s cookies', len(cookiejar))
|
||||||
|
|
|
@ -28,14 +28,17 @@ def parse_duration(text: str) -> datetime.timedelta:
|
||||||
unit = DATETIME_UNITS[unit]
|
unit = DATETIME_UNITS[unit]
|
||||||
return unit * num
|
return unit * num
|
||||||
|
|
||||||
|
|
||||||
def parse_response_datetime(response) -> datetime.datetime:
|
def parse_response_datetime(response) -> datetime.datetime:
|
||||||
return datetime.datetime.strptime(response.headers['Date'], FORMAT_DATE_HEADER)
|
return datetime.datetime.strptime(response.headers['Date'], FORMAT_DATE_HEADER)
|
||||||
|
|
||||||
|
|
||||||
def parse_time(text: str) -> datetime.datetime:
|
def parse_time(text: str) -> datetime.datetime:
|
||||||
text = text.replace('\n', ' ')
|
text = text.replace('\n', ' ')
|
||||||
text = text.strip()
|
text = text.strip()
|
||||||
return datetime.datetime.strptime(text, '%d %b %Y %I:%M:%S %p')
|
return datetime.datetime.strptime(text, '%d %b %Y %I:%M:%S %p')
|
||||||
|
|
||||||
|
|
||||||
def parse_date(text: str) -> datetime.date:
|
def parse_date(text: str) -> datetime.date:
|
||||||
return datetime.datetime.strptime(
|
return datetime.datetime.strptime(
|
||||||
text.strip(),
|
text.strip(),
|
||||||
|
|
Loading…
Reference in New Issue
Block a user