1
0
personal-data/personal_data/fetchers/psnprofiles.py

218 lines
7.0 KiB
Python
Raw Normal View History

2024-03-03 15:59:03 +00:00
import dataclasses
2024-03-31 22:55:55 +00:00
import datetime
2024-02-25 00:38:44 +00:00
import logging
2024-03-31 22:55:55 +00:00
import re
2024-04-16 21:00:44 +00:00
from collections.abc import Iterator
2024-03-31 22:55:55 +00:00
2024-02-25 00:38:44 +00:00
import bs4
import personal_data.html_util
import personal_data.parse_util
2024-04-28 22:01:11 +00:00
from personal_data import secrets
2024-03-31 22:55:55 +00:00
from personal_data.data import DeduplicateMode, Scraper
2024-02-25 00:38:44 +00:00
logger = logging.getLogger(__name__)
URL_PROFILE = 'https://psnprofiles.com/{psn_id}'
2024-04-06 16:21:56 +00:00
URL_USER_GAME_TROPHIES = 'https://psnprofiles.com/trophies/{game_id}/{psn_id}'
2024-02-25 00:38:44 +00:00
FORMAT_DAY_MONTH_YEAR = '%d %B %Y'
2024-03-31 22:55:55 +00:00
2024-02-25 00:38:44 +00:00
def game_psnprofiles_id_from_url(relative_url: str) -> int:
m = re.match(r'/(?:trophy|trophies)/(\d+)\-(?:[\w-]+)(/[\w-]*)?', relative_url)
result = m.group(1)
return int(result)
2024-03-31 22:55:55 +00:00
2024-02-25 00:38:44 +00:00
assert game_psnprofiles_id_from_url('/trophies/21045-theatrhythm-final-bar-line')
assert game_psnprofiles_id_from_url('/trophies/21045-theatrhythm-final-bar-line/')
2024-03-31 22:55:55 +00:00
assert game_psnprofiles_id_from_url(
'/trophies/21045-theatrhythm-final-bar-line/Jmaanes',
)
assert game_psnprofiles_id_from_url(
'/trophy/21045-theatrhythm-final-bar-line/19-seasoned-hunter',
)
2024-04-16 21:00:44 +00:00
2024-04-06 16:21:56 +00:00
def parse_time(text: str) -> datetime.datetime:
text = text.replace('\n', ' ')
text = text.strip()
return datetime.datetime.strptime(text, '%d %b %Y %I:%M:%S %p')
2024-04-16 21:00:44 +00:00
2024-04-06 16:21:56 +00:00
assert parse_time('06 Apr 2024 06:11:42 PM')
assert parse_time('26 Mar 2024 7:07:01 PM')
2024-02-25 00:38:44 +00:00
2024-04-06 16:59:18 +00:00
MAX_GAME_ITERATIONS = 10
2024-04-16 21:00:44 +00:00
2024-03-31 22:55:55 +00:00
@dataclasses.dataclass(frozen=True)
2024-03-03 15:59:03 +00:00
class PsnProfilesScraper(Scraper):
dataset_name = 'games_played_playstation'
deduplicate_mode = DeduplicateMode.BY_ALL_COLUMNS
2024-03-03 16:25:34 +00:00
@staticmethod
def requires_cfscrape() -> bool:
return True
2024-03-03 15:59:03 +00:00
def scrape(self):
2024-04-06 16:21:56 +00:00
games_rows = list(self.scrape_games_overview())
2024-04-06 16:59:18 +00:00
games_ids = {row['psnprofiles.game_id']: row['game.name'] for row in games_rows}
2024-04-06 16:21:56 +00:00
yield from games_rows
2024-04-06 16:59:18 +00:00
idx = 0
2024-04-06 16:21:56 +00:00
for game_id, game_name in games_ids.items():
yield from self.scrape_game_trophies(game_id, game_name)
del game_id
2024-04-06 16:59:18 +00:00
idx += 1
if idx >= MAX_GAME_ITERATIONS:
break
2024-04-06 16:21:56 +00:00
def scrape_games_overview(self) -> Iterator[dict]:
# Request to get overview
logger.info('Getting Overview')
2024-03-31 22:55:55 +00:00
url = URL_PROFILE.format(psn_id=secrets.PLAYSTATION_PSN_ID)
2024-03-03 15:59:03 +00:00
response = self.session.get(url)
response.raise_for_status()
2024-03-03 16:25:34 +00:00
NOW = personal_data.parse_util.response_datetime(response)
2024-03-03 15:59:03 +00:00
# Parse data
soup = bs4.BeautifulSoup(response.content, 'lxml')
2024-03-31 22:55:55 +00:00
soup = personal_data.html_util.normalize_soup_slightly(soup, classes=False)
2024-03-03 15:59:03 +00:00
# Recent trophies.
soup_recent_tropies = soup.select('ul#recent-trophies > li')
assert len(soup_recent_tropies) > 0, url
for row in soup_recent_tropies:
cells = row.select_one('.info .box td').find_all('div')
trophy_name = cells[0].get_text().strip()
trophy_desc = cells[1].get_text().strip()
game_name = cells[2].a.extract().get_text().strip()
psnprofiles_id = game_psnprofiles_id_from_url(cells[0].find('a')['href'])
trophy_icon = row.find(class_='icon').find('img')['src']
2024-03-31 22:55:55 +00:00
gotten_at = (
cells[2].get_text().strip().removesuffix(' in').removesuffix(' ago')
)
2024-03-03 15:59:03 +00:00
gotten_at = personal_data.parse_util.parse_duration(gotten_at)
time_acquired = NOW - gotten_at
yield {
2024-03-31 22:55:55 +00:00
'game.name': game_name,
2024-03-31 22:51:56 +00:00
'me.last_played_time': time_acquired.date(),
2024-03-03 15:59:03 +00:00
# Trophy Data
'trophy.name': trophy_name,
'trophy.desc': trophy_desc,
'trophy.icon': trophy_icon,
2024-02-25 00:38:44 +00:00
'psnprofiles.game_id': psnprofiles_id,
2024-03-03 15:59:03 +00:00
}
del row, cells, time_acquired
# Games table
2024-03-31 22:55:55 +00:00
table_rows = soup.find(id='gamesTable').find_all('tr')
2024-03-03 15:59:03 +00:00
assert len(table_rows) > 0, url
for row in table_rows:
cells = row.find_all('td')
# Check for pagination
2024-03-31 22:55:55 +00:00
if re.match(
r'show \d+ more games',
cells[0].get_text().strip(),
re.IGNORECASE,
):
2024-03-03 15:59:03 +00:00
break
2024-03-31 22:55:55 +00:00
game_name = cells[1].find(class_='title').get_text()
2024-03-03 15:59:03 +00:00
psnprofiles_id = game_psnprofiles_id_from_url(cells[0].find('a')['href'])
game_icon = cells[0].find('img')['src']
game_name = row.select_one('.title').get_text()
game_platform = row.select_one('.platform').get_text()
small_infos = cells[1].find_all('div')
if len(small_infos) > 2:
time_played_div = small_infos[2]
time_played_div.sup.extract()
2024-03-31 22:55:55 +00:00
time_played = datetime.datetime.strptime(
time_played_div.get_text().strip(),
FORMAT_DAY_MONTH_YEAR,
).date()
2024-03-03 15:59:03 +00:00
else:
time_played = None
d = {
2024-03-31 22:55:55 +00:00
# Important fields
'game.name': game_name,
# Secondary fields
'game.platform': game_platform,
'game.icon': game_icon,
'psnprofiles.game_id': psnprofiles_id,
2024-03-03 15:59:03 +00:00
}
if time_played:
d['me.last_played_time'] = time_played
yield d
2024-04-06 16:21:56 +00:00
2024-04-16 21:00:44 +00:00
def scrape_game_trophies(
2024-04-23 20:58:25 +00:00
self,
psnprofiles_id: int,
game_name: str,
2024-04-16 21:00:44 +00:00
) -> Iterator[dict]:
2024-04-06 16:59:18 +00:00
assert isinstance(psnprofiles_id, int), psnprofiles_id
assert isinstance(game_name, str), game_name
2024-04-06 16:21:56 +00:00
logger.info('Getting Game Trophies %s', psnprofiles_id)
2024-04-16 21:00:44 +00:00
url = URL_USER_GAME_TROPHIES.format(
2024-04-23 20:58:25 +00:00
psn_id=secrets.PLAYSTATION_PSN_ID,
game_id=psnprofiles_id,
2024-04-16 21:00:44 +00:00
)
2024-04-06 16:21:56 +00:00
response = self.session.get(url)
response.raise_for_status()
# Parse data
soup = bs4.BeautifulSoup(response.content, 'lxml')
soup = personal_data.html_util.normalize_soup_slightly(soup, classes=False)
2024-04-06 16:56:12 +00:00
# Remove redundant
for redundant in soup.select('.wide-ad'):
redundant.extract()
for redundant in soup.select('div.col-xs-4'):
redundant.extract()
2024-04-06 16:21:56 +00:00
# Recent trophies.
2024-04-16 21:00:44 +00:00
soup_tropies = soup.select(
'#content.page > .row > div.col-xs div.box table.zebra tr.completed',
)
2024-04-06 16:21:56 +00:00
for row in soup_tropies:
cells = row.find_all('td')
trophy_name_a = cells[1].a
2024-04-06 16:56:12 +00:00
if trophy_name_a is None:
continue
2024-04-06 16:21:56 +00:00
trophy_name = trophy_name_a.get_text().strip()
trophy_name_a.extract()
trophy_desc = cells[1].get_text().strip()
trophy_icon = cells[0].img['src']
cells[2].span.span.nobr.sup.extract()
gotten_at = parse_time(cells[2].get_text())
yield {
'game.name': game_name,
'me.last_played_time': gotten_at,
# Trophy Data
'trophy.name': trophy_name,
'trophy.desc': trophy_desc,
'trophy.icon': trophy_icon,
'psnprofiles.game_id': psnprofiles_id,
}
del row, cells