1
0

Set cache duration based on index in list.
All checks were successful
Run Python tests (through Pytest) / Test (push) Successful in 34s
Verify Python project can be installed, loaded and have version checked / Test (push) Successful in 30s

This commit is contained in:
Jon Michael Aanes 2024-11-17 12:28:03 +01:00
parent f7ea7c90b5
commit 57918ec9b9
Signed by: Jmaa
SSH Key Fingerprint: SHA256:Ab0GfHGCblESJx7JRE4fj4bFy/KRpeLhi41y4pF3sNA

View File

@ -32,7 +32,11 @@ MAX_NUMBER_GAMES_TO_PARSE = 10000
@dataclasses.dataclass(frozen=True) @dataclasses.dataclass(frozen=True)
class PsnProfiles(Scraper): class PsnProfiles(Scraper):
"""Downloads all trophies for the given user.""" """Downloads all trophies for the given user.
Individual game pages are cached for a period between 1 to 30 days,
depending upon how recently you played them.
"""
dataset_name = 'games_played' dataset_name = 'games_played'
deduplicate_mode = DeduplicateMode.BY_ALL_COLUMNS deduplicate_mode = DeduplicateMode.BY_ALL_COLUMNS
@ -49,7 +53,8 @@ class PsnProfiles(Scraper):
logger.info('Found %d games from overview', len(games_rows)) logger.info('Found %d games from overview', len(games_rows))
for idx, (game_id, game_name) in enumerate(reversed(games_ids.items())): for idx, (game_id, game_name) in enumerate(reversed(games_ids.items())):
yield from self._scrape_game_trophies(game_id, game_name) cache_duration = datetime.timedelta(days=min(idx+1, 30))
yield from self._scrape_game_trophies(game_id, game_name, cache_duration)
del game_id del game_id
if idx >= MAX_NUMBER_GAMES_TO_PARSE: if idx >= MAX_NUMBER_GAMES_TO_PARSE:
break break
@ -141,6 +146,7 @@ class PsnProfiles(Scraper):
self, self,
psnprofiles_id: int, psnprofiles_id: int,
game_name: str, game_name: str,
cache_duration: datetime.timedelta,
) -> Iterator[dict]: ) -> Iterator[dict]:
assert isinstance(psnprofiles_id, int), psnprofiles_id assert isinstance(psnprofiles_id, int), psnprofiles_id
assert isinstance(game_name, str), game_name assert isinstance(game_name, str), game_name
@ -151,7 +157,7 @@ class PsnProfiles(Scraper):
psn_id=secrets.PLAYSTATION_PSN_ID, psn_id=secrets.PLAYSTATION_PSN_ID,
game_id=psnprofiles_id, game_id=psnprofiles_id,
) )
response = self.session.get(url) response = self.session.get(url, expire_after=cache_duration)
response.raise_for_status() response.raise_for_status()
# Parse data # Parse data