1
0

Compare commits

..

No commits in common. "6749479f383043fc0fcc5a78ba0c2be77dc9b385" and "d2916cbc28ece48db7c53ec6c2b52136699a92b4" have entirely different histories.

2 changed files with 11 additions and 24 deletions

View File

@ -12,36 +12,31 @@ from personal_data.data import DeduplicateMode, Scraper
logger = logging.getLogger(__name__)
@dataclasses.dataclass(frozen=True)
class MyAnimeListAnime:
series_name_eng: str
series_name: str
series_myanimelist_url: urllib.parse.ParseResult
series_icon: urllib.parse.ParseResult
me_score: int
@dataclasses.dataclass(frozen=True)
class MyAnimeList(Scraper):
dataset_name = 'myanimelist_anime'
deduplicate_mode = DeduplicateMode.BY_FIRST_COLUMN
def scrape(self) -> Iterator[MyAnimeListAnime]:
def scrape(self) -> Iterator[Mapping[str, object]]:
username = 'WhereTheDogGoin'
url = f'https://myanimelist.net/animelist/{username}'
response = self.session.get(url)
response.raise_for_status()
soup = bs4.BeautifulSoup(response.text)
print(soup)
data_items_soup = soup.select('[data-items]')[0]
print(data_items_soup)
data_items = json.loads(data_items_soup.get('data-items'))
for data_item in data_items:
yield MyAnimeListAnime(
series_name_eng= data_item.get('anime_title_eng') or data_item.get('anime_title'),
series_name= data_item.get('anime_title') or data_item.get('anime_title_eng'),
series_myanimelist_url= urllib.parse.urlparse(urllib.parse.urljoin(url, data_item['anime_url'])),
series_icon= urllib.parse.urlparse(urllib.parse.urljoin(url, data_item['anime_image_path'])),
me_score= data_item.get('score'),
)
print(data_item)
yield {
'series.name_eng': data_item.get('anime_title_eng') or data_item.get('anime_title'),
'series.name': data_item.get('anime_title') or data_item.get('anime_title_eng'),
'series.myanimelist_url': urllib.parse.urlparse(urllib.parse.urljoin(url, data_item['anime_url'])),
'series.icon': urllib.parse.urlparse(urllib.parse.urljoin(url, data_item['anime_image_path'])),
'me.score': data_item.get('score'),
}
del data_item

View File

@ -1,6 +1,5 @@
import _csv
import csv
import dataclasses
import datetime
import io
import logging
@ -89,14 +88,7 @@ def deduplicate_dicts(
return dicts, fieldnames
def dataclass_to_dict(obj) -> dict[str, Any]:
d = dataclasses.asdict(obj)
return {k.replace('_','.',1):v for k,v in d.items()}
def normalize_dict(d: dict[str, Any] | frozendict[str, Any]) -> frozendict[str, Any]:
if not isinstance(d, dict) and not isinstance(d, frozendict):
d = dataclass_to_dict(d)
safe_values = [(k, csv_import.csv_str_to_value(csv_import.csv_safe_value(v))) for k, v in d.items() ]
return frozendict( {k:v for k,v in safe_values if v is not None})