1
0
personal-data/personal_data/__main__.py

156 lines
4.1 KiB
Python
Raw Normal View History

2023-12-10 23:27:56 +00:00
import csv
import datetime
import io
2024-01-28 20:01:50 +00:00
import logging
2024-03-31 22:55:55 +00:00
import browsercookie
2024-02-25 00:38:44 +00:00
import cfscrape
2024-03-31 22:55:55 +00:00
import requests
import requests_cache
from frozendict import frozendict
2024-01-28 20:01:50 +00:00
logger = logging.getLogger(__name__)
2023-12-10 23:27:56 +00:00
2024-03-31 22:55:55 +00:00
import personal_data.data
import personal_data.fetchers.crunchyroll
2024-03-03 16:25:34 +00:00
import personal_data.fetchers.ffxiv_lodestone
2024-01-28 21:33:30 +00:00
import personal_data.fetchers.playstation
2024-02-25 00:38:44 +00:00
import personal_data.fetchers.psnprofiles
2024-02-25 19:20:37 +00:00
CSV_DIALECT = 'one_true_dialect'
2024-03-31 22:55:55 +00:00
csv.register_dialect(CSV_DIALECT, lineterminator='\n', skipinitialspace=True)
2023-12-10 23:27:56 +00:00
2024-02-25 19:20:37 +00:00
def try_value(fn, s: str) -> any:
try:
return fn(s)
except ValueError:
return None
2024-03-31 22:55:55 +00:00
2024-02-25 19:20:37 +00:00
def to_value(s: str) -> any:
s = s.strip()
if len(s) == 0:
return None
if v := try_value(int, s):
return v
2024-03-31 22:55:55 +00:00
if v := try_value(datetime.date.fromisoformat, s):
2024-02-25 19:20:37 +00:00
return v
2024-03-31 22:55:55 +00:00
if v := try_value(datetime.datetime.fromisoformat, s):
2024-02-25 19:20:37 +00:00
return v
if s.lower() == 'false':
return False
if s.lower() == 'true':
return True
if s.lower() == 'none':
return None
return s
2024-03-31 22:55:55 +00:00
def extend_csv_file(
filename: str,
new_dicts: dict,
deduplicate_mode: personal_data.data.DeduplicateMode,
):
2023-12-10 23:27:56 +00:00
dicts = []
2024-01-28 21:29:29 +00:00
try:
with open(filename, 'r') as csvfile:
2024-03-31 22:55:55 +00:00
reader = csv.DictReader(csvfile, dialect=CSV_DIALECT)
2024-01-28 21:29:29 +00:00
for row in reader:
2024-02-25 19:20:37 +00:00
for k in list(row.keys()):
row[k] = to_value(row[k])
if row[k] is None:
del row[k]
2024-01-28 21:29:29 +00:00
dicts.append(frozendict(row))
del csvfile
except FileNotFoundError as e:
logger.info('Creating file: %s', filename)
pass
2023-12-10 23:27:56 +00:00
2024-02-25 00:38:44 +00:00
original_num_dicts = len(dicts)
2023-12-10 23:27:56 +00:00
dicts += [frozendict(d) for d in new_dicts]
del new_dicts
2024-02-25 00:38:44 +00:00
fieldnames = []
for d in dicts:
for k in d.keys():
if k not in fieldnames:
fieldnames.append(k)
2024-02-25 19:20:37 +00:00
del k
del d
2024-01-28 21:29:29 +00:00
2024-02-25 19:20:37 +00:00
if deduplicate_mode != personal_data.data.DeduplicateMode.NONE:
dicts = set(dicts)
2024-03-31 22:55:55 +00:00
dicts = sorted(dicts, key=lambda d: tuple(str(d.get(fn, '')) for fn in fieldnames))
2023-12-10 23:27:56 +00:00
csvfile_in_memory = io.StringIO()
2024-03-31 22:55:55 +00:00
writer = csv.DictWriter(
csvfile_in_memory,
fieldnames=fieldnames,
dialect=CSV_DIALECT,
)
2023-12-10 23:27:56 +00:00
writer.writeheader()
for d in dicts:
writer.writerow(d)
output_csv = csvfile_in_memory.getvalue()
del writer, csvfile_in_memory
with open(filename, 'w') as csvfile:
csvfile.write(output_csv)
del csvfile
2024-03-31 22:55:55 +00:00
logger.warning(
'Extended CSV "%s" from %d to %d lines',
filename,
original_num_dicts,
len(dicts),
)
2023-12-10 23:27:56 +00:00
2024-01-28 21:29:29 +00:00
STANDARD_HEADERS = {
2024-03-31 22:55:55 +00:00
'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64; rv:122.0) Gecko/20100101 Firefox/122.0',
# "Accept": "application/json, text/plain, */*",
'Accept-Language': 'en-US,en;q=0.5',
'Accept-Encoding': 'gzip, deflate, br',
2024-01-28 21:29:29 +00:00
}
2024-03-31 22:55:55 +00:00
2024-03-03 16:25:34 +00:00
def get_session(with_cfscrape: bool, cookiejar) -> requests.Session:
assert isinstance(with_cfscrape, bool)
if with_cfscrape:
session = cfscrape.create_scraper()
else:
2024-03-31 22:55:55 +00:00
session = requests_cache.CachedSession('web_cache', cookies=cookiejar)
2024-03-03 16:25:34 +00:00
for cookie in cookiejar:
session.cookies.set_cookie(cookie)
return session
2024-03-31 22:55:55 +00:00
2023-12-10 23:27:56 +00:00
def main():
2023-12-10 23:42:51 +00:00
cookiejar = browsercookie.firefox()
2024-01-28 21:29:29 +00:00
logger.warning('Got cookiejar from firefox: %s cookies', len(cookiejar))
2024-01-28 20:01:50 +00:00
2024-03-03 15:59:03 +00:00
for scraper_cls in personal_data.data.Scraper.__subclasses__():
2024-03-03 16:25:34 +00:00
session = get_session(scraper_cls.requires_cfscrape(), cookiejar)
2024-03-03 15:59:03 +00:00
scraper = scraper_cls(session)
2024-03-31 22:55:55 +00:00
logger.warning(
'Running %s, appending to "%s"',
scraper_cls.__name__,
scraper.dataset_name,
)
2024-03-03 15:59:03 +00:00
del scraper_cls
2024-02-25 00:38:44 +00:00
result_rows = list()
2024-03-03 15:59:03 +00:00
for result in scraper.scrape():
2024-02-25 00:38:44 +00:00
result_rows.append(result)
del result
2024-03-31 22:55:55 +00:00
extend_csv_file(
'output/' + scraper.dataset_name,
result_rows,
deduplicate_mode=scraper.deduplicate_mode,
)
2024-02-25 00:38:44 +00:00
logger.warning('Scraper done: %s', scraper.dataset_name)
2024-03-03 16:25:34 +00:00
del scraper, session
2023-12-10 23:27:56 +00:00
2024-03-31 22:55:55 +00:00
2023-12-10 23:27:56 +00:00
if __name__ == '__main__':
main()