import dataclasses
import logging
import subprocess
from pathlib import Path

import bs4

import personal_data.csv_import
import personal_data.main

logger = logging.getLogger(__name__)


@dataclasses.dataclass
class Result:
    title: str
    id: int
    levels: str


SESSION = personal_data.main.get_session([], with_cfscrape=False, ignore_cache=False)


def parse_results(response) -> list[Result]:
    soup = bs4.BeautifulSoup(response.text, 'lxml')

    results = []
    for tr in soup.select('table tbody tr'):
        cells = tr.select('td')

        title = cells[0].get_text().strip()
        link = cells[0].a
        if link is None:
            continue
        id = link['href'].removeprefix('viewsimfile.php?simfileid=')
        levels = cells[1].get_text().strip()
        results.append(Result(title, int(id), levels))
    return results


def search_for_song(song_data) -> Result | None:
    response = SESSION.post(
        'https://zenius-i-vanisher.com/v5.2/simfiles_search_ajax.php',
        data={
            'songtitle': song_data['song.name_eng'],
            'songartist': song_data['song.artist'],
        },
    )
    if results := parse_results(response):
        return results[0]

    response = SESSION.post(
        'https://zenius-i-vanisher.com/v5.2/simfiles_search_ajax.php',
        data={
            'songtitle': song_data['song.name_eng'],
            'songartist': '',
        },
    )
    if results := parse_results(response):
        return results[0]
    logger.warning('No results for %s', song_data['song.name_eng'])
    return None


def download_song(song_data, output_dir: Path):
    song_result = search_for_song(song_data)
    if song_result is None:
        return

    path_zip = output_dir / f'zenius-{song_result.id}-{song_result.title}.zip'
    if path_zip.exists():
        logger.warning('Skipping existing file')
        return

    logger.warning('Downloading to %s', path_zip)

    url = f'https://zenius-i-vanisher.com/v5.2/download.php?type=ddrsimfile&simfileid={song_result.id}'

    cmd = ['curl', '-L', '--fail', url, '-o', path_zip]
    subprocess.run(cmd, check=True, capture_output=True)


def main():
    csv_path = Path('./output/myanimelist_songs.csv')
    output_path = Path('./output/songs')
    output_path.mkdir(exist_ok=True, parents=True)

    songs = personal_data.csv_import.load_csv_file(csv_path)
    for song in songs:
        logger.warning('Trying to download %s', song['song.name_eng'])
        download_song(song, output_path)


if __name__ == '__main__':
    main()