diff --git a/socials_util/__init__.py b/socials_util/__init__.py index 4c472fc..593bfcc 100644 --- a/socials_util/__init__.py +++ b/socials_util/__init__.py @@ -14,7 +14,14 @@ from enforce_typing import enforce_types from socials_util._version import __version__ -__all__ = ['__version__', 'SocialSiteId', 'SocialLink', 'WikidataInfo', 'AGGERAGOR_SOCIALS', 'determine_social_from_url'] +__all__ = [ + '__version__', + 'SocialSiteId', + 'SocialLink', + 'WikidataInfo', + 'AGGERAGOR_SOCIALS', + 'determine_social_from_url', +] class SocialSiteId(aenum.Enum): diff --git a/test/test_data.py b/test/test_data.py index 922a44d..9a77fd0 100644 --- a/test/test_data.py +++ b/test/test_data.py @@ -1,5 +1,6 @@ import socials_util + def test_is_aggregator(): assert not socials_util.SocialSiteId.PAGE_WATCH.is_aggregator() assert not socials_util.SocialSiteId.FALKON_PROFILE_BOOKMARKS.is_aggregator() diff --git a/test/test_parsing.py b/test/test_parsing.py index 4fca71c..f5f7feb 100644 --- a/test/test_parsing.py +++ b/test/test_parsing.py @@ -1,6 +1,7 @@ +import urllib.parse + import pytest -import urllib.parse import socials_util from socials_util import SocialLink, SocialSiteId, determine_social_from_url @@ -188,11 +189,9 @@ PARSABLE_SOCIAL_IDS_COMBINED: list[tuple[str, object, str | None]] = [ # Cara ('https://cara.app/simzart', SocialSiteId.CARA_PROFILE, 'simzart'), ('https://cara.app/simzart/all', SocialSiteId.CARA_PROFILE, 'simzart'), - # Mastodon ('https://idlethumbs.social/@testtest', SocialSiteId.MASTODON_PAGE, None), ('https://mastodon.example.org/testtest', SocialSiteId.MASTODON_PAGE, None), - # Feeds ('https://example.org/main.atom', SocialSiteId.RSS_FEED, None), ] @@ -207,10 +206,12 @@ NOT_PARSABLE = [ @pytest.mark.parametrize( - ('url','expected_social_site_id','expected_social_id'), + ('url', 'expected_social_site_id', 'expected_social_id'), PARSABLE_SOCIAL_IDS_COMBINED, ) -def test_parse_social_ids(url: str, expected_social_site_id: SocialSiteId, expected_social_id: str) -> None: +def test_parse_social_ids( + url: str, expected_social_site_id: SocialSiteId, expected_social_id: str, +) -> None: social_link: SocialLink | None = determine_social_from_url(url) assert social_link is not None, url assert (social_link.social_id, social_link.social_site_id) == ( @@ -223,14 +224,19 @@ def test_parse_social_ids(url: str, expected_social_site_id: SocialSiteId, expec def test_not_parsable(url: str) -> None: assert determine_social_from_url(url) is None + def test_wrong_parse_type() -> None: with pytest.raises(TypeError): assert socials_util.to_parse_result(None) + def test_from_parse_result() -> None: - urlresult = urllib.parse.urlparse('https://old.reddit.com/user/Harpsibored/submitted/') + urlresult = urllib.parse.urlparse( + 'https://old.reddit.com/user/Harpsibored/submitted/', + ) assert socials_util.to_parse_result(urlresult) is urlresult + def test_determine_social_from_url_internally() -> None: with pytest.raises(TypeError): assert socials_util.determine_social_from_url_internally(None) diff --git a/test/test_wikidata_property.py b/test/test_wikidata_property.py index 878902d..909a26a 100644 --- a/test/test_wikidata_property.py +++ b/test/test_wikidata_property.py @@ -1,9 +1,11 @@ -import pytest -import socials_util import wikidata +import socials_util + def test_wikidata_properties(): - wikidata_client = wikidata.client.Client() - wikidata_property = socials_util.SocialSiteId.RSS_FEED.wikidata_property(wikidata_client) + wikidata_client = wikidata.client.Client() + wikidata_property = socials_util.SocialSiteId.RSS_FEED.wikidata_property( + wikidata_client, + ) assert wikidata_property is not None