From a9e2cb3cccb41ded947ef6812429c6159295418e Mon Sep 17 00:00:00 2001 From: Jon Michael Aanes Date: Fri, 1 Nov 2024 21:42:16 +0100 Subject: [PATCH] Wikidata client and property --- requirements_test.txt | 1 + test/test_data.py | 5 +++++ test/test_extension.py | 5 +++++ test/test_parsing.py | 23 ++++++++++++++++++++++- test/test_wikidata_property.py | 9 +++++++++ 5 files changed, 42 insertions(+), 1 deletion(-) create mode 100644 test/test_data.py create mode 100644 test/test_wikidata_property.py diff --git a/requirements_test.txt b/requirements_test.txt index e079f8a..f6cf578 100644 --- a/requirements_test.txt +++ b/requirements_test.txt @@ -1 +1,2 @@ pytest +wikidata diff --git a/test/test_data.py b/test/test_data.py new file mode 100644 index 0000000..922a44d --- /dev/null +++ b/test/test_data.py @@ -0,0 +1,5 @@ +import socials_util + +def test_is_aggregator(): + assert not socials_util.SocialSiteId.PAGE_WATCH.is_aggregator() + assert not socials_util.SocialSiteId.FALKON_PROFILE_BOOKMARKS.is_aggregator() diff --git a/test/test_extension.py b/test/test_extension.py index 7b769d1..cf0e815 100644 --- a/test/test_extension.py +++ b/test/test_extension.py @@ -1,7 +1,12 @@ +import pytest import aenum import socials_util +def test_re_social_path_validation(): + with pytest.raises(ValueError): + socials_util.re_social_path('www.example.org') + def test_extension(): my_secret_site = aenum.extend_enum(socials_util.SocialSiteId, 'MY_SECRET_SITE', 666) diff --git a/test/test_parsing.py b/test/test_parsing.py index 978a367..4fca71c 100644 --- a/test/test_parsing.py +++ b/test/test_parsing.py @@ -1,8 +1,10 @@ import pytest +import urllib.parse +import socials_util from socials_util import SocialLink, SocialSiteId, determine_social_from_url -PARSABLE_SOCIAL_IDS_COMBINED: list[tuple[str, object, str]] = [ +PARSABLE_SOCIAL_IDS_COMBINED: list[tuple[str, object, str | None]] = [ # Tumblr formats ('https://triviallytrue.tumblr.com/', SocialSiteId.TUMBLR, 'triviallytrue'), ('https://www.triviallytrue.tumblr.com/', SocialSiteId.TUMBLR, 'triviallytrue'), @@ -186,6 +188,13 @@ PARSABLE_SOCIAL_IDS_COMBINED: list[tuple[str, object, str]] = [ # Cara ('https://cara.app/simzart', SocialSiteId.CARA_PROFILE, 'simzart'), ('https://cara.app/simzart/all', SocialSiteId.CARA_PROFILE, 'simzart'), + + # Mastodon + ('https://idlethumbs.social/@testtest', SocialSiteId.MASTODON_PAGE, None), + ('https://mastodon.example.org/testtest', SocialSiteId.MASTODON_PAGE, None), + + # Feeds + ('https://example.org/main.atom', SocialSiteId.RSS_FEED, None), ] NOT_PARSABLE = [ @@ -213,3 +222,15 @@ def test_parse_social_ids(url: str, expected_social_site_id: SocialSiteId, expec @pytest.mark.parametrize('url', NOT_PARSABLE) def test_not_parsable(url: str) -> None: assert determine_social_from_url(url) is None + +def test_wrong_parse_type() -> None: + with pytest.raises(TypeError): + assert socials_util.to_parse_result(None) + +def test_from_parse_result() -> None: + urlresult = urllib.parse.urlparse('https://old.reddit.com/user/Harpsibored/submitted/') + assert socials_util.to_parse_result(urlresult) is urlresult + +def test_determine_social_from_url_internally() -> None: + with pytest.raises(TypeError): + assert socials_util.determine_social_from_url_internally(None) diff --git a/test/test_wikidata_property.py b/test/test_wikidata_property.py new file mode 100644 index 0000000..878902d --- /dev/null +++ b/test/test_wikidata_property.py @@ -0,0 +1,9 @@ +import pytest +import socials_util +import wikidata + + +def test_wikidata_properties(): + wikidata_client = wikidata.client.Client() + wikidata_property = socials_util.SocialSiteId.RSS_FEED.wikidata_property(wikidata_client) + assert wikidata_property is not None