1
0

Ruff
All checks were successful
Test Python / Test (push) Successful in 27s

This commit is contained in:
Jon Michael Aanes 2024-06-06 23:50:29 +02:00
parent 25e9e50e7c
commit a6e6c6dfee
Signed by: Jmaa
SSH Key Fingerprint: SHA256:Ab0GfHGCblESJx7JRE4fj4bFy/KRpeLhi41y4pF3sNA
6 changed files with 93 additions and 33 deletions

View File

@ -18,10 +18,14 @@ def parse_arguments():
parser.add_argument('--cookiejar', action='store_true')
parser.add_argument('--email', action='store_true', dest='send_email_notification')
parser.add_argument(
'--loud-sound', action='store_true', dest='trigger_loud_and_annoying_sound',
'--loud-sound',
action='store_true',
dest='trigger_loud_and_annoying_sound',
)
parser.add_argument(
'--ignore-cache', action='store_true', dest='ignore_cache',
'--ignore-cache',
action='store_true',
dest='ignore_cache',
)
return parser.parse_args()
@ -43,7 +47,7 @@ def main():
scraper_filter,
use_cookiejar=args.cookiejar,
notification_types=frozenset(notification_types),
ignore_cache = args.ignore_cache,
ignore_cache=args.ignore_cache,
)

View File

@ -1 +1 @@
__version__ = '0.1.19'
__version__ = '0.1.19'

View File

@ -1,10 +1,8 @@
import dataclasses
from decimal import Decimal
import datetime
import logging
from collections.abc import Iterator, Mapping
from frozendict import frozendict
from decimal import Decimal
from personal_data.data import DeduplicateMode, Scraper
@ -13,7 +11,8 @@ from .. import secrets
logger = logging.getLogger(__name__)
HA_ROOT = secrets.HOME_ASSISTANT_ROOT
HA_LLAK= secrets.HOME_ASSISTANT_LLAK
HA_LLAK = secrets.HOME_ASSISTANT_LLAK
@dataclasses.dataclass(frozen=True)
class HomeAssistantScaleWeight(Scraper):
@ -23,18 +22,18 @@ class HomeAssistantScaleWeight(Scraper):
def scrape(self) -> Iterator[Mapping[str, object]]:
headers = {
'Authorization': 'Bearer '+HA_LLAK,
'Content-Type': 'application/json',
'Authorization': 'Bearer ' + HA_LLAK,
'Content-Type': 'application/json',
}
end_time = datetime.datetime.now()
start_time = end_time - datetime.timedelta(days = 90)
url = '{}/api/history/period/{}'.format(HA_ROOT, start_time)
start_time = end_time - datetime.timedelta(days=90)
url = f'{HA_ROOT}/api/history/period/{start_time}'
print(url)
params = {
'filter_entity_id': 'sensor.bathroom_scale_mass',
'end_time': end_time,
'filter_entity_id': 'sensor.bathroom_scale_mass',
'end_time': end_time,
}
response = self.session.get(url, params = params, headers = headers)
response = self.session.get(url, params=params, headers=headers)
response.raise_for_status()
data = response.json()
@ -45,8 +44,14 @@ class HomeAssistantScaleWeight(Scraper):
if d['state'] == 'unavailable':
continue
state = Decimal(d['state'])
if state_range_for_consideration[0] <= state <= state_range_for_consideration[1]:
if (
state_range_for_consideration[0]
<= state
<= state_range_for_consideration[1]
):
yield {
'weight.sample_time': datetime.datetime.fromisoformat(d['last_updated']),
'weight.kg': state,
'weight.sample_time': datetime.datetime.fromisoformat(
d['last_updated'],
),
'weight.kg': state,
}

View File

@ -77,7 +77,9 @@ def equals_without_fields(
return frozendict(a) == frozendict(b)
def deduplicate_by_ignoring_certain_fields(dicts: list[dict],
def deduplicate_by_ignoring_certain_fields(
dicts: list[dict],
deduplicate_ignore_columns: list[str],
) -> list[dict]:
"""Removes duplicates that occur when ignoring certain columns.
@ -96,6 +98,7 @@ def deduplicate_by_ignoring_certain_fields(dicts: list[dict],
return dicts
def deduplicate_dicts(
dicts: Sequence[dict],
deduplicate_mode: personal_data.data.DeduplicateMode,
@ -117,8 +120,10 @@ def deduplicate_dicts(
):
del dicts[-1]
elif deduplicate_mode == personal_data.data.DeduplicateMode.BY_ALL_COLUMNS:
dicts = deduplicate_by_ignoring_certain_fields(dicts,
deduplicate_ignore_columns)
dicts = deduplicate_by_ignoring_certain_fields(
dicts,
deduplicate_ignore_columns,
)
elif deduplicate_mode != personal_data.data.DeduplicateMode.NONE:
dicts = set(dicts)
@ -209,7 +214,12 @@ if cfscrape:
pass
def get_session(cookiejar: Sequence, *, with_cfscrape: bool, ignore_cache: bool) -> requests.Session:
def get_session(
cookiejar: Sequence,
*,
with_cfscrape: bool,
ignore_cache: bool,
) -> requests.Session:
assert isinstance(with_cfscrape, bool)
session_class = requests_cache.CachedSession
if ignore_cache:
@ -257,9 +267,11 @@ def main(
logger.info('No notifications enabled: Notifications will not be sent!')
for scraper_cls in available_scrapers():
session = get_session(cookiejar,
with_cfscrape=scraper_cls.requires_cfscrape(),
ignore_cache = ignore_cache)
session = get_session(
cookiejar,
with_cfscrape=scraper_cls.requires_cfscrape(),
ignore_cache=ignore_cache,
)
scraper = scraper_cls(session)
if scraper_cls.__name__ not in scraper_filter:
continue
@ -286,7 +298,10 @@ def main(
if status['extended']:
notification.send_notifications(
session, scraper_cls.__name__, status['dicts'][-1], notification_types,
session,
scraper_cls.__name__,
status['dicts'][-1],
notification_types,
)
del scraper, session

View File

@ -9,7 +9,8 @@ from . import mailgun
logger = logging.getLogger(__name__)
SOUND_PATH = 'resource/sound/57808__guitarguy1985__carterattack.mp3'
#SOUND_PATH = 'resource/sound/516855__matrixxx__wake-up-01.wav'
# SOUND_PATH = 'resource/sound/516855__matrixxx__wake-up-01.wav'
class NotificationType(enum.Enum):
EMAIL = 1
@ -33,8 +34,10 @@ def play_loud_and_annoying_sound(
latest_dict: frozendict,
) -> None:
import playsound3
playsound3.playsound(SOUND_PATH, block=False)
NOTIFICATION_TYPE_TO_NOTIFIER = {
NotificationType.EMAIL: send_email_notification,
NotificationType.LOUD_SOUND: play_loud_and_annoying_sound,
@ -49,5 +52,7 @@ def send_notifications(
) -> None:
for notification_type in notification_types:
NOTIFICATION_TYPE_TO_NOTIFIER[notification_type](
session, scraper_name, latest_dict,
session,
scraper_name,
latest_dict,
)

View File

@ -1,8 +1,8 @@
from frozendict import frozendict
import datetime
from decimal import Decimal
from frozendict import frozendict
from personal_data.data import DeduplicateMode
from personal_data.main import deduplicate_dicts
@ -38,6 +38,7 @@ def test_all_fields():
frozendict({'a': 1, 'b': 2, 't': 300}),
]
def test_all_fields():
ls, fields = deduplicate_dicts(LIST + LIST, DeduplicateMode.BY_ALL_COLUMNS, ['t'])
assert fields == ['a', 'b', 't']
@ -47,18 +48,48 @@ def test_all_fields():
frozendict({'a': 1, 'b': 2, 't': 300}),
]
LIST_2 = [
frozendict({'weight.sample_time': datetime.datetime(2024, 5, 28, 6, 27, 31, 134506, tzinfo=datetime.timezone.utc), 'weight.kg': Decimal('73.6')}),
frozendict({'weight.sample_time': datetime.datetime(2024, 6, 1, 7, 36, 9, 590355, tzinfo=datetime.timezone.utc), 'weight.kg': Decimal('74.7')}),
frozendict(
{
'weight.sample_time': datetime.datetime(
2024,
5,
28,
6,
27,
31,
134506,
tzinfo=datetime.timezone.utc,
),
'weight.kg': Decimal('73.6'),
},
),
frozendict(
{
'weight.sample_time': datetime.datetime(
2024,
6,
1,
7,
36,
9,
590355,
tzinfo=datetime.timezone.utc,
),
'weight.kg': Decimal('74.7'),
},
),
]
def test_deduplicate_weight():
ls, fields = deduplicate_dicts(LIST_2, DeduplicateMode.BY_ALL_COLUMNS, [])
assert fields == ['weight.sample_time', 'weight.kg']
assert ls == LIST_2
def test_deduplicate_weight_2():
ls, fields = deduplicate_dicts(LIST_2 + LIST_2, DeduplicateMode.BY_ALL_COLUMNS, [])
assert fields == ['weight.sample_time', 'weight.kg']
assert ls == LIST_2