From a450fbb617893996a50c13f55b593294c22ec18f Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ond=C5=99ej=20Gajdu=C5=A1ek?= Date: Tue, 8 Aug 2023 23:07:49 +0200 Subject: [PATCH] Address Ruff's rules, add few enhancements --- custom_components/feedparser/sensor.py | 37 +++++++++++++--- pyproject.toml | 42 +++++++++++++----- tests/conftest.py | 7 ++- tests/download.py | 9 ++-- tests/feedsource.py | 11 +++-- tests/generate_ha_config.py | 2 +- tests/test_sensors.py | 60 +++++++++++++++----------- 7 files changed, 111 insertions(+), 57 deletions(-) diff --git a/custom_components/feedparser/sensor.py b/custom_components/feedparser/sensor.py index 58cb885..a898875 100644 --- a/custom_components/feedparser/sensor.py +++ b/custom_components/feedparser/sensor.py @@ -55,10 +55,10 @@ async def async_setup_platform( - hass: HomeAssistant, + hass: HomeAssistant, # noqa: ARG001 config: ConfigType, async_add_devices: AddEntitiesCallback, - discovery_info: DiscoveryInfoType | None = None, + discovery_info: DiscoveryInfoType | None = None, # noqa: ARG001 ) -> None: """Set up the Feedparser sensor.""" async_add_devices( @@ -92,6 +92,7 @@ def __init__( scan_interval: timedelta, local_time: bool, ) -> None: + """Initialize the Feedparser sensor.""" self._feed = feed self._attr_name = name self._attr_icon = "mdi:rss" @@ -112,6 +113,7 @@ def update(self: FeedParserSensor) -> None: self._attr_native_value = None return + # set the sensor value to the amount of entries self._attr_native_value = ( self._show_topn if len(parsed_feed.entries) > self._show_topn @@ -120,7 +122,8 @@ def update(self: FeedParserSensor) -> None: self._entries.extend(self._generate_entries(parsed_feed)) def _generate_entries( - self: FeedParserSensor, parsed_feed: FeedParserDict + self: FeedParserSensor, + parsed_feed: FeedParserDict, ) -> list[dict[str, str]]: return [ self._generate_sensor_entry(feed_entry) @@ -130,7 +133,8 @@ def _generate_entries( ] def _generate_sensor_entry( - self: FeedParserSensor, feed_entry: FeedParserDict + self: FeedParserSensor, + feed_entry: FeedParserDict, ) -> dict[str, str]: sensor_entry = {} for key, value in feed_entry.items(): @@ -166,14 +170,16 @@ def _parse_date(self: FeedParserSensor, date: str) -> datetime: if not parsed_time.tzname(): # replace tzinfo with UTC offset if tzinfo does not contain a TZ name parsed_time = parsed_time.replace( - tzinfo=timezone(parsed_time.utcoffset()) # type: ignore[arg-type] + tzinfo=timezone(parsed_time.utcoffset()), # type: ignore[arg-type] ) if self._local_time: parsed_time = dt.as_local(parsed_time) return parsed_time def _process_image( - self: FeedParserSensor, feed_entry: FeedParserDict, sensor_entry: dict[str, str] + self: FeedParserSensor, + feed_entry: FeedParserDict, + sensor_entry: dict[str, str], ) -> None: if "image" in self._inclusions and "image" not in sensor_entry.keys(): if "enclosures" in feed_entry: @@ -191,7 +197,24 @@ def _process_image( "image" ] = DEFAULT_THUMBNAIL # use default image if no image found + @property + def feed_entries(self: FeedParserSensor) -> list[dict[str, str]]: + """Return feed entries.""" + if hasattr(self, "_entries"): + return self._entries + return [] + + @property + def local_time(self: FeedParserSensor) -> bool: + """Return local_time.""" + return self._local_time + + @local_time.setter + def local_time(self: FeedParserSensor, value: bool) -> None: + """Set local_time.""" + self._local_time = value + @property def extra_state_attributes(self: FeedParserSensor) -> dict[str, list]: """Return entity specific state attributes.""" - return {"entries": self._entries} + return {"entries": self.feed_entries} diff --git a/pyproject.toml b/pyproject.toml index a0d12a0..bac2809 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -35,7 +35,7 @@ dev = [ "types-python-dateutil", "types-PyYAML", "voluptuous-stubs", - "pyyaml" + "pyyaml", ] [project.urls] @@ -67,26 +67,41 @@ exclude = ''' # Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or # McCabe complexity (`C901`) by default. select = [ - "E", - "F", + "ANN", + "ARG", + "ARG", "B", - "RET", - "PL", + "C", + "COM", "D", - "ANN", + "D", + "DTZ", + "E", + "EM", + "F", + "FBT", "FBT", - "ARG", "I", - "UP", "PGH", + "PL", + "PLR", + "PT", + "RET", + "RUF", + "SIM", + "SLF", + "TCH", + "TRY", + "UP", ] + +# Q000,ANN,PT009,D,E501, ignore = [ "D107", # Missing docstring in __init__ "FBT001", # Boolean positional arg in function definition - "PLR0913", # Too many arguments to function call - "ARG001", # Unused function argument "D203", # 1 blank line required before class docstring "D213", # Multi-line docstring summary should start at the first line + "FBT001" # Boolean positional argument in function definition ] # Allow autofix for all enabled rules (when `--fix`) is provided. @@ -113,7 +128,6 @@ exclude = [ "dist", "venv", ] -per-file-ignores = {} # Same as Black. line-length = 88 @@ -124,6 +138,12 @@ dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" # Assume Python 3.11. target-version = "py311" +[tool.ruff.per-file-ignores] +"tests/**" = ["S101"] + +[tool.ruff.pylint] +max-args = 9 + [[tool.mypy.overrides]] module = "feedparser.*" # Workaround till https://github.com/kurtmckee/feedparser/pull/282 gets merged to the main branch diff --git a/tests/conftest.py b/tests/conftest.py index b4c835b..f69e332 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -3,7 +3,6 @@ import pytest from constants import TEST_FEEDS from feedsource import FeedSource -from pytest import FixtureRequest from custom_components.feedparser.sensor import FeedParserSensor @@ -20,13 +19,13 @@ def pytest_generate_tests(metafunc: pytest.Metafunc) -> None: metafunc.parametrize("feed", feeds, ids=[f.name for f in feeds], indirect=True) -@pytest.fixture -def feed(request: FixtureRequest) -> FeedSource: +@pytest.fixture() +def feed(request: pytest.FixtureRequest) -> FeedSource: """Return feed file source.""" return request.param -@pytest.fixture +@pytest.fixture() def feed_sensor(feed: FeedSource) -> FeedParserSensor: """Return feed sensor initialized with the local RSS feed.""" return FeedParserSensor(**feed.sensor_config_local_feed) diff --git a/tests/download.py b/tests/download.py index c07cf9e..a8937fc 100644 --- a/tests/download.py +++ b/tests/download.py @@ -1,20 +1,19 @@ """Download RSS feeds for testing.""" import asyncio +import datetime import json -from datetime import datetime import aiohttp from constants import TEST_FEEDS from feedsource import FeedSource -base_date = datetime.now() +base_date = datetime.datetime.now(datetime.UTC) async def run_feed(feed: FeedSource) -> None: """Download feed and store its metadata and content.""" - async with aiohttp.ClientSession() as session: - async with session.get(feed.url) as response: - text = await response.text() + async with aiohttp.ClientSession() as session, session.get(feed.url) as response: + text = await response.text() metadata = feed.raw metadata["download_date"] = base_date.isoformat() feed.metadata_path.write_text(json.dumps(metadata, indent=4) + "\n") diff --git a/tests/feedsource.py b/tests/feedsource.py index eb8c32b..ead1ce9 100644 --- a/tests/feedsource.py +++ b/tests/feedsource.py @@ -71,9 +71,12 @@ def download_date(self: "FeedSource") -> datetime: try: return datetime.fromisoformat(self.metadata["download_date"]) except KeyError as ke: - raise KeyError( + msg = ( f"download_date not found in {self.metadata_path}. " "Is feed metadata downloaded?" + ) + raise KeyError( + msg, ) from ke @property @@ -121,14 +124,16 @@ def ha_config_entry(self: "FeedSource") -> dict[str, Any]: @classmethod def gen_ha_sensors_yml_config( - cls: type["FeedSource"], sensors: list["FeedSource"] + cls: type["FeedSource"], + sensors: list["FeedSource"], ) -> str: """Generate HA "sensors" config.""" return yaml.dump([s.ha_config_entry for s in sensors]) @classmethod def create_ha_sensors_config_file( - cls: type["FeedSource"], sensors: list["FeedSource"] + cls: type["FeedSource"], + sensors: list["FeedSource"], ) -> None: """Create HA "sensors" config file.""" sensors_yml = TEST_HASS_PATH / "sensors.yaml" diff --git a/tests/generate_ha_config.py b/tests/generate_ha_config.py index 8a8dadc..875a725 100644 --- a/tests/generate_ha_config.py +++ b/tests/generate_ha_config.py @@ -3,4 +3,4 @@ from feedsource import FeedSource fsources = [FeedSource(fs) for fs in TEST_FEEDS] -FeedSource.create_ha_sensors_config_file([fs for fs in fsources]) +FeedSource.create_ha_sensors_config_file(fsources) diff --git a/tests/test_sensors.py b/tests/test_sensors.py index 1395c45..6b43dfd 100644 --- a/tests/test_sensors.py +++ b/tests/test_sensors.py @@ -1,8 +1,8 @@ """"Tests the feedparser sensor.""" -import time from datetime import UTC, datetime +from typing import TYPE_CHECKING import feedparser import pytest @@ -15,11 +15,14 @@ FeedParserSensor, ) +if TYPE_CHECKING: + import time + def test_simple(feed_sensor: FeedParserSensor) -> None: """Test simple.""" feed_sensor.update() - assert feed_sensor._entries + assert feed_sensor.feed_entries def test_update_sensor(feed: FeedSource) -> None: @@ -35,45 +38,45 @@ def test_update_sensor(feed: FeedSource) -> None: scan_interval=DEFAULT_SCAN_INTERVAL, ) feed_sensor.update() - assert feed_sensor._entries + assert feed_sensor.feed_entries # assert that the sensor value is equal to the number of entries - assert feed_sensor._attr_native_value == len(feed_sensor._entries) + assert feed_sensor.native_value == len(feed_sensor.feed_entries) # assert that all entries have a title - assert all(e["title"] for e in feed_sensor._entries) + assert all(e["title"] for e in feed_sensor.feed_entries) # assert that all entries have a link - assert all(e["link"] for e in feed_sensor._entries) + assert all(e["link"] for e in feed_sensor.feed_entries) # assert that all entries have a published date - assert all(e["published"] for e in feed_sensor._entries) + assert all(e["published"] for e in feed_sensor.feed_entries) # assert that all entries have non-default image if feed.has_images: - assert all(e["image"] != DEFAULT_THUMBNAIL for e in feed_sensor._entries) + assert all(e["image"] != DEFAULT_THUMBNAIL for e in feed_sensor.feed_entries) else: - assert all(e["image"] == DEFAULT_THUMBNAIL for e in feed_sensor._entries) + assert all(e["image"] == DEFAULT_THUMBNAIL for e in feed_sensor.feed_entries) # assert that all entries have a unique link - assert len({e["link"] for e in feed_sensor._entries}) == len( - feed_sensor._entries + assert len({e["link"] for e in feed_sensor.feed_entries}) == len( + feed_sensor.feed_entries, ), "Duplicate links found" # assert that all entries have a unique title - assert len({e["title"] for e in feed_sensor._entries}) == len( - feed_sensor._entries + assert len({e["title"] for e in feed_sensor.feed_entries}) == len( + feed_sensor.feed_entries, ), "Duplicate titles found" # assert that all entries have a unique published date - assert len({e["published"] for e in feed_sensor._entries}) == len( - feed_sensor._entries + assert len({e["published"] for e in feed_sensor.feed_entries}) == len( + feed_sensor.feed_entries, ), "Duplicate published dates found" # assert that all entries have a unique image if feed.has_images: - assert len({e["image"] for e in feed_sensor._entries}) == len( - feed_sensor._entries + assert len({e["image"] for e in feed_sensor.feed_entries}) == len( + feed_sensor.feed_entries, ), "Duplicate images found" @@ -91,27 +94,31 @@ def test_update_sensor_with_topn(feed: FeedSource) -> None: scan_interval=DEFAULT_SCAN_INTERVAL, ) feed_sensor.update() - assert feed_sensor._entries + assert feed_sensor.feed_entries # assert that the sensor value is equal to the number of # entries and that only top N entries are stored - assert feed_sensor._attr_native_value == show_topn == len(feed_sensor._entries) + assert feed_sensor.native_value == show_topn == len(feed_sensor.feed_entries) @pytest.mark.parametrize( - "local_time", [True, False], ids=["local_time", "default_time"] + "local_time", + [True, False], + ids=["local_time", "default_time"], ) def test_update_sensor_entries_time( - feed: FeedSource, feed_sensor: FeedParserSensor, local_time: bool + feed: FeedSource, + feed_sensor: FeedParserSensor, + local_time: bool, ) -> None: """Test that the sensor converts the published date to local time.""" - feed_sensor._local_time = local_time + feed_sensor.local_time = local_time feed_sensor.update() - assert feed_sensor._entries + assert feed_sensor.feed_entries # load the feed with feedparser parsed_feed: feedparser.FeedParserDict = feedparser.parse( - feed.path.absolute().as_uri() + feed.path.absolute().as_uri(), ) # get the first entry @@ -122,8 +129,9 @@ def test_update_sensor_entries_time( first_entry_time: datetime = datetime(*first_entry_struct_time[:6], tzinfo=UTC) # get the time of the first entry in the sensor - first_sensor_entry_time: datetime = datetime.strptime( - feed_sensor._entries[0]["published"], feed.sensor_config.date_format + first_sensor_entry_time: datetime = datetime.strptime( # noqa: DTZ007 + feed_sensor.feed_entries[0]["published"], + feed.sensor_config.date_format, ) # assert that the time of the first entry in the sensor is equal to