From 893c99897d27fa5864f924c5cbc3308bbc93c7c9 Mon Sep 17 00:00:00 2001 From: James McKinney <26463+jpmckinney@users.noreply.github.com> Date: Wed, 4 Sep 2024 13:30:25 -0400 Subject: [PATCH] chore: Contrain ignore to D1, not D --- generic_scrapy/base_spiders/base_spider.py | 7 +++++-- generic_scrapy/base_spiders/export_file_spider.py | 3 +-- generic_scrapy/util.py | 2 +- pyproject.toml | 2 +- 4 files changed, 8 insertions(+), 6 deletions(-) diff --git a/generic_scrapy/base_spiders/base_spider.py b/generic_scrapy/base_spiders/base_spider.py index 3d450ff..404cf9d 100644 --- a/generic_scrapy/base_spiders/base_spider.py +++ b/generic_scrapy/base_spiders/base_spider.py @@ -6,6 +6,8 @@ class BaseSpider(scrapy.Spider): """ + The base class for this project's spiders. + With respect to the data's source: - If the source can support ``from_date`` and ``until_date`` spider arguments: @@ -46,11 +48,12 @@ def __init__( **kwargs, ): """ + Initialize the spider. + :param from_date: the date from which to download data (see :ref:`spider-arguments`) :param until_date: the date until which to download data (see :ref:`spider-arguments`) :param crawl_time: override the crawl's start time """ - super().__init__(*args, **kwargs) # https://docs.scrapy.org/en/latest/topics/spiders.html#spider-arguments @@ -111,6 +114,6 @@ def from_crawler(cls, crawler, *args, **kwargs): def parse_date_argument(self, date): """ - Returns the date argument as a datetime object. + Return the date argument as a datetime object. """ return datetime.datetime.strptime(date, self.date_format).replace(tzinfo=datetime.timezone.utc) diff --git a/generic_scrapy/base_spiders/export_file_spider.py b/generic_scrapy/base_spiders/export_file_spider.py index 6eb0dce..2036279 100644 --- a/generic_scrapy/base_spiders/export_file_spider.py +++ b/generic_scrapy/base_spiders/export_file_spider.py @@ -6,8 +6,7 @@ class ExportFileSpider(BaseSpider): """ - This class makes it easy to store the results from a spider into an individual CSV, JSON or both format files using - Scrapy's CSV and JSON Item Exporters. + Store items in a CSV file, JSON file or both, using Scrapy's CSV and JSON Item Exporters. #. Inherit from ``ExportFileSpider`` #. Define a ``export_outputs`` dict, with the following structure: diff --git a/generic_scrapy/util.py b/generic_scrapy/util.py index 36fd228..9fa9f67 100644 --- a/generic_scrapy/util.py +++ b/generic_scrapy/util.py @@ -3,7 +3,7 @@ def replace_parameters(url, **kwargs): """ - Returns a URL after updating the query string parameters' values. + Return a URL after updating the query string parameters' values. """ parsed = urlsplit(url) query = parse_qs(parsed.query) diff --git a/pyproject.toml b/pyproject.toml index b48c17b..d338c1c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -21,7 +21,7 @@ ignore = [ "D203", "D212", # ignore incompatible rules "D200", # documentation preferences "C901", "PLR0912", # complexity preferences - "D", # docstrings + "D1", # docstrings "PTH", # pathlib "ARG002", # Scrapy keyword arguments "ERA001", # commented Scrapy settings