Skip to content

Commit

Permalink
chore: Contrain ignore to D1, not D
Browse files Browse the repository at this point in the history
  • Loading branch information
jpmckinney committed Sep 4, 2024
1 parent 7379f7b commit 893c998
Show file tree
Hide file tree
Showing 4 changed files with 8 additions and 6 deletions.
7 changes: 5 additions & 2 deletions generic_scrapy/base_spiders/base_spider.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,8 @@

class BaseSpider(scrapy.Spider):
"""
The base class for this project's spiders.
With respect to the data's source:
- If the source can support ``from_date`` and ``until_date`` spider arguments:
Expand Down Expand Up @@ -46,11 +48,12 @@ def __init__(
**kwargs,
):
"""
Initialize the spider.
:param from_date: the date from which to download data (see :ref:`spider-arguments`)
:param until_date: the date until which to download data (see :ref:`spider-arguments`)
:param crawl_time: override the crawl's start time
"""

super().__init__(*args, **kwargs)

# https://docs.scrapy.org/en/latest/topics/spiders.html#spider-arguments
Expand Down Expand Up @@ -111,6 +114,6 @@ def from_crawler(cls, crawler, *args, **kwargs):

def parse_date_argument(self, date):
"""
Returns the date argument as a datetime object.
Return the date argument as a datetime object.
"""
return datetime.datetime.strptime(date, self.date_format).replace(tzinfo=datetime.timezone.utc)
3 changes: 1 addition & 2 deletions generic_scrapy/base_spiders/export_file_spider.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,8 +6,7 @@

class ExportFileSpider(BaseSpider):
"""
This class makes it easy to store the results from a spider into an individual CSV, JSON or both format files using
Scrapy's CSV and JSON Item Exporters.
Store items in a CSV file, JSON file or both, using Scrapy's CSV and JSON Item Exporters.
#. Inherit from ``ExportFileSpider``
#. Define a ``export_outputs`` dict, with the following structure:
Expand Down
2 changes: 1 addition & 1 deletion generic_scrapy/util.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@

def replace_parameters(url, **kwargs):
"""
Returns a URL after updating the query string parameters' values.
Return a URL after updating the query string parameters' values.
"""
parsed = urlsplit(url)
query = parse_qs(parsed.query)
Expand Down
2 changes: 1 addition & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ ignore = [
"D203", "D212", # ignore incompatible rules
"D200", # documentation preferences
"C901", "PLR0912", # complexity preferences
"D", # docstrings
"D1", # docstrings
"PTH", # pathlib
"ARG002", # Scrapy keyword arguments
"ERA001", # commented Scrapy settings
Expand Down

0 comments on commit 893c998

Please sign in to comment.