Skip to content

Commit

Permalink
Update CI
Browse files Browse the repository at this point in the history
  • Loading branch information
Gallaecio committed Feb 25, 2025
1 parent 4b1dd41 commit 13fd8ae
Show file tree
Hide file tree
Showing 7 changed files with 204 additions and 81 deletions.
42 changes: 16 additions & 26 deletions .github/workflows/main.yml
Original file line number Diff line number Diff line change
@@ -1,47 +1,37 @@
name: CI

on:
push:
branches: [ master ]
pull_request:
branches: [ master ]

on: [push, pull_request]
jobs:
build:

test:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
include:
- python-version: "3.9"
toxenv: min
- python-version: "3.9"
- python-version: "3.10"
- python-version: "3.11"
- python-version: "3.12"
- python-version: "3.13"

- python-version: "3.13"
toxenv: pre-commit
- python-version: "3.13"
toxenv: mypy
- python-version: "3.13"
toxenv: pylint
- python-version: "3.13"
toxenv: twinecheck
steps:
- uses: actions/checkout@v2
- uses: actions/checkout@v4
- name: libddb
run: |
sudo apt-get install libdb-dev
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}
- name: Cache pip
uses: actions/cache@v2
with:
path: ~/.cache/pip
key: ${{ runner.os}}-pip-${{ hashFiles('tests/requirements-test.txt') }}
restore-keys: |
${{ runner.os}}-pip-
${{ runner.os}}-
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install -r tests/requirements-test.txt
- name: Test with pytest
- name: Run
run: |
pytest
pip install -U tox
tox -e ${{ matrix.toxenv }}
74 changes: 74 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,3 +1,74 @@
[tool.coverage.run]
branch = true
include = ["scrapy_deltafetch/*"]
omit = ["tests/*"]
disable_warnings = ["include-ignored"]

[tool.coverage.paths]
source = [
"scrapy_deltafetch",
".tox/**/site-packages/scrapy-deltafetch"
]

[tool.coverage.report]
# https://github.com/nedbat/coveragepy/issues/831#issuecomment-517778185
exclude_lines = ["pragma: no cover", "if TYPE_CHECKING:"]



[tool.pylint.MASTER]
persistent = "no"
jobs = 1 # >1 hides results

[tool.pylint."MESSAGES CONTROL"]
enable = [
"useless-suppression",
]
disable = [
# Ones we want to ignore
"attribute-defined-outside-init",
"broad-exception-caught",
"consider-using-with",
"cyclic-import",
"disallowed-name",
"duplicate-code", # https://github.com/pylint-dev/pylint/issues/214
"fixme",
"import-outside-toplevel",
"inherit-non-class", # false positives with create_deprecated_class()
"invalid-name",
"invalid-overridden-method",
"isinstance-second-argument-not-valid-type", # false positives with create_deprecated_class()
"line-too-long",
"logging-format-interpolation",
"logging-fstring-interpolation",
"logging-not-lazy",
"missing-docstring",
"no-member",
"no-name-in-module", # caught by mypy already
"no-value-for-parameter", # https://github.com/pylint-dev/pylint/issues/3268
"not-callable",
"protected-access",
"redefined-builtin",
"redefined-outer-name",
"too-few-public-methods",
"too-many-ancestors",
"too-many-arguments",
"too-many-branches",
"too-many-function-args",
"too-many-instance-attributes",
"too-many-lines",
"too-many-locals",
"too-many-positional-arguments",
"too-many-public-methods",
"too-many-return-statements",
"unused-argument",
"unused-import",
"unused-variable",
"useless-import-alias", # used as a hint to mypy
"useless-return", # https://github.com/pylint-dev/pylint/issues/6530
"wrong-import-position",
]

[tool.ruff.lint]
extend-select = [
# flake8-bugbear
Expand Down Expand Up @@ -103,3 +174,6 @@ ignore = [
[tool.ruff.lint.per-file-ignores]
# D102: Missing docstring in public method
"tests/**" = ["D102"]

[tool.ruff.lint.pydocstyle]
convention = "pep257"
1 change: 0 additions & 1 deletion requirements.txt

This file was deleted.

9 changes: 5 additions & 4 deletions scrapy_deltafetch/middleware.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,6 @@
from scrapy import signals
from scrapy.exceptions import NotConfigured
from scrapy.http import Request
from scrapy.item import Item
from scrapy.utils.project import data_path
from scrapy.utils.python import to_bytes

Expand Down Expand Up @@ -36,6 +35,8 @@ def from_crawler(cls, crawler): # noqa: D102
dir = data_path(s.get("DELTAFETCH_DIR", "deltafetch"))
reset = s.getbool("DELTAFETCH_RESET")
o = cls(dir, reset, crawler.stats)
if o.stats is None:
o.stats = crawler.stats
crawler.signals.connect(o.spider_opened, signal=signals.spider_opened)
crawler.signals.connect(o.spider_closed, signal=signals.spider_closed)

Expand All @@ -56,14 +57,14 @@ def spider_opened(self, spider): # noqa: D102
reset = self.reset or getattr(spider, "deltafetch_reset", False)
flag = "n" if reset else "c"
try:
self.db = dbm.open(dbpath, flag=flag) # noqa: SIM115
self.db = dbm.open(str(dbpath), flag=flag) # noqa: SIM115
except Exception:
logger.warning(
f"Failed to open DeltaFetch database at {dbpath}, trying to recreate it"
)
if dbpath.exists():
dbpath.unlink()
self.db = dbm.open(dbpath, "c") # noqa: SIM115
self.db = dbm.open(str(dbpath), "c") # noqa: SIM115

def spider_closed(self, spider): # noqa: D102
self.db.close()
Expand All @@ -77,7 +78,7 @@ def process_spider_output(self, response, result, spider): # noqa: D102
if self.stats:
self.stats.inc_value("deltafetch/skipped", spider=spider)
continue
elif isinstance(r, (Item, dict)):
else:
key = self._get_key(response.request)
self.db[key] = str(time.time())
if self.stats:
Expand Down
5 changes: 0 additions & 5 deletions tests/requirements-test.txt

This file was deleted.

Loading

0 comments on commit 13fd8ae

Please sign in to comment.