Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Consolidate linting configuration #662

Merged
merged 6 commits into from Oct 20, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
5 changes: 5 additions & 0 deletions .pre-commit-config.yaml
Expand Up @@ -20,3 +20,8 @@ repos:
rev: 22.3.0
hooks:
- id: black
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.982
hooks:
- id: mypy
additional_dependencies: [types-beautifulsoup4, types-requests]
1 change: 0 additions & 1 deletion README.rst
Expand Up @@ -342,7 +342,6 @@ Assuming you have ``>=python3.7`` installed, navigate to the directory where you
python3 -m venv .venv &&
source .venv/bin/activate &&
pip install -r requirements-dev.txt &&
pre-commit install &&
python -m unittest

In case you want to run a single unittest for a newly developed scraper
Expand Down
2 changes: 2 additions & 0 deletions generate.py
@@ -1,3 +1,5 @@
# mypy: allow-untyped-defs

# generate generates a new recipe scraper.
import ast
import sys
Expand Down
2 changes: 1 addition & 1 deletion mypy.ini
Expand Up @@ -11,7 +11,7 @@ disallow_any_expr=False
disallow_any_decorated=False
disallow_any_explicit=False
disallow_any_generics=True
disallow_subclassing_any=True
disallow_subclassing_any=False # note: currently only required for templates/scraper.py

disallow_untyped_calls=False
disallow_untyped_defs=True
Expand Down
3 changes: 2 additions & 1 deletion recipe_scrapers/_utils.py
@@ -1,10 +1,11 @@
# mypy: disallow_untyped_defs=False

import html
import isodate
import math
import re

import isodate

from ._exceptions import ElementNotFoundInHtml

FRACTIONS = {
Expand Down
5 changes: 3 additions & 2 deletions recipe_scrapers/bettybossi.py
@@ -1,8 +1,9 @@
# mypy: disallow_untyped_defs=False
from typing import Optional, Union, Tuple, Dict
from typing import Dict, Optional, Tuple, Union

from requests import Session

from ._abstract import AbstractScraper, HEADERS
from ._abstract import HEADERS, AbstractScraper


class BettyBossi(AbstractScraper):
Expand Down
2 changes: 1 addition & 1 deletion recipe_scrapers/goustojson.py
@@ -1,7 +1,7 @@
# mypy: disallow_untyped_defs=False
import requests

from ._abstract import AbstractScraper, HEADERS
from ._abstract import HEADERS, AbstractScraper
from ._utils import get_minutes, get_yields, normalize_string, url_path_to_dict


Expand Down
3 changes: 2 additions & 1 deletion recipe_scrapers/marleyspoon.py
@@ -1,9 +1,10 @@
# mypy: disallow_untyped_defs=False
import json
import re

import requests

from ._abstract import AbstractScraper, HEADERS
from ._abstract import HEADERS, AbstractScraper
from ._exceptions import ElementNotFoundInHtml
from ._utils import normalize_string

Expand Down
2 changes: 1 addition & 1 deletion recipe_scrapers/woolworths.py
@@ -1,7 +1,7 @@
# mypy: disallow_untyped_defs=False
import requests

from ._abstract import AbstractScraper, HEADERS
from ._abstract import HEADERS, AbstractScraper
from ._schemaorg import SchemaOrg
from ._utils import url_path_to_dict

Expand Down
5 changes: 0 additions & 5 deletions requirements-dev.txt
@@ -1,10 +1,5 @@
-e .
black>=22.3.0
coverage>=4.5.1
flake8>=3.8.3
flake8-printf-formatting>=1.1.0
pre-commit>=2.6.0
responses>=0.21.0
mypy>=0.971
# language-tags>=1.0.0
# tld>=0.12.3
2 changes: 1 addition & 1 deletion tests/__init__.py
@@ -1,6 +1,6 @@
import os
from typing import Any, Iterator, Optional, Tuple
import unittest
from typing import Any, Iterator, Optional, Tuple

import responses

Expand Down
1 change: 0 additions & 1 deletion tests/test_foodnetwork.py
@@ -1,7 +1,6 @@
from responses import GET

from recipe_scrapers.foodnetwork import FoodNetwork

from tests import ScraperTest


Expand Down
1 change: 0 additions & 1 deletion tests/test_gousto.py
@@ -1,7 +1,6 @@
from responses import GET

from recipe_scrapers.gousto import Gousto

from tests import ScraperTest


Expand Down
1 change: 1 addition & 0 deletions tests/test_goustojson.py
@@ -1,4 +1,5 @@
from responses import GET

from recipe_scrapers.goustojson import GoustoJson
from tests import ScraperTest

Expand Down
1 change: 0 additions & 1 deletion tests/test_marleyspoon.py
@@ -1,7 +1,6 @@
import responses

from recipe_scrapers.marleyspoon import MarleySpoon

from tests import ScraperTest


Expand Down
1 change: 1 addition & 0 deletions tests/test_woolworths.py
@@ -1,4 +1,5 @@
from responses import GET

from recipe_scrapers.woolworths import Woolworths
from tests import ScraperTest

Expand Down
20 changes: 4 additions & 16 deletions tox.ini
@@ -1,9 +1,6 @@
[testenv]
deps =
coverage >= 4.5.1
responses >= 0.21.0
commands =
coverage run -m unittest
deps = -r{toxinidir}/requirements-dev.txt
commands = coverage run -m unittest

# The system-provided libxml2 on MacOS is typically outdated and this can lead to lxml parsing issues
# Using PyPi-provided binary wheels instead resolves this
Expand All @@ -15,14 +12,5 @@ install_command =

[testenv:lint]
skip_install = true
deps =
black >= 22.3.0
flake8 >= 3.8.3
flake8-printf-formatting >= 1.1.0
mypy >= 0.971
types-beautifulsoup4 >= 4.11.6
types-requests >= 2.28.10
commands =
black --check .
flake8 --count .
mypy recipe_scrapers tests
deps = pre-commit >= 2.20.0
commands = pre-commit run --all-files