Skip to content

Commit

Permalink
Migrate default project template to static project metadata
Browse files Browse the repository at this point in the history
Uses `requirements.txt` for dev requirements in project template.

Fix gh-2280.
Fix gh-2519.

Signed-off-by: Juan Luis Cano Rodríguez <juan_luis_cano@mckinsey.com>
  • Loading branch information
astrojuanlu committed Jun 21, 2023
1 parent eb75ba6 commit a450719
Show file tree
Hide file tree
Showing 14 changed files with 108 additions and 121 deletions.
13 changes: 9 additions & 4 deletions features/environment.py
Expand Up @@ -116,13 +116,18 @@ def _setup_minimal_env(context):

def _install_project_requirements(context):
install_reqs = (
Path(
"kedro/templates/project/{{ cookiecutter.repo_name }}/src/requirements.txt"
)
Path("kedro/templates/project/{{ cookiecutter.repo_name }}/requirements.txt")
.read_text(encoding="utf-8")
.splitlines()
)
install_reqs = [req for req in install_reqs if "{" not in req]
install_reqs = [
req
for req in install_reqs
if (req.strip())
and ("{" not in req)
and (not req.startswith("-e"))
and (not req.startswith("#"))
]
install_reqs.append(".[pandas.CSVDataSet]")
call([context.pip, "install", *install_reqs], env=context.env)
return context
10 changes: 5 additions & 5 deletions features/steps/cli_steps.py
Expand Up @@ -162,7 +162,7 @@ def create_config_file(context):
@given("I have installed the project dependencies")
def pip_install_dependencies(context):
"""Install project dependencies using pip."""
reqs_path = "src/requirements.txt"
reqs_path = "requirements.txt"
res = run(
[context.pip, "install", "-r", reqs_path],
env=context.env,
Expand Down Expand Up @@ -410,7 +410,7 @@ def update_kedro_req(context: behave.runner.Context):
"""Replace kedro as a standalone requirement with a line
that includes all of kedro's dependencies (-r kedro/requirements.txt)
"""
reqs_path = context.root_project_dir / "src" / "requirements.txt"
reqs_path = context.root_project_dir / "requirements.txt"
kedro_reqs = f"-r {context.requirements_path.as_posix()}"

if reqs_path.is_file():
Expand All @@ -428,7 +428,7 @@ def update_kedro_req(context: behave.runner.Context):

@when("I add {dependency} to the requirements")
def add_req(context: behave.runner.Context, dependency: str):
reqs_path = context.root_project_dir / "src" / "requirements.txt"
reqs_path = context.root_project_dir / "requirements.txt"
if reqs_path.is_file():
reqs_path.write_text(reqs_path.read_text() + "\n" + str(dependency) + "\n")

Expand Down Expand Up @@ -610,14 +610,14 @@ def check_docs_generated(context: behave.runner.Context):
@then("requirements should be generated")
def check_reqs_generated(context: behave.runner.Context):
"""Check that new project requirements are generated."""
reqs_path = context.root_project_dir / "src" / "requirements.lock"
reqs_path = context.root_project_dir / "requirements.lock"
assert reqs_path.is_file()
assert "This file is autogenerated by pip-compile" in reqs_path.read_text()


@then("{dependency} should be in the requirements")
def check_dependency_in_reqs(context: behave.runner.Context, dependency: str):
reqs_path = context.root_project_dir / "src" / "requirements.txt"
reqs_path = context.root_project_dir / "requirements.txt"
assert dependency in reqs_path.read_text()


Expand Down
4 changes: 2 additions & 2 deletions kedro/framework/cli/micropkg.py
Expand Up @@ -238,7 +238,7 @@ def _pull_package(
package_reqs = _get_all_library_reqs(library_meta)

if package_reqs:
requirements_txt = metadata.source_dir / "requirements.txt"
requirements_txt = metadata.project_path / "requirements.txt"
_append_package_reqs(requirements_txt, package_reqs, package_name)

_clean_pycache(temp_dir_path)
Expand Down Expand Up @@ -839,7 +839,7 @@ def _generate_sdist_file(
# Build a setup.py on the fly
try:
install_requires = _make_install_requires(
package_source / "requirements.txt" # type: ignore
metadata.project_path / "requirements.txt" # type: ignore
)
except Exception as exc:
click.secho("FAILED", fg="red")
Expand Down
29 changes: 16 additions & 13 deletions kedro/framework/cli/project.py
Expand Up @@ -88,9 +88,9 @@ def test(metadata: ProjectMetadata, args, **kwargs): # pylint: disable=unused-a
try:
_check_module_importable("pytest")
except KedroCliError as exc:
source_path = metadata.source_dir
project_path = metadata.project_path
raise KedroCliError(
NO_DEPENDENCY_MESSAGE.format(module="pytest", src=str(source_path))
NO_DEPENDENCY_MESSAGE.format(module="pytest", src=str(project_path))
) from exc
python_call("pytest", args)

Expand All @@ -110,20 +110,22 @@ def lint(
click.secho(deprecation_message, fg="red")

source_path = metadata.source_dir
project_path = metadata.project_path
package_name = metadata.package_name
files = files or (str(source_path / "tests"), str(source_path / package_name))
files = files or (str(project_path / "tests"), str(source_path / package_name))

if "PYTHONPATH" not in os.environ:
# isort needs the source path to be in the 'PYTHONPATH' environment
# variable to treat it as a first-party import location
# NOTE: Actually, `pip install [-e] .` achieves the same
os.environ["PYTHONPATH"] = str(source_path) # pragma: no cover

for module_name in ("flake8", "isort", "black"):
try:
_check_module_importable(module_name)
except KedroCliError as exc:
raise KedroCliError(
NO_DEPENDENCY_MESSAGE.format(module=module_name, src=str(source_path))
NO_DEPENDENCY_MESSAGE.format(module=module_name, src=str(project_path))
) from exc

python_call("black", ("--check",) + files if check_only else files)
Expand All @@ -149,7 +151,7 @@ def ipython(
@click.pass_obj # this will pass the metadata as first argument
def package(metadata: ProjectMetadata):
"""Package the project as a Python wheel."""
source_path = metadata.source_dir
project_path = metadata.project_path
call(
[
sys.executable,
Expand All @@ -159,7 +161,7 @@ def package(metadata: ProjectMetadata):
"--outdir",
"../dist",
],
cwd=str(source_path),
cwd=str(project_path),
)

directory = (
Expand Down Expand Up @@ -199,10 +201,11 @@ def build_docs(metadata: ProjectMetadata, open_docs):
click.secho(deprecation_message, fg="red")

source_path = metadata.source_dir
project_path = metadata.project_path
package_name = metadata.package_name

python_call("pip", ["install", str(source_path / "[docs]")])
python_call("pip", ["install", "-r", str(source_path / "requirements.txt")])
python_call("pip", ["install", str(project_path / "[docs]")])
python_call("pip", ["install", "-r", str(project_path / "requirements.txt")])
python_call("ipykernel", ["install", "--user", f"--name={package_name}"])
shutil.rmtree("docs/build", ignore_errors=True)
call(
Expand Down Expand Up @@ -249,9 +252,9 @@ def build_reqs(
)
click.secho(deprecation_message, fg="red")

source_path = metadata.source_dir
input_file = Path(input_file or source_path / "requirements.txt")
output_file = Path(output_file or source_path / "requirements.lock")
project_path = metadata.project_path
input_file = Path(input_file or project_path / "requirements.txt")
output_file = Path(output_file or project_path / "requirements.lock")

if input_file.is_file():
python_call(
Expand Down Expand Up @@ -291,7 +294,7 @@ def activate_nbstripout(
)
click.secho(deprecation_message, fg="red")

source_path = metadata.source_dir
project_path = metadata.source_dir
click.secho(
(
"Notebook output cells will be automatically cleared before committing"
Expand All @@ -304,7 +307,7 @@ def activate_nbstripout(
_check_module_importable("nbstripout")
except KedroCliError as exc:
raise KedroCliError(
NO_DEPENDENCY_MESSAGE.format(module="nbstripout", src=str(source_path))
NO_DEPENDENCY_MESSAGE.format(module="nbstripout", src=str(project_path))
) from exc

try:
Expand Down
12 changes: 0 additions & 12 deletions kedro/templates/project/{{ cookiecutter.repo_name }}/README.md
Expand Up @@ -64,25 +64,13 @@ After this, if you'd like to update your project requirements, please update `re
> Jupyter, JupyterLab, and IPython are already included in the project requirements by default, so once you have run `pip install -r requirements.txt` you will not need to take any extra steps before you use them.
### Jupyter
To use Jupyter notebooks in your Kedro project, you need to install Jupyter:

```
pip install jupyter
```

After installing Jupyter, you can start a local notebook server:

```
kedro jupyter notebook
```

### JupyterLab
To use JupyterLab, you need to install it:

```
pip install jupyterlab
```

You can also start JupyterLab:

```
Expand Down
@@ -1,3 +1,38 @@
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"

[project]
name = "{{ cookiecutter.python_package }}"
dependencies = [
"kedro~={{ cookiecutter.kedro_version }}",
]
dynamic = ["version"]

[project.scripts]
{{ cookiecutter.repo_name }} = "{{ cookiecutter.python_package }}.__main__:main"

[project.optional-dependencies]
docs = [
"docutils<0.18.0",
"sphinx~=3.4.3",
"sphinx_rtd_theme==0.5.1",
"nbsphinx==0.8.1",
"nbstripout~=0.4",
"sphinx-autodoc-typehints==1.11.1",
"sphinx_copybutton==0.3.1",
"ipykernel>=5.3, <7.0",
"Jinja2<3.1.0",
"myst-parser~=0.17.2",
]

[tool.setuptools.dynamic]
version = {attr = "{{ cookiecutter.python_package }}.__version__"}

[tool.setuptools.packages.find]
where = ["src"]
namespaces = false

[tool.kedro]
package_name = "{{ cookiecutter.python_package }}"
project_name = "{{ cookiecutter.project_name }}"
Expand Down
@@ -1,3 +1,7 @@
# Install library code
-e file:.

# Development dependencies
black~=22.0
flake8>=3.7.9, <5.0
ipython>=7.31.1, <8.0; python_version < '3.8'
Expand All @@ -6,8 +10,6 @@ isort~=5.0
jupyter~=1.0
jupyterlab_server>=2.11.1, <2.16.0
jupyterlab~=3.0, <3.6.0
kedro~={{ cookiecutter.kedro_version }}
kedro-telemetry~=0.2.0
nbstripout~=0.4
pytest-cov~=3.0
pytest-mock>=1.7.1, <2.0
Expand Down
39 changes: 0 additions & 39 deletions kedro/templates/project/{{ cookiecutter.repo_name }}/setup.py

This file was deleted.

6 changes: 3 additions & 3 deletions tests/framework/cli/micropkg/conftest.py
Expand Up @@ -26,7 +26,7 @@ def cleanup_micropackages(fake_repo_path, fake_package_path):
if each.is_file():
each.unlink()

tests = fake_repo_path / "src" / "tests" / micropackage
tests = fake_repo_path / "tests" / micropackage
if tests.is_dir():
shutil.rmtree(str(tests))

Expand All @@ -35,7 +35,7 @@ def cleanup_micropackages(fake_repo_path, fake_package_path):
def cleanup_pipelines(fake_repo_path, fake_package_path):
pipes_path = fake_package_path / "pipelines"
old_pipelines = {p.name for p in pipes_path.iterdir() if p.is_dir()}
requirements_txt = fake_repo_path / "src" / "requirements.txt"
requirements_txt = fake_repo_path / "requirements.txt"
requirements = requirements_txt.read_text()
yield

Expand All @@ -53,7 +53,7 @@ def cleanup_pipelines(fake_repo_path, fake_package_path):
if each.is_file():
each.unlink()

tests = fake_repo_path / "src" / "tests" / "pipelines" / pipeline
tests = fake_repo_path / "tests" / "pipelines" / pipeline
if tests.is_dir():
shutil.rmtree(str(tests))

Expand Down
14 changes: 7 additions & 7 deletions tests/framework/cli/micropkg/test_micropkg_requirements.py
Expand Up @@ -81,7 +81,7 @@ def test_existing_complex_project_requirements_txt(
self, fake_project_cli, fake_metadata, fake_package_path, fake_repo_path
):
"""Pipeline requirements.txt and project requirements.txt."""
project_requirements_txt = fake_repo_path / "src" / "requirements.txt"
project_requirements_txt = fake_repo_path / "requirements.txt"
with open(project_requirements_txt, "a", encoding="utf-8") as file:
file.write(COMPLEX_REQUIREMENTS)
existing_requirements = _safe_parse_requirements(
Expand Down Expand Up @@ -112,7 +112,7 @@ def test_existing_project_requirements_txt(
self, fake_project_cli, fake_metadata, fake_package_path, fake_repo_path
):
"""Pipeline requirements.txt and project requirements.txt."""
project_requirements_txt = fake_repo_path / "src" / "requirements.txt"
project_requirements_txt = fake_repo_path / "requirements.txt"
existing_requirements = _safe_parse_requirements(
project_requirements_txt.read_text()
)
Expand Down Expand Up @@ -146,7 +146,7 @@ def test_missing_project_requirements_txt(
project level."""

# Remove project requirements.txt
project_requirements_txt = fake_repo_path / "src" / "requirements.txt"
project_requirements_txt = fake_repo_path / "requirements.txt"
project_requirements_txt.unlink()

self.call_pipeline_create(fake_project_cli, fake_metadata)
Expand Down Expand Up @@ -176,7 +176,7 @@ def test_no_requirements(
"""No pipeline requirements.txt, and also no requirements.txt
at project level."""
# Remove project requirements.txt
project_requirements_txt = fake_repo_path / "src" / "requirements.txt"
project_requirements_txt = fake_repo_path / "requirements.txt"
project_requirements_txt.unlink()

self.call_pipeline_create(fake_project_cli, fake_metadata)
Expand All @@ -195,7 +195,7 @@ def test_all_requirements_already_covered(
pipeline_requirements_txt = (
fake_package_path / "pipelines" / PIPELINE_NAME / "requirements.txt"
)
project_requirements_txt = fake_repo_path / "src" / "requirements.txt"
project_requirements_txt = fake_repo_path / "requirements.txt"
pipeline_requirements_txt.write_text(SIMPLE_REQUIREMENTS)
project_requirements_txt.write_text(SIMPLE_REQUIREMENTS)

Expand All @@ -214,7 +214,7 @@ def test_no_pipeline_requirements_txt(
create project requirements.txt."""

# Remove project requirements.txt
project_requirements_txt = fake_repo_path / "src" / "requirements.txt"
project_requirements_txt = fake_repo_path / "requirements.txt"
project_requirements_txt.unlink()

self.call_pipeline_create(fake_project_cli, fake_metadata)
Expand All @@ -231,7 +231,7 @@ def test_empty_pipeline_requirements_txt(
create project requirements.txt."""

# Remove project requirements.txt
project_requirements_txt = fake_repo_path / "src" / "requirements.txt"
project_requirements_txt = fake_repo_path / "requirements.txt"
project_requirements_txt.unlink()

self.call_pipeline_create(fake_project_cli, fake_metadata)
Expand Down

0 comments on commit a450719

Please sign in to comment.