From 3ad071244332cc78e4be597a05ad5703d17787bd Mon Sep 17 00:00:00 2001 From: Chris Sewell Date: Thu, 12 Aug 2021 15:58:44 +0200 Subject: [PATCH] =?UTF-8?q?=E2=99=BB=EF=B8=8F=20REFACTOR:=20Remove=20`reen?= =?UTF-8?q?try`=20requirement=20(#5058)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit This commit replaces the use of `reentry`, for entry point loading, with `importlib_metadata` and, in turn, removes the requirement for users to run `reentry scan` after installations. aiida-core makes heavy use of entry-points to define plugins. The `reentry` package was introduced to load these plugins since, at the time, the de facto `pkg_resources` method for using entry points was too slow, in particular for responsive CLI usage. This, however, came with the drawback that users must perform an extra step to register the plugins before aiida-core can be used, or when new plugins are installed. In recent years `importlib.metadata` and its backport `importlib_metadata` has replaced `pkg_resources`, and as of https://github.com/python/importlib_metadata/pull/317 is now on a par with `reentry` for performance. For now, we use `importlib_metadata` for all python versions, rather than the built-in (as of python 3.8) `importlib.metadata`, so that we can use the new python 3.10 API and performance boosts. --- .docker/opt/configure-aiida.sh | 3 - .github/system_tests/test_verdi_load_time.sh | 4 +- .github/workflows/benchmark.yml | 1 - .github/workflows/ci-code.yml | 12 +- .github/workflows/rabbitmq.yml | 1 - .github/workflows/release.yml | 1 - .github/workflows/test-install.yml | 1 - .github/workflows/verdi.sh | 4 +- .molecule/default/setup_aiida.yml | 4 - .pre-commit-config.yaml | 16 +-- aiida/cmdline/commands/cmd_computer.py | 6 +- aiida/manage/database/integrity/plugins.py | 7 +- aiida/orm/nodes/node.py | 4 +- .../orm/nodes/process/calculation/calcjob.py | 2 +- aiida/plugins/__init__.py | 1 + aiida/plugins/entry_point.py | 133 ++++++++---------- docs/source/howto/faq.rst | 3 +- docs/source/howto/installation.rst | 4 +- docs/source/howto/plugin_codes.rst | 1 - docs/source/howto/plugins_develop.rst | 5 - docs/source/howto/plugins_install.rst | 18 --- docs/source/internals/plugin_system.rst | 4 +- docs/source/intro/install_conda.rst | 1 - docs/source/intro/install_system.rst | 3 - docs/source/nitpick-exceptions | 2 + environment.yml | 2 +- pyproject.toml | 3 +- requirements/requirements-py-3.7.txt | 5 +- requirements/requirements-py-3.8.txt | 2 +- requirements/requirements-py-3.9.txt | 2 +- setup.json | 6 +- utils/dependency_management.py | 72 +--------- utils/requirements.txt | 1 - utils/validate_consistency.py | 1 - 34 files changed, 99 insertions(+), 236 deletions(-) diff --git a/.docker/opt/configure-aiida.sh b/.docker/opt/configure-aiida.sh index c728cc64d8..a970a434e1 100755 --- a/.docker/opt/configure-aiida.sh +++ b/.docker/opt/configure-aiida.sh @@ -8,9 +8,6 @@ set -x # Environment. export SHELL=/bin/bash -# Update the list of installed aiida plugins. -reentry scan - # Setup AiiDA autocompletion. grep _VERDI_COMPLETE /home/${SYSTEM_USER}/.bashrc &> /dev/null || echo 'eval "$(_VERDI_COMPLETE=source verdi)"' >> /home/${SYSTEM_USER}/.bashrc diff --git a/.github/system_tests/test_verdi_load_time.sh b/.github/system_tests/test_verdi_load_time.sh index 2c8b4a39f4..07e8772ebe 100755 --- a/.github/system_tests/test_verdi_load_time.sh +++ b/.github/system_tests/test_verdi_load_time.sh @@ -21,10 +21,10 @@ while true; do load_time=$(/usr/bin/time -q -f "%e" $VERDI 2>&1 > /dev/null) if (( $(echo "$load_time < $LOAD_LIMIT" | bc -l) )); then - echo "SUCCESS: loading time $load_time at iteration $iteration below $load_limit" + echo "SUCCESS: loading time $load_time at iteration $iteration below $LOAD_LIMIT" break else - echo "WARNING: loading time $load_time at iteration $iteration above $load_limit" + echo "WARNING: loading time $load_time at iteration $iteration above $LOAD_LIMIT" if [ $iteration -eq $MAX_NUMBER_ATTEMPTS ]; then echo "ERROR: loading time exceeded the load limit $iteration consecutive times." diff --git a/.github/workflows/benchmark.yml b/.github/workflows/benchmark.yml index 5f33585d3a..87d8fbd159 100644 --- a/.github/workflows/benchmark.yml +++ b/.github/workflows/benchmark.yml @@ -54,7 +54,6 @@ jobs: python -m pip install --upgrade pip pip install -r requirements/requirements-py-3.8.txt pip install --no-deps -e . - reentry scan pip freeze - name: Run benchmarks diff --git a/.github/workflows/ci-code.yml b/.github/workflows/ci-code.yml index 457a2a669b..41db12b40d 100644 --- a/.github/workflows/ci-code.yml +++ b/.github/workflows/ci-code.yml @@ -98,7 +98,6 @@ jobs: run: | pip install --use-feature=2020-resolver -r requirements/requirements-py-${{ matrix.python-version }}.txt pip install --use-feature=2020-resolver --no-deps -e . - reentry scan pip freeze - name: Setup environment @@ -125,15 +124,20 @@ jobs: verdi: runs-on: ubuntu-latest - timeout-minutes: 30 + timeout-minutes: 15 + + strategy: + fail-fast: false + matrix: + python-version: [3.8, 3.9] steps: - uses: actions/checkout@v2 - - name: Set up Python 3.8 + - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v2 with: - python-version: 3.8 + python-version: ${{ matrix.python-version }} - name: Install python dependencies run: pip install -e . diff --git a/.github/workflows/rabbitmq.yml b/.github/workflows/rabbitmq.yml index d113f09cc1..425fb5e58a 100644 --- a/.github/workflows/rabbitmq.yml +++ b/.github/workflows/rabbitmq.yml @@ -60,7 +60,6 @@ jobs: run: | pip install -r requirements/requirements-py-3.8.txt pip install --no-deps -e . - reentry scan pip freeze - name: Run tests diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index ac35b8a9d2..7a09b867be 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -85,7 +85,6 @@ jobs: pip install --upgrade pip setuptools pip install -r requirements/requirements-py-3.8.txt pip install --no-deps -e . - reentry scan - name: Run sub-set of test suite run: pytest -sv -k 'requires_rmq' diff --git a/.github/workflows/test-install.yml b/.github/workflows/test-install.yml index 4f390fe7ae..c50127a502 100644 --- a/.github/workflows/test-install.yml +++ b/.github/workflows/test-install.yml @@ -160,7 +160,6 @@ jobs: - name: Install aiida-core run: | pip install -e .[atomic_tools,docs,notebook,rest,tests] - reentry scan - run: pip freeze diff --git a/.github/workflows/verdi.sh b/.github/workflows/verdi.sh index 103c1f54e5..34f49e5c72 100755 --- a/.github/workflows/verdi.sh +++ b/.github/workflows/verdi.sh @@ -20,10 +20,10 @@ while true; do load_time=$(/usr/bin/time -q -f "%e" $VERDI 2>&1 > /dev/null) if (( $(echo "$load_time < $LOAD_LIMIT" | bc -l) )); then - echo "SUCCESS: loading time $load_time at iteration $iteration below $load_limit" + echo "SUCCESS: loading time $load_time at iteration $iteration below $LOAD_LIMIT" break else - echo "WARNING: loading time $load_time at iteration $iteration above $load_limit" + echo "WARNING: loading time $load_time at iteration $iteration above $LOAD_LIMIT" if [ $iteration -eq $MAX_NUMBER_ATTEMPTS ]; then echo "ERROR: loading time exceeded the load limit $iteration consecutive times." diff --git a/.molecule/default/setup_aiida.yml b/.molecule/default/setup_aiida.yml index 5faca0f399..c0bc828482 100644 --- a/.molecule/default/setup_aiida.yml +++ b/.molecule/default/setup_aiida.yml @@ -12,10 +12,6 @@ tasks: - - name: reentry scan - command: "{{ venv_bin }}/reentry scan" - changed_when: false - - name: Create a new database with name "{{ aiida_backend }}" postgresql_db: name: "{{ aiida_backend }}" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 07ea0ad2f4..751bf2a231 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,7 +1,7 @@ ci: autoupdate_schedule: monthly autofix_prs: true - skip: [mypy, pylint, dm-generate-all, pyproject, dependencies, verdi-autodocs, version-number] + skip: [mypy, pylint, dm-generate-all, dependencies, verdi-autodocs, version-number] repos: - repo: https://github.com/pre-commit/pre-commit-hooks @@ -70,6 +70,7 @@ repos: aiida/manage/database/delete/nodes.py| aiida/orm/nodes/node.py| aiida/orm/nodes/process/.*py| + aiida/plugins/entry_point.py| aiida/repository/.*py| aiida/tools/graph/graph_traversers.py| aiida/tools/groups/paths.py| @@ -97,19 +98,6 @@ repos: utils/dependency_management.py )$ - - id: pyproject - name: Validate pyproject.toml - entry: python ./utils/dependency_management.py validate-pyproject-toml - language: system - pass_filenames: false - files: >- - (?x)^( - setup.json| - setup.py| - utils/dependency_management.py| - pyproject.toml - )$ - - id: dependencies name: Validate environment.yml entry: python ./utils/dependency_management.py validate-environment-yml diff --git a/aiida/cmdline/commands/cmd_computer.py b/aiida/cmdline/commands/cmd_computer.py index 968a1574c2..9d6e770c17 100644 --- a/aiida/cmdline/commands/cmd_computer.py +++ b/aiida/cmdline/commands/cmd_computer.py @@ -20,7 +20,7 @@ from aiida.cmdline.utils import echo from aiida.cmdline.utils.decorators import with_dbenv from aiida.common.exceptions import ValidationError -from aiida.plugins.entry_point import get_entry_points +from aiida.plugins.entry_point import get_entry_point_names from aiida.transports import cli as transport_cli @@ -597,5 +597,5 @@ def computer_config_show(computer, user, defaults, as_option_string): echo.echo(tabulate.tabulate(table, tablefmt='plain')) -for ep in get_entry_points('aiida.transports'): - computer_configure.add_command(transport_cli.create_configure_cmd(ep.name)) +for ep_name in get_entry_point_names('aiida.transports'): + computer_configure.add_command(transport_cli.create_configure_cmd(ep_name)) diff --git a/aiida/manage/database/integrity/plugins.py b/aiida/manage/database/integrity/plugins.py index 764a287e73..4fc48c8ccd 100644 --- a/aiida/manage/database/integrity/plugins.py +++ b/aiida/manage/database/integrity/plugins.py @@ -96,8 +96,7 @@ class of `JobCalculation`, would get `calculation.job.quantumespresso.pw.PwCalcu :param type_strings: a set of type strings whose entry point is to be inferred :return: a mapping of current node type string to the inferred entry point name """ - from reentry.entrypoint import EntryPoint - from aiida.plugins.entry_point import get_entry_points + from aiida.plugins.entry_point import get_entry_points, parse_entry_point prefix_calc_job = 'calculation.job.' entry_point_group = 'aiida.calculations' @@ -109,7 +108,9 @@ class of `JobCalculation`, would get `calculation.job.quantumespresso.pw.PwCalcu # from the aiida-registry. Note that if entry points with the same name are found in both sets, the entry point # from the local environment is kept as leading. entry_points_local = get_entry_points(group=entry_point_group) - entry_points_registry = [EntryPoint.parse(entry_point) for entry_point in registered_calculation_entry_points] + entry_points_registry = [ + parse_entry_point(entry_point_group, entry_point) for entry_point in registered_calculation_entry_points + ] entry_points = entry_points_local entry_point_names = [entry_point.name for entry_point in entry_points] diff --git a/aiida/orm/nodes/node.py b/aiida/orm/nodes/node.py index 709f7acf4f..0d9cce79e0 100644 --- a/aiida/orm/nodes/node.py +++ b/aiida/orm/nodes/node.py @@ -199,8 +199,8 @@ def validate_storability(self) -> None: if not is_registered_entry_point(self.__module__, self.__class__.__name__, groups=('aiida.node', 'aiida.data')): raise exceptions.StoringNotAllowed( f'class `{self.__module__}:{self.__class__.__name__}` does not have a registered entry point. ' - 'Consider running `reentry scan`. If the issue persists, check that the corresponding plugin is ' - 'installed and that the entry point shows up in `verdi plugin list`.' + 'Check that the corresponding plugin is installed ' + 'and that the entry point shows up in `verdi plugin list`.' ) @classproperty diff --git a/aiida/orm/nodes/process/calculation/calcjob.py b/aiida/orm/nodes/process/calculation/calcjob.py index 18a989d90d..eebeb9034e 100644 --- a/aiida/orm/nodes/process/calculation/calcjob.py +++ b/aiida/orm/nodes/process/calculation/calcjob.py @@ -67,7 +67,7 @@ def tools(self) -> 'CalculationTools': if self._tools is None: entry_point_string = self.process_type - if is_valid_entry_point_string(entry_point_string): + if entry_point_string and is_valid_entry_point_string(entry_point_string): entry_point = get_entry_point_from_string(entry_point_string) try: diff --git a/aiida/plugins/__init__.py b/aiida/plugins/__init__.py index 14a89108c0..63c4419cd1 100644 --- a/aiida/plugins/__init__.py +++ b/aiida/plugins/__init__.py @@ -32,6 +32,7 @@ 'WorkflowFactory', 'load_entry_point', 'load_entry_point_from_string', + 'parse_entry_point', ) # yapf: enable diff --git a/aiida/plugins/entry_point.py b/aiida/plugins/entry_point.py index e8c5814cc9..7591531b4d 100644 --- a/aiida/plugins/entry_point.py +++ b/aiida/plugins/entry_point.py @@ -9,25 +9,29 @@ ########################################################################### """Module to manage loading entrypoints.""" import enum -import traceback import functools +import traceback +from typing import Any, Optional, List, Sequence, Set, Tuple -try: - from reentry.default_manager import PluginManager - # I don't use the default manager as it has scan_for_not_found=True - # by default, which re-runs scan if no entrypoints are found - ENTRYPOINT_MANAGER = PluginManager(scan_for_not_found=False) -except ImportError: - import pkg_resources as ENTRYPOINT_MANAGER +# importlib.metadata was introduced into the standard library in python 3.8, +# but was then updated in python 3.10 to use an improved API. +# So for now we use the backport importlib_metadata package. +from importlib_metadata import EntryPoint, EntryPoints +from importlib_metadata import entry_points as _eps from aiida.common.exceptions import MissingEntryPointError, MultipleEntryPointError, LoadingEntryPointError -__all__ = ('load_entry_point', 'load_entry_point_from_string') +__all__ = ('load_entry_point', 'load_entry_point_from_string', 'parse_entry_point') ENTRY_POINT_GROUP_PREFIX = 'aiida.' ENTRY_POINT_STRING_SEPARATOR = ':' +@functools.lru_cache(maxsize=1) +def eps(): + return _eps() + + class EntryPointFormat(enum.Enum): """ Enum to distinguish between the various possible entry point string formats. An entry point string @@ -69,7 +73,13 @@ class EntryPointFormat(enum.Enum): } -def validate_registered_entry_points(): # pylint: disable=invalid-name +def parse_entry_point(group: str, spec: str) -> EntryPoint: + """Return an entry point, given its group and spec (as formatted in the setup)""" + name, value = spec.split('=', maxsplit=1) + return EntryPoint(group=group, name=name.strip(), value=value.strip()) + + +def validate_registered_entry_points() -> None: # pylint: disable=invalid-name """Validate all registered entry points by loading them with the corresponding factory. :raises EntryPointError: if any of the registered entry points cannot be loaded. This can happen if: @@ -98,7 +108,7 @@ def validate_registered_entry_points(): # pylint: disable=invalid-name factory(entry_point.name) -def format_entry_point_string(group, name, fmt=EntryPointFormat.FULL): +def format_entry_point_string(group: str, name: str, fmt: EntryPointFormat = EntryPointFormat.FULL) -> str: """ Format an entry point string for a given entry point group and name, based on the specified format @@ -120,7 +130,7 @@ def format_entry_point_string(group, name, fmt=EntryPointFormat.FULL): raise ValueError('invalid EntryPointFormat') -def parse_entry_point_string(entry_point_string): +def parse_entry_point_string(entry_point_string: str) -> Tuple[str, str]: """ Validate the entry point string and attempt to parse the entry point group and name @@ -140,14 +150,13 @@ def parse_entry_point_string(entry_point_string): return group, name -def get_entry_point_string_format(entry_point_string): +def get_entry_point_string_format(entry_point_string: str) -> EntryPointFormat: """ Determine the format of an entry point string. Note that it does not validate the actual entry point string and it may not correspond to any actual entry point. This will only assess the string format :param entry_point_string: the entry point string :returns: the entry point type - :rtype: EntryPointFormat """ try: group, _ = entry_point_string.split(ENTRY_POINT_STRING_SEPARATOR) @@ -159,7 +168,7 @@ def get_entry_point_string_format(entry_point_string): return EntryPointFormat.PARTIAL -def get_entry_point_from_string(entry_point_string): +def get_entry_point_from_string(entry_point_string: str) -> EntryPoint: """ Return an entry point for the given entry point string @@ -174,7 +183,7 @@ def get_entry_point_from_string(entry_point_string): return get_entry_point(group, name) -def load_entry_point_from_string(entry_point_string): +def load_entry_point_from_string(entry_point_string: str) -> Any: """ Load the class registered for a given entry point string that determines group and name @@ -190,7 +199,7 @@ def load_entry_point_from_string(entry_point_string): return load_entry_point(group, name) -def load_entry_point(group, name): +def load_entry_point(group: str, name: str) -> Any: """ Load the class registered under the entry point for a given name and group @@ -213,44 +222,35 @@ def load_entry_point(group, name): return loaded_entry_point -def get_entry_point_groups(): +def get_entry_point_groups() -> Set[str]: """ Return a list of all the recognized entry point groups :return: a list of valid entry point groups """ - return ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP.keys() - - -def get_entry_point_names(group, sort=True): - """ - Return a list of all the entry point names within a specific group + return eps().groups - :param group: the entry point group - :param sort: if True, the returned list will be sorted alphabetically - :return: a list of entry point names - """ - entry_point_names = [ep.name for ep in get_entry_points(group)] - - if sort is True: - entry_point_names.sort() - return entry_point_names +def get_entry_point_names(group: str, sort: bool = True) -> List[str]: + """Return the entry points within a group.""" + all_eps = eps() + group_names = list(all_eps.select(group=group).names) + if sort: + return sorted(group_names) + return group_names -@functools.lru_cache(maxsize=None) -def get_entry_points(group): +def get_entry_points(group: str) -> EntryPoints: """ Return a list of all the entry points within a specific group :param group: the entry point group :return: a list of entry points """ - return list(ENTRYPOINT_MANAGER.iter_entry_points(group=group)) + return eps().select(group=group) -@functools.lru_cache(maxsize=None) -def get_entry_point(group, name): +def get_entry_point(group: str, name: str) -> EntryPoint: """ Return an entry point with a given name within a specific group @@ -258,26 +258,18 @@ def get_entry_point(group, name): :param name: the name of the entry point :return: the entry point if it exists else None :raises aiida.common.MissingEntryPointError: entry point was not registered - :raises aiida.common.MultipleEntryPointError: entry point could not be uniquely resolved - """ - entry_points = [ep for ep in get_entry_points(group) if ep.name == name] - if not entry_points: - raise MissingEntryPointError( - "Entry point '{}' not found in group '{}'. Try running `reentry scan` to update " - 'the entry point cache.'.format(name, group) - ) - - if len(entry_points) > 1: - raise MultipleEntryPointError( - "Multiple entry points '{}' found in group '{}'.Try running `reentry scan` to " - 'repopulate the entry point cache.'.format(name, group) - ) - - return entry_points[0] + """ + found = eps().select(group=group, name=name) + if name not in found.names: + raise MissingEntryPointError(f"Entry point '{name}' not found in group '{group}'") + if len(found.names) > 1: + raise MultipleEntryPointError(f"Multiple entry points '{name}' found in group '{group}'.") + return found[name] -def get_entry_point_from_class(class_module, class_name): +@functools.lru_cache(maxsize=100) +def get_entry_point_from_class(class_module: str, class_name: str) -> Tuple[Optional[str], Optional[EntryPoint]]: """ Given the module and name of a class, attempt to obtain the corresponding entry point if it exists @@ -285,20 +277,19 @@ def get_entry_point_from_class(class_module, class_name): :param class_name: name of the class :return: a tuple of the corresponding group and entry point or None if not found """ - for group in ENTRYPOINT_MANAGER.get_entry_map().keys(): - for entry_point in ENTRYPOINT_MANAGER.iter_entry_points(group): + for group in get_entry_point_groups(): + for entry_point in get_entry_points(group): - if entry_point.module_name != class_module: + if entry_point.module != class_module: continue - for entry_point_class_name in entry_point.attrs: - if entry_point_class_name == class_name: - return group, entry_point + if entry_point.attr == class_name: + return group, entry_point return None, None -def get_entry_point_string_from_class(class_module, class_name): # pylint: disable=invalid-name +def get_entry_point_string_from_class(class_module: str, class_name: str) -> Optional[str]: # pylint: disable=invalid-name """ Given the module and name of a class, attempt to obtain the corresponding entry point if it exists and return the entry point string which will be the entry point group and entry point @@ -314,7 +305,6 @@ def get_entry_point_string_from_class(class_module, class_name): # pylint: disa :param class_module: module of the class :param class_name: name of the class :return: the corresponding entry point string or None - :rtype: str """ group, entry_point = get_entry_point_from_class(class_module, class_name) @@ -323,7 +313,7 @@ def get_entry_point_string_from_class(class_module, class_name): # pylint: disa return None -def is_valid_entry_point_string(entry_point_string): +def is_valid_entry_point_string(entry_point_string: str) -> bool: """ Verify whether the given entry point string is a valid one. For the string to be valid means that it is composed of two strings, the entry point group and name, concatenated by the entry point string separator. If that is the @@ -342,8 +332,8 @@ def is_valid_entry_point_string(entry_point_string): return group in ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP -@functools.lru_cache(maxsize=None) -def is_registered_entry_point(class_module, class_name, groups=None): +@functools.lru_cache(maxsize=100) +def is_registered_entry_point(class_module: str, class_name: str, groups: Optional[Sequence[str]] = None) -> bool: """Verify whether the class with the given module and class name is a registered entry point. .. note:: this function only checks whether the class has a registered entry point. It does explicitly not verify @@ -352,13 +342,10 @@ def is_registered_entry_point(class_module, class_name, groups=None): :param class_module: the module of the class :param class_name: the name of the class :param groups: optionally consider only these entry point groups to look for the class - :return: boolean, True if the class is a registered entry point, False otherwise. + :return: True if the class is a registered entry point, False otherwise. """ - if groups is None: - groups = list(ENTRY_POINT_GROUP_TO_MODULE_PATH_MAP.keys()) - - for group in groups: - for entry_point in ENTRYPOINT_MANAGER.iter_entry_points(group): - if class_module == entry_point.module_name and [class_name] == entry_point.attrs: + for group in get_entry_point_groups() if groups is None else groups: + for entry_point in get_entry_points(group): + if class_module == entry_point.module and class_name == entry_point.attr: return True return False diff --git a/docs/source/howto/faq.rst b/docs/source/howto/faq.rst index 1481a2e26c..41b8803ec3 100644 --- a/docs/source/howto/faq.rst +++ b/docs/source/howto/faq.rst @@ -13,14 +13,13 @@ First, make sure that your daemon is not running. You can check this with ``verdi daemon status``. If you find that your daemon was actually still running, that is likely the problem, so stop it first using ``verdi daemon stop``. It is very important that each time you want to :ref:`update your AiiDA installation`, you should *always* first finish all running processes and stop the daemon before doing so. -After you have stopped the daemon, make sure to run ``reentry scan`` before you restart the daemon with ``verdi daemon start``. +Restart the daemon with ``verdi daemon start``. I get a :py:class:`~aiida.common.exceptions.MissingEntryPointError` or :py:class:`~aiida.common.exceptions.MultipleEntryPointError` exception, saying that a particular entry point cannot be found. How can I fix this? ======================================================================================================================================================================================================================== Often this is caused by an outdated entry point cache. This can happen for example when you have updated your AiiDA installation or installed a new plugin using ``pip install``. -In both cases, you can fix the problem by running ``reentry scan``. Make sure to also restart all daemons, to ensure that the changes are picked up by the daemons as well. diff --git a/docs/source/howto/installation.rst b/docs/source/howto/installation.rst index 4e37832400..459f092c3e 100644 --- a/docs/source/howto/installation.rst +++ b/docs/source/howto/installation.rst @@ -409,9 +409,7 @@ Failing to do so, may leave your installation in a broken state, or worse may ev * If you have installed AiiDA through ``pip`` simply run: ``pip install --upgrade aiida-core``. * If you have installed from the git repository using ``pip install -e .``, first delete all the ``.pyc`` files (``find . -name "*.pyc" -delete``) before updating your branch with ``git pull``. - 6. Run `reentry scan` to update the cache of registered entry points. - - 7. Migrate your database with ``verdi -p database migrate``. + 6. Migrate your database with ``verdi -p database migrate``. Depending on the size of your database and the number of migrations to perform, data migration can take time, so please be patient. After the database migration finishes, you will be able to continue working with your existing data. diff --git a/docs/source/howto/plugin_codes.rst b/docs/source/howto/plugin_codes.rst index c94a0e4729..adfb249c84 100644 --- a/docs/source/howto/plugin_codes.rst +++ b/docs/source/howto/plugin_codes.rst @@ -339,7 +339,6 @@ With your ``calculations.py`` and ``parsers.py`` files at hand, let's register e .. code-block:: console $ pip install -e . # install package in "editable mode" - $ reentry scan See the :ref:`how-to:plugins-install` section for details. diff --git a/docs/source/howto/plugins_develop.rst b/docs/source/howto/plugins_develop.rst index 0b5bd9dc37..e439c98d3d 100644 --- a/docs/source/howto/plugins_develop.rst +++ b/docs/source/howto/plugins_develop.rst @@ -121,11 +121,6 @@ Adding a new entry point consists of the following steps: } ... - #. Let setuptools and reentry know about your entry point by installing your plugin again:: - - pip install -e . - reentry scan - Your new entry point should now show up in ``verdi plugin list aiida.calculations``. .. note:: diff --git a/docs/source/howto/plugins_install.rst b/docs/source/howto/plugins_install.rst index c242cbeca4..6f76b74d06 100644 --- a/docs/source/howto/plugins_install.rst +++ b/docs/source/howto/plugins_install.rst @@ -25,22 +25,6 @@ For example, if the code is available through a Git repository: $ cd aiida-diff $ pip install . -.. important:: - - Each time when you install a new plugin package you should make sure to run the following command to let AiiDA know about the new plugins that come with it: - - .. code-block:: console - - $ reentry scan - - If you forget to run this command, AiiDA will not be able to find the plugins. - The reentry cache can also be updated from python when access to the commandline is not available (e.g. in Jupyter notebooks). - - .. code-block:: python - - from reentry import manager - manager.scan(group_re='aiida') - .. warning:: If your daemon was running when installing or updating a plugin package, make sure to restart it with the ``--reset`` flag for changes to take effect: @@ -49,8 +33,6 @@ For example, if the code is available through a Git repository: $ verdi daemon restart --reset - This needs to be done *after* the command ``reentry scan`` is called. - To verify which plugins are currently installed, use the command: .. code-block:: console diff --git a/docs/source/internals/plugin_system.rst b/docs/source/internals/plugin_system.rst index 354b130a68..6e0d7a08f1 100644 --- a/docs/source/internals/plugin_system.rst +++ b/docs/source/internals/plugin_system.rst @@ -45,9 +45,7 @@ Interfaces Pluginloader ^^^^^^^^^^^^ -The plugin loading functionality is defined in :py:mod:`aiida.plugins.entry_point` and relies on the `reentry package `_ to find and load entry points. -``reentry`` is about 10x faster than the equivalent functionality in ``pkg_resources`` from ``setuptools``, leading to significant speedup of tab-autocompletion in the ``verdi`` cli. -If, for some reason, ``reentry`` is not found, the plugin system falls back on ``pkg_resources``. +The plugin loading functionality is defined in :py:mod:`aiida.plugins.entry_point`. Registry Tools ^^^^^^^^^^^^^^ diff --git a/docs/source/intro/install_conda.rst b/docs/source/intro/install_conda.rst index f559777efb..dc919a8022 100644 --- a/docs/source/intro/install_conda.rst +++ b/docs/source/intro/install_conda.rst @@ -23,7 +23,6 @@ If you want to install AiiDA onto you own personal workstation/laptop, it is rec $ conda create -n aiida -c conda-forge aiida-core aiida-core.services $ conda activate aiida - (aiida) $ reentry scan --- diff --git a/docs/source/intro/install_system.rst b/docs/source/intro/install_system.rst index 52c0997bf2..b254f317e5 100644 --- a/docs/source/intro/install_system.rst +++ b/docs/source/intro/install_system.rst @@ -151,7 +151,6 @@ This is the *recommended* installation method to setup AiiDA on a personal lapto $ python -m venv ~/envs/aiida $ source ~/envs/aiida/bin/activate (aiida) $ pip install aiida-core - (aiida) $ reentry scan .. tip:: @@ -197,7 +196,6 @@ This is the *recommended* installation method to setup AiiDA on a personal lapto $ conda create -n aiida -c conda-forge aiida-core $ conda activate aiida - (aiida) $ reentry scan .. tabbed:: From source @@ -212,7 +210,6 @@ This is the *recommended* installation method to setup AiiDA on a personal lapto $ python -m pip venv ~/envs/aiida $ source ~/envs/aiida/bin/activate (aiida) $ pip install . - (aiida) $ reentry scan --- diff --git a/docs/source/nitpick-exceptions b/docs/source/nitpick-exceptions index 3dae986e7d..45e463df6a 100644 --- a/docs/source/nitpick-exceptions +++ b/docs/source/nitpick-exceptions @@ -161,3 +161,5 @@ py:class pgsu.PGSU py:meth pgsu.PGSU.__init__ py:class jsonschema.exceptions._Error + +py:class importlib_metadata.EntryPoint diff --git a/environment.yml b/environment.yml index c6886f7a6d..01447410cd 100644 --- a/environment.yml +++ b/environment.yml @@ -22,6 +22,7 @@ dependencies: - jinja2~=3.0 - jsonschema~=3.0 - kiwipy[rmq]~=0.7.4 +- importlib-metadata~=4.3 - numpy~=1.17 - pamqp~=2.3 - paramiko>=2.7.2,~=2.7 @@ -32,7 +33,6 @@ dependencies: - python-dateutil~=2.8 - pytz~=2021.1 - pyyaml~=5.4 -- reentry~=1.3 - simplejson~=3.16 - sqlalchemy-utils~=0.37.2 - sqlalchemy~=1.3.10 diff --git a/pyproject.toml b/pyproject.toml index e4af438140..2484f89491 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=40.8.0", "wheel", "reentry~=1.3", "fastentrypoints~=0.12"] +requires = ["setuptools>=40.8.0", "wheel", "fastentrypoints~=0.12"] build-backend = "setuptools.build_meta" [tool.pylint.master] @@ -61,7 +61,6 @@ filterwarnings = [ "ignore::DeprecationWarning:yaml:", "ignore::DeprecationWarning:pymatgen:", "ignore::DeprecationWarning:jsonbackend:", - "ignore::DeprecationWarning:reentry:", "ignore::DeprecationWarning:pkg_resources:", "ignore::pytest.PytestCollectionWarning", "default::ResourceWarning", diff --git a/requirements/requirements-py-3.7.txt b/requirements/requirements-py-3.7.txt index 387d5b399b..6578b3b70e 100644 --- a/requirements/requirements-py-3.7.txt +++ b/requirements/requirements-py-3.7.txt @@ -43,9 +43,9 @@ future==0.18.2 graphviz==0.17 idna==3.2 imagesize==1.2.0 -importlib-metadata==3.10.1 +importlib-metadata==4.6.3 iniconfig==1.1.1 -ipykernel==6.0.3 +ipykernel==5.5.5 ipython==7.26.0 ipython-genutils==0.2.0 ipywidgets==7.6.3 @@ -125,7 +125,6 @@ PyYAML==5.4.1 pyzmq==22.2.1 qtconsole==5.1.1 QtPy==1.9.0 -reentry==1.3.2 requests==2.26.0 ruamel.yaml==0.17.10 ruamel.yaml.clib==0.2.6 diff --git a/requirements/requirements-py-3.8.txt b/requirements/requirements-py-3.8.txt index 54ee5a9d35..ee29f047fb 100644 --- a/requirements/requirements-py-3.8.txt +++ b/requirements/requirements-py-3.8.txt @@ -43,6 +43,7 @@ future==0.18.2 graphviz==0.17 idna==3.2 imagesize==1.2.0 +importlib-metadata==4.6.3 iniconfig==1.1.1 ipykernel==6.0.3 ipython==7.26.0 @@ -124,7 +125,6 @@ PyYAML==5.4.1 pyzmq==22.2.1 qtconsole==5.1.1 QtPy==1.9.0 -reentry==1.3.2 requests==2.26.0 ruamel.yaml==0.17.10 ruamel.yaml.clib==0.2.6 diff --git a/requirements/requirements-py-3.9.txt b/requirements/requirements-py-3.9.txt index 69c99c6fd6..66a23c6c36 100644 --- a/requirements/requirements-py-3.9.txt +++ b/requirements/requirements-py-3.9.txt @@ -43,6 +43,7 @@ future==0.18.2 graphviz==0.17 idna==3.2 imagesize==1.2.0 +importlib-metadata==4.6.3 iniconfig==1.1.1 ipykernel==6.0.3 ipython==7.26.0 @@ -124,7 +125,6 @@ PyYAML==5.4.1 pyzmq==22.1.1 qtconsole==5.1.1 QtPy==1.9.0 -reentry==1.3.2 requests==2.26.0 ruamel.yaml==0.17.10 ruamel.yaml.clib==0.2.6 diff --git a/setup.json b/setup.json index f7915cc8ba..2e5534c651 100644 --- a/setup.json +++ b/setup.json @@ -36,6 +36,7 @@ "jinja2~=3.0", "jsonschema~=3.0", "kiwipy[rmq]~=0.7.4", + "importlib-metadata~=4.3", "numpy~=1.17", "pamqp~=2.3", "paramiko~=2.7,>=2.7.2", @@ -46,7 +47,6 @@ "python-dateutil~=2.8", "pytz~=2021.1", "pyyaml~=5.4", - "reentry~=1.3", "simplejson~=3.16", "sqlalchemy-utils~=0.37.2", "sqlalchemy~=1.3.10", @@ -99,8 +99,7 @@ "packaging==20.3", "pre-commit~=2.2", "pylint~=2.5.0", - "pylint-django>=2.0,<2.4.0", - "tomlkit~=0.7.0" + "pylint-django>=2.0,<2.4.0" ], "tests": [ "aiida-export-migration-tests==0.9.0", @@ -122,7 +121,6 @@ "bpython~=0.18.0" ] }, - "reentry_register": true, "entry_points": { "console_scripts": [ "verdi=aiida.cmdline.commands.cmd_verdi:verdi", diff --git a/utils/dependency_management.py b/utils/dependency_management.py index 6c3c854f11..fbbab14aef 100755 --- a/utils/dependency_management.py +++ b/utils/dependency_management.py @@ -23,7 +23,6 @@ import click import requests import yaml -import tomlkit as toml ROOT = Path(__file__).resolve().parent.parent # repository root @@ -161,50 +160,11 @@ def generate_environment_yml(): ) -@cli.command() -def update_pyproject_toml(): - """Generate a 'pyproject.toml' file, or update an existing one. - - This function generates/updates the ``build-system`` section, - to be consistent with the 'setup.json' file. - """ - - # read the current file - toml_path = ROOT / 'pyproject.toml' - if toml_path.exists(): - pyproject = toml.loads(toml_path.read_text(encoding='utf8')) - else: - pyproject = {} - - # Read the requirements from 'setup.json' - setup_cfg = _load_setup_cfg() - install_requirements = [Requirement.parse(r) for r in setup_cfg['install_requires']] - for requirement in install_requirements: - if requirement.name == 'reentry': - reentry_requirement = requirement - break - else: - raise DependencySpecificationError("Failed to find reentry requirement in 'setup.json'.") - - # update the build-system key - pyproject.setdefault('build-system', {}) - pyproject['build-system'].update({ - 'requires': ['setuptools>=40.8.0', 'wheel', - str(reentry_requirement), 'fastentrypoints~=0.12'], - 'build-backend': - 'setuptools.build_meta', - }) - - # write the new file - toml_path.write_text(toml.dumps(pyproject), encoding='utf8') - - @cli.command() @click.pass_context def generate_all(ctx): """Generate all dependent requirement files.""" ctx.invoke(generate_environment_yml) - ctx.invoke(update_pyproject_toml) @cli.command('validate-environment-yml', help="Validate 'environment.yml'.") @@ -278,34 +238,6 @@ def validate_environment_yml(): # pylint: disable=too-many-branches click.secho('Conda dependency specification is consistent.', fg='green') -@cli.command('validate-pyproject-toml', help="Validate 'pyproject.toml'.") -def validate_pyproject_toml(): - """Validate that 'pyproject.toml' is consistent with 'setup.json'.""" - - # Read the requirements from 'setup.json' - setup_cfg = _load_setup_cfg() - install_requirements = [Requirement.parse(r) for r in setup_cfg['install_requires']] - - for requirement in install_requirements: - if requirement.name == 'reentry': - reentry_requirement = requirement - break - else: - raise DependencySpecificationError("Failed to find reentry requirement in 'setup.json'.") - - pyproject_file = ROOT / 'pyproject.toml' - if not pyproject_file.exists(): - raise DependencySpecificationError("The 'pyproject.toml' file is missing!") - - pyproject = toml.loads(pyproject_file.read_text(encoding='utf8')) - pyproject_requires = [Requirement.parse(r) for r in pyproject['build-system']['requires']] - - if reentry_requirement not in pyproject_requires: - raise DependencySpecificationError(f"Missing requirement '{reentry_requirement}' in 'pyproject.toml'.") - - click.secho('Pyproject.toml dependency specification is consistent.', fg='green') - - @cli.command('validate-all', help='Validate consistency of all requirements.') @click.pass_context def validate_all(ctx): @@ -314,14 +246,12 @@ def validate_all(ctx): Validates that the specification of requirements/dependencies is consistent across the following files: - - setup.py - setup.json - environment.yml - - pyproject.toml + """ ctx.invoke(validate_environment_yml) - ctx.invoke(validate_pyproject_toml) @cli.command() diff --git a/utils/requirements.txt b/utils/requirements.txt index b24c23534a..45058c6135 100644 --- a/utils/requirements.txt +++ b/utils/requirements.txt @@ -2,4 +2,3 @@ click==7.1.2 packaging==20.3 pyyaml==5.4.1 requests==2.25.1 -tomlkit==0.7.2 diff --git a/utils/validate_consistency.py b/utils/validate_consistency.py index b69f6fedda..f4e3a87d5e 100644 --- a/utils/validate_consistency.py +++ b/utils/validate_consistency.py @@ -14,7 +14,6 @@ * environment.yml * version in aiida/__init__.py - * reentry dependency in pyproject.toml """ import collections