From 6f8cf758afd9fcb8d08f6127acca9d948749b8ed Mon Sep 17 00:00:00 2001 From: David Runge Date: Wed, 7 Apr 2021 21:32:40 +0200 Subject: [PATCH] Change repo_management to be async repo_managament/*: Remove the obsolete `convert._transform_package_desc_to_output_package()` (the functionality is now covered by pydantic models directly). Change all relevant methods to be async. Change file open commands to make use of aiofiles. tests/*: Remove tests for the obsolete `convert._transform_package_desc_to_output_package()`. Change all tests for async methods to test using pytest-asyncio. --- repo_management/cli.py | 19 +++++---- repo_management/convert.py | 51 +++++------------------ repo_management/files.py | 33 +++++++-------- repo_management/models.py | 2 +- repo_management/operations.py | 33 ++++++++------- tests/test_commands.py | 1 + tests/test_convert.py | 77 ++++++----------------------------- tests/test_files.py | 56 ++++++++++++++----------- tests/test_models.py | 5 ++- tests/test_operations.py | 17 ++++---- 10 files changed, 118 insertions(+), 176 deletions(-) diff --git a/repo_management/cli.py b/repo_management/cli.py index 3fb107f..0e65bcf 100644 --- a/repo_management/cli.py +++ b/repo_management/cli.py @@ -1,3 +1,4 @@ +import asyncio from argparse import ArgumentTypeError from sys import exit @@ -13,9 +14,11 @@ def db2json() -> None: try: args = argparse.ArgParseFactory.db2json().parse_args() - operations.dump_db_to_json_files( - input_path=args.db_file, - output_path=args.output_dir, + asyncio.run( + operations.dump_db_to_json_files( + input_path=args.db_file, + output_path=args.output_dir, + ) ) except (errors.RepoManagementError, ArgumentTypeError) as e: print(e) @@ -31,10 +34,12 @@ def json2db() -> None: try: args = argparse.ArgParseFactory.json2db().parse_args() - operations.create_db_from_json_files( - input_path=args.input_dir, - output_path=args.db_file, - db_type=defaults.RepoDbType.FILES if args.files else defaults.RepoDbType.DEFAULT, + asyncio.run( + operations.create_db_from_json_files( + input_path=args.input_dir, + output_path=args.db_file, + db_type=defaults.RepoDbType.FILES if args.files else defaults.RepoDbType.DEFAULT, + ) ) except (errors.RepoManagementError, ArgumentTypeError) as e: print(e) diff --git a/repo_management/convert.py b/repo_management/convert.py index a1c9eff..e1401ac 100644 --- a/repo_management/convert.py +++ b/repo_management/convert.py @@ -1,5 +1,5 @@ import io -from typing import Dict, List, Optional, Union +from typing import Dict, List, Union from jinja2 import Environment, PackageLoader from pydantic.error_wrappers import ValidationError @@ -7,7 +7,7 @@ from repo_management import defaults, errors, models -def _files_data_to_model(data: io.StringIO) -> models.Files: +async def _files_data_to_model(data: io.StringIO) -> models.Files: """Read the contents of a 'files' file (represented as an instance of io.StringIO) and convert it to a pydantic model @@ -46,7 +46,7 @@ def _files_data_to_model(data: io.StringIO) -> models.Files: return models.Files(**output) -def _desc_data_line_to_dicts( +async def _desc_data_line_to_dicts( current_header: str, current_type: defaults.FieldType, line: str, @@ -88,7 +88,7 @@ def _desc_data_line_to_dicts( int_types[current_header] = int(line) -def _desc_data_to_model(data: io.StringIO) -> models.PackageDesc: +async def _desc_data_to_model(data: io.StringIO) -> models.PackageDesc: """Read the contents of a 'desc' file (represented as an instance of io.StringIO) and convert it to a pydantic model Parameters @@ -126,7 +126,7 @@ def _desc_data_to_model(data: io.StringIO) -> models.PackageDesc: if current_header: try: - _desc_data_line_to_dicts( + await _desc_data_line_to_dicts( current_header=current_header, current_type=current_type, line=line, @@ -148,37 +148,6 @@ def _desc_data_to_model(data: io.StringIO) -> models.PackageDesc: ) -def _transform_package_desc_to_output_package( - desc: models.PackageDesc, files: Optional[models.Files] -) -> models.OutputPackage: - """Transform a PackageDesc model and an accompanying Files model to an OutputPackage model - - Parameters - ---------- - desc: models.PackageDesc - A pydantic model, that has all required attributes (apart from the list of files) to create an OutputPackage - model - files: models.Files: - A pydantic model, that represents the list of files, that belong to the package described by desc - - Returns - ------- - models.OutputPackage - A pydantic model, that describes a package and its list of files - """ - - desc_dict = desc.dict() - # remove attributes, that are represented on the pkgbase level - for name in ["base", "makedepends", "packager", "version"]: - if desc_dict.get(name): - del desc_dict[name] - - if files: - return models.OutputPackage(**desc_dict, **files.dict()) - else: - return models.OutputPackage(**desc_dict) - - class RepoDbFile: """A class for handling templates for files used in repository database files (such as 'desc' or 'files') @@ -189,7 +158,7 @@ class RepoDbFile: """ - def __init__(self, enable_async: bool = False) -> None: + def __init__(self, enable_async: bool = True) -> None: """Initialize an instance of RepDbFile Parameters @@ -205,7 +174,7 @@ def __init__(self, enable_async: bool = False) -> None: enable_async=enable_async, ) - def render_desc_template(self, model: models.PackageDesc, output: io.StringIO) -> None: + async def render_desc_template(self, model: models.PackageDesc, output: io.StringIO) -> None: """Use the 'desc' template to write a string to an output stream based on a model Parameters @@ -217,9 +186,9 @@ def render_desc_template(self, model: models.PackageDesc, output: io.StringIO) - """ template = self.env.get_template("desc.j2") - output.write(template.render(model.dict())) + output.write(await template.render_async(model.dict())) - def render_files_template(self, model: models.Files, output: io.StringIO) -> None: + async def render_files_template(self, model: models.Files, output: io.StringIO) -> None: """Use the 'files' template to write a string to an output stream based on a model Parameters @@ -231,4 +200,4 @@ def render_files_template(self, model: models.Files, output: io.StringIO) -> Non """ template = self.env.get_template("files.j2") - output.write(template.render(model.dict())) + output.write(await template.render_async(model.dict())) diff --git a/repo_management/files.py b/repo_management/files.py index f53d8be..7a81b44 100644 --- a/repo_management/files.py +++ b/repo_management/files.py @@ -3,15 +3,16 @@ import tarfile import time from pathlib import Path -from typing import Iterator +from typing import AsyncIterator +import aiofiles import orjson from pydantic.error_wrappers import ValidationError from repo_management import convert, defaults, errors, models -def _read_db_file(db_path: Path, compression: str = "gz") -> tarfile.TarFile: +async def _read_db_file(db_path: Path, compression: str = "gz") -> tarfile.TarFile: """Read a repository database file Parameters @@ -39,7 +40,7 @@ def _read_db_file(db_path: Path, compression: str = "gz") -> tarfile.TarFile: return tarfile.open(name=db_path, mode=f"r:{compression}") -def _extract_db_member_package_name(name: str) -> str: +async def _extract_db_member_package_name(name: str) -> str: """Extract and return the package name from a repository database member name Parameters @@ -55,9 +56,9 @@ def _extract_db_member_package_name(name: str) -> str: return "".join(re.split("(-)", re.sub("(/desc|/files)$", "", name))[:-4]) -def _db_file_member_as_model( +async def _db_file_member_as_model( db_file: tarfile.TarFile, regex: str = "(/desc|/files)$" -) -> Iterator[models.RepoDbMemberData]: +) -> AsyncIterator[models.RepoDbMemberData]: """Iterate over the members of a database file, represented by an instance of tarfile.TarFile and yield the members as instances of models.RepoDbMemberData @@ -82,7 +83,7 @@ def _db_file_member_as_model( yield models.RepoDbMemberData( member_type=file_type, - name=_extract_db_member_package_name(name=name), + name=await _extract_db_member_package_name(name=name), data=io.StringIO( io.BytesIO( db_file.extractfile(name).read(), # type: ignore @@ -93,7 +94,7 @@ def _db_file_member_as_model( ) -def _json_files_in_directory(path: Path) -> Iterator[Path]: +async def _json_files_in_directory(path: Path) -> AsyncIterator[Path]: """Yield JSON files found in a directory Parameters @@ -108,7 +109,7 @@ def _json_files_in_directory(path: Path) -> Iterator[Path]: Returns ------- - Iterator[Path] + AsyncIterator[Path] An iterator over the files found in the directory defined by path """ @@ -120,7 +121,7 @@ def _json_files_in_directory(path: Path) -> Iterator[Path]: yield json_file -def _read_pkgbase_json_file(path: Path) -> models.OutputPackageBase: +async def _read_pkgbase_json_file(path: Path) -> models.OutputPackageBase: """Read a JSON file that represents a pkgbase and return it as models.OutputPackageBase Parameters @@ -141,16 +142,16 @@ def _read_pkgbase_json_file(path: Path) -> models.OutputPackageBase: A pydantic model representing a pkgbase """ - with open(path, "r") as input_file: + async with aiofiles.open(path, "r") as input_file: try: - return models.OutputPackageBase(**orjson.loads(input_file.read())) + return models.OutputPackageBase(**orjson.loads(await input_file.read())) except orjson.JSONDecodeError as e: raise errors.RepoManagementFileError(f"The JSON file '{path}' could not be decoded!\n{e}") except ValidationError as e: raise errors.RepoManagementValidationError(f"The JSON file '{path}' could not be validated!\n{e}") -def _write_db_file(path: Path, compression: str = "gz") -> tarfile.TarFile: +async def _write_db_file(path: Path, compression: str = "gz") -> tarfile.TarFile: """Open a repository database file for writing Parameters @@ -178,7 +179,7 @@ def _write_db_file(path: Path, compression: str = "gz") -> tarfile.TarFile: return tarfile.open(name=path, mode=f"w:{compression}") -def _stream_package_base_to_db( +async def _stream_package_base_to_db( db: tarfile.TarFile, model: models.OutputPackageBase, repodbfile: convert.RepoDbFile, @@ -198,7 +199,7 @@ def _stream_package_base_to_db( The type of database to stream to """ - for (desc_model, files_model) in model.get_packages_as_models(): + for (desc_model, files_model) in await model.get_packages_as_models(): dirname = f"{desc_model.name}-{model.version}" directory = tarfile.TarInfo(dirname) directory.type = tarfile.DIRTYPE @@ -209,7 +210,7 @@ def _stream_package_base_to_db( db.addfile(directory) desc_content = io.StringIO() - repodbfile.render_desc_template(model=desc_model, output=desc_content) + await repodbfile.render_desc_template(model=desc_model, output=desc_content) desc_file = tarfile.TarInfo(f"{dirname}/desc") desc_file.size = len(desc_content.getvalue().encode()) desc_file.mtime = int(time.time()) @@ -219,7 +220,7 @@ def _stream_package_base_to_db( db.addfile(desc_file, io.BytesIO(desc_content.getvalue().encode())) if db_type == defaults.RepoDbType.FILES: files_content = io.StringIO() - repodbfile.render_files_template(model=files_model, output=files_content) + await repodbfile.render_files_template(model=files_model, output=files_content) files_file = tarfile.TarInfo(f"{dirname}/files") files_file.size = len(files_content.getvalue().encode()) files_file.mtime = int(time.time()) diff --git a/repo_management/models.py b/repo_management/models.py index 2add599..faf761e 100644 --- a/repo_management/models.py +++ b/repo_management/models.py @@ -717,7 +717,7 @@ class OutputPackageBase( packages: List[OutputPackage] - def get_packages_as_models(self) -> List[Tuple[PackageDesc, Files]]: + async def get_packages_as_models(self) -> List[Tuple[PackageDesc, Files]]: """Return the list of packages as tuples of PackageDesc and Files models Returns diff --git a/repo_management/operations.py b/repo_management/operations.py index 4f63cc0..372fca3 100644 --- a/repo_management/operations.py +++ b/repo_management/operations.py @@ -1,13 +1,16 @@ from os.path import join from pathlib import Path -from typing import Dict, Iterator, Tuple +from typing import AsyncIterator, Dict, Tuple +import aiofiles import orjson from repo_management import convert, defaults, files, models -def db_file_as_models(db_path: Path, compression: str = "gz") -> Iterator[Tuple[str, models.OutputPackageBase]]: +async def db_file_as_models( + db_path: Path, compression: str = "gz" +) -> AsyncIterator[Tuple[str, models.OutputPackageBase]]: """Read a repository database and yield the name of each pkgbase and the respective data (represented as an instance of models.OutputPackageBase) in a Tuple. @@ -28,11 +31,13 @@ def db_file_as_models(db_path: Path, compression: str = "gz") -> Iterator[Tuple[ packages: Dict[str, models.OutputPackageBase] = {} package_descs: Dict[str, models.PackageDesc] = {} package_files: Dict[str, models.Files] = {} - for member in files._db_file_member_as_model(db_file=files._read_db_file(db_path=db_path, compression=compression)): + async for member in files._db_file_member_as_model( + db_file=await files._read_db_file(db_path=db_path, compression=compression) + ): if member.member_type == defaults.RepoDbMemberType.DESC: - package_descs.update({member.name: convert._desc_data_to_model(member.data)}) + package_descs.update({member.name: await convert._desc_data_to_model(member.data)}) if member.member_type == defaults.RepoDbMemberType.FILES: - package_files.update({member.name: convert._files_data_to_model(member.data)}) + package_files.update({member.name: await convert._files_data_to_model(member.data)}) for (name, package_desc) in package_descs.items(): if packages.get(package_desc.base): @@ -54,7 +59,7 @@ def db_file_as_models(db_path: Path, compression: str = "gz") -> Iterator[Tuple[ yield (name, package) -def dump_db_to_json_files(input_path: Path, output_path: Path) -> None: +async def dump_db_to_json_files(input_path: Path, output_path: Path) -> None: """Read a repository database file and dump each pkgbase contained in it to a separate JSON file below a defined output directory @@ -66,16 +71,16 @@ def dump_db_to_json_files(input_path: Path, output_path: Path) -> None: A directory in which to """ - for name, model in db_file_as_models(db_path=input_path): - with open(join(output_path, f"{name}.json"), "wb") as output_file: - output_file.write( + async for name, model in db_file_as_models(db_path=input_path): + async with aiofiles.open(join(output_path, f"{name}.json"), "wb") as output_file: + await output_file.write( orjson.dumps( model.dict(), option=orjson.OPT_INDENT_2 | orjson.OPT_APPEND_NEWLINE | orjson.OPT_SORT_KEYS ) ) -def create_db_from_json_files( +async def create_db_from_json_files( input_path: Path, output_path: Path, db_type: defaults.RepoDbType = defaults.RepoDbType.DEFAULT ) -> None: """Create a repository database from a list of JSON files found in a directory @@ -93,10 +98,10 @@ def create_db_from_json_files( """ repodbfile = convert.RepoDbFile() - database = files._write_db_file(path=output_path) - for path in files._json_files_in_directory(path=input_path): - model = files._read_pkgbase_json_file(path) - files._stream_package_base_to_db( + database = await files._write_db_file(path=output_path) + async for path in files._json_files_in_directory(path=input_path): + model = await files._read_pkgbase_json_file(path) + await files._stream_package_base_to_db( db=database, model=model, repodbfile=repodbfile, diff --git a/tests/test_commands.py b/tests/test_commands.py index b28558d..eb9d993 100644 --- a/tests/test_commands.py +++ b/tests/test_commands.py @@ -21,6 +21,7 @@ def test__print_env(env: Optional[Dict[str, str]]) -> None: (["cd", "-f"], {"FOO": "BAR"}, True, False, False, True, None, raises(CalledProcessError)), ], ) +@mark.asyncio def test_run_command( cmd: Union[str, List[str]], env: Optional[Dict[str, str]], diff --git a/tests/test_convert.py b/tests/test_convert.py index 6f5ed25..78e249a 100644 --- a/tests/test_convert.py +++ b/tests/test_convert.py @@ -20,12 +20,13 @@ ("usr/%FILES%\nusr/lib/\n", raises(RuntimeError)), ], ) -def test__files_data_to_dict( +@mark.asyncio +async def test__files_data_to_model( file_data: str, expectation: ContextManager[str], ) -> None: with expectation: - assert convert._files_data_to_model(data=io.StringIO(file_data)) + assert await convert._files_data_to_model(data=io.StringIO(file_data)) @mark.parametrize( @@ -99,81 +100,26 @@ def test__files_data_to_dict( ), ], ) -def test__desc_data_to_dict( +@mark.asyncio +async def test__desc_data_to_model( file_data: str, expectation: ContextManager[str], ) -> None: with expectation: - assert convert._desc_data_to_model(data=io.StringIO(file_data)) - - -@mark.parametrize( - "desc, files", - [ - ( - models.PackageDesc( - arch="foo", - base="foo", - builddate=1, - csize=1, - desc="foo", - filename="foo", - isize=1, - license=["foo"], - md5sum="foo", - name="foo", - packager="foo", - pgpsig="foo", - sha256sum="foo", - url="foo", - version="foo", - ), - models.Files(files=["foo", "bar"]), - ), - ( - models.PackageDesc( - arch="foo", - base="foo", - builddate=1, - csize=1, - desc="foo", - filename="foo", - isize=1, - license=["foo"], - md5sum="foo", - name="foo", - packager="foo", - pgpsig="foo", - sha256sum="foo", - url="foo", - version="foo", - ), - None, - ), - ], -) -def test__transform_package_desc_to_output_package( - desc: models.PackageDesc, - files: models.Files, -) -> None: - output = convert._transform_package_desc_to_output_package(desc=desc, files=files) - assert isinstance(output, models.OutputPackage) - if files: - assert output.files - else: - assert not output.files + assert await convert._desc_data_to_model(data=io.StringIO(file_data)) def test_repodbfile__init() -> None: assert convert.RepoDbFile() -def test_repodbfile_render_desc_template() -> None: +@mark.asyncio +async def test_repodbfile_render_desc_template() -> None: repodbfile = convert.RepoDbFile() assert repodbfile output = io.StringIO() assert not output.getvalue() - repodbfile.render_desc_template( + await repodbfile.render_desc_template( model=models.PackageDesc( arch="foo", base="foo", @@ -196,12 +142,13 @@ def test_repodbfile_render_desc_template() -> None: assert output.getvalue() -def test_repodbfile_render_files_template() -> None: +@mark.asyncio +async def test_repodbfile_render_files_template() -> None: repodbfile = convert.RepoDbFile() assert repodbfile output = io.StringIO() assert not output.getvalue() - repodbfile.render_files_template( + await repodbfile.render_files_template( model=models.Files(files=["foo", "bar"]), output=output, ) diff --git a/tests/test_files.py b/tests/test_files.py index 6a6c5ec..332b588 100644 --- a/tests/test_files.py +++ b/tests/test_files.py @@ -68,28 +68,33 @@ def invalid_json_file() -> Iterator[Path]: yield Path(json_file) -def test__read_db_file(create_gz_db_file: Path) -> None: +@mark.asyncio +async def test__read_db_file(create_gz_db_file: Path) -> None: with does_not_raise(): - assert files._read_db_file(create_gz_db_file) + assert await files._read_db_file(create_gz_db_file) -def test__read_db_file_wrong_compression(create_gz_db_file: Path) -> None: +@mark.asyncio +async def test__read_db_file_wrong_compression(create_gz_db_file: Path) -> None: with raises(tarfile.CompressionError): - assert files._read_db_file(create_gz_db_file, compression="foo") + assert await files._read_db_file(create_gz_db_file, compression="foo") -def test__read_db_file_does_not_exist(create_null_db_file: Path) -> None: +@mark.asyncio +async def test__read_db_file_does_not_exist(create_null_db_file: Path) -> None: with raises(FileNotFoundError): - assert files._read_db_file(create_null_db_file) + assert await files._read_db_file(create_null_db_file) -def test__read_db_file_wrong_db_compression(create_bzip_db_file: Path) -> None: +@mark.asyncio +async def test__read_db_file_wrong_db_compression(create_bzip_db_file: Path) -> None: with raises(tarfile.ReadError): - assert files._read_db_file(create_bzip_db_file) + assert await files._read_db_file(create_bzip_db_file) -def test__read_db_file_member_as_model(create_gz_db_file: Path) -> None: - for member in files._db_file_member_as_model(db_file=files._read_db_file(create_gz_db_file)): +@mark.asyncio +async def test__read_db_file_member_as_model(create_gz_db_file: Path) -> None: + async for member in files._db_file_member_as_model(db_file=await files._read_db_file(create_gz_db_file)): assert isinstance(member, models.RepoDbMemberData) @@ -104,31 +109,35 @@ def test__read_db_file_member_as_model(create_gz_db_file: Path) -> None: ("foobar-1.0.0-42/files", "foobar"), ], ) -def test__extract_db_member_package_name( +@mark.asyncio +async def test__extract_db_member_package_name( member_name: str, result: str, ) -> None: - assert files._extract_db_member_package_name(name=member_name) == result + assert await files._extract_db_member_package_name(name=member_name) == result -def test__json_files_in_directory(empty_json_files_in_dir: Path, empty_dir: Path) -> None: - for json_file in files._json_files_in_directory(path=empty_json_files_in_dir): +@mark.asyncio +async def test__json_files_in_directory(empty_json_files_in_dir: Path, empty_dir: Path) -> None: + async for json_file in files._json_files_in_directory(path=empty_json_files_in_dir): assert isinstance(json_file, Path) with raises(errors.RepoManagementFileNotFoundError): - for json_file in files._json_files_in_directory(path=empty_dir): + async for json_file in files._json_files_in_directory(path=empty_dir): assert isinstance(json_file, Path) -def test__read_pkgbase_json_file(broken_json_file: Path, invalid_json_file: Path) -> None: +@mark.asyncio +async def test__read_pkgbase_json_file(broken_json_file: Path, invalid_json_file: Path) -> None: with raises(errors.RepoManagementFileError): - files._read_pkgbase_json_file(path=broken_json_file) + await files._read_pkgbase_json_file(path=broken_json_file) with raises(errors.RepoManagementValidationError): - files._read_pkgbase_json_file(path=invalid_json_file) + await files._read_pkgbase_json_file(path=invalid_json_file) -def test__write_db_file(empty_dir: Path) -> None: - assert isinstance(files._write_db_file(empty_dir / Path("foo.db")), tarfile.TarFile) +@mark.asyncio +async def test__write_db_file(empty_dir: Path) -> None: + assert isinstance(await files._write_db_file(empty_dir / Path("foo.db")), tarfile.TarFile) @mark.parametrize( @@ -186,13 +195,14 @@ def test__write_db_file(empty_dir: Path) -> None: ), ], ) -def test__stream_package_base_to_db( +@mark.asyncio +async def test__stream_package_base_to_db( model: models.OutputPackageBase, db_type: defaults.RepoDbType, empty_file: Path, ) -> None: - files._stream_package_base_to_db( - db=files._write_db_file(path=empty_file), + await files._stream_package_base_to_db( + db=await files._write_db_file(path=empty_file), model=model, repodbfile=convert.RepoDbFile(), db_type=db_type, diff --git a/tests/test_models.py b/tests/test_models.py index 9514a9c..2283ee6 100644 --- a/tests/test_models.py +++ b/tests/test_models.py @@ -139,11 +139,12 @@ def test_package_desc_get_output_package( ), ], ) -def test_output_package_base_get_packages_as_models( +@mark.asyncio +async def test_output_package_base_get_packages_as_models( models_list: List[Tuple[models.PackageDesc, models.Files]], output_package_base: models.OutputPackageBase, ) -> None: - assert models_list == output_package_base.get_packages_as_models() + assert models_list == await output_package_base.get_packages_as_models() @mark.parametrize( diff --git a/tests/test_operations.py b/tests/test_operations.py index 6e68474..1c4683e 100644 --- a/tests/test_operations.py +++ b/tests/test_operations.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Iterator -from pytest import fixture +from pytest import fixture, mark from repo_management import models, operations @@ -38,18 +38,21 @@ def empty_file() -> Iterator[Path]: yield Path(file_name) -def test_db_file_as_models(create_gz_db_file: Path) -> None: - for (name, model) in operations.db_file_as_models(db_path=create_gz_db_file): +@mark.asyncio +async def test_db_file_as_models(create_gz_db_file: Path) -> None: + async for (name, model) in operations.db_file_as_models(db_path=create_gz_db_file): assert isinstance(name, str) assert isinstance(model, models.OutputPackageBase) -def test_dump_db_to_json_files( +@mark.asyncio +async def test_dump_db_to_json_files( create_gz_db_file: Path, create_dir_path: Path, ) -> None: - operations.dump_db_to_json_files(input_path=create_gz_db_file, output_path=create_dir_path) + await operations.dump_db_to_json_files(input_path=create_gz_db_file, output_path=create_dir_path) -def test_create_db_from_json_files(dummy_json_files_in_dir: Path, empty_file: Path) -> None: - operations.create_db_from_json_files(input_path=dummy_json_files_in_dir, output_path=empty_file) +@mark.asyncio +async def test_create_db_from_json_files(dummy_json_files_in_dir: Path, empty_file: Path) -> None: + await operations.create_db_from_json_files(input_path=dummy_json_files_in_dir, output_path=empty_file)