From 3ec2a112a0f6550303093d01e6e99104ca55f67f Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 17 Jan 2022 11:58:22 +0000 Subject: [PATCH 01/12] Bump black from 21.9b0 to 21.12b0 Bumps [black](https://github.com/psf/black) from 21.9b0 to 21.12b0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/commits) --- updated-dependencies: - dependency-name: black dependency-type: direct:development ... Signed-off-by: dependabot[bot] --- requirements.dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.dev.txt b/requirements.dev.txt index 61ac1626a..7ebabbf2e 100644 --- a/requirements.dev.txt +++ b/requirements.dev.txt @@ -1,4 +1,4 @@ -black==21.9b0 +black==21.12b0 coverage doctest-ignore-unicode==0.1.2 flake8 From 9cd7f4be49441df1e8855aa32d8cb6a960d4ad1b Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Mon, 31 Jan 2022 18:15:40 +0000 Subject: [PATCH 02/12] Bump black from 21.12b0 to 22.1.0 Bumps [black](https://github.com/psf/black) from 21.12b0 to 22.1.0. - [Release notes](https://github.com/psf/black/releases) - [Changelog](https://github.com/psf/black/blob/main/CHANGES.md) - [Commits](https://github.com/psf/black/commits/22.1.0) --- updated-dependencies: - dependency-name: black dependency-type: direct:development ... Signed-off-by: dependabot[bot] --- requirements.dev.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/requirements.dev.txt b/requirements.dev.txt index ceed31c44..dcca8d3d3 100644 --- a/requirements.dev.txt +++ b/requirements.dev.txt @@ -1,4 +1,4 @@ -black==21.12b0 +black==22.1.0 coverage doctest-ignore-unicode==0.1.2 flake8 From 11bc6d1fbd4831e8b911ab2aeca914bd6bb4242b Mon Sep 17 00:00:00 2001 From: Graham Higgins Date: Thu, 17 Mar 2022 18:12:11 +0000 Subject: [PATCH 03/12] rename store tests to test_store_* for consistency --- test/{test_auditable.py => test_store_auditable.py} | 0 test/{test_memory_store.py => test_store_memorystore.py} | 0 test/{test_sparqlstore.py => test_store_sparqlstore.py} | 0 ...{test_sparqlupdatestore.py => test_store_sparqlupdatestore.py} | 0 ...qlupdatestore_mock.py => test_store_sparqlupdatestore_mock.py} | 0 test/{test_triple_store.py => test_store_triple_store.py} | 0 6 files changed, 0 insertions(+), 0 deletions(-) rename test/{test_auditable.py => test_store_auditable.py} (100%) rename test/{test_memory_store.py => test_store_memorystore.py} (100%) rename test/{test_sparqlstore.py => test_store_sparqlstore.py} (100%) rename test/{test_sparqlupdatestore.py => test_store_sparqlupdatestore.py} (100%) rename test/{test_sparqlupdatestore_mock.py => test_store_sparqlupdatestore_mock.py} (100%) rename test/{test_triple_store.py => test_store_triple_store.py} (100%) diff --git a/test/test_auditable.py b/test/test_store_auditable.py similarity index 100% rename from test/test_auditable.py rename to test/test_store_auditable.py diff --git a/test/test_memory_store.py b/test/test_store_memorystore.py similarity index 100% rename from test/test_memory_store.py rename to test/test_store_memorystore.py diff --git a/test/test_sparqlstore.py b/test/test_store_sparqlstore.py similarity index 100% rename from test/test_sparqlstore.py rename to test/test_store_sparqlstore.py diff --git a/test/test_sparqlupdatestore.py b/test/test_store_sparqlupdatestore.py similarity index 100% rename from test/test_sparqlupdatestore.py rename to test/test_store_sparqlupdatestore.py diff --git a/test/test_sparqlupdatestore_mock.py b/test/test_store_sparqlupdatestore_mock.py similarity index 100% rename from test/test_sparqlupdatestore_mock.py rename to test/test_store_sparqlupdatestore_mock.py diff --git a/test/test_triple_store.py b/test/test_store_triple_store.py similarity index 100% rename from test/test_triple_store.py rename to test/test_store_triple_store.py From e9129a16f1dead9a1af08af906eda9e25fdbddad Mon Sep 17 00:00:00 2001 From: Graham Higgins Date: Fri, 18 Mar 2022 10:38:22 +0000 Subject: [PATCH 04/12] extend test suite re-org --- .../test_parser_hext_multigraph.ndjson | 0 .../test_parser_hext_singlegraph.ndjson | 0 test/data.py | 16 + .../test_conjunctive_graph.py | 0 .../test_conjunctivegraph_generators.py | 25 +- ..._conjunctivegraph_operator_combinations.py | 22 +- test/test_dataset.py | 239 --------- test/test_dataset/test_dataset.py | 257 ++++++++++ .../test_dataset_generators.py | 21 +- test/{ => test_graph}/test_graph.py | 0 test/{ => test_graph}/test_graph_cbd.py | 0 test/test_graph/test_graph_context.py | 381 ++++++++++++++ test/{ => test_graph}/test_graph_formula.py | 0 .../{ => test_graph}/test_graph_generators.py | 19 +- test/{ => test_graph}/test_graph_http.py | 2 +- test/{ => test_graph}/test_graph_items.py | 0 test/{ => test_graph}/test_graph_operator.py | 0 test/test_graph_context.py | 389 -------------- test/{ => test_issues}/test_issue084.py | 0 test/{ => test_issues}/test_issue1003.py | 0 test/{ => test_issues}/test_issue1043.py | 0 test/{ => test_issues}/test_issue1141.py | 0 test/{ => test_issues}/test_issue1160.py | 0 test/{ => test_issues}/test_issue1404.py | 0 test/{ => test_issues}/test_issue1484.py | 0 test/{ => test_issues}/test_issue160.py | 0 test/{ => test_issues}/test_issue161.py | 0 test/{ => test_issues}/test_issue184.py | 0 test/{ => test_issues}/test_issue190.py | 0 test/{ => test_issues}/test_issue200.py | 0 test/{ => test_issues}/test_issue209.py | 0 test/{ => test_issues}/test_issue223.py | 0 test/{ => test_issues}/test_issue247.py | 0 test/{ => test_issues}/test_issue248.py | 0 test/{ => test_issues}/test_issue274.py | 2 +- test/{ => test_issues}/test_issue363.py | 0 test/{ => test_issues}/test_issue379.py | 0 test/{ => test_issues}/test_issue381.py | 0 test/{ => test_issues}/test_issue432.py | 0 test/{ => test_issues}/test_issue446.py | 0 test/{ => test_issues}/test_issue492.py | 0 test/{ => test_issues}/test_issue523.py | 0 test/{ => test_issues}/test_issue532.py | 0 test/{ => test_issues}/test_issue535.py | 0 test/{ => test_issues}/test_issue545.py | 0 test/{ => test_issues}/test_issue554.py | 0 test/{ => test_issues}/test_issue563.py | 0 test/{ => test_issues}/test_issue579.py | 0 test/{ => test_issues}/test_issue604.py | 0 test/{ => test_issues}/test_issue655.py | 0 test/{ => test_issues}/test_issue715.py | 0 test/{ => test_issues}/test_issue733.py | 0 test/{ => test_issues}/test_issue801.py | 0 test/{ => test_issues}/test_issue893.py | 0 test/{ => test_issues}/test_issue910.py | 0 test/{ => test_issues}/test_issue920.py | 0 test/{ => test_issues}/test_issue923.py | 0 test/{ => test_issues}/test_issue953.py | 0 test/{ => test_issues}/test_issue977.py | 0 test/{ => test_issues}/test_issue_git_200.py | 0 test/{ => test_issues}/test_issue_git_336.py | 0 test/test_nquads.py | 340 ++++++++----- test/{ => test_parsers}/test_nquads_w3c.py | 0 test/{ => test_parsers}/test_nt_w3c.py | 0 test/{ => test_parsers}/test_parser.py | 0 .../{ => test_parsers}/test_parser_helpers.py | 0 test/{ => test_parsers}/test_parser_hext.py | 6 +- .../test_parser_reads_from_pathlike_object.py | 0 .../test_parser_structure.py | 0 test/{ => test_parsers}/test_swap_n3.py | 0 test/{ => test_parsers}/test_trix_parse.py | 0 test/{ => test_parsers}/test_turtle_w3c.py | 2 +- test/{ => test_serializers}/test_prettyxml.py | 0 .../{ => test_serializers}/test_serializer.py | 2 +- .../test_serializer_hext.py | 6 +- .../test_serializer_longturtle.py | 0 .../test_serializer_trix.py | 0 .../test_serializer_turtle.py | 0 .../test_serializer_xml.py | 0 test/{ => test_store}/test_store.py | 0 test/test_store/test_store_auditable.py | 473 ++++++++++++++++++ test/test_store/test_store_berkeleydb.py | 145 ++++++ test/test_store/test_store_memorystore.py | 31 ++ .../test_store_sparqlstore.py | 21 +- .../test_store_sparqlupdatestore.py | 365 ++++++++++++++ .../test_store_sparqlupdatestore_mock.py | 8 +- test/test_store/test_store_triple_store.py | 36 ++ test/test_store_auditable.py | 386 -------------- test/test_store_berkeleydb.py | 130 ----- test/test_store_memorystore.py | 59 --- test/test_store_sparqlupdatestore.py | 362 -------------- test/test_store_triple_store.py | 35 -- 92 files changed, 1975 insertions(+), 1805 deletions(-) rename test/{ => consistent_test_data}/test_parser_hext_multigraph.ndjson (100%) rename test/{ => consistent_test_data}/test_parser_hext_singlegraph.ndjson (100%) rename test/{ => test_conjunctivegraph}/test_conjunctive_graph.py (100%) rename test/{ => test_conjunctivegraph}/test_conjunctivegraph_generators.py (86%) rename test/{ => test_conjunctivegraph}/test_conjunctivegraph_operator_combinations.py (86%) delete mode 100644 test/test_dataset.py create mode 100644 test/test_dataset/test_dataset.py rename test/{ => test_dataset}/test_dataset_generators.py (90%) rename test/{ => test_graph}/test_graph.py (100%) rename test/{ => test_graph}/test_graph_cbd.py (100%) create mode 100644 test/test_graph/test_graph_context.py rename test/{ => test_graph}/test_graph_formula.py (100%) rename test/{ => test_graph}/test_graph_generators.py (87%) rename test/{ => test_graph}/test_graph_http.py (98%) rename test/{ => test_graph}/test_graph_items.py (100%) rename test/{ => test_graph}/test_graph_operator.py (100%) delete mode 100644 test/test_graph_context.py rename test/{ => test_issues}/test_issue084.py (100%) rename test/{ => test_issues}/test_issue1003.py (100%) rename test/{ => test_issues}/test_issue1043.py (100%) rename test/{ => test_issues}/test_issue1141.py (100%) rename test/{ => test_issues}/test_issue1160.py (100%) rename test/{ => test_issues}/test_issue1404.py (100%) rename test/{ => test_issues}/test_issue1484.py (100%) rename test/{ => test_issues}/test_issue160.py (100%) rename test/{ => test_issues}/test_issue161.py (100%) rename test/{ => test_issues}/test_issue184.py (100%) rename test/{ => test_issues}/test_issue190.py (100%) rename test/{ => test_issues}/test_issue200.py (100%) rename test/{ => test_issues}/test_issue209.py (100%) rename test/{ => test_issues}/test_issue223.py (100%) rename test/{ => test_issues}/test_issue247.py (100%) rename test/{ => test_issues}/test_issue248.py (100%) rename test/{ => test_issues}/test_issue274.py (99%) rename test/{ => test_issues}/test_issue363.py (100%) rename test/{ => test_issues}/test_issue379.py (100%) rename test/{ => test_issues}/test_issue381.py (100%) rename test/{ => test_issues}/test_issue432.py (100%) rename test/{ => test_issues}/test_issue446.py (100%) rename test/{ => test_issues}/test_issue492.py (100%) rename test/{ => test_issues}/test_issue523.py (100%) rename test/{ => test_issues}/test_issue532.py (100%) rename test/{ => test_issues}/test_issue535.py (100%) rename test/{ => test_issues}/test_issue545.py (100%) rename test/{ => test_issues}/test_issue554.py (100%) rename test/{ => test_issues}/test_issue563.py (100%) rename test/{ => test_issues}/test_issue579.py (100%) rename test/{ => test_issues}/test_issue604.py (100%) rename test/{ => test_issues}/test_issue655.py (100%) rename test/{ => test_issues}/test_issue715.py (100%) rename test/{ => test_issues}/test_issue733.py (100%) rename test/{ => test_issues}/test_issue801.py (100%) rename test/{ => test_issues}/test_issue893.py (100%) rename test/{ => test_issues}/test_issue910.py (100%) rename test/{ => test_issues}/test_issue920.py (100%) rename test/{ => test_issues}/test_issue923.py (100%) rename test/{ => test_issues}/test_issue953.py (100%) rename test/{ => test_issues}/test_issue977.py (100%) rename test/{ => test_issues}/test_issue_git_200.py (100%) rename test/{ => test_issues}/test_issue_git_336.py (100%) rename test/{ => test_parsers}/test_nquads_w3c.py (100%) rename test/{ => test_parsers}/test_nt_w3c.py (100%) rename test/{ => test_parsers}/test_parser.py (100%) rename test/{ => test_parsers}/test_parser_helpers.py (100%) rename test/{ => test_parsers}/test_parser_hext.py (95%) rename test/{ => test_parsers}/test_parser_reads_from_pathlike_object.py (100%) rename test/{ => test_parsers}/test_parser_structure.py (100%) rename test/{ => test_parsers}/test_swap_n3.py (100%) rename test/{ => test_parsers}/test_trix_parse.py (100%) rename test/{ => test_parsers}/test_turtle_w3c.py (98%) rename test/{ => test_serializers}/test_prettyxml.py (100%) rename test/{ => test_serializers}/test_serializer.py (98%) rename test/{ => test_serializers}/test_serializer_hext.py (97%) rename test/{ => test_serializers}/test_serializer_longturtle.py (100%) rename test/{ => test_serializers}/test_serializer_trix.py (100%) rename test/{ => test_serializers}/test_serializer_turtle.py (100%) rename test/{ => test_serializers}/test_serializer_xml.py (100%) rename test/{ => test_store}/test_store.py (100%) create mode 100644 test/test_store/test_store_auditable.py create mode 100644 test/test_store/test_store_berkeleydb.py create mode 100644 test/test_store/test_store_memorystore.py rename test/{ => test_store}/test_store_sparqlstore.py (98%) create mode 100644 test/test_store/test_store_sparqlupdatestore.py rename test/{ => test_store}/test_store_sparqlupdatestore_mock.py (96%) create mode 100644 test/test_store/test_store_triple_store.py delete mode 100644 test/test_store_auditable.py delete mode 100644 test/test_store_berkeleydb.py delete mode 100644 test/test_store_memorystore.py delete mode 100644 test/test_store_sparqlupdatestore.py delete mode 100644 test/test_store_triple_store.py diff --git a/test/test_parser_hext_multigraph.ndjson b/test/consistent_test_data/test_parser_hext_multigraph.ndjson similarity index 100% rename from test/test_parser_hext_multigraph.ndjson rename to test/consistent_test_data/test_parser_hext_multigraph.ndjson diff --git a/test/test_parser_hext_singlegraph.ndjson b/test/consistent_test_data/test_parser_hext_singlegraph.ndjson similarity index 100% rename from test/test_parser_hext_singlegraph.ndjson rename to test/consistent_test_data/test_parser_hext_singlegraph.ndjson diff --git a/test/data.py b/test/data.py index 613df944f..db184fb0d 100644 --- a/test/data.py +++ b/test/data.py @@ -1,4 +1,20 @@ +from rdflib import URIRef from pathlib import Path TEST_DIR = Path(__file__).parent CONSISTENT_DATA_DIR = TEST_DIR / "consistent_test_data" + +alice_uri = URIRef("http://example.org/alice") +bob_uri = URIRef("http://example.org/bob") + +michel = URIRef("urn:example:michel") +tarek = URIRef("urn:example:tarek") +bob = URIRef("urn:example:bob") +likes = URIRef("urn:example:likes") +hates = URIRef("urn:example:hates") +pizza = URIRef("urn:example:pizza") +cheese = URIRef("urn:example:cheese") + +context0 = URIRef("urn:example:context-0") +context1 = URIRef("urn:example:context-1") +context2 = URIRef("urn:example:context-2") diff --git a/test/test_conjunctive_graph.py b/test/test_conjunctivegraph/test_conjunctive_graph.py similarity index 100% rename from test/test_conjunctive_graph.py rename to test/test_conjunctivegraph/test_conjunctive_graph.py diff --git a/test/test_conjunctivegraph_generators.py b/test/test_conjunctivegraph/test_conjunctivegraph_generators.py similarity index 86% rename from test/test_conjunctivegraph_generators.py rename to test/test_conjunctivegraph/test_conjunctivegraph_generators.py index 07e88a4f7..9c134977a 100644 --- a/test/test_conjunctivegraph_generators.py +++ b/test/test_conjunctivegraph/test_conjunctivegraph_generators.py @@ -1,21 +1,22 @@ import os from rdflib import ConjunctiveGraph, URIRef - +from test.data import ( + CONSISTENT_DATA_DIR, + michel, + tarek, + bob, + likes, + hates, + pizza, + cheese, + context1, +) timblcardn3 = open( - os.path.join(os.path.dirname(__file__), "consistent_test_data", "timbl-card.n3") + os.path.join(CONSISTENT_DATA_DIR, "timbl-card.n3") ).read() -michel = URIRef("urn:example:michel") -tarek = URIRef("urn:example:tarek") -bob = URIRef("urn:example:bob") -likes = URIRef("urn:example:likes") -hates = URIRef("urn:example:hates") -pizza = URIRef("urn:example:pizza") -cheese = URIRef("urn:example:cheese") - - def add_stuff(graph): graph.add((tarek, likes, pizza)) graph.add((tarek, likes, cheese)) @@ -88,7 +89,7 @@ def test_parse_berners_lee_card_into_conjunctivegraph_default(): def test_parse_berners_lee_card_into_named_graph(): - graph = ConjunctiveGraph(identifier=URIRef("context-1")) + graph = ConjunctiveGraph(identifier=context1) graph.parse(data=timblcardn3, format="n3") assert len(list(graph.subjects())) == no_of_statements_in_card assert len(list(graph.subjects(unique=True))) == no_of_unique_subjects diff --git a/test/test_conjunctivegraph_operator_combinations.py b/test/test_conjunctivegraph/test_conjunctivegraph_operator_combinations.py similarity index 86% rename from test/test_conjunctivegraph_operator_combinations.py rename to test/test_conjunctivegraph/test_conjunctivegraph_operator_combinations.py index c639ceadb..0726790d5 100644 --- a/test/test_conjunctivegraph_operator_combinations.py +++ b/test/test_conjunctivegraph/test_conjunctivegraph_operator_combinations.py @@ -4,21 +4,17 @@ ConjunctiveGraph, URIRef, ) - - -michel = URIRef("urn:example:michel") -tarek = URIRef("urn:example:tarek") -bob = URIRef("urn:example:bob") -likes = URIRef("urn:example:likes") -hates = URIRef("urn:example:hates") -pizza = URIRef("urn:example:pizza") -cheese = URIRef("urn:example:cheese") - -c1 = URIRef("urn:example:context-1") -c2 = URIRef("urn:example:context-2") +from test.data import ( + CONSISTENT_DATA_DIR, + michel, + tarek, + likes, + pizza, + cheese, +) sportquadstrig = open( - os.path.join(os.path.dirname(__file__), "consistent_test_data", "sportquads.trig") + os.path.join(CONSISTENT_DATA_DIR, "sportquads.trig") ).read() diff --git a/test/test_dataset.py b/test/test_dataset.py deleted file mode 100644 index c08162ded..000000000 --- a/test/test_dataset.py +++ /dev/null @@ -1,239 +0,0 @@ -import sys -import os -from typing import Optional -import unittest - -from tempfile import mkdtemp, mkstemp -import shutil - -import pytest -from rdflib import Dataset, URIRef, plugin -from rdflib.graph import DATASET_DEFAULT_GRAPH_ID - -# Will also run SPARQLUpdateStore tests against local SPARQL1.1 endpoint if -# available. This assumes SPARQL1.1 query/update endpoints running locally at -# http://localhost:3030/db/ -# -# Testing SPARQLUpdateStore Dataset behavior needs a different endpoint behavior -# than our ConjunctiveGraph tests in test_sparqlupdatestore.py! -# -# For the tests here to run, you can for example start fuseki with: -# ./fuseki-server --mem --update /db - -# THIS WILL DELETE ALL DATA IN THE /db dataset - -HOST = "http://localhost:3030" -DB = "/db/" - - -class DatasetTestCase(unittest.TestCase): - store = "default" - skip_reason = None - slow = True - tmppath = None - - def setUp(self): - if self.skip_reason is not None: - self.skipTest(skip_reason) - try: - self.graph = Dataset(store=self.store) - except ImportError: - pytest.skip("Dependencies for store '%s' not available!" % self.store) - if self.store == "SQLite": - _, self.tmppath = mkstemp(prefix="test", dir="/tmp", suffix=".sqlite") - elif self.store == "SPARQLUpdateStore": - root = HOST + DB - self.graph.open((root + "sparql", root + "update")) - else: - self.tmppath = mkdtemp() - - if self.store != "SPARQLUpdateStore": - self.graph.open(self.tmppath, create=True) - self.michel = URIRef("urn:example:michel") - self.tarek = URIRef("urn:example:tarek") - self.bob = URIRef("urn:example:bob") - self.likes = URIRef("urn:example:likes") - self.hates = URIRef("urn:example:hates") - self.pizza = URIRef("urn:example:pizza") - self.cheese = URIRef("urn:cheese") - - # Use regular URIs because SPARQL endpoints like Fuseki alter short names - self.c1 = URIRef("urn:example:context-1") - self.c2 = URIRef("urn:example:context-2") - - # delete the graph for each test! - self.graph.remove((None, None, None)) - for c in self.graph.contexts(): - c.remove((None, None, None)) - assert len(c) == 0 - self.graph.remove_graph(c) - - def tearDown(self): - self.graph.close() - if self.store == "SPARQLUpdateStore": - pass - else: - if os.path.isdir(self.tmppath): - shutil.rmtree(self.tmppath) - else: - os.remove(self.tmppath) - - def testGraphAware(self): - - if not self.graph.store.graph_aware: - return - - g = self.graph - g1 = g.graph(self.c1) - - # Some SPARQL endpoint backends (e.g. TDB) do not consider - # empty named graphs - if self.store != "SPARQLUpdateStore": - # added graph exists - self.assertEqual( - set(x.identifier for x in self.graph.contexts()), - set([self.c1, DATASET_DEFAULT_GRAPH_ID]), - ) - - # added graph is empty - self.assertEqual(len(g1), 0) - - g1.add((self.tarek, self.likes, self.pizza)) - - # added graph still exists - self.assertEqual( - set(x.identifier for x in self.graph.contexts()), - set([self.c1, DATASET_DEFAULT_GRAPH_ID]), - ) - - # added graph contains one triple - self.assertEqual(len(g1), 1) - - g1.remove((self.tarek, self.likes, self.pizza)) - - # added graph is empty - self.assertEqual(len(g1), 0) - - # Some SPARQL endpoint backends (e.g. TDB) do not consider - # empty named graphs - if self.store != "SPARQLUpdateStore": - # graph still exists, although empty - self.assertEqual( - set(x.identifier for x in self.graph.contexts()), - set([self.c1, DATASET_DEFAULT_GRAPH_ID]), - ) - - g.remove_graph(self.c1) - - # graph is gone - self.assertEqual( - set(x.identifier for x in self.graph.contexts()), - set([DATASET_DEFAULT_GRAPH_ID]), - ) - - def testDefaultGraph(self): - # Something the default graph is read-only (e.g. TDB in union mode) - if self.store == "SPARQLUpdateStore": - print( - "Please make sure updating the default graph " - "is supported by your SPARQL endpoint" - ) - - self.graph.add((self.tarek, self.likes, self.pizza)) - self.assertEqual(len(self.graph), 1) - # only default exists - self.assertEqual( - set(x.identifier for x in self.graph.contexts()), - set([DATASET_DEFAULT_GRAPH_ID]), - ) - - # removing default graph removes triples but not actual graph - self.graph.remove_graph(DATASET_DEFAULT_GRAPH_ID) - - self.assertEqual(len(self.graph), 0) - # default still exists - self.assertEqual( - set(x.identifier for x in self.graph.contexts()), - set([DATASET_DEFAULT_GRAPH_ID]), - ) - - def testNotUnion(self): - # Union depends on the SPARQL endpoint configuration - if self.store == "SPARQLUpdateStore": - print( - "Please make sure your SPARQL endpoint has not configured " - "its default graph as the union of the named graphs" - ) - g1 = self.graph.graph(self.c1) - g1.add((self.tarek, self.likes, self.pizza)) - - self.assertEqual(list(self.graph.objects(self.tarek, None)), []) - self.assertEqual(list(g1.objects(self.tarek, None)), [self.pizza]) - - def testIter(self): - """PR 1382: adds __iter__ to Dataset""" - d = Dataset() - uri_a = URIRef("https://example.com/a") - uri_b = URIRef("https://example.com/b") - uri_c = URIRef("https://example.com/c") - uri_d = URIRef("https://example.com/d") - - d.add_graph(URIRef("https://example.com/g1")) - d.add((uri_a, uri_b, uri_c, URIRef("https://example.com/g1"))) - d.add( - (uri_a, uri_b, uri_c, URIRef("https://example.com/g1")) - ) # pointless addition: duplicates above - - d.add_graph(URIRef("https://example.com/g2")) - d.add((uri_a, uri_b, uri_c, URIRef("https://example.com/g2"))) - d.add((uri_a, uri_b, uri_d, URIRef("https://example.com/g1"))) # new, uri_d - - # traditional iterator - i_trad = 0 - for t in d.quads((None, None, None)): - i_trad += 1 - - # new Dataset.__iter__ iterator - i_new = 0 - for t in d: - i_new += 1 - - self.assertEqual(i_new, i_trad) # both should be 3 - - -# dynamically create classes for each registered Store - -pluginname = None -if __name__ == "__main__": - if len(sys.argv) > 1: - pluginname = sys.argv[1] - -tests = 0 - -for s in plugin.plugins(pluginname, plugin.Store): - skip_reason: Optional[str] = None - if s.name in ("default", "Memory", "Auditable", "Concurrent", "SPARQLStore"): - continue # these are tested by default - - if not s.getClass().graph_aware: - continue - - if s.name == "SPARQLUpdateStore": - from urllib.request import urlopen - - try: - assert len(urlopen(HOST).read()) > 0 - except BaseException: - skip_reason = "No SPARQL endpoint for %s (tests skipped)\n" % s.name - sys.stderr.write(skip_reason) - - locals()["t%d" % tests] = type( - "%sContextTestCase" % s.name, - (DatasetTestCase,), - {"store": s.name, "skip_reason": skip_reason}, - ) - tests += 1 - - -if __name__ == "__main__": - unittest.main() diff --git a/test/test_dataset/test_dataset.py b/test/test_dataset/test_dataset.py new file mode 100644 index 000000000..3af172f6c --- /dev/null +++ b/test/test_dataset/test_dataset.py @@ -0,0 +1,257 @@ +# -*- coding: utf-8 -*- +import os +import shutil +import tempfile +from typing import Optional + +import pytest + +from rdflib import FOAF, XSD, BNode, Literal, URIRef, plugin +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph +from rdflib.plugins.stores.sparqlstore import SPARQLUpdateStore +from test.data import ( + CONSISTENT_DATA_DIR, + michel, + tarek, + bob, + likes, + hates, + pizza, + cheese, + context1, +) + +# Will also run SPARQLUpdateStore tests against local SPARQL1.1 endpoint if +# available. This assumes SPARQL1.1 query/update endpoints running locally at +# http://localhost:3030/db/ +# +# Testing SPARQLUpdateStore Dataset behavior needs a different endpoint behavior +# than our ConjunctiveGraph tests in test_sparqlupdatestore.py! +# +# For the tests here to run, you can for example start fuseki with: +# ./fuseki-server --mem --update /db + +# THIS WILL DELETE ALL DATA IN THE /db dataset + +HOST = "http://localhost:3030" +DB = "/db/" + +dgb = URIRef("http://rdflib/net/") + +timblcardn3 = open( + os.path.join(CONSISTENT_DATA_DIR, "timbl-card.n3") +).read() + +timblcardnquads = open( + os.path.join(CONSISTENT_DATA_DIR, "timbl-card.nquads") +).read() + +no_of_statements_in_card = 86 +no_of_unique_subjects = 20 +no_of_unique_predicates = 58 +no_of_unique_objects = 62 + + +pluginstores = [] + +for s in plugin.plugins(None, plugin.Store): + if s.name in ("default", "Memory", "Auditable", "Concurrent", "SPARQLStore"): + continue # these are tested by default + + if not s.getClass().graph_aware: + continue + + if s.name == "SPARQLUpdateStore": + from urllib.request import urlopen + + try: + assert len(urlopen(HOST).read()) > 0 + except Exception: + continue + + pluginstores.append(s.name) + + +@pytest.fixture( + scope="function", + params=pluginstores, +) +def get_dataset(request): + store = request.param + + try: + dataset = Dataset(store=store) + except ImportError: + pytest.skip("Dependencies for store '%s' not available!" % store) + + graph = Dataset(store=store) + + if not graph.store.graph_aware: + return + + if store == "SPARQLUpdateStore": + root = HOST + DB + path = root + "sparql", root + "update" + else: + path = tempfile.mkdtemp() + + graph.open(path, create=True if store != "SPARQLUpdateStore" else False) + + if store == "SPARQLUpdateStore": + try: + graph.store.update("CLEAR ALL") + except Exception as e: + if "SPARQLStore does not support BNodes! " in str(e): + pass + else: + raise Exception(e) + + yield store, graph + + if store == "SPARQLUpdateStore": + try: + graph.store.update("CLEAR ALL") + except Exception as e: + if "SPARQLStore does not support BNodes! " in str(e): + pass + else: + raise Exception(e) + graph.close() + else: + graph.close() + graph.destroy(path) + if os.path.isdir(path): + shutil.rmtree(path) + else: + try: + os.remove(path) + except: + pass + + +def test_graph_aware(get_dataset): + + store, graph = get_dataset + + if not graph.store.graph_aware: + return + + g = graph + g1 = g.graph(context1) + + # Some SPARQL endpoint backends (e.g. TDB) do not consider + # empty named graphs + if store != "SPARQLUpdateStore": + # added graph exists + assert set(x.identifier for x in graph.contexts()) == set( + [context1, DATASET_DEFAULT_GRAPH_ID] + ) + + # added graph is empty + assert len(g1) == 0 + + g1.add((tarek, likes, pizza)) + + # added graph still exists + assert set(x.identifier for x in graph.contexts()) == set( + [context1, DATASET_DEFAULT_GRAPH_ID] + ) + + # added graph contains one triple + assert len(g1) == 1 + + g1.remove((tarek, likes, pizza)) + + # added graph is empty + assert len(g1) == 0 + + # Some SPARQL endpoint backends (e.g. TDB) do not consider + # empty named graphs + if store != "SPARQLUpdateStore": + # graph still exists, although empty + assert set(x.identifier for x in graph.contexts()) == set( + [context1, DATASET_DEFAULT_GRAPH_ID] + ) + + g.remove_graph(context1) + + # graph is gone + assert set(x.identifier for x in graph.contexts()) == set( + [DATASET_DEFAULT_GRAPH_ID] + ) + + +def test_default_graph(get_dataset): + # Something the default graph is read-only (e.g. TDB in union mode) + + store, graph = get_dataset + if store == "SPARQLUpdateStore": + print( + "Please make sure updating the default graph " + "is supported by your SPARQL endpoint" + ) + + graph.add((tarek, likes, pizza)) + assert len(graph) == 1 + # only default exists + assert set(x.identifier for x in graph.contexts()) == set( + [DATASET_DEFAULT_GRAPH_ID] + ) + + # removing default graph removes triples but not actual graph + graph.remove_graph(DATASET_DEFAULT_GRAPH_ID) + + assert len(graph) == 0 + # default still exists + assert set(x.identifier for x in graph.contexts()) == set( + [DATASET_DEFAULT_GRAPH_ID] + ) + + +def test_not_union(get_dataset): + + store, graph = get_dataset + # Union depends on the SPARQL endpoint configuration + if store == "SPARQLUpdateStore": + print( + "Please make sure your SPARQL endpoint has not configured " + "its default graph as the union of the named graphs" + ) + g1 = graph.graph(context1) + g1.add((tarek, likes, pizza)) + + assert list(graph.objects(tarek, None)) == [] + assert list(g1.objects(tarek, None)) == [pizza] + + +def test_iter(get_dataset): + + store, d = get_dataset + """PR 1382: adds __iter__ to Dataset""" + # d = Dataset() + uri_a = URIRef("https://example.com/a") + uri_b = URIRef("https://example.com/b") + uri_c = URIRef("https://example.com/c") + uri_d = URIRef("https://example.com/d") + + d.add_graph(URIRef("https://example.com/g1")) + d.add((uri_a, uri_b, uri_c, URIRef("https://example.com/g1"))) + d.add( + (uri_a, uri_b, uri_c, URIRef("https://example.com/g1")) + ) # pointless addition: duplicates above + + d.add_graph(URIRef("https://example.com/g2")) + d.add((uri_a, uri_b, uri_c, URIRef("https://example.com/g2"))) + d.add((uri_a, uri_b, uri_d, URIRef("https://example.com/g1"))) # new, uri_d + + # traditional iterator + i_trad = 0 + for t in d.quads((None, None, None)): + i_trad += 1 + + # new Dataset.__iter__ iterator + i_new = 0 + for t in d: + i_new += 1 + + assert i_new == i_trad # both should be 3 diff --git a/test/test_dataset_generators.py b/test/test_dataset/test_dataset_generators.py similarity index 90% rename from test/test_dataset_generators.py rename to test/test_dataset/test_dataset_generators.py index 874e48554..f6d6d5223 100644 --- a/test/test_dataset_generators.py +++ b/test/test_dataset/test_dataset_generators.py @@ -1,20 +1,21 @@ import os from rdflib import Dataset, URIRef +from test.data import ( + CONSISTENT_DATA_DIR, + michel, + tarek, + bob, + likes, + hates, + pizza, + cheese, +) timblcardn3 = open( - os.path.join(os.path.dirname(__file__), "consistent_test_data", "timbl-card.n3") + os.path.join(CONSISTENT_DATA_DIR, "timbl-card.n3") ).read() -michel = URIRef("urn:example:michel") -tarek = URIRef("urn:example:tarek") -bob = URIRef("urn:example:bob") -likes = URIRef("urn:example:likes") -hates = URIRef("urn:example:hates") -pizza = URIRef("urn:example:pizza") -cheese = URIRef("urn:example:cheese") - - def add_stuff(graph): graph.add((tarek, likes, pizza)) graph.add((tarek, likes, cheese)) diff --git a/test/test_graph.py b/test/test_graph/test_graph.py similarity index 100% rename from test/test_graph.py rename to test/test_graph/test_graph.py diff --git a/test/test_graph_cbd.py b/test/test_graph/test_graph_cbd.py similarity index 100% rename from test/test_graph_cbd.py rename to test/test_graph/test_graph_cbd.py diff --git a/test/test_graph/test_graph_context.py b/test/test_graph/test_graph_context.py new file mode 100644 index 000000000..ac4d332bb --- /dev/null +++ b/test/test_graph/test_graph_context.py @@ -0,0 +1,381 @@ +import os +import shutil +import tempfile + +import pytest + +from rdflib import BNode, ConjunctiveGraph, Graph, URIRef, plugin +from rdflib.store import VALID_STORE + +michel = URIRef("urn:example:michel") +tarek = URIRef("urn:example:tarek") +bob = URIRef("urn:example:bob") +likes = URIRef("urn:example:likes") +hates = URIRef("urn:example:hates") +pizza = URIRef("urn:example:pizza") +cheese = URIRef("urn:example:cheese") + +c1 = URIRef("urn:example:context-1") +c2 = URIRef("urn:example:context-2") + + +pluginstores = [] + +for s in plugin.plugins(None, plugin.Store): + if s.name in ( + "default", + "Memory", + "Auditable", + "Concurrent", + "SimpleMemory", + "SPARQLStore", + "SPARQLUpdateStore", + ): + continue # inappropriate for these tests + + pluginstores.append(s.name) + + +@pytest.fixture( + scope="function", + params=pluginstores, +) +def get_graph(request): + store = request.param + path = tempfile.mktemp() + try: + shutil.rmtree(path) + except Exception: + pass + + try: + graph = ConjunctiveGraph(store=store) + except ImportError: + pytest.skip("Dependencies for store '%s' not available!" % store) + + if store != "default": + rt = graph.open(configuration=path, create=True) + assert rt == VALID_STORE, "The underlying store is corrupt" + + assert ( + len(graph) == 0 + ), "There must be zero triples in the graph just after store (file) creation" + + yield graph + + graph.close() + graph.store.destroy(path) + + if os.path.isdir(path): + shutil.rmtree(path) + elif os.path.exists(path): + os.remove(path) + + +def populate_c1(graph): + context1 = Graph(graph.store, c1) + + context1.add((tarek, likes, pizza)) + context1.add((tarek, likes, cheese)) + context1.add((michel, likes, pizza)) + context1.add((michel, likes, cheese)) + context1.add((bob, likes, cheese)) + context1.add((bob, hates, pizza)) + context1.add((bob, hates, michel)) # gasp! + + +def depopulate_c1(graph): + context1 = Graph(graph.store, c1) + + context1.remove((tarek, likes, pizza)) + context1.remove((tarek, likes, cheese)) + context1.remove((michel, likes, pizza)) + context1.remove((michel, likes, cheese)) + context1.remove((bob, likes, cheese)) + context1.remove((bob, hates, pizza)) + context1.remove((bob, hates, michel)) # gasp! + + +def add_triple_to_default_context_context1_and_context2(graph): + triple = (pizza, hates, tarek) # revenge! + + # add to default context + graph.add(triple) + + # add to context 1 + context1 = Graph(graph.store, c1) + context1.add(triple) + + # add to context 2 + context2 = Graph(graph.store, c2) + context2.add(triple) + + +def test_conjunction(get_graph): + graph = get_graph + + if graph.store == "Shelf": + pytest.skip("Skipping known issue with __len__") + + add_triple_to_default_context_context1_and_context2(graph) + triple = (pizza, likes, pizza) + + # add to context 1 + context1 = Graph(graph.store, c1) + context1.add(triple) + assert len(context1) == len(graph) + + +def test_add(get_graph): + graph = get_graph + + populate_c1(graph) + + +def test_remove(get_graph): + graph = get_graph + + populate_c1(graph) + depopulate_c1(graph) + + +def test_len_in_one_context(get_graph): + graph = get_graph + # make sure context is empty + + graph.remove_context(graph.get_context(c1)) + context1 = Graph(graph.store, c1) + oldLen = len(graph) + + for i in range(0, 10): + context1.add((BNode(), hates, hates)) + assert len(context1) == oldLen + 10 + + assert len(graph.get_context(c1)) == oldLen + 10 + + graph.remove_context(graph.get_context(c1)) + + assert len(graph) == oldLen + assert len(graph) == 0 + + +def test_len_in_multiple_contexts(get_graph): + graph = get_graph + + if graph.store == "Shelf": + pytest.skip("Skipping known issue with __len__") + + oldLen = len(graph) + add_triple_to_default_context_context1_and_context2(graph) + + # add_triple_to_default_context_context1_and_context2 is adding the same triple to + # three different contexts. So it's only + 1 + assert len(graph) == oldLen + 1 + + context1 = Graph(graph.store, c1) + assert len(context1) == oldLen + 1 + + +def test_remove_in_multiple_contexts(get_graph): + graph = get_graph + + triple = (pizza, hates, tarek) # revenge! + + add_triple_to_default_context_context1_and_context2(graph) + + # triple should be still in store after removing it from c1 + c2 + assert triple in graph + context1 = Graph(graph.store, c1) + context1.remove(triple) + + assert triple in graph + context2 = Graph(graph.store, c2) + context2.remove(triple) + assert triple in graph + graph.remove(triple) + # now gone! + assert triple not in graph + + # add again and see if remove without context removes all triples! + add_triple_to_default_context_context1_and_context2(graph) + graph.remove(triple) + assert triple not in graph + + +def test_contexts(get_graph): + graph = get_graph + triple = (pizza, hates, tarek) # revenge! + + add_triple_to_default_context_context1_and_context2(graph) + + def cid(c): + return c.identifier + + assert c1 in map(cid, graph.contexts()) + assert c2 in map(cid, graph.contexts()) + + contextList = list(map(cid, list(graph.contexts(triple)))) + assert c1 in contextList, (c1, contextList) + assert c2 in contextList, (c2, contextList) + + +def test_remove_context(get_graph): + graph = get_graph + + add_triple_to_default_context_context1_and_context2(graph) + + assert len(Graph(graph.store, c1)) == 1 + assert len(graph.get_context(c1)) == 1 + + graph.remove_context(graph.get_context(c1)) + assert c1 not in graph.contexts() + + +def test_remove_any(get_graph): + graph = get_graph + Any = None + add_triple_to_default_context_context1_and_context2(graph) + graph.remove((Any, Any, Any)) + assert len(graph) == 0 + + +def test_triples(get_graph): + graph = get_graph + + triples = graph.triples + Any = None + populate_c1(graph) + + context1 = Graph(graph.store, c1) + context1triples = context1.triples + + # unbound subjects with context + assert len(list(context1triples((Any, likes, pizza)))) == 2, graph.store + assert len(list(context1triples((Any, hates, pizza)))) == 1 + assert len(list(context1triples((Any, likes, cheese)))) == 3 + assert len(list(context1triples((Any, hates, cheese)))) == 0 + + # unbound subjects without context, same results! + assert len(list(triples((Any, likes, pizza)))) == 2 + assert len(list(triples((Any, hates, pizza)))) == 1 + assert len(list(triples((Any, likes, cheese)))) == 3 + assert len(list(triples((Any, hates, cheese)))) == 0 + + # unbound objects with context + assert len(list(context1triples((michel, likes, Any)))) == 2 + assert len(list(context1triples((tarek, likes, Any)))) == 2 + assert len(list(context1triples((bob, hates, Any)))) == 2 + assert len(list(context1triples((bob, likes, Any)))) == 1 + + # unbound objects without context, same results! + assert len(list(triples((michel, likes, Any)))) == 2 + assert len(list(triples((tarek, likes, Any)))) == 2 + assert len(list(triples((bob, hates, Any)))) == 2 + assert len(list(triples((bob, likes, Any)))) == 1 + + # unbound predicates with context + assert len(list(context1triples((michel, Any, cheese)))) == 1 + assert len(list(context1triples((tarek, Any, cheese)))) == 1 + assert len(list(context1triples((bob, Any, pizza)))) == 1 + assert len(list(context1triples((bob, Any, michel)))) == 1 + + # unbound predicates without context, same results! + assert len(list(triples((michel, Any, cheese)))) == 1 + assert len(list(triples((tarek, Any, cheese)))) == 1 + assert len(list(triples((bob, Any, pizza)))) == 1 + assert len(list(triples((bob, Any, michel)))) == 1 + + # unbound subject, objects with context + assert len(list(context1triples((Any, hates, Any)))) == 2 + assert len(list(context1triples((Any, likes, Any)))) == 5 + + # unbound subject, objects without context, same results! + assert len(list(triples((Any, hates, Any)))) == 2 + assert len(list(triples((Any, likes, Any)))) == 5 + + # unbound predicates, objects with context + assert len(list(context1triples((michel, Any, Any)))) == 2 + assert len(list(context1triples((bob, Any, Any)))) == 3 + assert len(list(context1triples((tarek, Any, Any)))) == 2 + + # unbound predicates, objects without context, same results! + assert len(list(triples((michel, Any, Any)))) == 2 + assert len(list(triples((bob, Any, Any)))) == 3 + assert len(list(triples((tarek, Any, Any)))) == 2 + + # unbound subjects, predicates with context + assert len(list(context1triples((Any, Any, pizza)))) == 3 + assert len(list(context1triples((Any, Any, cheese)))) == 3 + assert len(list(context1triples((Any, Any, michel)))) == 1 + + # unbound subjects, predicates without context, same results! + assert len(list(triples((Any, Any, pizza)))) == 3 + assert len(list(triples((Any, Any, cheese)))) == 3 + assert len(list(triples((Any, Any, michel)))) == 1 + + # all unbound with context + assert len(list(context1triples((Any, Any, Any)))) == 7 + # all unbound without context, same result! + assert len(list(triples((Any, Any, Any)))) == 7 + + for c in [graph, graph.get_context(c1)]: + # unbound subjects + assert set(c.subjects(likes, pizza)) == set((michel, tarek)) + assert set(c.subjects(hates, pizza)) == set((bob,)) + assert set(c.subjects(likes, cheese)) == set([tarek, bob, michel]) + assert set(c.subjects(hates, cheese)) == set() + + # unbound objects + assert set(c.objects(michel, likes)) == set([cheese, pizza]) + assert set(c.objects(tarek, likes)) == set([cheese, pizza]) + assert set(c.objects(bob, hates)) == set([michel, pizza]) + assert set(c.objects(bob, likes)) == set([cheese]) + + # unbound predicates + assert set(c.predicates(michel, cheese)) == set([likes]) + assert set(c.predicates(tarek, cheese)) == set([likes]) + assert set(c.predicates(bob, pizza)) == set([hates]) + assert set(c.predicates(bob, michel)) == set([hates]) + + assert set(c.subject_objects(hates)) == set([(bob, pizza), (bob, michel)]) + assert set(c.subject_objects(likes)) == set( + [ + (tarek, cheese), + (michel, cheese), + (michel, pizza), + (bob, cheese), + (tarek, pizza), + ] + ) + + assert set(c.predicate_objects(michel)) == set( + [(likes, cheese), (likes, pizza)] + ) + assert set(c.predicate_objects(bob)) == set( + [(likes, cheese), (hates, pizza), (hates, michel)] + ) + assert set(c.predicate_objects(tarek)) == set([(likes, cheese), (likes, pizza)]) + + assert set(c.subject_predicates(pizza)) == set( + [(bob, hates), (tarek, likes), (michel, likes)] + ) + assert set(c.subject_predicates(cheese)) == set( + [(bob, likes), (tarek, likes), (michel, likes)] + ) + assert set(c.subject_predicates(michel)) == set([(bob, hates)]) + + assert set(c) == set( + [ + (bob, hates, michel), + (bob, likes, cheese), + (tarek, likes, pizza), + (michel, likes, pizza), + (michel, likes, cheese), + (bob, hates, pizza), + (tarek, likes, cheese), + ] + ) + # remove stuff and make sure the graph is empty again + depopulate_c1(graph) + assert len(list(context1triples((Any, Any, Any)))) == 0 + assert len(list(triples((Any, Any, Any)))) == 0 diff --git a/test/test_graph_formula.py b/test/test_graph/test_graph_formula.py similarity index 100% rename from test/test_graph_formula.py rename to test/test_graph/test_graph_formula.py diff --git a/test/test_graph_generators.py b/test/test_graph/test_graph_generators.py similarity index 87% rename from test/test_graph_generators.py rename to test/test_graph/test_graph_generators.py index d013aaec8..b0b0077d7 100644 --- a/test/test_graph_generators.py +++ b/test/test_graph/test_graph_generators.py @@ -1,16 +1,19 @@ import os from rdflib import Graph, URIRef +from test.data import ( + CONSISTENT_DATA_DIR, + michel, + tarek, + bob, + likes, + hates, + pizza, + cheese, +) -michel = URIRef("urn:example:michel") -tarek = URIRef("urn:example:tarek") -bob = URIRef("urn:example:bob") -likes = URIRef("urn:example:likes") -hates = URIRef("urn:example:hates") -pizza = URIRef("urn:example:pizza") -cheese = URIRef("urn:example:cheese") timblcardn3 = open( - os.path.join(os.path.dirname(__file__), "consistent_test_data", "timbl-card.n3") + os.path.join(CONSISTENT_DATA_DIR, "timbl-card.n3") ).read() diff --git a/test/test_graph_http.py b/test/test_graph/test_graph_http.py similarity index 98% rename from test/test_graph_http.py rename to test/test_graph/test_graph_http.py index 8ffb358f0..f9e1b1701 100644 --- a/test/test_graph_http.py +++ b/test/test_graph/test_graph_http.py @@ -2,7 +2,7 @@ from http.server import BaseHTTPRequestHandler from urllib.error import HTTPError -from .testutils import SimpleHTTPMock, MockHTTPResponse, ctx_http_server, GraphHelper +from test.testutils import SimpleHTTPMock, MockHTTPResponse, ctx_http_server, GraphHelper import unittest diff --git a/test/test_graph_items.py b/test/test_graph/test_graph_items.py similarity index 100% rename from test/test_graph_items.py rename to test/test_graph/test_graph_items.py diff --git a/test/test_graph_operator.py b/test/test_graph/test_graph_operator.py similarity index 100% rename from test/test_graph_operator.py rename to test/test_graph/test_graph_operator.py diff --git a/test/test_graph_context.py b/test/test_graph_context.py deleted file mode 100644 index ab4df544f..000000000 --- a/test/test_graph_context.py +++ /dev/null @@ -1,389 +0,0 @@ -import sys -import os -import unittest - -from tempfile import mkdtemp, mkstemp -import shutil - -import pytest -from rdflib import Graph, ConjunctiveGraph, URIRef, BNode, plugin - -class ContextTestCase(unittest.TestCase): - store = "default" - slow = True - tmppath = None - - def setUp(self): - try: - self.graph = ConjunctiveGraph(store=self.store) - except ImportError: - pytest.skip("Dependencies for store '%s' not available!" % self.store) - if self.store == "SQLite": - _, self.tmppath = mkstemp(prefix="test", dir="/tmp", suffix=".sqlite") - else: - self.tmppath = mkdtemp() - self.graph.open(self.tmppath, create=True) - self.michel = URIRef("michel") - self.tarek = URIRef("tarek") - self.bob = URIRef("bob") - self.likes = URIRef("likes") - self.hates = URIRef("hates") - self.pizza = URIRef("pizza") - self.cheese = URIRef("cheese") - - self.c1 = URIRef("context-1") - self.c2 = URIRef("context-2") - - # delete the graph for each test! - self.graph.remove((None, None, None)) - - def tearDown(self): - self.graph.close() - if os.path.isdir(self.tmppath): - shutil.rmtree(self.tmppath) - else: - os.remove(self.tmppath) - - def addStuff(self): - tarek = self.tarek - michel = self.michel - bob = self.bob - likes = self.likes - hates = self.hates - pizza = self.pizza - cheese = self.cheese - c1 = self.c1 - graph = Graph(self.graph.store, c1) - - graph.add((tarek, likes, pizza)) - graph.add((tarek, likes, cheese)) - graph.add((michel, likes, pizza)) - graph.add((michel, likes, cheese)) - graph.add((bob, likes, cheese)) - graph.add((bob, hates, pizza)) - graph.add((bob, hates, michel)) # gasp! - - def removeStuff(self): - tarek = self.tarek - michel = self.michel - bob = self.bob - likes = self.likes - hates = self.hates - pizza = self.pizza - cheese = self.cheese - c1 = self.c1 - graph = Graph(self.graph.store, c1) - - graph.remove((tarek, likes, pizza)) - graph.remove((tarek, likes, cheese)) - graph.remove((michel, likes, pizza)) - graph.remove((michel, likes, cheese)) - graph.remove((bob, likes, cheese)) - graph.remove((bob, hates, pizza)) - graph.remove((bob, hates, michel)) # gasp! - - def addStuffInMultipleContexts(self): - c1 = self.c1 - c2 = self.c2 - triple = (self.pizza, self.hates, self.tarek) # revenge! - - # add to default context - self.graph.add(triple) - # add to context 1 - graph = Graph(self.graph.store, c1) - graph.add(triple) - # add to context 2 - graph = Graph(self.graph.store, c2) - graph.add(triple) - - def testConjunction(self): - if self.store == "SQLite": - pytest.skip("Skipping known issue with __len__") - self.addStuffInMultipleContexts() - triple = (self.pizza, self.likes, self.pizza) - # add to context 1 - graph = Graph(self.graph.store, self.c1) - graph.add(triple) - self.assertEqual(len(self.graph), len(graph)) - - def testAdd(self): - self.addStuff() - - def testRemove(self): - self.addStuff() - self.removeStuff() - - def testLenInOneContext(self): - c1 = self.c1 - # make sure context is empty - - self.graph.remove_context(self.graph.get_context(c1)) - graph = Graph(self.graph.store, c1) - oldLen = len(self.graph) - - for i in range(0, 10): - graph.add((BNode(), self.hates, self.hates)) - self.assertEqual(len(graph), oldLen + 10) - self.assertEqual(len(self.graph.get_context(c1)), oldLen + 10) - self.graph.remove_context(self.graph.get_context(c1)) - self.assertEqual(len(self.graph), oldLen) - self.assertEqual(len(graph), 0) - - def testLenInMultipleContexts(self): - if self.store == "SQLite": - pytest.skip("Skipping known issue with __len__") - oldLen = len(self.graph) - self.addStuffInMultipleContexts() - - # addStuffInMultipleContexts is adding the same triple to - # three different contexts. So it's only + 1 - self.assertEqual(len(self.graph), oldLen + 1) - - graph = Graph(self.graph.store, self.c1) - self.assertEqual(len(graph), oldLen + 1) - - def testRemoveInMultipleContexts(self): - c1 = self.c1 - c2 = self.c2 - triple = (self.pizza, self.hates, self.tarek) # revenge! - - self.addStuffInMultipleContexts() - - # triple should be still in store after removing it from c1 + c2 - self.assertTrue(triple in self.graph) - graph = Graph(self.graph.store, c1) - graph.remove(triple) - self.assertTrue(triple in self.graph) - graph = Graph(self.graph.store, c2) - graph.remove(triple) - self.assertTrue(triple in self.graph) - self.graph.remove(triple) - # now gone! - self.assertTrue(triple not in self.graph) - - # add again and see if remove without context removes all triples! - self.addStuffInMultipleContexts() - self.graph.remove(triple) - self.assertTrue(triple not in self.graph) - - def testContexts(self): - triple = (self.pizza, self.hates, self.tarek) # revenge! - - self.addStuffInMultipleContexts() - - def cid(c): - return c.identifier - - self.assertTrue(self.c1 in map(cid, self.graph.contexts())) - self.assertTrue(self.c2 in map(cid, self.graph.contexts())) - - contextList = list(map(cid, list(self.graph.contexts(triple)))) - self.assertTrue(self.c1 in contextList, (self.c1, contextList)) - self.assertTrue(self.c2 in contextList, (self.c2, contextList)) - - def testRemoveContext(self): - c1 = self.c1 - - self.addStuffInMultipleContexts() - self.assertEqual(len(Graph(self.graph.store, c1)), 1) - self.assertEqual(len(self.graph.get_context(c1)), 1) - - self.graph.remove_context(self.graph.get_context(c1)) - self.assertTrue(self.c1 not in self.graph.contexts()) - - def testRemoveAny(self): - Any = None - self.addStuffInMultipleContexts() - self.graph.remove((Any, Any, Any)) - self.assertEqual(len(self.graph), 0) - - def testTriples(self): - tarek = self.tarek - michel = self.michel - bob = self.bob - likes = self.likes - hates = self.hates - pizza = self.pizza - cheese = self.cheese - c1 = self.c1 - asserte = self.assertEqual - triples = self.graph.triples - graph = self.graph - c1graph = Graph(self.graph.store, c1) - c1triples = c1graph.triples - Any = None - - self.addStuff() - - # unbound subjects with context - asserte(len(list(c1triples((Any, likes, pizza)))), 2) - asserte(len(list(c1triples((Any, hates, pizza)))), 1) - asserte(len(list(c1triples((Any, likes, cheese)))), 3) - asserte(len(list(c1triples((Any, hates, cheese)))), 0) - - # unbound subjects without context, same results! - asserte(len(list(triples((Any, likes, pizza)))), 2) - asserte(len(list(triples((Any, hates, pizza)))), 1) - asserte(len(list(triples((Any, likes, cheese)))), 3) - asserte(len(list(triples((Any, hates, cheese)))), 0) - - # unbound objects with context - asserte(len(list(c1triples((michel, likes, Any)))), 2) - asserte(len(list(c1triples((tarek, likes, Any)))), 2) - asserte(len(list(c1triples((bob, hates, Any)))), 2) - asserte(len(list(c1triples((bob, likes, Any)))), 1) - - # unbound objects without context, same results! - asserte(len(list(triples((michel, likes, Any)))), 2) - asserte(len(list(triples((tarek, likes, Any)))), 2) - asserte(len(list(triples((bob, hates, Any)))), 2) - asserte(len(list(triples((bob, likes, Any)))), 1) - - # unbound predicates with context - asserte(len(list(c1triples((michel, Any, cheese)))), 1) - asserte(len(list(c1triples((tarek, Any, cheese)))), 1) - asserte(len(list(c1triples((bob, Any, pizza)))), 1) - asserte(len(list(c1triples((bob, Any, michel)))), 1) - - # unbound predicates without context, same results! - asserte(len(list(triples((michel, Any, cheese)))), 1) - asserte(len(list(triples((tarek, Any, cheese)))), 1) - asserte(len(list(triples((bob, Any, pizza)))), 1) - asserte(len(list(triples((bob, Any, michel)))), 1) - - # unbound subject, objects with context - asserte(len(list(c1triples((Any, hates, Any)))), 2) - asserte(len(list(c1triples((Any, likes, Any)))), 5) - - # unbound subject, objects without context, same results! - asserte(len(list(triples((Any, hates, Any)))), 2) - asserte(len(list(triples((Any, likes, Any)))), 5) - - # unbound predicates, objects with context - asserte(len(list(c1triples((michel, Any, Any)))), 2) - asserte(len(list(c1triples((bob, Any, Any)))), 3) - asserte(len(list(c1triples((tarek, Any, Any)))), 2) - - # unbound predicates, objects without context, same results! - asserte(len(list(triples((michel, Any, Any)))), 2) - asserte(len(list(triples((bob, Any, Any)))), 3) - asserte(len(list(triples((tarek, Any, Any)))), 2) - - # unbound subjects, predicates with context - asserte(len(list(c1triples((Any, Any, pizza)))), 3) - asserte(len(list(c1triples((Any, Any, cheese)))), 3) - asserte(len(list(c1triples((Any, Any, michel)))), 1) - - # unbound subjects, predicates without context, same results! - asserte(len(list(triples((Any, Any, pizza)))), 3) - asserte(len(list(triples((Any, Any, cheese)))), 3) - asserte(len(list(triples((Any, Any, michel)))), 1) - - # all unbound with context - asserte(len(list(c1triples((Any, Any, Any)))), 7) - # all unbound without context, same result! - asserte(len(list(triples((Any, Any, Any)))), 7) - - for c in [graph, self.graph.get_context(c1)]: - # unbound subjects - asserte(set(c.subjects(likes, pizza)), set((michel, tarek))) - asserte(set(c.subjects(hates, pizza)), set((bob,))) - asserte(set(c.subjects(likes, cheese)), set([tarek, bob, michel])) - asserte(set(c.subjects(hates, cheese)), set()) - - # unbound objects - asserte(set(c.objects(michel, likes)), set([cheese, pizza])) - asserte(set(c.objects(tarek, likes)), set([cheese, pizza])) - asserte(set(c.objects(bob, hates)), set([michel, pizza])) - asserte(set(c.objects(bob, likes)), set([cheese])) - - # unbound predicates - asserte(set(c.predicates(michel, cheese)), set([likes])) - asserte(set(c.predicates(tarek, cheese)), set([likes])) - asserte(set(c.predicates(bob, pizza)), set([hates])) - asserte(set(c.predicates(bob, michel)), set([hates])) - - asserte(set(c.subject_objects(hates)), set([(bob, pizza), (bob, michel)])) - asserte( - set(c.subject_objects(likes)), - set( - [ - (tarek, cheese), - (michel, cheese), - (michel, pizza), - (bob, cheese), - (tarek, pizza), - ] - ), - ) - - asserte( - set(c.predicate_objects(michel)), set([(likes, cheese), (likes, pizza)]) - ) - asserte( - set(c.predicate_objects(bob)), - set([(likes, cheese), (hates, pizza), (hates, michel)]), - ) - asserte( - set(c.predicate_objects(tarek)), set([(likes, cheese), (likes, pizza)]) - ) - - asserte( - set(c.subject_predicates(pizza)), - set([(bob, hates), (tarek, likes), (michel, likes)]), - ) - asserte( - set(c.subject_predicates(cheese)), - set([(bob, likes), (tarek, likes), (michel, likes)]), - ) - asserte(set(c.subject_predicates(michel)), set([(bob, hates)])) - - asserte( - set(c), - set( - [ - (bob, hates, michel), - (bob, likes, cheese), - (tarek, likes, pizza), - (michel, likes, pizza), - (michel, likes, cheese), - (bob, hates, pizza), - (tarek, likes, cheese), - ] - ), - ) - - # remove stuff and make sure the graph is empty again - self.removeStuff() - asserte(len(list(c1triples((Any, Any, Any)))), 0) - asserte(len(list(triples((Any, Any, Any)))), 0) - - -# dynamically create classes for each registered Store -pluginname = None -if __name__ == "__main__": - if len(sys.argv) > 1: - pluginname = sys.argv[1] - -tests = 0 -for s in plugin.plugins(pluginname, plugin.Store): - if s.name in ( - "default", - "Memory", - "Auditable", - "Concurrent", - "SPARQLStore", - "SPARQLUpdateStore", - ): - continue # these are tested by default - if not s.getClass().context_aware: - continue - - locals()["t%d" % tests] = type( - "%sContextTestCase" % s.name, (ContextTestCase,), {"store": s.name} - ) - tests += 1 - - -if __name__ == "__main__": - unittest.main() diff --git a/test/test_issue084.py b/test/test_issues/test_issue084.py similarity index 100% rename from test/test_issue084.py rename to test/test_issues/test_issue084.py diff --git a/test/test_issue1003.py b/test/test_issues/test_issue1003.py similarity index 100% rename from test/test_issue1003.py rename to test/test_issues/test_issue1003.py diff --git a/test/test_issue1043.py b/test/test_issues/test_issue1043.py similarity index 100% rename from test/test_issue1043.py rename to test/test_issues/test_issue1043.py diff --git a/test/test_issue1141.py b/test/test_issues/test_issue1141.py similarity index 100% rename from test/test_issue1141.py rename to test/test_issues/test_issue1141.py diff --git a/test/test_issue1160.py b/test/test_issues/test_issue1160.py similarity index 100% rename from test/test_issue1160.py rename to test/test_issues/test_issue1160.py diff --git a/test/test_issue1404.py b/test/test_issues/test_issue1404.py similarity index 100% rename from test/test_issue1404.py rename to test/test_issues/test_issue1404.py diff --git a/test/test_issue1484.py b/test/test_issues/test_issue1484.py similarity index 100% rename from test/test_issue1484.py rename to test/test_issues/test_issue1484.py diff --git a/test/test_issue160.py b/test/test_issues/test_issue160.py similarity index 100% rename from test/test_issue160.py rename to test/test_issues/test_issue160.py diff --git a/test/test_issue161.py b/test/test_issues/test_issue161.py similarity index 100% rename from test/test_issue161.py rename to test/test_issues/test_issue161.py diff --git a/test/test_issue184.py b/test/test_issues/test_issue184.py similarity index 100% rename from test/test_issue184.py rename to test/test_issues/test_issue184.py diff --git a/test/test_issue190.py b/test/test_issues/test_issue190.py similarity index 100% rename from test/test_issue190.py rename to test/test_issues/test_issue190.py diff --git a/test/test_issue200.py b/test/test_issues/test_issue200.py similarity index 100% rename from test/test_issue200.py rename to test/test_issues/test_issue200.py diff --git a/test/test_issue209.py b/test/test_issues/test_issue209.py similarity index 100% rename from test/test_issue209.py rename to test/test_issues/test_issue209.py diff --git a/test/test_issue223.py b/test/test_issues/test_issue223.py similarity index 100% rename from test/test_issue223.py rename to test/test_issues/test_issue223.py diff --git a/test/test_issue247.py b/test/test_issues/test_issue247.py similarity index 100% rename from test/test_issue247.py rename to test/test_issues/test_issue247.py diff --git a/test/test_issue248.py b/test/test_issues/test_issue248.py similarity index 100% rename from test/test_issue248.py rename to test/test_issues/test_issue248.py diff --git a/test/test_issue274.py b/test/test_issues/test_issue274.py similarity index 99% rename from test/test_issue274.py rename to test/test_issues/test_issue274.py index 6a139735e..291b0f150 100644 --- a/test/test_issue274.py +++ b/test/test_issues/test_issue274.py @@ -1,4 +1,4 @@ -from .testutils import eq_ +from test.testutils import eq_ from unittest import TestCase import pytest diff --git a/test/test_issue363.py b/test/test_issues/test_issue363.py similarity index 100% rename from test/test_issue363.py rename to test/test_issues/test_issue363.py diff --git a/test/test_issue379.py b/test/test_issues/test_issue379.py similarity index 100% rename from test/test_issue379.py rename to test/test_issues/test_issue379.py diff --git a/test/test_issue381.py b/test/test_issues/test_issue381.py similarity index 100% rename from test/test_issue381.py rename to test/test_issues/test_issue381.py diff --git a/test/test_issue432.py b/test/test_issues/test_issue432.py similarity index 100% rename from test/test_issue432.py rename to test/test_issues/test_issue432.py diff --git a/test/test_issue446.py b/test/test_issues/test_issue446.py similarity index 100% rename from test/test_issue446.py rename to test/test_issues/test_issue446.py diff --git a/test/test_issue492.py b/test/test_issues/test_issue492.py similarity index 100% rename from test/test_issue492.py rename to test/test_issues/test_issue492.py diff --git a/test/test_issue523.py b/test/test_issues/test_issue523.py similarity index 100% rename from test/test_issue523.py rename to test/test_issues/test_issue523.py diff --git a/test/test_issue532.py b/test/test_issues/test_issue532.py similarity index 100% rename from test/test_issue532.py rename to test/test_issues/test_issue532.py diff --git a/test/test_issue535.py b/test/test_issues/test_issue535.py similarity index 100% rename from test/test_issue535.py rename to test/test_issues/test_issue535.py diff --git a/test/test_issue545.py b/test/test_issues/test_issue545.py similarity index 100% rename from test/test_issue545.py rename to test/test_issues/test_issue545.py diff --git a/test/test_issue554.py b/test/test_issues/test_issue554.py similarity index 100% rename from test/test_issue554.py rename to test/test_issues/test_issue554.py diff --git a/test/test_issue563.py b/test/test_issues/test_issue563.py similarity index 100% rename from test/test_issue563.py rename to test/test_issues/test_issue563.py diff --git a/test/test_issue579.py b/test/test_issues/test_issue579.py similarity index 100% rename from test/test_issue579.py rename to test/test_issues/test_issue579.py diff --git a/test/test_issue604.py b/test/test_issues/test_issue604.py similarity index 100% rename from test/test_issue604.py rename to test/test_issues/test_issue604.py diff --git a/test/test_issue655.py b/test/test_issues/test_issue655.py similarity index 100% rename from test/test_issue655.py rename to test/test_issues/test_issue655.py diff --git a/test/test_issue715.py b/test/test_issues/test_issue715.py similarity index 100% rename from test/test_issue715.py rename to test/test_issues/test_issue715.py diff --git a/test/test_issue733.py b/test/test_issues/test_issue733.py similarity index 100% rename from test/test_issue733.py rename to test/test_issues/test_issue733.py diff --git a/test/test_issue801.py b/test/test_issues/test_issue801.py similarity index 100% rename from test/test_issue801.py rename to test/test_issues/test_issue801.py diff --git a/test/test_issue893.py b/test/test_issues/test_issue893.py similarity index 100% rename from test/test_issue893.py rename to test/test_issues/test_issue893.py diff --git a/test/test_issue910.py b/test/test_issues/test_issue910.py similarity index 100% rename from test/test_issue910.py rename to test/test_issues/test_issue910.py diff --git a/test/test_issue920.py b/test/test_issues/test_issue920.py similarity index 100% rename from test/test_issue920.py rename to test/test_issues/test_issue920.py diff --git a/test/test_issue923.py b/test/test_issues/test_issue923.py similarity index 100% rename from test/test_issue923.py rename to test/test_issues/test_issue923.py diff --git a/test/test_issue953.py b/test/test_issues/test_issue953.py similarity index 100% rename from test/test_issue953.py rename to test/test_issues/test_issue953.py diff --git a/test/test_issue977.py b/test/test_issues/test_issue977.py similarity index 100% rename from test/test_issue977.py rename to test/test_issues/test_issue977.py diff --git a/test/test_issue_git_200.py b/test/test_issues/test_issue_git_200.py similarity index 100% rename from test/test_issue_git_200.py rename to test/test_issues/test_issue_git_200.py diff --git a/test/test_issue_git_336.py b/test/test_issues/test_issue_git_336.py similarity index 100% rename from test/test_issue_git_336.py rename to test/test_issues/test_issue_git_336.py diff --git a/test/test_nquads.py b/test/test_nquads.py index ac5a6dcd1..bb1da983d 100644 --- a/test/test_nquads.py +++ b/test/test_nquads.py @@ -1,143 +1,207 @@ import os -import unittest -from rdflib import ConjunctiveGraph, URIRef, Namespace -from test import TEST_DIR +import shutil +import tempfile +import pytest + +from rdflib import ConjunctiveGraph, Namespace, URIRef, plugin +from rdflib.store import VALID_STORE + +TEST_DIR = os.path.abspath(os.path.dirname(__file__)) TEST_BASE = "test/nquads.rdflib" -class NQuadsParserTest(unittest.TestCase): - def _load_example(self): - g = ConjunctiveGraph() - nq_path = os.path.relpath( - os.path.join(TEST_DIR, "nquads.rdflib/example.nquads"), os.curdir - ) - with open(nq_path, "rb") as data: - g.parse(data, format="nquads") - return g - - def test_01_simple_open(self): - g = self._load_example() - assert len(g.store) == 449 - - def test_02_contexts(self): - # There should be 16 separate contexts - g = self._load_example() - assert len([x for x in g.store.contexts()]) == 16 - - def test_03_get_value(self): - # is the name of entity E10009 "Arco Publications"? - # (in graph http://bibliographica.org/entity/E10009) - # Looking for: - # - # - # "Arco Publications" - # - - g = self._load_example() - s = URIRef("http://bibliographica.org/entity/E10009") - FOAF = Namespace("http://xmlns.com/foaf/0.1/") - self.assertTrue(g.value(s, FOAF.name).eq("Arco Publications")) - - def test_context_is_optional(self): - g = ConjunctiveGraph() - nq_path = os.path.relpath( - os.path.join(TEST_DIR, "nquads.rdflib/test6.nq"), os.curdir - ) - with open(nq_path, "rb") as data: - g.parse(data, format="nquads") - assert len(g) > 0 - - def test_serialize(self): - g = ConjunctiveGraph() - uri1 = URIRef("http://example.org/mygraph1") - uri2 = URIRef("http://example.org/mygraph2") - - bob = URIRef("urn:example:bob") - likes = URIRef("urn:example:likes") - pizza = URIRef("urn:example:pizza") - - g.get_context(uri1).add((bob, likes, pizza)) - g.get_context(uri2).add((bob, likes, pizza)) - - s = g.serialize(format="nquads", encoding="utf-8") - self.assertEqual(len([x for x in s.split(b"\n") if x.strip()]), 2) - - g2 = ConjunctiveGraph() - g2.parse(data=s, format="nquads") - - self.assertEqual(len(g), len(g2)) - self.assertEqual( - sorted(x.identifier for x in g.contexts()), - sorted(x.identifier for x in g2.contexts()), - ) - - -class BnodeContextTest(unittest.TestCase): - def setUp(self): - self.data = open("test/nquads.rdflib/bnode_context.nquads", "rb") - self.data_obnodes = open( - "test/nquads.rdflib/bnode_context_obj_bnodes.nquads", "rb" - ) - - def tearDown(self): - self.data.close() - - def test_parse_shared_bnode_context(self): - bnode_ctx = dict() - g = ConjunctiveGraph() - h = ConjunctiveGraph() - g.parse(self.data, format="nquads", bnode_context=bnode_ctx) - self.data.seek(0) - h.parse(self.data, format="nquads", bnode_context=bnode_ctx) - self.assertEqual(set(h.subjects()), set(g.subjects())) - - def test_parse_shared_bnode_context_same_graph(self): - bnode_ctx = dict() - g = ConjunctiveGraph() - g.parse(self.data_obnodes, format="nquads", bnode_context=bnode_ctx) - o1 = set(g.objects()) - self.data_obnodes.seek(0) - g.parse(self.data_obnodes, format="nquads", bnode_context=bnode_ctx) - o2 = set(g.objects()) - self.assertEqual(o1, o2) - - def test_parse_distinct_bnode_context(self): - g = ConjunctiveGraph() - g.parse(self.data, format="nquads", bnode_context=dict()) - s1 = set(g.subjects()) - self.data.seek(0) - g.parse(self.data, format="nquads", bnode_context=dict()) - s2 = set(g.subjects()) - self.assertNotEqual(set(), s2 - s1) - - def test_parse_distinct_bnode_contexts_between_graphs(self): - g = ConjunctiveGraph() - h = ConjunctiveGraph() - g.parse(self.data, format="nquads") - s1 = set(g.subjects()) - self.data.seek(0) - h.parse(self.data, format="nquads") - s2 = set(h.subjects()) - self.assertNotEqual(s1, s2) - - def test_parse_distinct_bnode_contexts_named_graphs(self): - g = ConjunctiveGraph() - h = ConjunctiveGraph() - g.parse(self.data, format="nquads") - self.data.seek(0) - h.parse(self.data, format="nquads") - self.assertNotEqual(set(h.contexts()), set(g.contexts())) - - def test_parse_shared_bnode_contexts_named_graphs(self): - bnode_ctx = dict() - g = ConjunctiveGraph() - h = ConjunctiveGraph() - g.parse(self.data, format="nquads", bnode_context=bnode_ctx) - self.data.seek(0) - h.parse(self.data, format="nquads", bnode_context=bnode_ctx) - self.assertEqual(set(h.contexts()), set(g.contexts())) - - -if __name__ == "__main__": - unittest.main() +pluginstores = [] + +for s in plugin.plugins(None, plugin.Store): + if s.name in ( + "default", + "Memory", + "Auditable", + "Concurrent", + "SimpleMemory", + "SPARQLStore", + "SPARQLUpdateStore", + ): + continue # inappropriate for these tests + + pluginstores.append(s.name) + + +@pytest.fixture( + scope="function", + params=pluginstores, +) +def get_graph(request): + store = request.param + path = tempfile.mktemp() + try: + shutil.rmtree(path) + except Exception: + pass + + try: + graph = ConjunctiveGraph(store=store) + except ImportError: + pytest.skip("Dependencies for store '%s' not available!" % store) + + if store != "default": + rt = graph.open(configuration=path, create=True) + assert rt == VALID_STORE, "The underlying store is corrupt" + + assert ( + len(graph) == 0 + ), "There must be zero triples in the graph just after store (file) creation" + + nq_path = os.path.relpath( + os.path.join(TEST_DIR, "nquads.rdflib/example.nquads"), os.curdir + ) + with open(nq_path, "rb") as data: + graph.parse(data, format="nquads") + + yield graph + + graph.close() + graph.destroy(path) + try: + shutil.rmtree(path) + except Exception: + pass + + +def test_01_simple_open(get_graph): + graph = get_graph + assert len(graph.store) == 449 + + +def test_02_contexts(get_graph): + # There should be 16 separate contexts + graph = get_graph + assert len([x for x in graph.store.contexts()]) == 16 + + +def test_03_get_value(get_graph): + # is the name of entity E10009 "Arco Publications"? + # (in graph http://bibliographica.org/entity/E10009) + # Looking for: + # + # + # "Arco Publications" + # + + graph = get_graph + s = URIRef("http://bibliographica.org/entity/E10009") + FOAF = Namespace("http://xmlns.com/foaf/0.1/") + + assert graph.value(subject=s, predicate=FOAF.name).eq("Arco Publications") + + +def test_context_is_optional(get_graph): + graph = get_graph + nq_path = os.path.relpath( + os.path.join(TEST_DIR, "nquads.rdflib/test6.nq"), os.curdir + ) + with open(nq_path, "rb") as data: + graph.parse(data, format="nquads") + assert len(graph) > 0 + + +def test_serialize(): + g = ConjunctiveGraph() + uri1 = URIRef("http://example.org/mygraph1") + uri2 = URIRef("http://example.org/mygraph2") + + bob = URIRef("urn:example:bob") + likes = URIRef("urn:example:likes") + pizza = URIRef("urn:example:pizza") + + g.get_context(uri1).add((bob, likes, pizza)) + g.get_context(uri2).add((bob, likes, pizza)) + + s = g.serialize(format="nquads", encoding="utf-8") + assert len([x for x in s.split(b"\n") if x.strip()]) == 2 + + g2 = ConjunctiveGraph() + g2.parse(data=s, format="nquads") + + assert len(g) == len(g2) + assert sorted(x.identifier for x in g.contexts()) == sorted( + x.identifier for x in g2.contexts() + ) + + +@pytest.fixture +def get_data(): + data = open("test/nquads.rdflib/bnode_context.nquads", "rb") + data_obnodes = open("test/nquads.rdflib/bnode_context_obj_bnodes.nquads", "rb") + yield data, data_obnodes + + data.close() + + +def test_parse_shared_bnode_context(get_data): + data, data_obnodes = get_data + bnode_ctx = dict() + g = ConjunctiveGraph() + h = ConjunctiveGraph() + g.parse(data, format="nquads", bnode_context=bnode_ctx) + data.seek(0) + h.parse(data, format="nquads", bnode_context=bnode_ctx) + assert set(h.subjects()) == set(g.subjects()) + + +def test_parse_shared_bnode_context_same_graph(get_data): + data, data_obnodes = get_data + + bnode_ctx = dict() + g = ConjunctiveGraph() + g.parse(data_obnodes, format="nquads", bnode_context=bnode_ctx) + o1 = set(g.objects()) + data_obnodes.seek(0) + g.parse(data_obnodes, format="nquads", bnode_context=bnode_ctx) + o2 = set(g.objects()) + assert o1 == o2 + + +def test_parse_distinct_bnode_context(get_data): + data, data_obnodes = get_data + g = ConjunctiveGraph() + g.parse(data, format="nquads", bnode_context=dict()) + s1 = set(g.subjects()) + data.seek(0) + g.parse(data, format="nquads", bnode_context=dict()) + s2 = set(g.subjects()) + assert set() != s2 - s1 + + +def test_parse_distinct_bnode_contexts_between_graphs(get_data): + data, data_obnodes = get_data + g = ConjunctiveGraph() + h = ConjunctiveGraph() + g.parse(data, format="nquads") + s1 = set(g.subjects()) + data.seek(0) + h.parse(data, format="nquads") + s2 = set(h.subjects()) + assert s1 != s2 + + +def test_parse_distinct_bnode_contexts_named_graphs(get_data): + data, data_obnodes = get_data + g = ConjunctiveGraph() + h = ConjunctiveGraph() + g.parse(data, format="nquads") + data.seek(0) + h.parse(data, format="nquads") + assert set(h.contexts()) != set(g.contexts()) + + +def test_parse_shared_bnode_contexts_named_graphs(get_data): + data, data_obnodes = get_data + bnode_ctx = dict() + g = ConjunctiveGraph() + h = ConjunctiveGraph() + g.parse(data, format="nquads", bnode_context=bnode_ctx) + data.seek(0) + h.parse(data, format="nquads", bnode_context=bnode_ctx) + assert set(h.contexts()) == set(g.contexts()) diff --git a/test/test_nquads_w3c.py b/test/test_parsers/test_nquads_w3c.py similarity index 100% rename from test/test_nquads_w3c.py rename to test/test_parsers/test_nquads_w3c.py diff --git a/test/test_nt_w3c.py b/test/test_parsers/test_nt_w3c.py similarity index 100% rename from test/test_nt_w3c.py rename to test/test_parsers/test_nt_w3c.py diff --git a/test/test_parser.py b/test/test_parsers/test_parser.py similarity index 100% rename from test/test_parser.py rename to test/test_parsers/test_parser.py diff --git a/test/test_parser_helpers.py b/test/test_parsers/test_parser_helpers.py similarity index 100% rename from test/test_parser_helpers.py rename to test/test_parsers/test_parser_helpers.py diff --git a/test/test_parser_hext.py b/test/test_parsers/test_parser_hext.py similarity index 95% rename from test/test_parser_hext.py rename to test/test_parsers/test_parser_hext.py index fdf419118..d4b595c10 100644 --- a/test/test_parser_hext.py +++ b/test/test_parsers/test_parser_hext.py @@ -40,7 +40,7 @@ def test_small_string_cg(): def test_small_file_singlegraph(): - d = Dataset().parse(Path(__file__).parent / "test_parser_hext_singlegraph.ndjson", format="hext") + d = Dataset().parse(Path(__file__).parent.parent / "consistent_test_data/test_parser_hext_singlegraph.ndjson", format="hext") assert len(d) == 10 @@ -48,7 +48,7 @@ def test_small_file_multigraph(): d = Dataset() assert len(d) == 0 d.parse( - Path(__file__).parent / "test_parser_hext_multigraph.ndjson", + Path(__file__).parent.parent / "consistent_test_data/test_parser_hext_multigraph.ndjson", format="hext", publicID=d.default_context.identifier ) @@ -68,7 +68,7 @@ def test_small_file_multigraph_cg(): d = ConjunctiveGraph() assert len(d) == 0 d.parse( - Path(__file__).parent / "test_parser_hext_multigraph.ndjson", + Path(__file__).parent.parent / "consistent_test_data/test_parser_hext_multigraph.ndjson", format="hext", publicID=d.default_context.identifier ) diff --git a/test/test_parser_reads_from_pathlike_object.py b/test/test_parsers/test_parser_reads_from_pathlike_object.py similarity index 100% rename from test/test_parser_reads_from_pathlike_object.py rename to test/test_parsers/test_parser_reads_from_pathlike_object.py diff --git a/test/test_parser_structure.py b/test/test_parsers/test_parser_structure.py similarity index 100% rename from test/test_parser_structure.py rename to test/test_parsers/test_parser_structure.py diff --git a/test/test_swap_n3.py b/test/test_parsers/test_swap_n3.py similarity index 100% rename from test/test_swap_n3.py rename to test/test_parsers/test_swap_n3.py diff --git a/test/test_trix_parse.py b/test/test_parsers/test_trix_parse.py similarity index 100% rename from test/test_trix_parse.py rename to test/test_parsers/test_trix_parse.py diff --git a/test/test_turtle_w3c.py b/test/test_parsers/test_turtle_w3c.py similarity index 98% rename from test/test_turtle_w3c.py rename to test/test_parsers/test_turtle_w3c.py index 0112fa101..e5f52a755 100644 --- a/test/test_turtle_w3c.py +++ b/test/test_parsers/test_turtle_w3c.py @@ -11,7 +11,7 @@ from test.manifest import RDFT, RDFTest, read_manifest import pytest -from .testutils import file_uri_to_path +from test.testutils import file_uri_to_path verbose = False diff --git a/test/test_prettyxml.py b/test/test_serializers/test_prettyxml.py similarity index 100% rename from test/test_prettyxml.py rename to test/test_serializers/test_prettyxml.py diff --git a/test/test_serializer.py b/test/test_serializers/test_serializer.py similarity index 98% rename from test/test_serializer.py rename to test/test_serializers/test_serializer.py index fb9cdc115..5f99bb12f 100644 --- a/test/test_serializer.py +++ b/test/test_serializers/test_serializer.py @@ -11,7 +11,7 @@ from rdflib.graph import ConjunctiveGraph -from .testutils import GraphHelper +from test.testutils import GraphHelper @pytest.mark.parametrize( diff --git a/test/test_serializer_hext.py b/test/test_serializers/test_serializer_hext.py similarity index 97% rename from test/test_serializer_hext.py rename to test/test_serializers/test_serializer_hext.py index 7231338f6..dfba25332 100644 --- a/test/test_serializer_hext.py +++ b/test/test_serializers/test_serializer_hext.py @@ -234,7 +234,7 @@ def test_hext_dataset_linecount(): d = Dataset() assert len(d) == 0 d.parse( - Path(__file__).parent / "test_parser_hext_multigraph.ndjson", + Path(__file__).parent.parent / "consistent_test_data/test_parser_hext_multigraph.ndjson", format="hext", publicID=d.default_context.identifier ) @@ -253,12 +253,12 @@ def test_hext_dataset_linecount(): def test_roundtrip(): d = Dataset() d.parse( - Path(__file__).parent / "test_parser_hext_multigraph.ndjson", + Path(__file__).parent.parent / "consistent_test_data/test_parser_hext_multigraph.ndjson", format="hext", publicID=d.default_context.identifier ) d.default_union = True - with open(str(Path(__file__).parent / "test_parser_hext_multigraph.ndjson")) as i: + with open(str(Path(__file__).parent.parent / "consistent_test_data/test_parser_hext_multigraph.ndjson")) as i: ordered_input = "".join(sorted(i.readlines())).strip() ordered_output = "\n".join(sorted(d.serialize(format="hext").split("\n"))).strip() diff --git a/test/test_serializer_longturtle.py b/test/test_serializers/test_serializer_longturtle.py similarity index 100% rename from test/test_serializer_longturtle.py rename to test/test_serializers/test_serializer_longturtle.py diff --git a/test/test_serializer_trix.py b/test/test_serializers/test_serializer_trix.py similarity index 100% rename from test/test_serializer_trix.py rename to test/test_serializers/test_serializer_trix.py diff --git a/test/test_serializer_turtle.py b/test/test_serializers/test_serializer_turtle.py similarity index 100% rename from test/test_serializer_turtle.py rename to test/test_serializers/test_serializer_turtle.py diff --git a/test/test_serializer_xml.py b/test/test_serializers/test_serializer_xml.py similarity index 100% rename from test/test_serializer_xml.py rename to test/test_serializers/test_serializer_xml.py diff --git a/test/test_store.py b/test/test_store/test_store.py similarity index 100% rename from test/test_store.py rename to test/test_store/test_store.py diff --git a/test/test_store/test_store_auditable.py b/test/test_store/test_store_auditable.py new file mode 100644 index 000000000..6ef581a39 --- /dev/null +++ b/test/test_store/test_store_auditable.py @@ -0,0 +1,473 @@ +# -*- coding=utf8 -*- +import os +import shutil +import tempfile + +import pytest + +from rdflib import Graph, Namespace, plugin +from rdflib.plugins.stores.auditable import AuditableStore +from rdflib.store import VALID_STORE + +EX = Namespace("http://example.org/") + + +def get_plugin_stores(): + pluginstores = [] + + for s in plugin.plugins(None, plugin.Store): + if s.name in ( + "default", + "Memory", + "Auditable", + "Concurrent", + "SimpleMemory", + "SPARQLStore", + "SPARQLUpdateStore", + ): + continue # excluded from these tests + + try: + graph = Graph(store=s.name) + pluginstores.append(s.name) + except ImportError: + pass + return pluginstores + + +@pytest.fixture( + scope="function", + params=get_plugin_stores(), +) +def get_graph(request): + storename = request.param + + g = Graph(store=storename) + + path = tempfile.mktemp() + + try: + shutil.rmtree(path) + except Exception: + pass + + rt = g.open(configuration=path, create=True) + assert rt == VALID_STORE, "The underlying store is corrupt" + + g.add((EX.s0, EX.p0, EX.o0)) + g.add((EX.s0, EX.p0, EX.o0bis)) + + t = Graph(AuditableStore(g.store), g.identifier) + + yield g, t + + g.close() + + g.destroy(configuration=path) + + try: + shutil.rmtree(path) + except Exception: + pass + + +def test_add_commit(get_graph): + g, t = get_graph + t.add((EX.s1, EX.p1, EX.o1)) + assert set(t) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + (EX.s1, EX.p1, EX.o1), + ] + ) + + t.commit() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + (EX.s1, EX.p1, EX.o1), + ] + ) + + +def test_remove_commit(get_graph): + g, t = get_graph + t.remove((EX.s0, EX.p0, EX.o0)) + assert set(t) == set( + [ + (EX.s0, EX.p0, EX.o0bis), + ] + ) + t.commit() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0bis), + ] + ) + + +def test_multiple_remove_commit(get_graph): + g, t = get_graph + t.remove((EX.s0, EX.p0, None)) + assert set(t) == set([]) + t.commit() + assert set(g) == set([]) + + +def test_noop_add_commit(get_graph): + g, t = get_graph + t.add((EX.s0, EX.p0, EX.o0)) + assert set(t) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ] + ) + t.commit() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ] + ) + + +def test_noop_remove_commit(get_graph): + g, t = get_graph + t.add((EX.s0, EX.p0, EX.o0)) + assert set(t) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ] + ) + + t.commit() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ] + ) + + +def test_add_remove_commit(get_graph): + g, t = get_graph + t.add((EX.s1, EX.p1, EX.o1)) + t.remove((EX.s1, EX.p1, EX.o1)) + assert set(t) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ] + ) + t.commit() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ] + ) + + +def test_remove_add_commit(get_graph): + g, t = get_graph + t.remove((EX.s1, EX.p1, EX.o1)) + t.add((EX.s1, EX.p1, EX.o1)) + assert set(t) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + (EX.s1, EX.p1, EX.o1), + ] + ) + t.commit() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + (EX.s1, EX.p1, EX.o1), + ] + ) + + +def test_add_rollback(get_graph): + g, t = get_graph + t.add((EX.s1, EX.p1, EX.o1)) + t.rollback() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ] + ) + + +def test_remove_rollback(get_graph): + g, t = get_graph + t.remove((EX.s0, EX.p0, EX.o0)) + t.rollback() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ] + ) + + +def test_multiple_remove_rollback(get_graph): + g, t = get_graph + t.remove((EX.s0, EX.p0, None)) + t.rollback() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ] + ) + + +def test_noop_add_rollback(get_graph): + g, t = get_graph + t.add((EX.s0, EX.p0, EX.o0)) + t.rollback() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ] + ) + + +def test_noop_remove_rollback(get_graph): + g, t = get_graph + t.add((EX.s0, EX.p0, EX.o0)) + t.rollback() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ] + ) + + +def test_add_remove_rollback(get_graph): + g, t = get_graph + t.add((EX.s1, EX.p1, EX.o1)) + t.remove((EX.s1, EX.p1, EX.o1)) + t.rollback() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ] + ) + + +def test_remove_add_rollback(get_graph): + g, t = get_graph + t.remove((EX.s1, EX.p1, EX.o1)) + t.add((EX.s1, EX.p1, EX.o1)) + t.rollback() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ] + ) + + +@pytest.fixture( + scope="function", + params=get_plugin_stores(), +) +def get_empty_graph(request): + storename = request.param + + path = tempfile.mktemp() + + try: + shutil.rmtree(path) + except Exception: + pass + + g = Graph(store=storename) + rt = g.open(configuration=path, create=True) + assert rt == VALID_STORE, "The underlying store is corrupt" + + t = Graph(AuditableStore(g.store), g.identifier) + + yield g, t + g.close() + g.destroy(configuration=path) + + +def test_add_commit_empty(get_empty_graph): + g, t = get_empty_graph + t.add((EX.s1, EX.p1, EX.o1)) + assert set(t) == set( + [ + (EX.s1, EX.p1, EX.o1), + ] + ) + t.commit() + assert set(g) == set( + [ + (EX.s1, EX.p1, EX.o1), + ] + ) + + +def test_add_rollback_empty(get_empty_graph): + g, t = get_empty_graph + t.add((EX.s1, EX.p1, EX.o1)) + t.rollback() + assert set(g) == set([]) + + +@pytest.fixture +def get_concurrent_graph(): + g = Graph() + g.add((EX.s0, EX.p0, EX.o0)) + g.add((EX.s0, EX.p0, EX.o0bis)) + t1 = Graph(AuditableStore(g.store), g.identifier) + t2 = Graph(AuditableStore(g.store), g.identifier) + t1.add((EX.s1, EX.p1, EX.o1)) + t2.add((EX.s2, EX.p2, EX.o2)) + t1.remove((EX.s0, EX.p0, EX.o0)) + t2.remove((EX.s0, EX.p0, EX.o0bis)) + + yield g, t1, t2 + + +def test_commit_commit(get_concurrent_graph): + g, t1, t2 = get_concurrent_graph + t1.commit() + t2.commit() + assert set(g) == set( + [ + (EX.s1, EX.p1, EX.o1), + (EX.s2, EX.p2, EX.o2), + ] + ) + + +def test_commit_rollback(get_concurrent_graph): + g, t1, t2 = get_concurrent_graph + t1.commit() + t2.rollback() + assert set(g) == set( + [ + (EX.s1, EX.p1, EX.o1), + (EX.s0, EX.p0, EX.o0bis), + ] + ) + + +def test_rollback_commit(get_concurrent_graph): + g, t1, t2 = get_concurrent_graph + t1.rollback() + t2.commit() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s2, EX.p2, EX.o2), + ] + ) + + +def test_rollback_rollback(get_concurrent_graph): + g, t1, t2 = get_concurrent_graph + t1.rollback() + t2.rollback() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ] + ) + + +@pytest.fixture +def get_embedded_graph(): + g = Graph() + g.add((EX.s0, EX.p0, EX.o0)) + g.add((EX.s0, EX.p0, EX.o0bis)) + + t1 = Graph(AuditableStore(g.store), g.identifier) + t1.add((EX.s1, EX.p1, EX.o1)) + t1.remove((EX.s0, EX.p0, EX.o0bis)) + + t2 = Graph(AuditableStore(t1.store), t1.identifier) + t2.add((EX.s2, EX.p2, EX.o2)) + t2.remove((EX.s1, EX.p1, EX.o1)) + + yield g, t1, t2 + + +def test_commit_commit_embedded(get_embedded_graph): + g, t1, t2 = get_embedded_graph + assert set(t2) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s2, EX.p2, EX.o2), + ] + ) + t2.commit() + assert set(t1) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s2, EX.p2, EX.o2), + ] + ) + t1.commit() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s2, EX.p2, EX.o2), + ] + ) + + +def test_commit_rollback_embedded(get_embedded_graph): + g, t1, t2 = get_embedded_graph + t2.commit() + t1.rollback() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ] + ) + + +def test_rollback_commit_embedded(get_embedded_graph): + g, t1, t2 = get_embedded_graph + t2.rollback() + assert set(t1) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s1, EX.p1, EX.o1), + ] + ) + t1.commit() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s1, EX.p1, EX.o1), + ] + ) + + +def test_rollback_rollback_embedded(get_embedded_graph): + g, t1, t2 = get_embedded_graph + t2.rollback() + t1.rollback() + assert set(g) == set( + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ] + ) diff --git a/test/test_store/test_store_berkeleydb.py b/test/test_store/test_store_berkeleydb.py new file mode 100644 index 000000000..9b68605e4 --- /dev/null +++ b/test/test_store/test_store_berkeleydb.py @@ -0,0 +1,145 @@ +import pytest +from tempfile import mktemp +from rdflib import Dataset, URIRef +from rdflib.store import VALID_STORE +from rdflib.plugins.stores.berkeleydb import has_bsddb + +pytestmark = pytest.mark.skipif( + not has_bsddb, reason="skipping berkeleydb tests, modile not available" +) + + +@pytest.fixture +def get_dataset(): + path = mktemp() + dataset = Dataset("BerkeleyDB") + rt = dataset.open(path, create=True) + assert rt == VALID_STORE, "The underlying store is corrupt" + assert ( + len(dataset) == 0 + ), "There must be zero triples in the graph just after store (file) creation" + data = """ + PREFIX : + + :a :b :c . + :d :e :f . + :d :g :h . + """ + dataset.default_graph.parse(data=data, format="ttl") + + yield path, dataset + + dataset.close() + dataset.destroy(path) + + +def test_write(get_dataset): + path, dataset = get_dataset + assert ( + len(dataset.default_graph) == 3 + ), "There must be three triples in the graph after the first data chunk parse" + data2 = """ + PREFIX : + + :d :i :j . + """ + dataset.parse(data=data2, format="ttl") + assert ( + len(dataset) == 4 + ), "There must be four triples in the graph after the second data chunk parse" + data3 = """ + PREFIX : + + :d :i :j . + """ + dataset.parse(data=data3, format="ttl") + assert ( + len(dataset) == 4 + ), "There must still be four triples in the graph after the third data chunk parse" + + +def test_read(get_dataset): + path, dataset = get_dataset + sx = None + for s in dataset.subjects( + predicate=URIRef("https://example.org/e"), + object=URIRef("https://example.org/f"), + ): + sx = s + assert sx == URIRef("https://example.org/d") + + +def test_sparql_query(get_dataset): + path, dataset = get_dataset + q = """ + PREFIX : + + SELECT (COUNT(*) AS ?c) + WHERE { + :d ?p ?o . + }""" + + c = 0 + for row in dataset.query(q): + c = int(row.c) + assert c == 2, "SPARQL COUNT must return 2" + + +def test_sparql_insert(get_dataset): + path, dataset = get_dataset + q = """ + PREFIX : + + INSERT DATA { + :x :y :z . + }""" + + dataset.update(q) + assert len(dataset) == 4, "After extra triple insert, length must be 4" + + +def test_multigraph(get_dataset): + path, dataset = get_dataset + q = """ + PREFIX : + + INSERT DATA { + GRAPH :m { + :x :y :z . + } + GRAPH :n { + :x :y :z . + } + }""" + + dataset.update(q) + + q = """ + SELECT (COUNT(?g) AS ?c) + WHERE { + SELECT DISTINCT ?g + WHERE { + GRAPH ?g { + ?s ?p ?o + } + } + } + """ + c = 0 + for row in dataset.query(q): + c = int(row.c) + assert c == 2, "SPARQL COUNT must return 2 (:m & :n)" + + +def test_open_shut(get_dataset): + path, dataset = get_dataset + assert len(dataset) == 3, "Initially we must have 3 triples from setUp" + dataset.close() + dataset = None + + # reopen the graph + dataset = Dataset("BerkeleyDB") + dataset.open(path, create=False) + assert ( + len(dataset) == 3 + ), "After close and reopen, we should still have the 3 originally added triples" diff --git a/test/test_store/test_store_memorystore.py b/test/test_store/test_store_memorystore.py new file mode 100644 index 000000000..905dc58b6 --- /dev/null +++ b/test/test_store/test_store_memorystore.py @@ -0,0 +1,31 @@ +import pytest + +import rdflib + + +@pytest.fixture(scope="function", params=["SimpleMemory", "Memory"]) +def get_graph(request): + g = rdflib.Graph(request.param) + yield g + + +def test_memory_store(get_graph): + g = get_graph + subj1 = rdflib.URIRef("http://example.org/foo#bar1") + pred1 = rdflib.URIRef("http://example.org/foo#bar2") + obj1 = rdflib.URIRef("http://example.org/foo#bar3") + triple1 = (subj1, pred1, obj1) + triple2 = ( + subj1, + rdflib.URIRef("http://example.org/foo#bar4"), + rdflib.URIRef("http://example.org/foo#bar5"), + ) + g.add(triple1) + assert len(g) == 1 + g.add(triple2) + assert len(list(g.triples((subj1, None, None)))) == 2 + assert len(list(g.triples((None, pred1, None)))) == 1 + assert len(list(g.triples((None, None, obj1)))) == 1 + g.remove(triple1) + assert len(g) == 1 + assert len(g.serialize()) > 0 diff --git a/test/test_store_sparqlstore.py b/test/test_store/test_store_sparqlstore.py similarity index 98% rename from test/test_store_sparqlstore.py rename to test/test_store/test_store_sparqlstore.py index e7d7b4dac..b504e2e39 100644 --- a/test/test_store_sparqlstore.py +++ b/test/test_store/test_store_sparqlstore.py @@ -1,20 +1,19 @@ -from rdflib import Graph, URIRef, Literal +import re +import socket import unittest from http.server import BaseHTTPRequestHandler, HTTPServer -import socket from threading import Thread +from typing import Callable, ClassVar, Type from unittest.mock import patch -from rdflib.namespace import RDF, XSD, XMLNS, FOAF, RDFS -from rdflib.plugins.stores.sparqlstore import SPARQLConnector -from typing import ClassVar, Callable, Type + import pytest -import re -from . import helper -from .testutils import ( - MockHTTPResponse, - ServedSimpleHTTPMock, -) +from rdflib import Graph, Literal, URIRef +from rdflib.namespace import FOAF, RDF, RDFS, XMLNS, XSD +from rdflib.plugins.stores.sparqlstore import SPARQLConnector + +from test import helper +from test.testutils import MockHTTPResponse, ServedSimpleHTTPMock class TestSPARQLStoreGraph: diff --git a/test/test_store/test_store_sparqlupdatestore.py b/test/test_store/test_store_sparqlupdatestore.py new file mode 100644 index 000000000..e47eaacf2 --- /dev/null +++ b/test/test_store/test_store_sparqlupdatestore.py @@ -0,0 +1,365 @@ +# -*- coding: utf-8 -*- + +import re +import unittest +from urllib.request import urlopen +import pytest + +from rdflib import BNode, ConjunctiveGraph, Graph, Literal, URIRef +from test.data import ( + tarek, + bob, + michel, + likes, + hates, + cheese, + pizza, +) + +HOST = "http://localhost:3031" +DB = "/db/" + +# this assumes SPARQL1.1 query/update endpoints running locally at +# http://localhost:3031/db/ +# +# The ConjunctiveGraph tests below require that the SPARQL endpoint renders its +# default graph as the union of all known graphs! This is incompatible with the +# endpoint behavior required by our Dataset tests in test_dataset.py, so you +# need to run a second SPARQL endpoint on a non standard port, +# e.g. fuseki started with: +# ./fuseki-server --port 3031 --memTDB --update --set tdb:unionDefaultGraph=true /db + +# THIS WILL DELETE ALL DATA IN THE /db dataset + +graphuri = URIRef("urn:example:graph") +othergraphuri = URIRef("urn:example:othergraph") + +try: + assert len(urlopen(HOST).read()) > 0 +except: + pytest.skip(f"skipping because {HOST} is unavailable", allow_module_level=True) + + +@pytest.fixture +def get_graph(): + + longMessage = True + graph = ConjunctiveGraph("SPARQLUpdateStore") + + root = HOST + DB + graph.open((root + "sparql", root + "update")) + + # clean out the store + for c in graph.contexts(): + c.remove((None, None, None)) + assert len(c) == 0 + + yield graph + + graph.close() + + +def test_simple_graph(get_graph): + graph = get_graph + g = graph.get_context(graphuri) + g.add((tarek, likes, pizza)) + g.add((bob, likes, pizza)) + g.add((bob, likes, cheese)) + + g2 = graph.get_context(othergraphuri) + g2.add((michel, likes, pizza)) + + assert 3 == len(g), "graph contains 3 triples" + assert 1 == len(g2), "other graph contains 1 triple" + + r = g.query("SELECT * WHERE { ?s . }") + assert 2 == len(list(r)), "two people like pizza" + + r = g.triples((None, likes, pizza)) + assert 2 == len(list(r)), "two people like pizza" + + # Test initBindings + r = g.query( + "SELECT * WHERE { ?s . }", + initBindings={"s": tarek}, + ) + assert 1 == len(list(r)), "i was asking only about tarek" + + r = g.triples((tarek, likes, pizza)) + assert 1 == len(list(r)), "i was asking only about tarek" + + r = g.triples((tarek, likes, cheese)) + assert 0 == len(list(r)), "tarek doesn't like cheese" + + g2.add((tarek, likes, pizza)) + g.remove((tarek, likes, pizza)) + r = g.query("SELECT * WHERE { ?s . }") + assert 1 == len(list(r)), "only bob likes pizza" + + +def test_conjunctive_default(get_graph): + graph = get_graph + g = graph.get_context(graphuri) + g.add((tarek, likes, pizza)) + g2 = graph.get_context(othergraphuri) + g2.add((bob, likes, pizza)) + g.add((tarek, hates, cheese)) + + assert 2 == len(g), "graph contains 2 triples" + + # the following are actually bad tests as they depend on your endpoint, + # as pointed out in the sparqlstore.py code: + # + # For ConjunctiveGraphs, reading is done from the "default graph" Exactly + # what this means depends on your endpoint, because SPARQL does not offer a + # simple way to query the union of all graphs as it would be expected for a + # ConjuntiveGraph. + ## + # Fuseki/TDB has a flag for specifying that the default graph + # is the union of all graphs (tdb:unionDefaultGraph in the Fuseki config). + assert ( + len(graph) == 3 + ), f"default union graph should contain three triples but contains:\n{list(graph)}" + + r = graph.query("SELECT * WHERE { ?s . }") + assert 2 == len(list(r)), "two people like pizza" + + r = graph.query( + "SELECT * WHERE { ?s . }", + initBindings={"s": tarek}, + ) + assert 1 == len(list(r)), "i was asking only about tarek" + + r = graph.triples((tarek, likes, pizza)) + assert 1 == len(list(r)), "i was asking only about tarek" + + r = graph.triples((tarek, likes, cheese)) + assert 0 == len(list(r)), "tarek doesn't like cheese" + + g2.remove((bob, likes, pizza)) + + r = graph.query("SELECT * WHERE { ?s . }") + assert 1 == len(list(r)), "only tarek likes pizza" + + +def test_update(get_graph): + graph = get_graph + graph.update( + "INSERT DATA { GRAPH { . } }" + ) + + g = graph.get_context(graphuri) + assert 1 == len(g), "graph contains 1 triples" + + +def test_update_with_initns(get_graph): + graph = get_graph + graph.update( + "INSERT DATA { GRAPH ns:graph { ns:michel ns:likes ns:pizza . } }", + initNs={"ns": URIRef("urn:example:")}, + ) + + g = graph.get_context(graphuri) + assert set(g.triples((None, None, None))) == set( + [(michel, likes, pizza)] + ), "only michel likes pizza" + + +def test_update_with_init_bindings(get_graph): + graph = get_graph + graph.update( + "INSERT { GRAPH { ?a ?b ?c . } } WherE { }", + initBindings={ + "a": URIRef("urn:example:michel"), + "b": URIRef("urn:example:likes"), + "c": URIRef("urn:example:pizza"), + }, + ) + + g = graph.get_context(graphuri) + assert set(g.triples((None, None, None))) == set( + [(michel, likes, pizza)] + ), "only michel likes pizza" + + +def test_update_with_blank_node(get_graph): + graph = get_graph + graph.update( + "INSERT DATA { GRAPH { _:blankA } }" + ) + g = graph.get_context(graphuri) + for t in g.triples((None, None, None)): + assert isinstance(t[0], BNode) + assert t[1].n3() == "" + assert t[2].n3() == "" + + +def test_update_with_blank_node_serialize_and_parse(get_graph): + graph = get_graph + graph.update( + "INSERT DATA { GRAPH { _:blankA } }" + ) + g = graph.get_context(graphuri) + string = g.serialize(format="ntriples") + raised = False + try: + Graph().parse(data=string, format="ntriples") + except Exception as e: + raised = True + assert raised is False, "Exception raised when parsing: " + string + + +def test_multiple_update_with_init_bindings(get_graph): + graph = get_graph + graph.update( + "INSERT { GRAPH { ?a ?b ?c . } } WHERE { };" + "INSERT { GRAPH { ?d ?b ?c . } } WHERE { }", + initBindings={ + "a": URIRef("urn:example:michel"), + "b": URIRef("urn:example:likes"), + "c": URIRef("urn:example:pizza"), + "d": URIRef("urn:example:bob"), + }, + ) + + g = graph.get_context(graphuri) + assert set(g.triples((None, None, None))) == set( + [(michel, likes, pizza), (bob, likes, pizza)] + ), "michel and bob like pizza" + + +def test_named_graph_update(get_graph): + graph = get_graph + g = graph.get_context(graphuri) + r1 = "INSERT DATA { }" + g.update(r1) + assert set(g.triples((None, None, None))) == set( + [(michel, likes, pizza)] + ), "only michel likes pizza" + + r2 = ( + "DELETE { } " + + "INSERT { } WHERE {}" + ) + g.update(r2) + assert set(g.triples((None, None, None))) == set( + [(bob, likes, pizza)] + ), "only bob likes pizza" + + says = URIRef("urn:says") + + # Strings with unbalanced curly braces + tricky_strs = ["With an unbalanced curly brace %s " % brace for brace in ["{", "}"]] + for tricky_str in tricky_strs: + r3 = ( + """INSERT { ?b "%s" } + WHERE { ?b } """ + % tricky_str + ) + g.update(r3) + + values = set() + for v in g.objects(bob, says): + values.add(str(v)) + assert values == set(tricky_strs) + + # Complicated Strings + r4strings = [] + r4strings.append(r'''"1: adfk { ' \\\" \" { "''') + r4strings.append(r'''"2: adfk } #éï \\"''') + + r4strings.append(r"""'3: adfk { " \\\' \' { '""") + r4strings.append(r"""'4: adfk } #éï \\'""") + + r4strings.append(r'''"""5: adfk { ' \\\" \" { """''') + r4strings.append(r'''"""6: adfk } #éï \\"""''') + r4strings.append('"""7: ad adsfj \n { \n sadfj"""') + + r4strings.append(r"""'''8: adfk { " \\\' \' { '''""") + r4strings.append(r"""'''9: adfk } #éï \\'''""") + r4strings.append("'''10: ad adsfj \n { \n sadfj'''") + + r4 = "\n".join( + ["INSERT DATA { %s } ;" % s for s in r4strings] + ) + g.update(r4) + values = set() + for v in g.objects(michel, says): + values.add(str(v)) + assert values == set( + [ + re.sub( + r"\\(.)", + r"\1", + re.sub(r"^'''|'''$|^'|'$|" + r'^"""|"""$|^"|"$', r"", s), + ) + for s in r4strings + ] + ) + + # IRI Containing ' or # + # The fragment identifier must not be misinterpreted as a comment + # (commenting out the end of the block). + # The ' must not be interpreted as the start of a string, causing the } + # in the literal to be identified as the end of the block. + r5 = """INSERT DATA { , "'}" }""" + + g.update(r5) + values = set() + for v in g.objects(michel, hates): + values.add(str(v)) + assert values == set(["urn:example:foo'bar?baz;a=1&b=2#fragment", "'}"]) + + # Comments + r6 = """ + INSERT DATA { + . # No closing brace: } + . + } + #Final { } comment""" + + g.update(r6) + values = set() + for v in g.objects(bob, hates): + values.add(v) + assert values == set([bob, michel]) + + +def test_named_graph_update_with_init_bindings(get_graph): + graph = get_graph + g = graph.get_context(graphuri) + r = "INSERT { ?a ?b ?c } WHERE {}" + g.update(r, initBindings={"a": michel, "b": likes, "c": pizza}) + assert set(g.triples((None, None, None))) == set( + [(michel, likes, pizza)] + ), "only michel likes pizza" + + +def test_empty_named_graph(get_graph): + graph = get_graph + empty_graph_iri = "urn:empty-graph-1" + graph.update("CREATE GRAPH <%s>" % empty_graph_iri) + named_graphs = [ + str(r[0]) for r in graph.query("SELECT ?name WHERE { GRAPH ?name {} }") + ] + # Some SPARQL endpoint backends (like TDB) are not able to find empty named graphs + # (at least with this query) + if empty_graph_iri in named_graphs: + assert empty_graph_iri in [str(g.identifier) for g in graph.contexts()] + + +def test_empty_literal(get_graph): + graph = get_graph + # test for https://github.com/RDFLib/rdflib/issues/457 + # also see test_issue457.py which is sparql store independent! + g = graph.get_context(graphuri) + g.add( + ( + URIRef("http://example.com/s"), + URIRef("http://example.com/p"), + Literal(""), + ) + ) + + o = tuple(g)[0][2] + assert o == Literal(""), repr(o) diff --git a/test/test_store_sparqlupdatestore_mock.py b/test/test_store/test_store_sparqlupdatestore_mock.py similarity index 96% rename from test/test_store_sparqlupdatestore_mock.py rename to test/test_store/test_store_sparqlupdatestore_mock.py index f812c3deb..c3246e78e 100644 --- a/test/test_store_sparqlupdatestore_mock.py +++ b/test/test_store/test_store_sparqlupdatestore_mock.py @@ -1,8 +1,10 @@ -from rdflib.graph import ConjunctiveGraph +import unittest from typing import ClassVar + from rdflib import Namespace -from .testutils import MockHTTPResponse, ServedSimpleHTTPMock -import unittest +from rdflib.graph import ConjunctiveGraph + +from test.testutils import MockHTTPResponse, ServedSimpleHTTPMock EG = Namespace("http://example.org/") diff --git a/test/test_store/test_store_triple_store.py b/test/test_store/test_store_triple_store.py new file mode 100644 index 000000000..e37530647 --- /dev/null +++ b/test/test_store/test_store_triple_store.py @@ -0,0 +1,36 @@ +import pytest + +from rdflib.graph import Graph +from rdflib.namespace import RDFS +from rdflib.term import BNode, Literal + +remove_me = (BNode(), RDFS.label, Literal("remove_me")) + + +@pytest.fixture(scope="function") +def get_store(request): + store = Graph(store="default") + store.open("store") + store.add(remove_me) + + yield store + + store.close() + + +def test_add(get_store): + store = get_store + subject = BNode() + store.add((subject, RDFS.label, Literal("foo"))) + + +def test_remove(get_store): + store = get_store + store.remove(remove_me) + store.remove((None, None, None)) + + +def test_triples(get_store): + store = get_store + for s, p, o in store: + pass diff --git a/test/test_store_auditable.py b/test/test_store_auditable.py deleted file mode 100644 index f5c1188c9..000000000 --- a/test/test_store_auditable.py +++ /dev/null @@ -1,386 +0,0 @@ -# -*- coding=utf8 -*- -import unittest - -from rdflib import Graph, Namespace -from rdflib.plugins.stores.auditable import AuditableStore - -EX = Namespace("http://example.org/") - - -class BaseTestAuditableStore(unittest.TestCase): - def assert_graph_equal(self, g1, g2): - try: - return self.assertSetEqual(set(g1), set(g2)) - except AttributeError: - # python2.6 does not have assertSetEqual - assert set(g1) == set(g2) - - -class TestAuditableStore(BaseTestAuditableStore): - def setUp(self): - self.g = Graph() - self.g.add((EX.s0, EX.p0, EX.o0)) - self.g.add((EX.s0, EX.p0, EX.o0bis)) - - self.t = Graph(AuditableStore(self.g.store), self.g.identifier) - - def test_add_commit(self): - self.t.add((EX.s1, EX.p1, EX.o1)) - self.assert_graph_equal( - self.t, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - (EX.s1, EX.p1, EX.o1), - ], - ) - self.t.commit() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - (EX.s1, EX.p1, EX.o1), - ], - ) - - def test_remove_commit(self): - self.t.remove((EX.s0, EX.p0, EX.o0)) - self.assert_graph_equal( - self.t, - [ - (EX.s0, EX.p0, EX.o0bis), - ], - ) - self.t.commit() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0bis), - ], - ) - - def test_multiple_remove_commit(self): - self.t.remove((EX.s0, EX.p0, None)) - self.assert_graph_equal(self.t, []) - self.t.commit() - self.assert_graph_equal(self.g, []) - - def test_noop_add_commit(self): - self.t.add((EX.s0, EX.p0, EX.o0)) - self.assert_graph_equal( - self.t, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ], - ) - self.t.commit() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ], - ) - - def test_noop_remove_commit(self): - self.t.add((EX.s0, EX.p0, EX.o0)) - self.assert_graph_equal( - self.t, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ], - ) - self.t.commit() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ], - ) - - def test_add_remove_commit(self): - self.t.add((EX.s1, EX.p1, EX.o1)) - self.t.remove((EX.s1, EX.p1, EX.o1)) - self.assert_graph_equal( - self.t, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ], - ) - self.t.commit() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ], - ) - - def test_remove_add_commit(self): - self.t.remove((EX.s1, EX.p1, EX.o1)) - self.t.add((EX.s1, EX.p1, EX.o1)) - self.assert_graph_equal( - self.t, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - (EX.s1, EX.p1, EX.o1), - ], - ) - self.t.commit() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - (EX.s1, EX.p1, EX.o1), - ], - ) - - def test_add_rollback(self): - self.t.add((EX.s1, EX.p1, EX.o1)) - self.t.rollback() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ], - ) - - def test_remove_rollback(self): - self.t.remove((EX.s0, EX.p0, EX.o0)) - self.t.rollback() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ], - ) - - def test_multiple_remove_rollback(self): - self.t.remove((EX.s0, EX.p0, None)) - self.t.rollback() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ], - ) - - def test_noop_add_rollback(self): - self.t.add((EX.s0, EX.p0, EX.o0)) - self.t.rollback() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ], - ) - - def test_noop_remove_rollback(self): - self.t.add((EX.s0, EX.p0, EX.o0)) - self.t.rollback() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ], - ) - - def test_add_remove_rollback(self): - self.t.add((EX.s1, EX.p1, EX.o1)) - self.t.remove((EX.s1, EX.p1, EX.o1)) - self.t.rollback() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ], - ) - - def test_remove_add_rollback(self): - self.t.remove((EX.s1, EX.p1, EX.o1)) - self.t.add((EX.s1, EX.p1, EX.o1)) - self.t.rollback() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ], - ) - - -class TestAuditableStoreEmptyGraph(BaseTestAuditableStore): - def setUp(self): - self.g = Graph() - self.t = Graph(AuditableStore(self.g.store), self.g.identifier) - - def test_add_commit(self): - self.t.add((EX.s1, EX.p1, EX.o1)) - self.assert_graph_equal( - self.t, - [ - (EX.s1, EX.p1, EX.o1), - ], - ) - self.t.commit() - self.assert_graph_equal( - self.g, - [ - (EX.s1, EX.p1, EX.o1), - ], - ) - - def test_add_rollback(self): - self.t.add((EX.s1, EX.p1, EX.o1)) - self.t.rollback() - self.assert_graph_equal(self.g, []) - - -class TestAuditableStoreConccurent(BaseTestAuditableStore): - def setUp(self): - self.g = Graph() - self.g.add((EX.s0, EX.p0, EX.o0)) - self.g.add((EX.s0, EX.p0, EX.o0bis)) - self.t1 = Graph(AuditableStore(self.g.store), self.g.identifier) - self.t2 = Graph(AuditableStore(self.g.store), self.g.identifier) - self.t1.add((EX.s1, EX.p1, EX.o1)) - self.t2.add((EX.s2, EX.p2, EX.o2)) - self.t1.remove((EX.s0, EX.p0, EX.o0)) - self.t2.remove((EX.s0, EX.p0, EX.o0bis)) - - def test_commit_commit(self): - self.t1.commit() - self.t2.commit() - self.assert_graph_equal( - self.g, - [ - (EX.s1, EX.p1, EX.o1), - (EX.s2, EX.p2, EX.o2), - ], - ) - - def test_commit_rollback(self): - self.t1.commit() - self.t2.rollback() - self.assert_graph_equal( - self.g, - [ - (EX.s1, EX.p1, EX.o1), - (EX.s0, EX.p0, EX.o0bis), - ], - ) - - def test_rollback_commit(self): - self.t1.rollback() - self.t2.commit() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s2, EX.p2, EX.o2), - ], - ) - - def test_rollback_rollback(self): - self.t1.rollback() - self.t2.rollback() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ], - ) - - -class TestAuditableStoreEmbeded(BaseTestAuditableStore): - def setUp(self): - self.g = Graph() - self.g.add((EX.s0, EX.p0, EX.o0)) - self.g.add((EX.s0, EX.p0, EX.o0bis)) - - self.t1 = Graph(AuditableStore(self.g.store), self.g.identifier) - self.t1.add((EX.s1, EX.p1, EX.o1)) - self.t1.remove((EX.s0, EX.p0, EX.o0bis)) - - self.t2 = Graph(AuditableStore(self.t1.store), self.t1.identifier) - self.t2.add((EX.s2, EX.p2, EX.o2)) - self.t2.remove((EX.s1, EX.p1, EX.o1)) - - def test_commit_commit(self): - self.assert_graph_equal( - self.t2, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s2, EX.p2, EX.o2), - ], - ) - self.t2.commit() - self.assert_graph_equal( - self.t1, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s2, EX.p2, EX.o2), - ], - ) - self.t1.commit() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s2, EX.p2, EX.o2), - ], - ) - - def test_commit_rollback(self): - self.t2.commit() - self.t1.rollback() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ], - ) - - def test_rollback_commit(self): - self.t2.rollback() - self.assert_graph_equal( - self.t1, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s1, EX.p1, EX.o1), - ], - ) - self.t1.commit() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s1, EX.p1, EX.o1), - ], - ) - - def test_rollback_rollback(self): - self.t2.rollback() - self.t1.rollback() - self.assert_graph_equal( - self.g, - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ], - ) diff --git a/test/test_store_berkeleydb.py b/test/test_store_berkeleydb.py deleted file mode 100644 index f96fb9bc2..000000000 --- a/test/test_store_berkeleydb.py +++ /dev/null @@ -1,130 +0,0 @@ -import unittest -from tempfile import mktemp -from rdflib import ConjunctiveGraph, URIRef -from rdflib.store import VALID_STORE -from rdflib.plugins.stores.berkeleydb import has_bsddb - - -class BerkeleyDBTestCase(unittest.TestCase): - def setUp(self): - if not has_bsddb: - self.skipTest("skipping as berkleydb is missing") - self.store_name = "BerkeleyDB" - self.path = mktemp() - self.g = ConjunctiveGraph(store=self.store_name) - self.rt = self.g.open(self.path, create=True) - assert self.rt == VALID_STORE, "The underlying store is corrupt" - assert ( - len(self.g) == 0 - ), "There must be zero triples in the graph just after store (file) creation" - data = """ - PREFIX : - - :a :b :c . - :d :e :f . - :d :g :h . - """ - self.g.parse(data=data, format="ttl") - - def tearDown(self): - self.g.close() - - def test_write(self): - assert ( - len(self.g) == 3 - ), "There must be three triples in the graph after the first data chunk parse" - data2 = """ - PREFIX : - - :d :i :j . - """ - self.g.parse(data=data2, format="ttl") - assert ( - len(self.g) == 4 - ), "There must be four triples in the graph after the second data chunk parse" - data3 = """ - PREFIX : - - :d :i :j . - """ - self.g.parse(data=data3, format="ttl") - assert ( - len(self.g) == 4 - ), "There must still be four triples in the graph after the thrd data chunk parse" - - def test_read(self): - sx = None - for s in self.g.subjects( - predicate=URIRef("https://example.org/e"), - object=URIRef("https://example.org/f"), - ): - sx = s - assert sx == URIRef("https://example.org/d") - - def test_sparql_query(self): - q = """ - PREFIX : - - SELECT (COUNT(*) AS ?c) - WHERE { - :d ?p ?o . - }""" - - c = 0 - for row in self.g.query(q): - c = int(row.c) - assert c == 2, "SPARQL COUNT must return 2" - - def test_sparql_insert(self): - q = """ - PREFIX : - - INSERT DATA { - :x :y :z . - }""" - - self.g.update(q) - assert len(self.g) == 4, "After extra triple insert, length must be 4" - - def test_multigraph(self): - q = """ - PREFIX : - - INSERT DATA { - GRAPH :m { - :x :y :z . - } - GRAPH :n { - :x :y :z . - } - }""" - - self.g.update(q) - - q = """ - SELECT (COUNT(?g) AS ?c) - WHERE { - SELECT DISTINCT ?g - WHERE { - GRAPH ?g { - ?s ?p ?o - } - } - } - """ - c = 0 - for row in self.g.query(q): - c = int(row.c) - assert c == 3, "SPARQL COUNT must return 3 (default, :m & :n)" - - def test_open_shut(self): - assert len(self.g) == 3, "Initially we must have 3 triples from setUp" - self.g.close() - self.g = None - - # reopen the graph - self.g = ConjunctiveGraph("BerkeleyDB") - self.g.open(self.path, create=False) - assert ( - len(self.g) == 3 - ), "After close and reopen, we should still have the 3 originally added triples" diff --git a/test/test_store_memorystore.py b/test/test_store_memorystore.py deleted file mode 100644 index 41b603569..000000000 --- a/test/test_store_memorystore.py +++ /dev/null @@ -1,59 +0,0 @@ -import unittest -import rdflib - -rdflib.plugin.register( - "SimpleMemory", rdflib.store.Store, "rdflib.plugins.stores.memory", "SimpleMemory" -) -rdflib.plugin.register( - "Memory", rdflib.store.Store, "rdflib.plugins.stores.memory", "Memory" -) - - -class SimpleStoreTestCase(unittest.TestCase): - def test_memory_store(self): - g = rdflib.Graph("SimpleMemory") - subj1 = rdflib.URIRef("http://example.org/foo#bar1") - pred1 = rdflib.URIRef("http://example.org/foo#bar2") - obj1 = rdflib.URIRef("http://example.org/foo#bar3") - triple1 = (subj1, pred1, obj1) - triple2 = ( - subj1, - rdflib.URIRef("http://example.org/foo#bar4"), - rdflib.URIRef("http://example.org/foo#bar5"), - ) - g.add(triple1) - self.assertTrue(len(g) == 1) - g.add(triple2) - self.assertTrue(len(list(g.triples((subj1, None, None)))) == 2) - self.assertTrue(len(list(g.triples((None, pred1, None)))) == 1) - self.assertTrue(len(list(g.triples((None, None, obj1)))) == 1) - g.remove(triple1) - self.assertTrue(len(g) == 1) - g.serialize() - - -class MemoryStoreTestCase(unittest.TestCase): - def test_memory_store(self): - g = rdflib.Graph("Memory") - subj1 = rdflib.URIRef("http://example.org/foo#bar1") - pred1 = rdflib.URIRef("http://example.org/foo#bar2") - obj1 = rdflib.URIRef("http://example.org/foo#bar3") - triple1 = (subj1, pred1, obj1) - triple2 = ( - subj1, - rdflib.URIRef("http://example.org/foo#bar4"), - rdflib.URIRef("http://example.org/foo#bar5"), - ) - g.add(triple1) - self.assertTrue(len(g) == 1) - g.add(triple2) - self.assertTrue(len(list(g.triples((subj1, None, None)))) == 2) - self.assertTrue(len(list(g.triples((None, pred1, None)))) == 1) - self.assertTrue(len(list(g.triples((None, None, obj1)))) == 1) - g.remove(triple1) - self.assertTrue(len(g) == 1) - g.serialize() - - -if __name__ == "__main__": - unittest.main(defaultTest="test_suite") diff --git a/test/test_store_sparqlupdatestore.py b/test/test_store_sparqlupdatestore.py deleted file mode 100644 index 6c942f8ef..000000000 --- a/test/test_store_sparqlupdatestore.py +++ /dev/null @@ -1,362 +0,0 @@ -# -*- coding: utf-8 -*- - -import unittest -import re - -from rdflib import ConjunctiveGraph, URIRef, Literal, BNode, Graph -from urllib.request import urlopen - -HOST = "http://localhost:3031" -DB = "/db/" - -# this assumes SPARQL1.1 query/update endpoints running locally at -# http://localhost:3031/db/ -# -# The ConjunctiveGraph tests below require that the SPARQL endpoint renders its -# default graph as the union of all known graphs! This is incompatible with the -# endpoint behavior required by our Dataset tests in test_dataset.py, so you -# need to run a second SPARQL endpoint on a non standard port, -# e.g. fuseki started with: -# ./fuseki-server --port 3031 --memTDB --update --set tdb:unionDefaultGraph=true /db - -# THIS WILL DELETE ALL DATA IN THE /db dataset - -michel = URIRef("urn:example:michel") -tarek = URIRef("urn:example:tarek") -bob = URIRef("urn:example:bob") -likes = URIRef("urn:example:likes") -hates = URIRef("urn:example:hates") -pizza = URIRef("urn:example:pizza") -cheese = URIRef("urn:example:cheese") - -graphuri = URIRef("urn:example:graph") -othergraphuri = URIRef("urn:example:othergraph") - -try: - assert len(urlopen(HOST).read()) > 0 - skip = False -except: - skip = True - - -@unittest.skipIf(skip, HOST + " is unavailable.") -class TestSparql11(unittest.TestCase): - def setUp(self): - self.longMessage = True - self.graph = ConjunctiveGraph("SPARQLUpdateStore") - - root = HOST + DB - self.graph.open((root + "sparql", root + "update")) - - # clean out the store - for c in self.graph.contexts(): - c.remove((None, None, None)) - assert len(c) == 0 - - def tearDown(self): - self.graph.close() - - def testSimpleGraph(self): - g = self.graph.get_context(graphuri) - g.add((tarek, likes, pizza)) - g.add((bob, likes, pizza)) - g.add((bob, likes, cheese)) - - g2 = self.graph.get_context(othergraphuri) - g2.add((michel, likes, pizza)) - - self.assertEqual(3, len(g), "graph contains 3 triples") - self.assertEqual(1, len(g2), "other graph contains 1 triple") - - r = g.query("SELECT * WHERE { ?s . }") - self.assertEqual(2, len(list(r)), "two people like pizza") - - r = g.triples((None, likes, pizza)) - self.assertEqual(2, len(list(r)), "two people like pizza") - - # Test initBindings - r = g.query( - "SELECT * WHERE { ?s . }", initBindings={"s": tarek}, - ) - self.assertEqual(1, len(list(r)), "i was asking only about tarek") - - r = g.triples((tarek, likes, pizza)) - self.assertEqual(1, len(list(r)), "i was asking only about tarek") - - r = g.triples((tarek, likes, cheese)) - self.assertEqual(0, len(list(r)), "tarek doesn't like cheese") - - g2.add((tarek, likes, pizza)) - g.remove((tarek, likes, pizza)) - r = g.query("SELECT * WHERE { ?s . }") - self.assertEqual(1, len(list(r)), "only bob likes pizza") - - def testConjunctiveDefault(self): - g = self.graph.get_context(graphuri) - g.add((tarek, likes, pizza)) - g2 = self.graph.get_context(othergraphuri) - g2.add((bob, likes, pizza)) - g.add((tarek, hates, cheese)) - - self.assertEqual(2, len(g), "graph contains 2 triples") - - # the following are actually bad tests as they depend on your endpoint, - # as pointed out in the sparqlstore.py code: - # - # For ConjunctiveGraphs, reading is done from the "default graph" Exactly - # what this means depends on your endpoint, because SPARQL does not offer a - # simple way to query the union of all graphs as it would be expected for a - # ConjuntiveGraph. - ## - # Fuseki/TDB has a flag for specifying that the default graph - # is the union of all graphs (tdb:unionDefaultGraph in the Fuseki config). - self.assertEqual( - 3, - len(self.graph), - "default union graph should contain three triples but contains:\n" - "%s" % list(self.graph), - ) - - r = self.graph.query("SELECT * WHERE { ?s . }") - self.assertEqual(2, len(list(r)), "two people like pizza") - - r = self.graph.query( - "SELECT * WHERE { ?s . }", initBindings={"s": tarek}, - ) - self.assertEqual(1, len(list(r)), "i was asking only about tarek") - - r = self.graph.triples((tarek, likes, pizza)) - self.assertEqual(1, len(list(r)), "i was asking only about tarek") - - r = self.graph.triples((tarek, likes, cheese)) - self.assertEqual(0, len(list(r)), "tarek doesn't like cheese") - - g2.remove((bob, likes, pizza)) - - r = self.graph.query("SELECT * WHERE { ?s . }") - self.assertEqual(1, len(list(r)), "only tarek likes pizza") - - def testUpdate(self): - self.graph.update( - "INSERT DATA { GRAPH { . } }" - ) - - g = self.graph.get_context(graphuri) - self.assertEqual(1, len(g), "graph contains 1 triples") - - def testUpdateWithInitNs(self): - self.graph.update( - "INSERT DATA { GRAPH ns:graph { ns:michel ns:likes ns:pizza . } }", - initNs={"ns": URIRef("urn:example:")}, - ) - - g = self.graph.get_context(graphuri) - self.assertEqual( - set(g.triples((None, None, None))), - set([(michel, likes, pizza)]), - "only michel likes pizza", - ) - - def testUpdateWithInitBindings(self): - self.graph.update( - "INSERT { GRAPH { ?a ?b ?c . } } WherE { }", - initBindings={ - "a": URIRef("urn:example:michel"), - "b": URIRef("urn:example:likes"), - "c": URIRef("urn:example:pizza"), - }, - ) - - g = self.graph.get_context(graphuri) - self.assertEqual( - set(g.triples((None, None, None))), - set([(michel, likes, pizza)]), - "only michel likes pizza", - ) - - def testUpdateWithBlankNode(self): - self.graph.update( - "INSERT DATA { GRAPH { _:blankA } }" - ) - g = self.graph.get_context(graphuri) - for t in g.triples((None, None, None)): - self.assertTrue(isinstance(t[0], BNode)) - self.assertEqual(t[1].n3(), "") - self.assertEqual(t[2].n3(), "") - - def testUpdateWithBlankNodeSerializeAndParse(self): - self.graph.update( - "INSERT DATA { GRAPH { _:blankA } }" - ) - g = self.graph.get_context(graphuri) - string = g.serialize(format="ntriples") - raised = False - try: - Graph().parse(data=string, format="ntriples") - except Exception as e: - raised = True - self.assertFalse(raised, "Exception raised when parsing: " + string) - - def testMultipleUpdateWithInitBindings(self): - self.graph.update( - "INSERT { GRAPH { ?a ?b ?c . } } WHERE { };" - "INSERT { GRAPH { ?d ?b ?c . } } WHERE { }", - initBindings={ - "a": URIRef("urn:example:michel"), - "b": URIRef("urn:example:likes"), - "c": URIRef("urn:example:pizza"), - "d": URIRef("urn:example:bob"), - }, - ) - - g = self.graph.get_context(graphuri) - self.assertEqual( - set(g.triples((None, None, None))), - set([(michel, likes, pizza), (bob, likes, pizza)]), - "michel and bob like pizza", - ) - - def testNamedGraphUpdate(self): - g = self.graph.get_context(graphuri) - r1 = "INSERT DATA { }" - g.update(r1) - self.assertEqual( - set(g.triples((None, None, None))), - set([(michel, likes, pizza)]), - "only michel likes pizza", - ) - - r2 = ( - "DELETE { } " - + "INSERT { } WHERE {}" - ) - g.update(r2) - self.assertEqual( - set(g.triples((None, None, None))), - set([(bob, likes, pizza)]), - "only bob likes pizza", - ) - says = URIRef("urn:says") - - # Strings with unbalanced curly braces - tricky_strs = [ - "With an unbalanced curly brace %s " % brace for brace in ["{", "}"] - ] - for tricky_str in tricky_strs: - r3 = ( - """INSERT { ?b "%s" } - WHERE { ?b } """ - % tricky_str - ) - g.update(r3) - - values = set() - for v in g.objects(bob, says): - values.add(str(v)) - self.assertEqual(values, set(tricky_strs)) - - # Complicated Strings - r4strings = [] - r4strings.append(r'''"1: adfk { ' \\\" \" { "''') - r4strings.append(r'''"2: adfk } #éï \\"''') - - r4strings.append(r"""'3: adfk { " \\\' \' { '""") - r4strings.append(r"""'4: adfk } #éï \\'""") - - r4strings.append(r'''"""5: adfk { ' \\\" \" { """''') - r4strings.append(r'''"""6: adfk } #éï \\"""''') - r4strings.append('"""7: ad adsfj \n { \n sadfj"""') - - r4strings.append(r"""'''8: adfk { " \\\' \' { '''""") - r4strings.append(r"""'''9: adfk } #éï \\'''""") - r4strings.append("'''10: ad adsfj \n { \n sadfj'''") - - r4 = "\n".join( - ["INSERT DATA { %s } ;" % s for s in r4strings] - ) - g.update(r4) - values = set() - for v in g.objects(michel, says): - values.add(str(v)) - self.assertEqual( - values, - set( - [ - re.sub( - r"\\(.)", - r"\1", - re.sub(r"^'''|'''$|^'|'$|" + r'^"""|"""$|^"|"$', r"", s), - ) - for s in r4strings - ] - ), - ) - - # IRI Containing ' or # - # The fragment identifier must not be misinterpreted as a comment - # (commenting out the end of the block). - # The ' must not be interpreted as the start of a string, causing the } - # in the literal to be identified as the end of the block. - r5 = """INSERT DATA { , "'}" }""" - - g.update(r5) - values = set() - for v in g.objects(michel, hates): - values.add(str(v)) - self.assertEqual(values, set(["urn:example:foo'bar?baz;a=1&b=2#fragment", "'}"])) - - # Comments - r6 = """ - INSERT DATA { - . # No closing brace: } - . - } - #Final { } comment""" - - g.update(r6) - values = set() - for v in g.objects(bob, hates): - values.add(v) - self.assertEqual(values, set([bob, michel])) - - def testNamedGraphUpdateWithInitBindings(self): - g = self.graph.get_context(graphuri) - r = "INSERT { ?a ?b ?c } WHERE {}" - g.update(r, initBindings={"a": michel, "b": likes, "c": pizza}) - self.assertEqual( - set(g.triples((None, None, None))), - set([(michel, likes, pizza)]), - "only michel likes pizza", - ) - - def testEmptyNamedGraph(self): - empty_graph_iri = "urn:empty-graph-1" - self.graph.update("CREATE GRAPH <%s>" % empty_graph_iri) - named_graphs = [ - str(r[0]) for r in self.graph.query("SELECT ?name WHERE { GRAPH ?name {} }") - ] - # Some SPARQL endpoint backends (like TDB) are not able to find empty named graphs - # (at least with this query) - if empty_graph_iri in named_graphs: - self.assertTrue( - empty_graph_iri in [str(g.identifier) for g in self.graph.contexts()] - ) - - def testEmptyLiteral(self): - # test for https://github.com/RDFLib/rdflib/issues/457 - # also see test_issue457.py which is sparql store independent! - g = self.graph.get_context(graphuri) - g.add( - ( - URIRef("http://example.com/s"), - URIRef("http://example.com/p"), - Literal(""), - ) - ) - - o = tuple(g)[0][2] - self.assertEqual(o, Literal(""), repr(o)) - - -if __name__ == "__main__": - unittest.main() diff --git a/test/test_store_triple_store.py b/test/test_store_triple_store.py deleted file mode 100644 index f37bea33e..000000000 --- a/test/test_store_triple_store.py +++ /dev/null @@ -1,35 +0,0 @@ -import unittest - -from rdflib.term import BNode, Literal -from rdflib.namespace import RDFS -from rdflib.graph import Graph - - -class GraphTest(unittest.TestCase): - backend = "default" - path = "store" - - def setUp(self): - self.store = Graph(store=self.backend) - self.store.open(self.path) - self.remove_me = (BNode(), RDFS.label, Literal("remove_me")) - self.store.add(self.remove_me) - - def tearDown(self): - self.store.close() - - def testAdd(self): - subject = BNode() - self.store.add((subject, RDFS.label, Literal("foo"))) - - def testRemove(self): - self.store.remove(self.remove_me) - self.store.remove((None, None, None)) - - def testTriples(self): - for s, p, o in self.store: - pass - - -if __name__ == "__main__": - unittest.main() From 4a02af7c6fc302749820f06e2080aad0d01fe4b5 Mon Sep 17 00:00:00 2001 From: Graham Higgins Date: Fri, 18 Mar 2022 10:53:48 +0000 Subject: [PATCH 05/12] add omitted test data file --- test/consistent_test_data/timbl-card.nquads | 86 +++++++++++++++++++++ 1 file changed, 86 insertions(+) create mode 100644 test/consistent_test_data/timbl-card.nquads diff --git a/test/consistent_test_data/timbl-card.nquads b/test/consistent_test_data/timbl-card.nquads new file mode 100644 index 000000000..90fe2c144 --- /dev/null +++ b/test/consistent_test_data/timbl-card.nquads @@ -0,0 +1,86 @@ + "Design Issues for the World Wide Web" . + . + _:ub1bL214C17 . + . + "Tim Berners-Lee's FOAF file" . + . + . + . + . + . + "timbl's blog on DIG" . + . + . + . + "Identity, Reference and the Web workshop 2006" . + . + "The Next Wave of the Web (Plenary Panel)" . + . + . + "Designing the Web for an Open Society" . + . + . + . + "Tim Berners-Lee's editable profile" . + . + . + . + . + . + . + . + "Tim Berners-Lee" . + . + . + . + _:ub1bL112C13 . + "https://www.w3.org/People/Berners-Lee/card#i" . + . + "Tim Berners-Lee" . + _:ub1bL102C9 . + . + . + . + . + . + . + . + "#ffffff" . + "#00467E" . + . + . + . + . + _:ub1bL131C21 . + "Berners-Lee" . + "Timothy" . + . + . + . + "965c47c5a70db7407210cef6e4e6f5374a525c5c" . + "Timothy Berners-Lee" . + "TimBL" . + "timbl" . + . + "Sir" . + . + . +_:ub1bL102C9 . +_:ub1bL102C9 "Cambridge" . +_:ub1bL102C9 "02139" . +_:ub1bL102C9 "MA" . +_:ub1bL102C9 "32 Vassar Street" . +_:ub1bL112C13 _:ub1bL114C15 . +_:ub1bL112C13 _:ub1bL113C19 . +_:ub1bL113C19 "42.361860" . +_:ub1bL113C19 "-71.091840" . +_:ub1bL114C15 "Cambridge" . +_:ub1bL114C15 "USA" . +_:ub1bL114C15 "02139" . +_:ub1bL114C15 "MIT CSAIL Building 32" . +_:ub1bL114C15 "32 Vassar Street" . +_:ub1bL131C21 "42.361860" . +_:ub1bL131C21 "-71.091840" . +_:ub1bL214C17 . +_:ub1bL214C17 "65537"^^ . +_:ub1bL214C17 "ebe99c737bd3670239600547e5e2eb1d1497da39947b6576c3c44ffeca32cf0f2f7cbee3c47001278a90fc7fc5bcf292f741eb1fcd6bbe7f90650afb519cf13e81b2bffc6e02063ee5a55781d420b1dfaf61c15758480e66d47fb0dcb5fa7b9f7f1052e5ccbd01beee9553c3b6b51f4daf1fce991294cd09a3d1d636bc6c7656e4455d0aff06daec740ed0084aa6866fcae1359de61cc12dbe37c8fa42e977c6e727a8258bb9a3f265b27e3766fe0697f6aa0bcc81c3f026e387bd7bbc81580dc1853af2daa099186a9f59da526474ef6ec0a3d84cf400be3261b6b649dea1f78184862d34d685d2d587f09acc14cd8e578fdd2283387821296f0af39b8d8845"^^ . \ No newline at end of file From 29bacc537e238aed5d38c009ed9f3001ba5bfc86 Mon Sep 17 00:00:00 2001 From: Graham Higgins Date: Fri, 18 Mar 2022 17:23:11 +0000 Subject: [PATCH 06/12] strange but true --- test/{test_graph => }/test_graph.py | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename test/{test_graph => }/test_graph.py (100%) diff --git a/test/test_graph/test_graph.py b/test/test_graph.py similarity index 100% rename from test/test_graph/test_graph.py rename to test/test_graph.py From 9be9d837b55c93eba38cfd20f24f6646dbbaaee7 Mon Sep 17 00:00:00 2001 From: Graham Higgins Date: Fri, 18 Mar 2022 17:58:12 +0000 Subject: [PATCH 07/12] revert unintended change --- test/test_store/test_store_berkeleydb.py | 239 +++++++++++------------ 1 file changed, 112 insertions(+), 127 deletions(-) diff --git a/test/test_store/test_store_berkeleydb.py b/test/test_store/test_store_berkeleydb.py index 9b68605e4..f96fb9bc2 100644 --- a/test/test_store/test_store_berkeleydb.py +++ b/test/test_store/test_store_berkeleydb.py @@ -1,145 +1,130 @@ -import pytest +import unittest from tempfile import mktemp -from rdflib import Dataset, URIRef +from rdflib import ConjunctiveGraph, URIRef from rdflib.store import VALID_STORE from rdflib.plugins.stores.berkeleydb import has_bsddb -pytestmark = pytest.mark.skipif( - not has_bsddb, reason="skipping berkeleydb tests, modile not available" -) - - -@pytest.fixture -def get_dataset(): - path = mktemp() - dataset = Dataset("BerkeleyDB") - rt = dataset.open(path, create=True) - assert rt == VALID_STORE, "The underlying store is corrupt" - assert ( - len(dataset) == 0 - ), "There must be zero triples in the graph just after store (file) creation" - data = """ - PREFIX : - - :a :b :c . - :d :e :f . - :d :g :h . - """ - dataset.default_graph.parse(data=data, format="ttl") - - yield path, dataset - - dataset.close() - dataset.destroy(path) - - -def test_write(get_dataset): - path, dataset = get_dataset - assert ( - len(dataset.default_graph) == 3 - ), "There must be three triples in the graph after the first data chunk parse" - data2 = """ - PREFIX : - :d :i :j . - """ - dataset.parse(data=data2, format="ttl") - assert ( - len(dataset) == 4 - ), "There must be four triples in the graph after the second data chunk parse" - data3 = """ +class BerkeleyDBTestCase(unittest.TestCase): + def setUp(self): + if not has_bsddb: + self.skipTest("skipping as berkleydb is missing") + self.store_name = "BerkeleyDB" + self.path = mktemp() + self.g = ConjunctiveGraph(store=self.store_name) + self.rt = self.g.open(self.path, create=True) + assert self.rt == VALID_STORE, "The underlying store is corrupt" + assert ( + len(self.g) == 0 + ), "There must be zero triples in the graph just after store (file) creation" + data = """ + PREFIX : + + :a :b :c . + :d :e :f . + :d :g :h . + """ + self.g.parse(data=data, format="ttl") + + def tearDown(self): + self.g.close() + + def test_write(self): + assert ( + len(self.g) == 3 + ), "There must be three triples in the graph after the first data chunk parse" + data2 = """ + PREFIX : + + :d :i :j . + """ + self.g.parse(data=data2, format="ttl") + assert ( + len(self.g) == 4 + ), "There must be four triples in the graph after the second data chunk parse" + data3 = """ + PREFIX : + + :d :i :j . + """ + self.g.parse(data=data3, format="ttl") + assert ( + len(self.g) == 4 + ), "There must still be four triples in the graph after the thrd data chunk parse" + + def test_read(self): + sx = None + for s in self.g.subjects( + predicate=URIRef("https://example.org/e"), + object=URIRef("https://example.org/f"), + ): + sx = s + assert sx == URIRef("https://example.org/d") + + def test_sparql_query(self): + q = """ PREFIX : - :d :i :j . - """ - dataset.parse(data=data3, format="ttl") - assert ( - len(dataset) == 4 - ), "There must still be four triples in the graph after the third data chunk parse" - - -def test_read(get_dataset): - path, dataset = get_dataset - sx = None - for s in dataset.subjects( - predicate=URIRef("https://example.org/e"), - object=URIRef("https://example.org/f"), - ): - sx = s - assert sx == URIRef("https://example.org/d") - - -def test_sparql_query(get_dataset): - path, dataset = get_dataset - q = """ - PREFIX : - - SELECT (COUNT(*) AS ?c) - WHERE { - :d ?p ?o . - }""" - - c = 0 - for row in dataset.query(q): - c = int(row.c) - assert c == 2, "SPARQL COUNT must return 2" - + SELECT (COUNT(*) AS ?c) + WHERE { + :d ?p ?o . + }""" -def test_sparql_insert(get_dataset): - path, dataset = get_dataset - q = """ - PREFIX : + c = 0 + for row in self.g.query(q): + c = int(row.c) + assert c == 2, "SPARQL COUNT must return 2" - INSERT DATA { - :x :y :z . - }""" + def test_sparql_insert(self): + q = """ + PREFIX : - dataset.update(q) - assert len(dataset) == 4, "After extra triple insert, length must be 4" + INSERT DATA { + :x :y :z . + }""" + self.g.update(q) + assert len(self.g) == 4, "After extra triple insert, length must be 4" -def test_multigraph(get_dataset): - path, dataset = get_dataset - q = """ - PREFIX : + def test_multigraph(self): + q = """ + PREFIX : - INSERT DATA { - GRAPH :m { - :x :y :z . - } - GRAPH :n { - :x :y :z . - } - }""" + INSERT DATA { + GRAPH :m { + :x :y :z . + } + GRAPH :n { + :x :y :z . + } + }""" - dataset.update(q) + self.g.update(q) - q = """ - SELECT (COUNT(?g) AS ?c) - WHERE { - SELECT DISTINCT ?g + q = """ + SELECT (COUNT(?g) AS ?c) WHERE { - GRAPH ?g { - ?s ?p ?o + SELECT DISTINCT ?g + WHERE { + GRAPH ?g { + ?s ?p ?o + } } } - } - """ - c = 0 - for row in dataset.query(q): - c = int(row.c) - assert c == 2, "SPARQL COUNT must return 2 (:m & :n)" - - -def test_open_shut(get_dataset): - path, dataset = get_dataset - assert len(dataset) == 3, "Initially we must have 3 triples from setUp" - dataset.close() - dataset = None - - # reopen the graph - dataset = Dataset("BerkeleyDB") - dataset.open(path, create=False) - assert ( - len(dataset) == 3 - ), "After close and reopen, we should still have the 3 originally added triples" + """ + c = 0 + for row in self.g.query(q): + c = int(row.c) + assert c == 3, "SPARQL COUNT must return 3 (default, :m & :n)" + + def test_open_shut(self): + assert len(self.g) == 3, "Initially we must have 3 triples from setUp" + self.g.close() + self.g = None + + # reopen the graph + self.g = ConjunctiveGraph("BerkeleyDB") + self.g.open(self.path, create=False) + assert ( + len(self.g) == 3 + ), "After close and reopen, we should still have the 3 originally added triples" From dae90a5222f9cb5d44238a2902b6d4c5c5b34eff Mon Sep 17 00:00:00 2001 From: Graham Higgins Date: Sat, 19 Mar 2022 12:21:00 +0000 Subject: [PATCH 08/12] revert unintentionally-changed test_nquads --- test/test_nquads.py | 340 ++++++++++++++++++-------------------------- 1 file changed, 138 insertions(+), 202 deletions(-) diff --git a/test/test_nquads.py b/test/test_nquads.py index bb1da983d..ac5a6dcd1 100644 --- a/test/test_nquads.py +++ b/test/test_nquads.py @@ -1,207 +1,143 @@ import os -import shutil -import tempfile +import unittest +from rdflib import ConjunctiveGraph, URIRef, Namespace +from test import TEST_DIR -import pytest - -from rdflib import ConjunctiveGraph, Namespace, URIRef, plugin -from rdflib.store import VALID_STORE - -TEST_DIR = os.path.abspath(os.path.dirname(__file__)) TEST_BASE = "test/nquads.rdflib" -pluginstores = [] - -for s in plugin.plugins(None, plugin.Store): - if s.name in ( - "default", - "Memory", - "Auditable", - "Concurrent", - "SimpleMemory", - "SPARQLStore", - "SPARQLUpdateStore", - ): - continue # inappropriate for these tests - - pluginstores.append(s.name) - - -@pytest.fixture( - scope="function", - params=pluginstores, -) -def get_graph(request): - store = request.param - path = tempfile.mktemp() - try: - shutil.rmtree(path) - except Exception: - pass - - try: - graph = ConjunctiveGraph(store=store) - except ImportError: - pytest.skip("Dependencies for store '%s' not available!" % store) - - if store != "default": - rt = graph.open(configuration=path, create=True) - assert rt == VALID_STORE, "The underlying store is corrupt" - - assert ( - len(graph) == 0 - ), "There must be zero triples in the graph just after store (file) creation" - - nq_path = os.path.relpath( - os.path.join(TEST_DIR, "nquads.rdflib/example.nquads"), os.curdir - ) - with open(nq_path, "rb") as data: - graph.parse(data, format="nquads") - - yield graph - - graph.close() - graph.destroy(path) - try: - shutil.rmtree(path) - except Exception: - pass - - -def test_01_simple_open(get_graph): - graph = get_graph - assert len(graph.store) == 449 - - -def test_02_contexts(get_graph): - # There should be 16 separate contexts - graph = get_graph - assert len([x for x in graph.store.contexts()]) == 16 - - -def test_03_get_value(get_graph): - # is the name of entity E10009 "Arco Publications"? - # (in graph http://bibliographica.org/entity/E10009) - # Looking for: - # - # - # "Arco Publications" - # - - graph = get_graph - s = URIRef("http://bibliographica.org/entity/E10009") - FOAF = Namespace("http://xmlns.com/foaf/0.1/") - - assert graph.value(subject=s, predicate=FOAF.name).eq("Arco Publications") - - -def test_context_is_optional(get_graph): - graph = get_graph - nq_path = os.path.relpath( - os.path.join(TEST_DIR, "nquads.rdflib/test6.nq"), os.curdir - ) - with open(nq_path, "rb") as data: - graph.parse(data, format="nquads") - assert len(graph) > 0 - - -def test_serialize(): - g = ConjunctiveGraph() - uri1 = URIRef("http://example.org/mygraph1") - uri2 = URIRef("http://example.org/mygraph2") - - bob = URIRef("urn:example:bob") - likes = URIRef("urn:example:likes") - pizza = URIRef("urn:example:pizza") - - g.get_context(uri1).add((bob, likes, pizza)) - g.get_context(uri2).add((bob, likes, pizza)) - - s = g.serialize(format="nquads", encoding="utf-8") - assert len([x for x in s.split(b"\n") if x.strip()]) == 2 - - g2 = ConjunctiveGraph() - g2.parse(data=s, format="nquads") - - assert len(g) == len(g2) - assert sorted(x.identifier for x in g.contexts()) == sorted( - x.identifier for x in g2.contexts() - ) - - -@pytest.fixture -def get_data(): - data = open("test/nquads.rdflib/bnode_context.nquads", "rb") - data_obnodes = open("test/nquads.rdflib/bnode_context_obj_bnodes.nquads", "rb") - yield data, data_obnodes - - data.close() - - -def test_parse_shared_bnode_context(get_data): - data, data_obnodes = get_data - bnode_ctx = dict() - g = ConjunctiveGraph() - h = ConjunctiveGraph() - g.parse(data, format="nquads", bnode_context=bnode_ctx) - data.seek(0) - h.parse(data, format="nquads", bnode_context=bnode_ctx) - assert set(h.subjects()) == set(g.subjects()) - - -def test_parse_shared_bnode_context_same_graph(get_data): - data, data_obnodes = get_data - - bnode_ctx = dict() - g = ConjunctiveGraph() - g.parse(data_obnodes, format="nquads", bnode_context=bnode_ctx) - o1 = set(g.objects()) - data_obnodes.seek(0) - g.parse(data_obnodes, format="nquads", bnode_context=bnode_ctx) - o2 = set(g.objects()) - assert o1 == o2 - - -def test_parse_distinct_bnode_context(get_data): - data, data_obnodes = get_data - g = ConjunctiveGraph() - g.parse(data, format="nquads", bnode_context=dict()) - s1 = set(g.subjects()) - data.seek(0) - g.parse(data, format="nquads", bnode_context=dict()) - s2 = set(g.subjects()) - assert set() != s2 - s1 - - -def test_parse_distinct_bnode_contexts_between_graphs(get_data): - data, data_obnodes = get_data - g = ConjunctiveGraph() - h = ConjunctiveGraph() - g.parse(data, format="nquads") - s1 = set(g.subjects()) - data.seek(0) - h.parse(data, format="nquads") - s2 = set(h.subjects()) - assert s1 != s2 - - -def test_parse_distinct_bnode_contexts_named_graphs(get_data): - data, data_obnodes = get_data - g = ConjunctiveGraph() - h = ConjunctiveGraph() - g.parse(data, format="nquads") - data.seek(0) - h.parse(data, format="nquads") - assert set(h.contexts()) != set(g.contexts()) - - -def test_parse_shared_bnode_contexts_named_graphs(get_data): - data, data_obnodes = get_data - bnode_ctx = dict() - g = ConjunctiveGraph() - h = ConjunctiveGraph() - g.parse(data, format="nquads", bnode_context=bnode_ctx) - data.seek(0) - h.parse(data, format="nquads", bnode_context=bnode_ctx) - assert set(h.contexts()) == set(g.contexts()) +class NQuadsParserTest(unittest.TestCase): + def _load_example(self): + g = ConjunctiveGraph() + nq_path = os.path.relpath( + os.path.join(TEST_DIR, "nquads.rdflib/example.nquads"), os.curdir + ) + with open(nq_path, "rb") as data: + g.parse(data, format="nquads") + return g + + def test_01_simple_open(self): + g = self._load_example() + assert len(g.store) == 449 + + def test_02_contexts(self): + # There should be 16 separate contexts + g = self._load_example() + assert len([x for x in g.store.contexts()]) == 16 + + def test_03_get_value(self): + # is the name of entity E10009 "Arco Publications"? + # (in graph http://bibliographica.org/entity/E10009) + # Looking for: + # + # + # "Arco Publications" + # + + g = self._load_example() + s = URIRef("http://bibliographica.org/entity/E10009") + FOAF = Namespace("http://xmlns.com/foaf/0.1/") + self.assertTrue(g.value(s, FOAF.name).eq("Arco Publications")) + + def test_context_is_optional(self): + g = ConjunctiveGraph() + nq_path = os.path.relpath( + os.path.join(TEST_DIR, "nquads.rdflib/test6.nq"), os.curdir + ) + with open(nq_path, "rb") as data: + g.parse(data, format="nquads") + assert len(g) > 0 + + def test_serialize(self): + g = ConjunctiveGraph() + uri1 = URIRef("http://example.org/mygraph1") + uri2 = URIRef("http://example.org/mygraph2") + + bob = URIRef("urn:example:bob") + likes = URIRef("urn:example:likes") + pizza = URIRef("urn:example:pizza") + + g.get_context(uri1).add((bob, likes, pizza)) + g.get_context(uri2).add((bob, likes, pizza)) + + s = g.serialize(format="nquads", encoding="utf-8") + self.assertEqual(len([x for x in s.split(b"\n") if x.strip()]), 2) + + g2 = ConjunctiveGraph() + g2.parse(data=s, format="nquads") + + self.assertEqual(len(g), len(g2)) + self.assertEqual( + sorted(x.identifier for x in g.contexts()), + sorted(x.identifier for x in g2.contexts()), + ) + + +class BnodeContextTest(unittest.TestCase): + def setUp(self): + self.data = open("test/nquads.rdflib/bnode_context.nquads", "rb") + self.data_obnodes = open( + "test/nquads.rdflib/bnode_context_obj_bnodes.nquads", "rb" + ) + + def tearDown(self): + self.data.close() + + def test_parse_shared_bnode_context(self): + bnode_ctx = dict() + g = ConjunctiveGraph() + h = ConjunctiveGraph() + g.parse(self.data, format="nquads", bnode_context=bnode_ctx) + self.data.seek(0) + h.parse(self.data, format="nquads", bnode_context=bnode_ctx) + self.assertEqual(set(h.subjects()), set(g.subjects())) + + def test_parse_shared_bnode_context_same_graph(self): + bnode_ctx = dict() + g = ConjunctiveGraph() + g.parse(self.data_obnodes, format="nquads", bnode_context=bnode_ctx) + o1 = set(g.objects()) + self.data_obnodes.seek(0) + g.parse(self.data_obnodes, format="nquads", bnode_context=bnode_ctx) + o2 = set(g.objects()) + self.assertEqual(o1, o2) + + def test_parse_distinct_bnode_context(self): + g = ConjunctiveGraph() + g.parse(self.data, format="nquads", bnode_context=dict()) + s1 = set(g.subjects()) + self.data.seek(0) + g.parse(self.data, format="nquads", bnode_context=dict()) + s2 = set(g.subjects()) + self.assertNotEqual(set(), s2 - s1) + + def test_parse_distinct_bnode_contexts_between_graphs(self): + g = ConjunctiveGraph() + h = ConjunctiveGraph() + g.parse(self.data, format="nquads") + s1 = set(g.subjects()) + self.data.seek(0) + h.parse(self.data, format="nquads") + s2 = set(h.subjects()) + self.assertNotEqual(s1, s2) + + def test_parse_distinct_bnode_contexts_named_graphs(self): + g = ConjunctiveGraph() + h = ConjunctiveGraph() + g.parse(self.data, format="nquads") + self.data.seek(0) + h.parse(self.data, format="nquads") + self.assertNotEqual(set(h.contexts()), set(g.contexts())) + + def test_parse_shared_bnode_contexts_named_graphs(self): + bnode_ctx = dict() + g = ConjunctiveGraph() + h = ConjunctiveGraph() + g.parse(self.data, format="nquads", bnode_context=bnode_ctx) + self.data.seek(0) + h.parse(self.data, format="nquads", bnode_context=bnode_ctx) + self.assertEqual(set(h.contexts()), set(g.contexts())) + + +if __name__ == "__main__": + unittest.main() From 70c12c79402fcdde22d068f9bf7b66b998067ab5 Mon Sep 17 00:00:00 2001 From: Graham Higgins Date: Sat, 19 Mar 2022 12:30:14 +0000 Subject: [PATCH 09/12] revert unintentionally-changed test files --- .../test_conjunctivegraph_generators.py | 25 +- ..._conjunctivegraph_operator_combinations.py | 22 +- test/test_dataset/test_dataset.py | 396 +++++----- test/test_dataset/test_dataset_generators.py | 21 +- test/test_graph/test_graph_context.py | 740 +++++++++--------- test/test_graph/test_graph_generators.py | 19 +- 6 files changed, 606 insertions(+), 617 deletions(-) diff --git a/test/test_conjunctivegraph/test_conjunctivegraph_generators.py b/test/test_conjunctivegraph/test_conjunctivegraph_generators.py index 9c134977a..07e88a4f7 100644 --- a/test/test_conjunctivegraph/test_conjunctivegraph_generators.py +++ b/test/test_conjunctivegraph/test_conjunctivegraph_generators.py @@ -1,22 +1,21 @@ import os from rdflib import ConjunctiveGraph, URIRef -from test.data import ( - CONSISTENT_DATA_DIR, - michel, - tarek, - bob, - likes, - hates, - pizza, - cheese, - context1, -) + timblcardn3 = open( - os.path.join(CONSISTENT_DATA_DIR, "timbl-card.n3") + os.path.join(os.path.dirname(__file__), "consistent_test_data", "timbl-card.n3") ).read() +michel = URIRef("urn:example:michel") +tarek = URIRef("urn:example:tarek") +bob = URIRef("urn:example:bob") +likes = URIRef("urn:example:likes") +hates = URIRef("urn:example:hates") +pizza = URIRef("urn:example:pizza") +cheese = URIRef("urn:example:cheese") + + def add_stuff(graph): graph.add((tarek, likes, pizza)) graph.add((tarek, likes, cheese)) @@ -89,7 +88,7 @@ def test_parse_berners_lee_card_into_conjunctivegraph_default(): def test_parse_berners_lee_card_into_named_graph(): - graph = ConjunctiveGraph(identifier=context1) + graph = ConjunctiveGraph(identifier=URIRef("context-1")) graph.parse(data=timblcardn3, format="n3") assert len(list(graph.subjects())) == no_of_statements_in_card assert len(list(graph.subjects(unique=True))) == no_of_unique_subjects diff --git a/test/test_conjunctivegraph/test_conjunctivegraph_operator_combinations.py b/test/test_conjunctivegraph/test_conjunctivegraph_operator_combinations.py index 0726790d5..c639ceadb 100644 --- a/test/test_conjunctivegraph/test_conjunctivegraph_operator_combinations.py +++ b/test/test_conjunctivegraph/test_conjunctivegraph_operator_combinations.py @@ -4,17 +4,21 @@ ConjunctiveGraph, URIRef, ) -from test.data import ( - CONSISTENT_DATA_DIR, - michel, - tarek, - likes, - pizza, - cheese, -) + + +michel = URIRef("urn:example:michel") +tarek = URIRef("urn:example:tarek") +bob = URIRef("urn:example:bob") +likes = URIRef("urn:example:likes") +hates = URIRef("urn:example:hates") +pizza = URIRef("urn:example:pizza") +cheese = URIRef("urn:example:cheese") + +c1 = URIRef("urn:example:context-1") +c2 = URIRef("urn:example:context-2") sportquadstrig = open( - os.path.join(CONSISTENT_DATA_DIR, "sportquads.trig") + os.path.join(os.path.dirname(__file__), "consistent_test_data", "sportquads.trig") ).read() diff --git a/test/test_dataset/test_dataset.py b/test/test_dataset/test_dataset.py index 3af172f6c..c08162ded 100644 --- a/test/test_dataset/test_dataset.py +++ b/test/test_dataset/test_dataset.py @@ -1,25 +1,14 @@ -# -*- coding: utf-8 -*- +import sys import os -import shutil -import tempfile from typing import Optional +import unittest -import pytest +from tempfile import mkdtemp, mkstemp +import shutil -from rdflib import FOAF, XSD, BNode, Literal, URIRef, plugin -from rdflib.graph import DATASET_DEFAULT_GRAPH_ID, Dataset, Graph -from rdflib.plugins.stores.sparqlstore import SPARQLUpdateStore -from test.data import ( - CONSISTENT_DATA_DIR, - michel, - tarek, - bob, - likes, - hates, - pizza, - cheese, - context1, -) +import pytest +from rdflib import Dataset, URIRef, plugin +from rdflib.graph import DATASET_DEFAULT_GRAPH_ID # Will also run SPARQLUpdateStore tests against local SPARQL1.1 endpoint if # available. This assumes SPARQL1.1 query/update endpoints running locally at @@ -36,222 +25,215 @@ HOST = "http://localhost:3030" DB = "/db/" -dgb = URIRef("http://rdflib/net/") - -timblcardn3 = open( - os.path.join(CONSISTENT_DATA_DIR, "timbl-card.n3") -).read() - -timblcardnquads = open( - os.path.join(CONSISTENT_DATA_DIR, "timbl-card.nquads") -).read() - -no_of_statements_in_card = 86 -no_of_unique_subjects = 20 -no_of_unique_predicates = 58 -no_of_unique_objects = 62 - - -pluginstores = [] - -for s in plugin.plugins(None, plugin.Store): - if s.name in ("default", "Memory", "Auditable", "Concurrent", "SPARQLStore"): - continue # these are tested by default - - if not s.getClass().graph_aware: - continue - - if s.name == "SPARQLUpdateStore": - from urllib.request import urlopen - - try: - assert len(urlopen(HOST).read()) > 0 - except Exception: - continue - - pluginstores.append(s.name) - -@pytest.fixture( - scope="function", - params=pluginstores, -) -def get_dataset(request): - store = request.param +class DatasetTestCase(unittest.TestCase): + store = "default" + skip_reason = None + slow = True + tmppath = None - try: - dataset = Dataset(store=store) - except ImportError: - pytest.skip("Dependencies for store '%s' not available!" % store) - - graph = Dataset(store=store) - - if not graph.store.graph_aware: - return - - if store == "SPARQLUpdateStore": - root = HOST + DB - path = root + "sparql", root + "update" - else: - path = tempfile.mkdtemp() - - graph.open(path, create=True if store != "SPARQLUpdateStore" else False) - - if store == "SPARQLUpdateStore": + def setUp(self): + if self.skip_reason is not None: + self.skipTest(skip_reason) try: - graph.store.update("CLEAR ALL") - except Exception as e: - if "SPARQLStore does not support BNodes! " in str(e): - pass + self.graph = Dataset(store=self.store) + except ImportError: + pytest.skip("Dependencies for store '%s' not available!" % self.store) + if self.store == "SQLite": + _, self.tmppath = mkstemp(prefix="test", dir="/tmp", suffix=".sqlite") + elif self.store == "SPARQLUpdateStore": + root = HOST + DB + self.graph.open((root + "sparql", root + "update")) + else: + self.tmppath = mkdtemp() + + if self.store != "SPARQLUpdateStore": + self.graph.open(self.tmppath, create=True) + self.michel = URIRef("urn:example:michel") + self.tarek = URIRef("urn:example:tarek") + self.bob = URIRef("urn:example:bob") + self.likes = URIRef("urn:example:likes") + self.hates = URIRef("urn:example:hates") + self.pizza = URIRef("urn:example:pizza") + self.cheese = URIRef("urn:cheese") + + # Use regular URIs because SPARQL endpoints like Fuseki alter short names + self.c1 = URIRef("urn:example:context-1") + self.c2 = URIRef("urn:example:context-2") + + # delete the graph for each test! + self.graph.remove((None, None, None)) + for c in self.graph.contexts(): + c.remove((None, None, None)) + assert len(c) == 0 + self.graph.remove_graph(c) + + def tearDown(self): + self.graph.close() + if self.store == "SPARQLUpdateStore": + pass + else: + if os.path.isdir(self.tmppath): + shutil.rmtree(self.tmppath) else: - raise Exception(e) + os.remove(self.tmppath) - yield store, graph - - if store == "SPARQLUpdateStore": - try: - graph.store.update("CLEAR ALL") - except Exception as e: - if "SPARQLStore does not support BNodes! " in str(e): - pass - else: - raise Exception(e) - graph.close() - else: - graph.close() - graph.destroy(path) - if os.path.isdir(path): - shutil.rmtree(path) - else: - try: - os.remove(path) - except: - pass + def testGraphAware(self): + if not self.graph.store.graph_aware: + return -def test_graph_aware(get_dataset): + g = self.graph + g1 = g.graph(self.c1) - store, graph = get_dataset + # Some SPARQL endpoint backends (e.g. TDB) do not consider + # empty named graphs + if self.store != "SPARQLUpdateStore": + # added graph exists + self.assertEqual( + set(x.identifier for x in self.graph.contexts()), + set([self.c1, DATASET_DEFAULT_GRAPH_ID]), + ) - if not graph.store.graph_aware: - return + # added graph is empty + self.assertEqual(len(g1), 0) - g = graph - g1 = g.graph(context1) + g1.add((self.tarek, self.likes, self.pizza)) - # Some SPARQL endpoint backends (e.g. TDB) do not consider - # empty named graphs - if store != "SPARQLUpdateStore": - # added graph exists - assert set(x.identifier for x in graph.contexts()) == set( - [context1, DATASET_DEFAULT_GRAPH_ID] + # added graph still exists + self.assertEqual( + set(x.identifier for x in self.graph.contexts()), + set([self.c1, DATASET_DEFAULT_GRAPH_ID]), ) - # added graph is empty - assert len(g1) == 0 + # added graph contains one triple + self.assertEqual(len(g1), 1) - g1.add((tarek, likes, pizza)) + g1.remove((self.tarek, self.likes, self.pizza)) - # added graph still exists - assert set(x.identifier for x in graph.contexts()) == set( - [context1, DATASET_DEFAULT_GRAPH_ID] - ) - - # added graph contains one triple - assert len(g1) == 1 + # added graph is empty + self.assertEqual(len(g1), 0) - g1.remove((tarek, likes, pizza)) + # Some SPARQL endpoint backends (e.g. TDB) do not consider + # empty named graphs + if self.store != "SPARQLUpdateStore": + # graph still exists, although empty + self.assertEqual( + set(x.identifier for x in self.graph.contexts()), + set([self.c1, DATASET_DEFAULT_GRAPH_ID]), + ) - # added graph is empty - assert len(g1) == 0 + g.remove_graph(self.c1) - # Some SPARQL endpoint backends (e.g. TDB) do not consider - # empty named graphs - if store != "SPARQLUpdateStore": - # graph still exists, although empty - assert set(x.identifier for x in graph.contexts()) == set( - [context1, DATASET_DEFAULT_GRAPH_ID] + # graph is gone + self.assertEqual( + set(x.identifier for x in self.graph.contexts()), + set([DATASET_DEFAULT_GRAPH_ID]), ) - g.remove_graph(context1) - - # graph is gone - assert set(x.identifier for x in graph.contexts()) == set( - [DATASET_DEFAULT_GRAPH_ID] - ) - - -def test_default_graph(get_dataset): - # Something the default graph is read-only (e.g. TDB in union mode) - - store, graph = get_dataset - if store == "SPARQLUpdateStore": - print( - "Please make sure updating the default graph " - "is supported by your SPARQL endpoint" + def testDefaultGraph(self): + # Something the default graph is read-only (e.g. TDB in union mode) + if self.store == "SPARQLUpdateStore": + print( + "Please make sure updating the default graph " + "is supported by your SPARQL endpoint" + ) + + self.graph.add((self.tarek, self.likes, self.pizza)) + self.assertEqual(len(self.graph), 1) + # only default exists + self.assertEqual( + set(x.identifier for x in self.graph.contexts()), + set([DATASET_DEFAULT_GRAPH_ID]), ) - graph.add((tarek, likes, pizza)) - assert len(graph) == 1 - # only default exists - assert set(x.identifier for x in graph.contexts()) == set( - [DATASET_DEFAULT_GRAPH_ID] - ) - - # removing default graph removes triples but not actual graph - graph.remove_graph(DATASET_DEFAULT_GRAPH_ID) - - assert len(graph) == 0 - # default still exists - assert set(x.identifier for x in graph.contexts()) == set( - [DATASET_DEFAULT_GRAPH_ID] - ) + # removing default graph removes triples but not actual graph + self.graph.remove_graph(DATASET_DEFAULT_GRAPH_ID) - -def test_not_union(get_dataset): - - store, graph = get_dataset - # Union depends on the SPARQL endpoint configuration - if store == "SPARQLUpdateStore": - print( - "Please make sure your SPARQL endpoint has not configured " - "its default graph as the union of the named graphs" + self.assertEqual(len(self.graph), 0) + # default still exists + self.assertEqual( + set(x.identifier for x in self.graph.contexts()), + set([DATASET_DEFAULT_GRAPH_ID]), ) - g1 = graph.graph(context1) - g1.add((tarek, likes, pizza)) - - assert list(graph.objects(tarek, None)) == [] - assert list(g1.objects(tarek, None)) == [pizza] - -def test_iter(get_dataset): - - store, d = get_dataset - """PR 1382: adds __iter__ to Dataset""" - # d = Dataset() - uri_a = URIRef("https://example.com/a") - uri_b = URIRef("https://example.com/b") - uri_c = URIRef("https://example.com/c") - uri_d = URIRef("https://example.com/d") + def testNotUnion(self): + # Union depends on the SPARQL endpoint configuration + if self.store == "SPARQLUpdateStore": + print( + "Please make sure your SPARQL endpoint has not configured " + "its default graph as the union of the named graphs" + ) + g1 = self.graph.graph(self.c1) + g1.add((self.tarek, self.likes, self.pizza)) + + self.assertEqual(list(self.graph.objects(self.tarek, None)), []) + self.assertEqual(list(g1.objects(self.tarek, None)), [self.pizza]) + + def testIter(self): + """PR 1382: adds __iter__ to Dataset""" + d = Dataset() + uri_a = URIRef("https://example.com/a") + uri_b = URIRef("https://example.com/b") + uri_c = URIRef("https://example.com/c") + uri_d = URIRef("https://example.com/d") + + d.add_graph(URIRef("https://example.com/g1")) + d.add((uri_a, uri_b, uri_c, URIRef("https://example.com/g1"))) + d.add( + (uri_a, uri_b, uri_c, URIRef("https://example.com/g1")) + ) # pointless addition: duplicates above + + d.add_graph(URIRef("https://example.com/g2")) + d.add((uri_a, uri_b, uri_c, URIRef("https://example.com/g2"))) + d.add((uri_a, uri_b, uri_d, URIRef("https://example.com/g1"))) # new, uri_d + + # traditional iterator + i_trad = 0 + for t in d.quads((None, None, None)): + i_trad += 1 + + # new Dataset.__iter__ iterator + i_new = 0 + for t in d: + i_new += 1 + + self.assertEqual(i_new, i_trad) # both should be 3 + + +# dynamically create classes for each registered Store + +pluginname = None +if __name__ == "__main__": + if len(sys.argv) > 1: + pluginname = sys.argv[1] + +tests = 0 + +for s in plugin.plugins(pluginname, plugin.Store): + skip_reason: Optional[str] = None + if s.name in ("default", "Memory", "Auditable", "Concurrent", "SPARQLStore"): + continue # these are tested by default - d.add_graph(URIRef("https://example.com/g1")) - d.add((uri_a, uri_b, uri_c, URIRef("https://example.com/g1"))) - d.add( - (uri_a, uri_b, uri_c, URIRef("https://example.com/g1")) - ) # pointless addition: duplicates above + if not s.getClass().graph_aware: + continue - d.add_graph(URIRef("https://example.com/g2")) - d.add((uri_a, uri_b, uri_c, URIRef("https://example.com/g2"))) - d.add((uri_a, uri_b, uri_d, URIRef("https://example.com/g1"))) # new, uri_d + if s.name == "SPARQLUpdateStore": + from urllib.request import urlopen - # traditional iterator - i_trad = 0 - for t in d.quads((None, None, None)): - i_trad += 1 + try: + assert len(urlopen(HOST).read()) > 0 + except BaseException: + skip_reason = "No SPARQL endpoint for %s (tests skipped)\n" % s.name + sys.stderr.write(skip_reason) + + locals()["t%d" % tests] = type( + "%sContextTestCase" % s.name, + (DatasetTestCase,), + {"store": s.name, "skip_reason": skip_reason}, + ) + tests += 1 - # new Dataset.__iter__ iterator - i_new = 0 - for t in d: - i_new += 1 - assert i_new == i_trad # both should be 3 +if __name__ == "__main__": + unittest.main() diff --git a/test/test_dataset/test_dataset_generators.py b/test/test_dataset/test_dataset_generators.py index f6d6d5223..874e48554 100644 --- a/test/test_dataset/test_dataset_generators.py +++ b/test/test_dataset/test_dataset_generators.py @@ -1,21 +1,20 @@ import os from rdflib import Dataset, URIRef -from test.data import ( - CONSISTENT_DATA_DIR, - michel, - tarek, - bob, - likes, - hates, - pizza, - cheese, -) timblcardn3 = open( - os.path.join(CONSISTENT_DATA_DIR, "timbl-card.n3") + os.path.join(os.path.dirname(__file__), "consistent_test_data", "timbl-card.n3") ).read() +michel = URIRef("urn:example:michel") +tarek = URIRef("urn:example:tarek") +bob = URIRef("urn:example:bob") +likes = URIRef("urn:example:likes") +hates = URIRef("urn:example:hates") +pizza = URIRef("urn:example:pizza") +cheese = URIRef("urn:example:cheese") + + def add_stuff(graph): graph.add((tarek, likes, pizza)) graph.add((tarek, likes, cheese)) diff --git a/test/test_graph/test_graph_context.py b/test/test_graph/test_graph_context.py index ac4d332bb..ab4df544f 100644 --- a/test/test_graph/test_graph_context.py +++ b/test/test_graph/test_graph_context.py @@ -1,381 +1,389 @@ +import sys import os +import unittest + +from tempfile import mkdtemp, mkstemp import shutil -import tempfile import pytest - -from rdflib import BNode, ConjunctiveGraph, Graph, URIRef, plugin -from rdflib.store import VALID_STORE - -michel = URIRef("urn:example:michel") -tarek = URIRef("urn:example:tarek") -bob = URIRef("urn:example:bob") -likes = URIRef("urn:example:likes") -hates = URIRef("urn:example:hates") -pizza = URIRef("urn:example:pizza") -cheese = URIRef("urn:example:cheese") - -c1 = URIRef("urn:example:context-1") -c2 = URIRef("urn:example:context-2") - - -pluginstores = [] - -for s in plugin.plugins(None, plugin.Store): +from rdflib import Graph, ConjunctiveGraph, URIRef, BNode, plugin + +class ContextTestCase(unittest.TestCase): + store = "default" + slow = True + tmppath = None + + def setUp(self): + try: + self.graph = ConjunctiveGraph(store=self.store) + except ImportError: + pytest.skip("Dependencies for store '%s' not available!" % self.store) + if self.store == "SQLite": + _, self.tmppath = mkstemp(prefix="test", dir="/tmp", suffix=".sqlite") + else: + self.tmppath = mkdtemp() + self.graph.open(self.tmppath, create=True) + self.michel = URIRef("michel") + self.tarek = URIRef("tarek") + self.bob = URIRef("bob") + self.likes = URIRef("likes") + self.hates = URIRef("hates") + self.pizza = URIRef("pizza") + self.cheese = URIRef("cheese") + + self.c1 = URIRef("context-1") + self.c2 = URIRef("context-2") + + # delete the graph for each test! + self.graph.remove((None, None, None)) + + def tearDown(self): + self.graph.close() + if os.path.isdir(self.tmppath): + shutil.rmtree(self.tmppath) + else: + os.remove(self.tmppath) + + def addStuff(self): + tarek = self.tarek + michel = self.michel + bob = self.bob + likes = self.likes + hates = self.hates + pizza = self.pizza + cheese = self.cheese + c1 = self.c1 + graph = Graph(self.graph.store, c1) + + graph.add((tarek, likes, pizza)) + graph.add((tarek, likes, cheese)) + graph.add((michel, likes, pizza)) + graph.add((michel, likes, cheese)) + graph.add((bob, likes, cheese)) + graph.add((bob, hates, pizza)) + graph.add((bob, hates, michel)) # gasp! + + def removeStuff(self): + tarek = self.tarek + michel = self.michel + bob = self.bob + likes = self.likes + hates = self.hates + pizza = self.pizza + cheese = self.cheese + c1 = self.c1 + graph = Graph(self.graph.store, c1) + + graph.remove((tarek, likes, pizza)) + graph.remove((tarek, likes, cheese)) + graph.remove((michel, likes, pizza)) + graph.remove((michel, likes, cheese)) + graph.remove((bob, likes, cheese)) + graph.remove((bob, hates, pizza)) + graph.remove((bob, hates, michel)) # gasp! + + def addStuffInMultipleContexts(self): + c1 = self.c1 + c2 = self.c2 + triple = (self.pizza, self.hates, self.tarek) # revenge! + + # add to default context + self.graph.add(triple) + # add to context 1 + graph = Graph(self.graph.store, c1) + graph.add(triple) + # add to context 2 + graph = Graph(self.graph.store, c2) + graph.add(triple) + + def testConjunction(self): + if self.store == "SQLite": + pytest.skip("Skipping known issue with __len__") + self.addStuffInMultipleContexts() + triple = (self.pizza, self.likes, self.pizza) + # add to context 1 + graph = Graph(self.graph.store, self.c1) + graph.add(triple) + self.assertEqual(len(self.graph), len(graph)) + + def testAdd(self): + self.addStuff() + + def testRemove(self): + self.addStuff() + self.removeStuff() + + def testLenInOneContext(self): + c1 = self.c1 + # make sure context is empty + + self.graph.remove_context(self.graph.get_context(c1)) + graph = Graph(self.graph.store, c1) + oldLen = len(self.graph) + + for i in range(0, 10): + graph.add((BNode(), self.hates, self.hates)) + self.assertEqual(len(graph), oldLen + 10) + self.assertEqual(len(self.graph.get_context(c1)), oldLen + 10) + self.graph.remove_context(self.graph.get_context(c1)) + self.assertEqual(len(self.graph), oldLen) + self.assertEqual(len(graph), 0) + + def testLenInMultipleContexts(self): + if self.store == "SQLite": + pytest.skip("Skipping known issue with __len__") + oldLen = len(self.graph) + self.addStuffInMultipleContexts() + + # addStuffInMultipleContexts is adding the same triple to + # three different contexts. So it's only + 1 + self.assertEqual(len(self.graph), oldLen + 1) + + graph = Graph(self.graph.store, self.c1) + self.assertEqual(len(graph), oldLen + 1) + + def testRemoveInMultipleContexts(self): + c1 = self.c1 + c2 = self.c2 + triple = (self.pizza, self.hates, self.tarek) # revenge! + + self.addStuffInMultipleContexts() + + # triple should be still in store after removing it from c1 + c2 + self.assertTrue(triple in self.graph) + graph = Graph(self.graph.store, c1) + graph.remove(triple) + self.assertTrue(triple in self.graph) + graph = Graph(self.graph.store, c2) + graph.remove(triple) + self.assertTrue(triple in self.graph) + self.graph.remove(triple) + # now gone! + self.assertTrue(triple not in self.graph) + + # add again and see if remove without context removes all triples! + self.addStuffInMultipleContexts() + self.graph.remove(triple) + self.assertTrue(triple not in self.graph) + + def testContexts(self): + triple = (self.pizza, self.hates, self.tarek) # revenge! + + self.addStuffInMultipleContexts() + + def cid(c): + return c.identifier + + self.assertTrue(self.c1 in map(cid, self.graph.contexts())) + self.assertTrue(self.c2 in map(cid, self.graph.contexts())) + + contextList = list(map(cid, list(self.graph.contexts(triple)))) + self.assertTrue(self.c1 in contextList, (self.c1, contextList)) + self.assertTrue(self.c2 in contextList, (self.c2, contextList)) + + def testRemoveContext(self): + c1 = self.c1 + + self.addStuffInMultipleContexts() + self.assertEqual(len(Graph(self.graph.store, c1)), 1) + self.assertEqual(len(self.graph.get_context(c1)), 1) + + self.graph.remove_context(self.graph.get_context(c1)) + self.assertTrue(self.c1 not in self.graph.contexts()) + + def testRemoveAny(self): + Any = None + self.addStuffInMultipleContexts() + self.graph.remove((Any, Any, Any)) + self.assertEqual(len(self.graph), 0) + + def testTriples(self): + tarek = self.tarek + michel = self.michel + bob = self.bob + likes = self.likes + hates = self.hates + pizza = self.pizza + cheese = self.cheese + c1 = self.c1 + asserte = self.assertEqual + triples = self.graph.triples + graph = self.graph + c1graph = Graph(self.graph.store, c1) + c1triples = c1graph.triples + Any = None + + self.addStuff() + + # unbound subjects with context + asserte(len(list(c1triples((Any, likes, pizza)))), 2) + asserte(len(list(c1triples((Any, hates, pizza)))), 1) + asserte(len(list(c1triples((Any, likes, cheese)))), 3) + asserte(len(list(c1triples((Any, hates, cheese)))), 0) + + # unbound subjects without context, same results! + asserte(len(list(triples((Any, likes, pizza)))), 2) + asserte(len(list(triples((Any, hates, pizza)))), 1) + asserte(len(list(triples((Any, likes, cheese)))), 3) + asserte(len(list(triples((Any, hates, cheese)))), 0) + + # unbound objects with context + asserte(len(list(c1triples((michel, likes, Any)))), 2) + asserte(len(list(c1triples((tarek, likes, Any)))), 2) + asserte(len(list(c1triples((bob, hates, Any)))), 2) + asserte(len(list(c1triples((bob, likes, Any)))), 1) + + # unbound objects without context, same results! + asserte(len(list(triples((michel, likes, Any)))), 2) + asserte(len(list(triples((tarek, likes, Any)))), 2) + asserte(len(list(triples((bob, hates, Any)))), 2) + asserte(len(list(triples((bob, likes, Any)))), 1) + + # unbound predicates with context + asserte(len(list(c1triples((michel, Any, cheese)))), 1) + asserte(len(list(c1triples((tarek, Any, cheese)))), 1) + asserte(len(list(c1triples((bob, Any, pizza)))), 1) + asserte(len(list(c1triples((bob, Any, michel)))), 1) + + # unbound predicates without context, same results! + asserte(len(list(triples((michel, Any, cheese)))), 1) + asserte(len(list(triples((tarek, Any, cheese)))), 1) + asserte(len(list(triples((bob, Any, pizza)))), 1) + asserte(len(list(triples((bob, Any, michel)))), 1) + + # unbound subject, objects with context + asserte(len(list(c1triples((Any, hates, Any)))), 2) + asserte(len(list(c1triples((Any, likes, Any)))), 5) + + # unbound subject, objects without context, same results! + asserte(len(list(triples((Any, hates, Any)))), 2) + asserte(len(list(triples((Any, likes, Any)))), 5) + + # unbound predicates, objects with context + asserte(len(list(c1triples((michel, Any, Any)))), 2) + asserte(len(list(c1triples((bob, Any, Any)))), 3) + asserte(len(list(c1triples((tarek, Any, Any)))), 2) + + # unbound predicates, objects without context, same results! + asserte(len(list(triples((michel, Any, Any)))), 2) + asserte(len(list(triples((bob, Any, Any)))), 3) + asserte(len(list(triples((tarek, Any, Any)))), 2) + + # unbound subjects, predicates with context + asserte(len(list(c1triples((Any, Any, pizza)))), 3) + asserte(len(list(c1triples((Any, Any, cheese)))), 3) + asserte(len(list(c1triples((Any, Any, michel)))), 1) + + # unbound subjects, predicates without context, same results! + asserte(len(list(triples((Any, Any, pizza)))), 3) + asserte(len(list(triples((Any, Any, cheese)))), 3) + asserte(len(list(triples((Any, Any, michel)))), 1) + + # all unbound with context + asserte(len(list(c1triples((Any, Any, Any)))), 7) + # all unbound without context, same result! + asserte(len(list(triples((Any, Any, Any)))), 7) + + for c in [graph, self.graph.get_context(c1)]: + # unbound subjects + asserte(set(c.subjects(likes, pizza)), set((michel, tarek))) + asserte(set(c.subjects(hates, pizza)), set((bob,))) + asserte(set(c.subjects(likes, cheese)), set([tarek, bob, michel])) + asserte(set(c.subjects(hates, cheese)), set()) + + # unbound objects + asserte(set(c.objects(michel, likes)), set([cheese, pizza])) + asserte(set(c.objects(tarek, likes)), set([cheese, pizza])) + asserte(set(c.objects(bob, hates)), set([michel, pizza])) + asserte(set(c.objects(bob, likes)), set([cheese])) + + # unbound predicates + asserte(set(c.predicates(michel, cheese)), set([likes])) + asserte(set(c.predicates(tarek, cheese)), set([likes])) + asserte(set(c.predicates(bob, pizza)), set([hates])) + asserte(set(c.predicates(bob, michel)), set([hates])) + + asserte(set(c.subject_objects(hates)), set([(bob, pizza), (bob, michel)])) + asserte( + set(c.subject_objects(likes)), + set( + [ + (tarek, cheese), + (michel, cheese), + (michel, pizza), + (bob, cheese), + (tarek, pizza), + ] + ), + ) + + asserte( + set(c.predicate_objects(michel)), set([(likes, cheese), (likes, pizza)]) + ) + asserte( + set(c.predicate_objects(bob)), + set([(likes, cheese), (hates, pizza), (hates, michel)]), + ) + asserte( + set(c.predicate_objects(tarek)), set([(likes, cheese), (likes, pizza)]) + ) + + asserte( + set(c.subject_predicates(pizza)), + set([(bob, hates), (tarek, likes), (michel, likes)]), + ) + asserte( + set(c.subject_predicates(cheese)), + set([(bob, likes), (tarek, likes), (michel, likes)]), + ) + asserte(set(c.subject_predicates(michel)), set([(bob, hates)])) + + asserte( + set(c), + set( + [ + (bob, hates, michel), + (bob, likes, cheese), + (tarek, likes, pizza), + (michel, likes, pizza), + (michel, likes, cheese), + (bob, hates, pizza), + (tarek, likes, cheese), + ] + ), + ) + + # remove stuff and make sure the graph is empty again + self.removeStuff() + asserte(len(list(c1triples((Any, Any, Any)))), 0) + asserte(len(list(triples((Any, Any, Any)))), 0) + + +# dynamically create classes for each registered Store +pluginname = None +if __name__ == "__main__": + if len(sys.argv) > 1: + pluginname = sys.argv[1] + +tests = 0 +for s in plugin.plugins(pluginname, plugin.Store): if s.name in ( "default", "Memory", "Auditable", "Concurrent", - "SimpleMemory", "SPARQLStore", "SPARQLUpdateStore", ): - continue # inappropriate for these tests - - pluginstores.append(s.name) - - -@pytest.fixture( - scope="function", - params=pluginstores, -) -def get_graph(request): - store = request.param - path = tempfile.mktemp() - try: - shutil.rmtree(path) - except Exception: - pass - - try: - graph = ConjunctiveGraph(store=store) - except ImportError: - pytest.skip("Dependencies for store '%s' not available!" % store) - - if store != "default": - rt = graph.open(configuration=path, create=True) - assert rt == VALID_STORE, "The underlying store is corrupt" - - assert ( - len(graph) == 0 - ), "There must be zero triples in the graph just after store (file) creation" - - yield graph - - graph.close() - graph.store.destroy(path) - - if os.path.isdir(path): - shutil.rmtree(path) - elif os.path.exists(path): - os.remove(path) - - -def populate_c1(graph): - context1 = Graph(graph.store, c1) - - context1.add((tarek, likes, pizza)) - context1.add((tarek, likes, cheese)) - context1.add((michel, likes, pizza)) - context1.add((michel, likes, cheese)) - context1.add((bob, likes, cheese)) - context1.add((bob, hates, pizza)) - context1.add((bob, hates, michel)) # gasp! - - -def depopulate_c1(graph): - context1 = Graph(graph.store, c1) - - context1.remove((tarek, likes, pizza)) - context1.remove((tarek, likes, cheese)) - context1.remove((michel, likes, pizza)) - context1.remove((michel, likes, cheese)) - context1.remove((bob, likes, cheese)) - context1.remove((bob, hates, pizza)) - context1.remove((bob, hates, michel)) # gasp! - - -def add_triple_to_default_context_context1_and_context2(graph): - triple = (pizza, hates, tarek) # revenge! - - # add to default context - graph.add(triple) - - # add to context 1 - context1 = Graph(graph.store, c1) - context1.add(triple) - - # add to context 2 - context2 = Graph(graph.store, c2) - context2.add(triple) - - -def test_conjunction(get_graph): - graph = get_graph - - if graph.store == "Shelf": - pytest.skip("Skipping known issue with __len__") - - add_triple_to_default_context_context1_and_context2(graph) - triple = (pizza, likes, pizza) - - # add to context 1 - context1 = Graph(graph.store, c1) - context1.add(triple) - assert len(context1) == len(graph) - - -def test_add(get_graph): - graph = get_graph - - populate_c1(graph) - - -def test_remove(get_graph): - graph = get_graph - - populate_c1(graph) - depopulate_c1(graph) - - -def test_len_in_one_context(get_graph): - graph = get_graph - # make sure context is empty - - graph.remove_context(graph.get_context(c1)) - context1 = Graph(graph.store, c1) - oldLen = len(graph) - - for i in range(0, 10): - context1.add((BNode(), hates, hates)) - assert len(context1) == oldLen + 10 - - assert len(graph.get_context(c1)) == oldLen + 10 - - graph.remove_context(graph.get_context(c1)) - - assert len(graph) == oldLen - assert len(graph) == 0 - - -def test_len_in_multiple_contexts(get_graph): - graph = get_graph - - if graph.store == "Shelf": - pytest.skip("Skipping known issue with __len__") - - oldLen = len(graph) - add_triple_to_default_context_context1_and_context2(graph) - - # add_triple_to_default_context_context1_and_context2 is adding the same triple to - # three different contexts. So it's only + 1 - assert len(graph) == oldLen + 1 - - context1 = Graph(graph.store, c1) - assert len(context1) == oldLen + 1 - - -def test_remove_in_multiple_contexts(get_graph): - graph = get_graph - - triple = (pizza, hates, tarek) # revenge! - - add_triple_to_default_context_context1_and_context2(graph) - - # triple should be still in store after removing it from c1 + c2 - assert triple in graph - context1 = Graph(graph.store, c1) - context1.remove(triple) - - assert triple in graph - context2 = Graph(graph.store, c2) - context2.remove(triple) - assert triple in graph - graph.remove(triple) - # now gone! - assert triple not in graph - - # add again and see if remove without context removes all triples! - add_triple_to_default_context_context1_and_context2(graph) - graph.remove(triple) - assert triple not in graph - - -def test_contexts(get_graph): - graph = get_graph - triple = (pizza, hates, tarek) # revenge! - - add_triple_to_default_context_context1_and_context2(graph) - - def cid(c): - return c.identifier - - assert c1 in map(cid, graph.contexts()) - assert c2 in map(cid, graph.contexts()) - - contextList = list(map(cid, list(graph.contexts(triple)))) - assert c1 in contextList, (c1, contextList) - assert c2 in contextList, (c2, contextList) - - -def test_remove_context(get_graph): - graph = get_graph - - add_triple_to_default_context_context1_and_context2(graph) - - assert len(Graph(graph.store, c1)) == 1 - assert len(graph.get_context(c1)) == 1 - - graph.remove_context(graph.get_context(c1)) - assert c1 not in graph.contexts() - - -def test_remove_any(get_graph): - graph = get_graph - Any = None - add_triple_to_default_context_context1_and_context2(graph) - graph.remove((Any, Any, Any)) - assert len(graph) == 0 - - -def test_triples(get_graph): - graph = get_graph + continue # these are tested by default + if not s.getClass().context_aware: + continue - triples = graph.triples - Any = None - populate_c1(graph) + locals()["t%d" % tests] = type( + "%sContextTestCase" % s.name, (ContextTestCase,), {"store": s.name} + ) + tests += 1 - context1 = Graph(graph.store, c1) - context1triples = context1.triples - # unbound subjects with context - assert len(list(context1triples((Any, likes, pizza)))) == 2, graph.store - assert len(list(context1triples((Any, hates, pizza)))) == 1 - assert len(list(context1triples((Any, likes, cheese)))) == 3 - assert len(list(context1triples((Any, hates, cheese)))) == 0 - - # unbound subjects without context, same results! - assert len(list(triples((Any, likes, pizza)))) == 2 - assert len(list(triples((Any, hates, pizza)))) == 1 - assert len(list(triples((Any, likes, cheese)))) == 3 - assert len(list(triples((Any, hates, cheese)))) == 0 - - # unbound objects with context - assert len(list(context1triples((michel, likes, Any)))) == 2 - assert len(list(context1triples((tarek, likes, Any)))) == 2 - assert len(list(context1triples((bob, hates, Any)))) == 2 - assert len(list(context1triples((bob, likes, Any)))) == 1 - - # unbound objects without context, same results! - assert len(list(triples((michel, likes, Any)))) == 2 - assert len(list(triples((tarek, likes, Any)))) == 2 - assert len(list(triples((bob, hates, Any)))) == 2 - assert len(list(triples((bob, likes, Any)))) == 1 - - # unbound predicates with context - assert len(list(context1triples((michel, Any, cheese)))) == 1 - assert len(list(context1triples((tarek, Any, cheese)))) == 1 - assert len(list(context1triples((bob, Any, pizza)))) == 1 - assert len(list(context1triples((bob, Any, michel)))) == 1 - - # unbound predicates without context, same results! - assert len(list(triples((michel, Any, cheese)))) == 1 - assert len(list(triples((tarek, Any, cheese)))) == 1 - assert len(list(triples((bob, Any, pizza)))) == 1 - assert len(list(triples((bob, Any, michel)))) == 1 - - # unbound subject, objects with context - assert len(list(context1triples((Any, hates, Any)))) == 2 - assert len(list(context1triples((Any, likes, Any)))) == 5 - - # unbound subject, objects without context, same results! - assert len(list(triples((Any, hates, Any)))) == 2 - assert len(list(triples((Any, likes, Any)))) == 5 - - # unbound predicates, objects with context - assert len(list(context1triples((michel, Any, Any)))) == 2 - assert len(list(context1triples((bob, Any, Any)))) == 3 - assert len(list(context1triples((tarek, Any, Any)))) == 2 - - # unbound predicates, objects without context, same results! - assert len(list(triples((michel, Any, Any)))) == 2 - assert len(list(triples((bob, Any, Any)))) == 3 - assert len(list(triples((tarek, Any, Any)))) == 2 - - # unbound subjects, predicates with context - assert len(list(context1triples((Any, Any, pizza)))) == 3 - assert len(list(context1triples((Any, Any, cheese)))) == 3 - assert len(list(context1triples((Any, Any, michel)))) == 1 - - # unbound subjects, predicates without context, same results! - assert len(list(triples((Any, Any, pizza)))) == 3 - assert len(list(triples((Any, Any, cheese)))) == 3 - assert len(list(triples((Any, Any, michel)))) == 1 - - # all unbound with context - assert len(list(context1triples((Any, Any, Any)))) == 7 - # all unbound without context, same result! - assert len(list(triples((Any, Any, Any)))) == 7 - - for c in [graph, graph.get_context(c1)]: - # unbound subjects - assert set(c.subjects(likes, pizza)) == set((michel, tarek)) - assert set(c.subjects(hates, pizza)) == set((bob,)) - assert set(c.subjects(likes, cheese)) == set([tarek, bob, michel]) - assert set(c.subjects(hates, cheese)) == set() - - # unbound objects - assert set(c.objects(michel, likes)) == set([cheese, pizza]) - assert set(c.objects(tarek, likes)) == set([cheese, pizza]) - assert set(c.objects(bob, hates)) == set([michel, pizza]) - assert set(c.objects(bob, likes)) == set([cheese]) - - # unbound predicates - assert set(c.predicates(michel, cheese)) == set([likes]) - assert set(c.predicates(tarek, cheese)) == set([likes]) - assert set(c.predicates(bob, pizza)) == set([hates]) - assert set(c.predicates(bob, michel)) == set([hates]) - - assert set(c.subject_objects(hates)) == set([(bob, pizza), (bob, michel)]) - assert set(c.subject_objects(likes)) == set( - [ - (tarek, cheese), - (michel, cheese), - (michel, pizza), - (bob, cheese), - (tarek, pizza), - ] - ) - - assert set(c.predicate_objects(michel)) == set( - [(likes, cheese), (likes, pizza)] - ) - assert set(c.predicate_objects(bob)) == set( - [(likes, cheese), (hates, pizza), (hates, michel)] - ) - assert set(c.predicate_objects(tarek)) == set([(likes, cheese), (likes, pizza)]) - - assert set(c.subject_predicates(pizza)) == set( - [(bob, hates), (tarek, likes), (michel, likes)] - ) - assert set(c.subject_predicates(cheese)) == set( - [(bob, likes), (tarek, likes), (michel, likes)] - ) - assert set(c.subject_predicates(michel)) == set([(bob, hates)]) - - assert set(c) == set( - [ - (bob, hates, michel), - (bob, likes, cheese), - (tarek, likes, pizza), - (michel, likes, pizza), - (michel, likes, cheese), - (bob, hates, pizza), - (tarek, likes, cheese), - ] - ) - # remove stuff and make sure the graph is empty again - depopulate_c1(graph) - assert len(list(context1triples((Any, Any, Any)))) == 0 - assert len(list(triples((Any, Any, Any)))) == 0 +if __name__ == "__main__": + unittest.main() diff --git a/test/test_graph/test_graph_generators.py b/test/test_graph/test_graph_generators.py index b0b0077d7..d013aaec8 100644 --- a/test/test_graph/test_graph_generators.py +++ b/test/test_graph/test_graph_generators.py @@ -1,19 +1,16 @@ import os from rdflib import Graph, URIRef -from test.data import ( - CONSISTENT_DATA_DIR, - michel, - tarek, - bob, - likes, - hates, - pizza, - cheese, -) +michel = URIRef("urn:example:michel") +tarek = URIRef("urn:example:tarek") +bob = URIRef("urn:example:bob") +likes = URIRef("urn:example:likes") +hates = URIRef("urn:example:hates") +pizza = URIRef("urn:example:pizza") +cheese = URIRef("urn:example:cheese") timblcardn3 = open( - os.path.join(CONSISTENT_DATA_DIR, "timbl-card.n3") + os.path.join(os.path.dirname(__file__), "consistent_test_data", "timbl-card.n3") ).read() From 7926e9a3fe1f8de17812d7e3c210f216991993ff Mon Sep 17 00:00:00 2001 From: Graham Higgins Date: Sat, 19 Mar 2022 12:38:14 +0000 Subject: [PATCH 10/12] revert more unintentionally-changed test files --- test/test_store/test_store_auditable.py | 843 ++++++++---------- test/test_store/test_store_memorystore.py | 80 +- test/test_store/test_store_sparqlstore.py | 21 +- .../test_store_sparqlupdatestore.py | 627 +++++++------ .../test_store_sparqlupdatestore_mock.py | 8 +- test/test_store/test_store_triple_store.py | 49 +- 6 files changed, 782 insertions(+), 846 deletions(-) diff --git a/test/test_store/test_store_auditable.py b/test/test_store/test_store_auditable.py index 6ef581a39..f5c1188c9 100644 --- a/test/test_store/test_store_auditable.py +++ b/test/test_store/test_store_auditable.py @@ -1,473 +1,386 @@ # -*- coding=utf8 -*- -import os -import shutil -import tempfile +import unittest -import pytest - -from rdflib import Graph, Namespace, plugin +from rdflib import Graph, Namespace from rdflib.plugins.stores.auditable import AuditableStore -from rdflib.store import VALID_STORE EX = Namespace("http://example.org/") -def get_plugin_stores(): - pluginstores = [] - - for s in plugin.plugins(None, plugin.Store): - if s.name in ( - "default", - "Memory", - "Auditable", - "Concurrent", - "SimpleMemory", - "SPARQLStore", - "SPARQLUpdateStore", - ): - continue # excluded from these tests - +class BaseTestAuditableStore(unittest.TestCase): + def assert_graph_equal(self, g1, g2): try: - graph = Graph(store=s.name) - pluginstores.append(s.name) - except ImportError: - pass - return pluginstores - - -@pytest.fixture( - scope="function", - params=get_plugin_stores(), -) -def get_graph(request): - storename = request.param - - g = Graph(store=storename) - - path = tempfile.mktemp() - - try: - shutil.rmtree(path) - except Exception: - pass - - rt = g.open(configuration=path, create=True) - assert rt == VALID_STORE, "The underlying store is corrupt" - - g.add((EX.s0, EX.p0, EX.o0)) - g.add((EX.s0, EX.p0, EX.o0bis)) - - t = Graph(AuditableStore(g.store), g.identifier) - - yield g, t - - g.close() - - g.destroy(configuration=path) - - try: - shutil.rmtree(path) - except Exception: - pass - - -def test_add_commit(get_graph): - g, t = get_graph - t.add((EX.s1, EX.p1, EX.o1)) - assert set(t) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - (EX.s1, EX.p1, EX.o1), - ] - ) - - t.commit() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - (EX.s1, EX.p1, EX.o1), - ] - ) - - -def test_remove_commit(get_graph): - g, t = get_graph - t.remove((EX.s0, EX.p0, EX.o0)) - assert set(t) == set( - [ - (EX.s0, EX.p0, EX.o0bis), - ] - ) - t.commit() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0bis), - ] - ) - - -def test_multiple_remove_commit(get_graph): - g, t = get_graph - t.remove((EX.s0, EX.p0, None)) - assert set(t) == set([]) - t.commit() - assert set(g) == set([]) - - -def test_noop_add_commit(get_graph): - g, t = get_graph - t.add((EX.s0, EX.p0, EX.o0)) - assert set(t) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ] - ) - t.commit() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ] - ) - - -def test_noop_remove_commit(get_graph): - g, t = get_graph - t.add((EX.s0, EX.p0, EX.o0)) - assert set(t) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ] - ) - - t.commit() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ] - ) - - -def test_add_remove_commit(get_graph): - g, t = get_graph - t.add((EX.s1, EX.p1, EX.o1)) - t.remove((EX.s1, EX.p1, EX.o1)) - assert set(t) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ] - ) - t.commit() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ] - ) - - -def test_remove_add_commit(get_graph): - g, t = get_graph - t.remove((EX.s1, EX.p1, EX.o1)) - t.add((EX.s1, EX.p1, EX.o1)) - assert set(t) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - (EX.s1, EX.p1, EX.o1), - ] - ) - t.commit() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - (EX.s1, EX.p1, EX.o1), - ] - ) - - -def test_add_rollback(get_graph): - g, t = get_graph - t.add((EX.s1, EX.p1, EX.o1)) - t.rollback() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ] - ) - - -def test_remove_rollback(get_graph): - g, t = get_graph - t.remove((EX.s0, EX.p0, EX.o0)) - t.rollback() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ] - ) - - -def test_multiple_remove_rollback(get_graph): - g, t = get_graph - t.remove((EX.s0, EX.p0, None)) - t.rollback() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ] - ) - - -def test_noop_add_rollback(get_graph): - g, t = get_graph - t.add((EX.s0, EX.p0, EX.o0)) - t.rollback() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ] - ) - - -def test_noop_remove_rollback(get_graph): - g, t = get_graph - t.add((EX.s0, EX.p0, EX.o0)) - t.rollback() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ] - ) - - -def test_add_remove_rollback(get_graph): - g, t = get_graph - t.add((EX.s1, EX.p1, EX.o1)) - t.remove((EX.s1, EX.p1, EX.o1)) - t.rollback() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ] - ) - - -def test_remove_add_rollback(get_graph): - g, t = get_graph - t.remove((EX.s1, EX.p1, EX.o1)) - t.add((EX.s1, EX.p1, EX.o1)) - t.rollback() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ] - ) - - -@pytest.fixture( - scope="function", - params=get_plugin_stores(), -) -def get_empty_graph(request): - storename = request.param - - path = tempfile.mktemp() - - try: - shutil.rmtree(path) - except Exception: - pass - - g = Graph(store=storename) - rt = g.open(configuration=path, create=True) - assert rt == VALID_STORE, "The underlying store is corrupt" - - t = Graph(AuditableStore(g.store), g.identifier) - - yield g, t - g.close() - g.destroy(configuration=path) - - -def test_add_commit_empty(get_empty_graph): - g, t = get_empty_graph - t.add((EX.s1, EX.p1, EX.o1)) - assert set(t) == set( - [ - (EX.s1, EX.p1, EX.o1), - ] - ) - t.commit() - assert set(g) == set( - [ - (EX.s1, EX.p1, EX.o1), - ] - ) - - -def test_add_rollback_empty(get_empty_graph): - g, t = get_empty_graph - t.add((EX.s1, EX.p1, EX.o1)) - t.rollback() - assert set(g) == set([]) - - -@pytest.fixture -def get_concurrent_graph(): - g = Graph() - g.add((EX.s0, EX.p0, EX.o0)) - g.add((EX.s0, EX.p0, EX.o0bis)) - t1 = Graph(AuditableStore(g.store), g.identifier) - t2 = Graph(AuditableStore(g.store), g.identifier) - t1.add((EX.s1, EX.p1, EX.o1)) - t2.add((EX.s2, EX.p2, EX.o2)) - t1.remove((EX.s0, EX.p0, EX.o0)) - t2.remove((EX.s0, EX.p0, EX.o0bis)) - - yield g, t1, t2 - - -def test_commit_commit(get_concurrent_graph): - g, t1, t2 = get_concurrent_graph - t1.commit() - t2.commit() - assert set(g) == set( - [ - (EX.s1, EX.p1, EX.o1), - (EX.s2, EX.p2, EX.o2), - ] - ) - - -def test_commit_rollback(get_concurrent_graph): - g, t1, t2 = get_concurrent_graph - t1.commit() - t2.rollback() - assert set(g) == set( - [ - (EX.s1, EX.p1, EX.o1), - (EX.s0, EX.p0, EX.o0bis), - ] - ) - - -def test_rollback_commit(get_concurrent_graph): - g, t1, t2 = get_concurrent_graph - t1.rollback() - t2.commit() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s2, EX.p2, EX.o2), - ] - ) - - -def test_rollback_rollback(get_concurrent_graph): - g, t1, t2 = get_concurrent_graph - t1.rollback() - t2.rollback() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ] - ) - - -@pytest.fixture -def get_embedded_graph(): - g = Graph() - g.add((EX.s0, EX.p0, EX.o0)) - g.add((EX.s0, EX.p0, EX.o0bis)) - - t1 = Graph(AuditableStore(g.store), g.identifier) - t1.add((EX.s1, EX.p1, EX.o1)) - t1.remove((EX.s0, EX.p0, EX.o0bis)) - - t2 = Graph(AuditableStore(t1.store), t1.identifier) - t2.add((EX.s2, EX.p2, EX.o2)) - t2.remove((EX.s1, EX.p1, EX.o1)) - - yield g, t1, t2 - - -def test_commit_commit_embedded(get_embedded_graph): - g, t1, t2 = get_embedded_graph - assert set(t2) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s2, EX.p2, EX.o2), - ] - ) - t2.commit() - assert set(t1) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s2, EX.p2, EX.o2), - ] - ) - t1.commit() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s2, EX.p2, EX.o2), - ] - ) - - -def test_commit_rollback_embedded(get_embedded_graph): - g, t1, t2 = get_embedded_graph - t2.commit() - t1.rollback() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ] - ) - - -def test_rollback_commit_embedded(get_embedded_graph): - g, t1, t2 = get_embedded_graph - t2.rollback() - assert set(t1) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s1, EX.p1, EX.o1), - ] - ) - t1.commit() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s1, EX.p1, EX.o1), - ] - ) - - -def test_rollback_rollback_embedded(get_embedded_graph): - g, t1, t2 = get_embedded_graph - t2.rollback() - t1.rollback() - assert set(g) == set( - [ - (EX.s0, EX.p0, EX.o0), - (EX.s0, EX.p0, EX.o0bis), - ] - ) + return self.assertSetEqual(set(g1), set(g2)) + except AttributeError: + # python2.6 does not have assertSetEqual + assert set(g1) == set(g2) + + +class TestAuditableStore(BaseTestAuditableStore): + def setUp(self): + self.g = Graph() + self.g.add((EX.s0, EX.p0, EX.o0)) + self.g.add((EX.s0, EX.p0, EX.o0bis)) + + self.t = Graph(AuditableStore(self.g.store), self.g.identifier) + + def test_add_commit(self): + self.t.add((EX.s1, EX.p1, EX.o1)) + self.assert_graph_equal( + self.t, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + (EX.s1, EX.p1, EX.o1), + ], + ) + self.t.commit() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + (EX.s1, EX.p1, EX.o1), + ], + ) + + def test_remove_commit(self): + self.t.remove((EX.s0, EX.p0, EX.o0)) + self.assert_graph_equal( + self.t, + [ + (EX.s0, EX.p0, EX.o0bis), + ], + ) + self.t.commit() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0bis), + ], + ) + + def test_multiple_remove_commit(self): + self.t.remove((EX.s0, EX.p0, None)) + self.assert_graph_equal(self.t, []) + self.t.commit() + self.assert_graph_equal(self.g, []) + + def test_noop_add_commit(self): + self.t.add((EX.s0, EX.p0, EX.o0)) + self.assert_graph_equal( + self.t, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ], + ) + self.t.commit() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ], + ) + + def test_noop_remove_commit(self): + self.t.add((EX.s0, EX.p0, EX.o0)) + self.assert_graph_equal( + self.t, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ], + ) + self.t.commit() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ], + ) + + def test_add_remove_commit(self): + self.t.add((EX.s1, EX.p1, EX.o1)) + self.t.remove((EX.s1, EX.p1, EX.o1)) + self.assert_graph_equal( + self.t, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ], + ) + self.t.commit() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ], + ) + + def test_remove_add_commit(self): + self.t.remove((EX.s1, EX.p1, EX.o1)) + self.t.add((EX.s1, EX.p1, EX.o1)) + self.assert_graph_equal( + self.t, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + (EX.s1, EX.p1, EX.o1), + ], + ) + self.t.commit() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + (EX.s1, EX.p1, EX.o1), + ], + ) + + def test_add_rollback(self): + self.t.add((EX.s1, EX.p1, EX.o1)) + self.t.rollback() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ], + ) + + def test_remove_rollback(self): + self.t.remove((EX.s0, EX.p0, EX.o0)) + self.t.rollback() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ], + ) + + def test_multiple_remove_rollback(self): + self.t.remove((EX.s0, EX.p0, None)) + self.t.rollback() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ], + ) + + def test_noop_add_rollback(self): + self.t.add((EX.s0, EX.p0, EX.o0)) + self.t.rollback() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ], + ) + + def test_noop_remove_rollback(self): + self.t.add((EX.s0, EX.p0, EX.o0)) + self.t.rollback() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ], + ) + + def test_add_remove_rollback(self): + self.t.add((EX.s1, EX.p1, EX.o1)) + self.t.remove((EX.s1, EX.p1, EX.o1)) + self.t.rollback() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ], + ) + + def test_remove_add_rollback(self): + self.t.remove((EX.s1, EX.p1, EX.o1)) + self.t.add((EX.s1, EX.p1, EX.o1)) + self.t.rollback() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ], + ) + + +class TestAuditableStoreEmptyGraph(BaseTestAuditableStore): + def setUp(self): + self.g = Graph() + self.t = Graph(AuditableStore(self.g.store), self.g.identifier) + + def test_add_commit(self): + self.t.add((EX.s1, EX.p1, EX.o1)) + self.assert_graph_equal( + self.t, + [ + (EX.s1, EX.p1, EX.o1), + ], + ) + self.t.commit() + self.assert_graph_equal( + self.g, + [ + (EX.s1, EX.p1, EX.o1), + ], + ) + + def test_add_rollback(self): + self.t.add((EX.s1, EX.p1, EX.o1)) + self.t.rollback() + self.assert_graph_equal(self.g, []) + + +class TestAuditableStoreConccurent(BaseTestAuditableStore): + def setUp(self): + self.g = Graph() + self.g.add((EX.s0, EX.p0, EX.o0)) + self.g.add((EX.s0, EX.p0, EX.o0bis)) + self.t1 = Graph(AuditableStore(self.g.store), self.g.identifier) + self.t2 = Graph(AuditableStore(self.g.store), self.g.identifier) + self.t1.add((EX.s1, EX.p1, EX.o1)) + self.t2.add((EX.s2, EX.p2, EX.o2)) + self.t1.remove((EX.s0, EX.p0, EX.o0)) + self.t2.remove((EX.s0, EX.p0, EX.o0bis)) + + def test_commit_commit(self): + self.t1.commit() + self.t2.commit() + self.assert_graph_equal( + self.g, + [ + (EX.s1, EX.p1, EX.o1), + (EX.s2, EX.p2, EX.o2), + ], + ) + + def test_commit_rollback(self): + self.t1.commit() + self.t2.rollback() + self.assert_graph_equal( + self.g, + [ + (EX.s1, EX.p1, EX.o1), + (EX.s0, EX.p0, EX.o0bis), + ], + ) + + def test_rollback_commit(self): + self.t1.rollback() + self.t2.commit() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s2, EX.p2, EX.o2), + ], + ) + + def test_rollback_rollback(self): + self.t1.rollback() + self.t2.rollback() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ], + ) + + +class TestAuditableStoreEmbeded(BaseTestAuditableStore): + def setUp(self): + self.g = Graph() + self.g.add((EX.s0, EX.p0, EX.o0)) + self.g.add((EX.s0, EX.p0, EX.o0bis)) + + self.t1 = Graph(AuditableStore(self.g.store), self.g.identifier) + self.t1.add((EX.s1, EX.p1, EX.o1)) + self.t1.remove((EX.s0, EX.p0, EX.o0bis)) + + self.t2 = Graph(AuditableStore(self.t1.store), self.t1.identifier) + self.t2.add((EX.s2, EX.p2, EX.o2)) + self.t2.remove((EX.s1, EX.p1, EX.o1)) + + def test_commit_commit(self): + self.assert_graph_equal( + self.t2, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s2, EX.p2, EX.o2), + ], + ) + self.t2.commit() + self.assert_graph_equal( + self.t1, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s2, EX.p2, EX.o2), + ], + ) + self.t1.commit() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s2, EX.p2, EX.o2), + ], + ) + + def test_commit_rollback(self): + self.t2.commit() + self.t1.rollback() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ], + ) + + def test_rollback_commit(self): + self.t2.rollback() + self.assert_graph_equal( + self.t1, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s1, EX.p1, EX.o1), + ], + ) + self.t1.commit() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s1, EX.p1, EX.o1), + ], + ) + + def test_rollback_rollback(self): + self.t2.rollback() + self.t1.rollback() + self.assert_graph_equal( + self.g, + [ + (EX.s0, EX.p0, EX.o0), + (EX.s0, EX.p0, EX.o0bis), + ], + ) diff --git a/test/test_store/test_store_memorystore.py b/test/test_store/test_store_memorystore.py index 905dc58b6..41b603569 100644 --- a/test/test_store/test_store_memorystore.py +++ b/test/test_store/test_store_memorystore.py @@ -1,31 +1,59 @@ -import pytest - +import unittest import rdflib +rdflib.plugin.register( + "SimpleMemory", rdflib.store.Store, "rdflib.plugins.stores.memory", "SimpleMemory" +) +rdflib.plugin.register( + "Memory", rdflib.store.Store, "rdflib.plugins.stores.memory", "Memory" +) + + +class SimpleStoreTestCase(unittest.TestCase): + def test_memory_store(self): + g = rdflib.Graph("SimpleMemory") + subj1 = rdflib.URIRef("http://example.org/foo#bar1") + pred1 = rdflib.URIRef("http://example.org/foo#bar2") + obj1 = rdflib.URIRef("http://example.org/foo#bar3") + triple1 = (subj1, pred1, obj1) + triple2 = ( + subj1, + rdflib.URIRef("http://example.org/foo#bar4"), + rdflib.URIRef("http://example.org/foo#bar5"), + ) + g.add(triple1) + self.assertTrue(len(g) == 1) + g.add(triple2) + self.assertTrue(len(list(g.triples((subj1, None, None)))) == 2) + self.assertTrue(len(list(g.triples((None, pred1, None)))) == 1) + self.assertTrue(len(list(g.triples((None, None, obj1)))) == 1) + g.remove(triple1) + self.assertTrue(len(g) == 1) + g.serialize() + -@pytest.fixture(scope="function", params=["SimpleMemory", "Memory"]) -def get_graph(request): - g = rdflib.Graph(request.param) - yield g +class MemoryStoreTestCase(unittest.TestCase): + def test_memory_store(self): + g = rdflib.Graph("Memory") + subj1 = rdflib.URIRef("http://example.org/foo#bar1") + pred1 = rdflib.URIRef("http://example.org/foo#bar2") + obj1 = rdflib.URIRef("http://example.org/foo#bar3") + triple1 = (subj1, pred1, obj1) + triple2 = ( + subj1, + rdflib.URIRef("http://example.org/foo#bar4"), + rdflib.URIRef("http://example.org/foo#bar5"), + ) + g.add(triple1) + self.assertTrue(len(g) == 1) + g.add(triple2) + self.assertTrue(len(list(g.triples((subj1, None, None)))) == 2) + self.assertTrue(len(list(g.triples((None, pred1, None)))) == 1) + self.assertTrue(len(list(g.triples((None, None, obj1)))) == 1) + g.remove(triple1) + self.assertTrue(len(g) == 1) + g.serialize() -def test_memory_store(get_graph): - g = get_graph - subj1 = rdflib.URIRef("http://example.org/foo#bar1") - pred1 = rdflib.URIRef("http://example.org/foo#bar2") - obj1 = rdflib.URIRef("http://example.org/foo#bar3") - triple1 = (subj1, pred1, obj1) - triple2 = ( - subj1, - rdflib.URIRef("http://example.org/foo#bar4"), - rdflib.URIRef("http://example.org/foo#bar5"), - ) - g.add(triple1) - assert len(g) == 1 - g.add(triple2) - assert len(list(g.triples((subj1, None, None)))) == 2 - assert len(list(g.triples((None, pred1, None)))) == 1 - assert len(list(g.triples((None, None, obj1)))) == 1 - g.remove(triple1) - assert len(g) == 1 - assert len(g.serialize()) > 0 +if __name__ == "__main__": + unittest.main(defaultTest="test_suite") diff --git a/test/test_store/test_store_sparqlstore.py b/test/test_store/test_store_sparqlstore.py index b504e2e39..e7d7b4dac 100644 --- a/test/test_store/test_store_sparqlstore.py +++ b/test/test_store/test_store_sparqlstore.py @@ -1,19 +1,20 @@ -import re -import socket +from rdflib import Graph, URIRef, Literal import unittest from http.server import BaseHTTPRequestHandler, HTTPServer +import socket from threading import Thread -from typing import Callable, ClassVar, Type from unittest.mock import patch - -import pytest - -from rdflib import Graph, Literal, URIRef -from rdflib.namespace import FOAF, RDF, RDFS, XMLNS, XSD +from rdflib.namespace import RDF, XSD, XMLNS, FOAF, RDFS from rdflib.plugins.stores.sparqlstore import SPARQLConnector +from typing import ClassVar, Callable, Type +import pytest +import re -from test import helper -from test.testutils import MockHTTPResponse, ServedSimpleHTTPMock +from . import helper +from .testutils import ( + MockHTTPResponse, + ServedSimpleHTTPMock, +) class TestSPARQLStoreGraph: diff --git a/test/test_store/test_store_sparqlupdatestore.py b/test/test_store/test_store_sparqlupdatestore.py index e47eaacf2..6c942f8ef 100644 --- a/test/test_store/test_store_sparqlupdatestore.py +++ b/test/test_store/test_store_sparqlupdatestore.py @@ -1,20 +1,10 @@ # -*- coding: utf-8 -*- -import re import unittest +import re + +from rdflib import ConjunctiveGraph, URIRef, Literal, BNode, Graph from urllib.request import urlopen -import pytest - -from rdflib import BNode, ConjunctiveGraph, Graph, Literal, URIRef -from test.data import ( - tarek, - bob, - michel, - likes, - hates, - cheese, - pizza, -) HOST = "http://localhost:3031" DB = "/db/" @@ -31,335 +21,342 @@ # THIS WILL DELETE ALL DATA IN THE /db dataset +michel = URIRef("urn:example:michel") +tarek = URIRef("urn:example:tarek") +bob = URIRef("urn:example:bob") +likes = URIRef("urn:example:likes") +hates = URIRef("urn:example:hates") +pizza = URIRef("urn:example:pizza") +cheese = URIRef("urn:example:cheese") + graphuri = URIRef("urn:example:graph") othergraphuri = URIRef("urn:example:othergraph") try: assert len(urlopen(HOST).read()) > 0 + skip = False except: - pytest.skip(f"skipping because {HOST} is unavailable", allow_module_level=True) + skip = True + + +@unittest.skipIf(skip, HOST + " is unavailable.") +class TestSparql11(unittest.TestCase): + def setUp(self): + self.longMessage = True + self.graph = ConjunctiveGraph("SPARQLUpdateStore") + + root = HOST + DB + self.graph.open((root + "sparql", root + "update")) + + # clean out the store + for c in self.graph.contexts(): + c.remove((None, None, None)) + assert len(c) == 0 + + def tearDown(self): + self.graph.close() + + def testSimpleGraph(self): + g = self.graph.get_context(graphuri) + g.add((tarek, likes, pizza)) + g.add((bob, likes, pizza)) + g.add((bob, likes, cheese)) + + g2 = self.graph.get_context(othergraphuri) + g2.add((michel, likes, pizza)) + self.assertEqual(3, len(g), "graph contains 3 triples") + self.assertEqual(1, len(g2), "other graph contains 1 triple") -@pytest.fixture -def get_graph(): + r = g.query("SELECT * WHERE { ?s . }") + self.assertEqual(2, len(list(r)), "two people like pizza") - longMessage = True - graph = ConjunctiveGraph("SPARQLUpdateStore") + r = g.triples((None, likes, pizza)) + self.assertEqual(2, len(list(r)), "two people like pizza") - root = HOST + DB - graph.open((root + "sparql", root + "update")) + # Test initBindings + r = g.query( + "SELECT * WHERE { ?s . }", initBindings={"s": tarek}, + ) + self.assertEqual(1, len(list(r)), "i was asking only about tarek") + + r = g.triples((tarek, likes, pizza)) + self.assertEqual(1, len(list(r)), "i was asking only about tarek") + + r = g.triples((tarek, likes, cheese)) + self.assertEqual(0, len(list(r)), "tarek doesn't like cheese") + + g2.add((tarek, likes, pizza)) + g.remove((tarek, likes, pizza)) + r = g.query("SELECT * WHERE { ?s . }") + self.assertEqual(1, len(list(r)), "only bob likes pizza") + + def testConjunctiveDefault(self): + g = self.graph.get_context(graphuri) + g.add((tarek, likes, pizza)) + g2 = self.graph.get_context(othergraphuri) + g2.add((bob, likes, pizza)) + g.add((tarek, hates, cheese)) + + self.assertEqual(2, len(g), "graph contains 2 triples") + + # the following are actually bad tests as they depend on your endpoint, + # as pointed out in the sparqlstore.py code: + # + # For ConjunctiveGraphs, reading is done from the "default graph" Exactly + # what this means depends on your endpoint, because SPARQL does not offer a + # simple way to query the union of all graphs as it would be expected for a + # ConjuntiveGraph. + ## + # Fuseki/TDB has a flag for specifying that the default graph + # is the union of all graphs (tdb:unionDefaultGraph in the Fuseki config). + self.assertEqual( + 3, + len(self.graph), + "default union graph should contain three triples but contains:\n" + "%s" % list(self.graph), + ) - # clean out the store - for c in graph.contexts(): - c.remove((None, None, None)) - assert len(c) == 0 + r = self.graph.query("SELECT * WHERE { ?s . }") + self.assertEqual(2, len(list(r)), "two people like pizza") - yield graph + r = self.graph.query( + "SELECT * WHERE { ?s . }", initBindings={"s": tarek}, + ) + self.assertEqual(1, len(list(r)), "i was asking only about tarek") - graph.close() + r = self.graph.triples((tarek, likes, pizza)) + self.assertEqual(1, len(list(r)), "i was asking only about tarek") + r = self.graph.triples((tarek, likes, cheese)) + self.assertEqual(0, len(list(r)), "tarek doesn't like cheese") -def test_simple_graph(get_graph): - graph = get_graph - g = graph.get_context(graphuri) - g.add((tarek, likes, pizza)) - g.add((bob, likes, pizza)) - g.add((bob, likes, cheese)) + g2.remove((bob, likes, pizza)) - g2 = graph.get_context(othergraphuri) - g2.add((michel, likes, pizza)) + r = self.graph.query("SELECT * WHERE { ?s . }") + self.assertEqual(1, len(list(r)), "only tarek likes pizza") - assert 3 == len(g), "graph contains 3 triples" - assert 1 == len(g2), "other graph contains 1 triple" + def testUpdate(self): + self.graph.update( + "INSERT DATA { GRAPH { . } }" + ) - r = g.query("SELECT * WHERE { ?s . }") - assert 2 == len(list(r)), "two people like pizza" + g = self.graph.get_context(graphuri) + self.assertEqual(1, len(g), "graph contains 1 triples") - r = g.triples((None, likes, pizza)) - assert 2 == len(list(r)), "two people like pizza" + def testUpdateWithInitNs(self): + self.graph.update( + "INSERT DATA { GRAPH ns:graph { ns:michel ns:likes ns:pizza . } }", + initNs={"ns": URIRef("urn:example:")}, + ) - # Test initBindings - r = g.query( - "SELECT * WHERE { ?s . }", - initBindings={"s": tarek}, - ) - assert 1 == len(list(r)), "i was asking only about tarek" + g = self.graph.get_context(graphuri) + self.assertEqual( + set(g.triples((None, None, None))), + set([(michel, likes, pizza)]), + "only michel likes pizza", + ) - r = g.triples((tarek, likes, pizza)) - assert 1 == len(list(r)), "i was asking only about tarek" + def testUpdateWithInitBindings(self): + self.graph.update( + "INSERT { GRAPH { ?a ?b ?c . } } WherE { }", + initBindings={ + "a": URIRef("urn:example:michel"), + "b": URIRef("urn:example:likes"), + "c": URIRef("urn:example:pizza"), + }, + ) + + g = self.graph.get_context(graphuri) + self.assertEqual( + set(g.triples((None, None, None))), + set([(michel, likes, pizza)]), + "only michel likes pizza", + ) - r = g.triples((tarek, likes, cheese)) - assert 0 == len(list(r)), "tarek doesn't like cheese" + def testUpdateWithBlankNode(self): + self.graph.update( + "INSERT DATA { GRAPH { _:blankA } }" + ) + g = self.graph.get_context(graphuri) + for t in g.triples((None, None, None)): + self.assertTrue(isinstance(t[0], BNode)) + self.assertEqual(t[1].n3(), "") + self.assertEqual(t[2].n3(), "") + + def testUpdateWithBlankNodeSerializeAndParse(self): + self.graph.update( + "INSERT DATA { GRAPH { _:blankA } }" + ) + g = self.graph.get_context(graphuri) + string = g.serialize(format="ntriples") + raised = False + try: + Graph().parse(data=string, format="ntriples") + except Exception as e: + raised = True + self.assertFalse(raised, "Exception raised when parsing: " + string) + + def testMultipleUpdateWithInitBindings(self): + self.graph.update( + "INSERT { GRAPH { ?a ?b ?c . } } WHERE { };" + "INSERT { GRAPH { ?d ?b ?c . } } WHERE { }", + initBindings={ + "a": URIRef("urn:example:michel"), + "b": URIRef("urn:example:likes"), + "c": URIRef("urn:example:pizza"), + "d": URIRef("urn:example:bob"), + }, + ) - g2.add((tarek, likes, pizza)) - g.remove((tarek, likes, pizza)) - r = g.query("SELECT * WHERE { ?s . }") - assert 1 == len(list(r)), "only bob likes pizza" + g = self.graph.get_context(graphuri) + self.assertEqual( + set(g.triples((None, None, None))), + set([(michel, likes, pizza), (bob, likes, pizza)]), + "michel and bob like pizza", + ) + def testNamedGraphUpdate(self): + g = self.graph.get_context(graphuri) + r1 = "INSERT DATA { }" + g.update(r1) + self.assertEqual( + set(g.triples((None, None, None))), + set([(michel, likes, pizza)]), + "only michel likes pizza", + ) -def test_conjunctive_default(get_graph): - graph = get_graph - g = graph.get_context(graphuri) - g.add((tarek, likes, pizza)) - g2 = graph.get_context(othergraphuri) - g2.add((bob, likes, pizza)) - g.add((tarek, hates, cheese)) - - assert 2 == len(g), "graph contains 2 triples" - - # the following are actually bad tests as they depend on your endpoint, - # as pointed out in the sparqlstore.py code: - # - # For ConjunctiveGraphs, reading is done from the "default graph" Exactly - # what this means depends on your endpoint, because SPARQL does not offer a - # simple way to query the union of all graphs as it would be expected for a - # ConjuntiveGraph. - ## - # Fuseki/TDB has a flag for specifying that the default graph - # is the union of all graphs (tdb:unionDefaultGraph in the Fuseki config). - assert ( - len(graph) == 3 - ), f"default union graph should contain three triples but contains:\n{list(graph)}" - - r = graph.query("SELECT * WHERE { ?s . }") - assert 2 == len(list(r)), "two people like pizza" - - r = graph.query( - "SELECT * WHERE { ?s . }", - initBindings={"s": tarek}, - ) - assert 1 == len(list(r)), "i was asking only about tarek" - - r = graph.triples((tarek, likes, pizza)) - assert 1 == len(list(r)), "i was asking only about tarek" - - r = graph.triples((tarek, likes, cheese)) - assert 0 == len(list(r)), "tarek doesn't like cheese" - - g2.remove((bob, likes, pizza)) - - r = graph.query("SELECT * WHERE { ?s . }") - assert 1 == len(list(r)), "only tarek likes pizza" - - -def test_update(get_graph): - graph = get_graph - graph.update( - "INSERT DATA { GRAPH { . } }" - ) - - g = graph.get_context(graphuri) - assert 1 == len(g), "graph contains 1 triples" - - -def test_update_with_initns(get_graph): - graph = get_graph - graph.update( - "INSERT DATA { GRAPH ns:graph { ns:michel ns:likes ns:pizza . } }", - initNs={"ns": URIRef("urn:example:")}, - ) - - g = graph.get_context(graphuri) - assert set(g.triples((None, None, None))) == set( - [(michel, likes, pizza)] - ), "only michel likes pizza" - - -def test_update_with_init_bindings(get_graph): - graph = get_graph - graph.update( - "INSERT { GRAPH { ?a ?b ?c . } } WherE { }", - initBindings={ - "a": URIRef("urn:example:michel"), - "b": URIRef("urn:example:likes"), - "c": URIRef("urn:example:pizza"), - }, - ) - - g = graph.get_context(graphuri) - assert set(g.triples((None, None, None))) == set( - [(michel, likes, pizza)] - ), "only michel likes pizza" - - -def test_update_with_blank_node(get_graph): - graph = get_graph - graph.update( - "INSERT DATA { GRAPH { _:blankA } }" - ) - g = graph.get_context(graphuri) - for t in g.triples((None, None, None)): - assert isinstance(t[0], BNode) - assert t[1].n3() == "" - assert t[2].n3() == "" - - -def test_update_with_blank_node_serialize_and_parse(get_graph): - graph = get_graph - graph.update( - "INSERT DATA { GRAPH { _:blankA } }" - ) - g = graph.get_context(graphuri) - string = g.serialize(format="ntriples") - raised = False - try: - Graph().parse(data=string, format="ntriples") - except Exception as e: - raised = True - assert raised is False, "Exception raised when parsing: " + string - - -def test_multiple_update_with_init_bindings(get_graph): - graph = get_graph - graph.update( - "INSERT { GRAPH { ?a ?b ?c . } } WHERE { };" - "INSERT { GRAPH { ?d ?b ?c . } } WHERE { }", - initBindings={ - "a": URIRef("urn:example:michel"), - "b": URIRef("urn:example:likes"), - "c": URIRef("urn:example:pizza"), - "d": URIRef("urn:example:bob"), - }, - ) - - g = graph.get_context(graphuri) - assert set(g.triples((None, None, None))) == set( - [(michel, likes, pizza), (bob, likes, pizza)] - ), "michel and bob like pizza" - - -def test_named_graph_update(get_graph): - graph = get_graph - g = graph.get_context(graphuri) - r1 = "INSERT DATA { }" - g.update(r1) - assert set(g.triples((None, None, None))) == set( - [(michel, likes, pizza)] - ), "only michel likes pizza" - - r2 = ( - "DELETE { } " - + "INSERT { } WHERE {}" - ) - g.update(r2) - assert set(g.triples((None, None, None))) == set( - [(bob, likes, pizza)] - ), "only bob likes pizza" - - says = URIRef("urn:says") - - # Strings with unbalanced curly braces - tricky_strs = ["With an unbalanced curly brace %s " % brace for brace in ["{", "}"]] - for tricky_str in tricky_strs: - r3 = ( - """INSERT { ?b "%s" } - WHERE { ?b } """ - % tricky_str + r2 = ( + "DELETE { } " + + "INSERT { } WHERE {}" ) - g.update(r3) - - values = set() - for v in g.objects(bob, says): - values.add(str(v)) - assert values == set(tricky_strs) - - # Complicated Strings - r4strings = [] - r4strings.append(r'''"1: adfk { ' \\\" \" { "''') - r4strings.append(r'''"2: adfk } #éï \\"''') - - r4strings.append(r"""'3: adfk { " \\\' \' { '""") - r4strings.append(r"""'4: adfk } #éï \\'""") - - r4strings.append(r'''"""5: adfk { ' \\\" \" { """''') - r4strings.append(r'''"""6: adfk } #éï \\"""''') - r4strings.append('"""7: ad adsfj \n { \n sadfj"""') - - r4strings.append(r"""'''8: adfk { " \\\' \' { '''""") - r4strings.append(r"""'''9: adfk } #éï \\'''""") - r4strings.append("'''10: ad adsfj \n { \n sadfj'''") - - r4 = "\n".join( - ["INSERT DATA { %s } ;" % s for s in r4strings] - ) - g.update(r4) - values = set() - for v in g.objects(michel, says): - values.add(str(v)) - assert values == set( - [ - re.sub( - r"\\(.)", - r"\1", - re.sub(r"^'''|'''$|^'|'$|" + r'^"""|"""$|^"|"$', r"", s), + g.update(r2) + self.assertEqual( + set(g.triples((None, None, None))), + set([(bob, likes, pizza)]), + "only bob likes pizza", + ) + says = URIRef("urn:says") + + # Strings with unbalanced curly braces + tricky_strs = [ + "With an unbalanced curly brace %s " % brace for brace in ["{", "}"] + ] + for tricky_str in tricky_strs: + r3 = ( + """INSERT { ?b "%s" } + WHERE { ?b } """ + % tricky_str ) - for s in r4strings + g.update(r3) + + values = set() + for v in g.objects(bob, says): + values.add(str(v)) + self.assertEqual(values, set(tricky_strs)) + + # Complicated Strings + r4strings = [] + r4strings.append(r'''"1: adfk { ' \\\" \" { "''') + r4strings.append(r'''"2: adfk } #éï \\"''') + + r4strings.append(r"""'3: adfk { " \\\' \' { '""") + r4strings.append(r"""'4: adfk } #éï \\'""") + + r4strings.append(r'''"""5: adfk { ' \\\" \" { """''') + r4strings.append(r'''"""6: adfk } #éï \\"""''') + r4strings.append('"""7: ad adsfj \n { \n sadfj"""') + + r4strings.append(r"""'''8: adfk { " \\\' \' { '''""") + r4strings.append(r"""'''9: adfk } #éï \\'''""") + r4strings.append("'''10: ad adsfj \n { \n sadfj'''") + + r4 = "\n".join( + ["INSERT DATA { %s } ;" % s for s in r4strings] + ) + g.update(r4) + values = set() + for v in g.objects(michel, says): + values.add(str(v)) + self.assertEqual( + values, + set( + [ + re.sub( + r"\\(.)", + r"\1", + re.sub(r"^'''|'''$|^'|'$|" + r'^"""|"""$|^"|"$', r"", s), + ) + for s in r4strings + ] + ), + ) + + # IRI Containing ' or # + # The fragment identifier must not be misinterpreted as a comment + # (commenting out the end of the block). + # The ' must not be interpreted as the start of a string, causing the } + # in the literal to be identified as the end of the block. + r5 = """INSERT DATA { , "'}" }""" + + g.update(r5) + values = set() + for v in g.objects(michel, hates): + values.add(str(v)) + self.assertEqual(values, set(["urn:example:foo'bar?baz;a=1&b=2#fragment", "'}"])) + + # Comments + r6 = """ + INSERT DATA { + . # No closing brace: } + . + } + #Final { } comment""" + + g.update(r6) + values = set() + for v in g.objects(bob, hates): + values.add(v) + self.assertEqual(values, set([bob, michel])) + + def testNamedGraphUpdateWithInitBindings(self): + g = self.graph.get_context(graphuri) + r = "INSERT { ?a ?b ?c } WHERE {}" + g.update(r, initBindings={"a": michel, "b": likes, "c": pizza}) + self.assertEqual( + set(g.triples((None, None, None))), + set([(michel, likes, pizza)]), + "only michel likes pizza", + ) + + def testEmptyNamedGraph(self): + empty_graph_iri = "urn:empty-graph-1" + self.graph.update("CREATE GRAPH <%s>" % empty_graph_iri) + named_graphs = [ + str(r[0]) for r in self.graph.query("SELECT ?name WHERE { GRAPH ?name {} }") ] - ) - - # IRI Containing ' or # - # The fragment identifier must not be misinterpreted as a comment - # (commenting out the end of the block). - # The ' must not be interpreted as the start of a string, causing the } - # in the literal to be identified as the end of the block. - r5 = """INSERT DATA { , "'}" }""" - - g.update(r5) - values = set() - for v in g.objects(michel, hates): - values.add(str(v)) - assert values == set(["urn:example:foo'bar?baz;a=1&b=2#fragment", "'}"]) - - # Comments - r6 = """ - INSERT DATA { - . # No closing brace: } - . - } - #Final { } comment""" - - g.update(r6) - values = set() - for v in g.objects(bob, hates): - values.add(v) - assert values == set([bob, michel]) - - -def test_named_graph_update_with_init_bindings(get_graph): - graph = get_graph - g = graph.get_context(graphuri) - r = "INSERT { ?a ?b ?c } WHERE {}" - g.update(r, initBindings={"a": michel, "b": likes, "c": pizza}) - assert set(g.triples((None, None, None))) == set( - [(michel, likes, pizza)] - ), "only michel likes pizza" - - -def test_empty_named_graph(get_graph): - graph = get_graph - empty_graph_iri = "urn:empty-graph-1" - graph.update("CREATE GRAPH <%s>" % empty_graph_iri) - named_graphs = [ - str(r[0]) for r in graph.query("SELECT ?name WHERE { GRAPH ?name {} }") - ] - # Some SPARQL endpoint backends (like TDB) are not able to find empty named graphs - # (at least with this query) - if empty_graph_iri in named_graphs: - assert empty_graph_iri in [str(g.identifier) for g in graph.contexts()] - - -def test_empty_literal(get_graph): - graph = get_graph - # test for https://github.com/RDFLib/rdflib/issues/457 - # also see test_issue457.py which is sparql store independent! - g = graph.get_context(graphuri) - g.add( - ( - URIRef("http://example.com/s"), - URIRef("http://example.com/p"), - Literal(""), + # Some SPARQL endpoint backends (like TDB) are not able to find empty named graphs + # (at least with this query) + if empty_graph_iri in named_graphs: + self.assertTrue( + empty_graph_iri in [str(g.identifier) for g in self.graph.contexts()] + ) + + def testEmptyLiteral(self): + # test for https://github.com/RDFLib/rdflib/issues/457 + # also see test_issue457.py which is sparql store independent! + g = self.graph.get_context(graphuri) + g.add( + ( + URIRef("http://example.com/s"), + URIRef("http://example.com/p"), + Literal(""), + ) ) - ) - o = tuple(g)[0][2] - assert o == Literal(""), repr(o) + o = tuple(g)[0][2] + self.assertEqual(o, Literal(""), repr(o)) + + +if __name__ == "__main__": + unittest.main() diff --git a/test/test_store/test_store_sparqlupdatestore_mock.py b/test/test_store/test_store_sparqlupdatestore_mock.py index c3246e78e..f812c3deb 100644 --- a/test/test_store/test_store_sparqlupdatestore_mock.py +++ b/test/test_store/test_store_sparqlupdatestore_mock.py @@ -1,10 +1,8 @@ -import unittest +from rdflib.graph import ConjunctiveGraph from typing import ClassVar - from rdflib import Namespace -from rdflib.graph import ConjunctiveGraph - -from test.testutils import MockHTTPResponse, ServedSimpleHTTPMock +from .testutils import MockHTTPResponse, ServedSimpleHTTPMock +import unittest EG = Namespace("http://example.org/") diff --git a/test/test_store/test_store_triple_store.py b/test/test_store/test_store_triple_store.py index e37530647..f37bea33e 100644 --- a/test/test_store/test_store_triple_store.py +++ b/test/test_store/test_store_triple_store.py @@ -1,36 +1,35 @@ -import pytest +import unittest -from rdflib.graph import Graph -from rdflib.namespace import RDFS from rdflib.term import BNode, Literal +from rdflib.namespace import RDFS +from rdflib.graph import Graph -remove_me = (BNode(), RDFS.label, Literal("remove_me")) - - -@pytest.fixture(scope="function") -def get_store(request): - store = Graph(store="default") - store.open("store") - store.add(remove_me) - yield store +class GraphTest(unittest.TestCase): + backend = "default" + path = "store" - store.close() + def setUp(self): + self.store = Graph(store=self.backend) + self.store.open(self.path) + self.remove_me = (BNode(), RDFS.label, Literal("remove_me")) + self.store.add(self.remove_me) + def tearDown(self): + self.store.close() -def test_add(get_store): - store = get_store - subject = BNode() - store.add((subject, RDFS.label, Literal("foo"))) + def testAdd(self): + subject = BNode() + self.store.add((subject, RDFS.label, Literal("foo"))) + def testRemove(self): + self.store.remove(self.remove_me) + self.store.remove((None, None, None)) -def test_remove(get_store): - store = get_store - store.remove(remove_me) - store.remove((None, None, None)) + def testTriples(self): + for s, p, o in self.store: + pass -def test_triples(get_store): - store = get_store - for s, p, o in store: - pass +if __name__ == "__main__": + unittest.main() From 4ed7a3fe5bfa8447405ae4f5de0eb88e9aec63d0 Mon Sep 17 00:00:00 2001 From: Graham Higgins Date: Sat, 19 Mar 2022 12:54:03 +0000 Subject: [PATCH 11/12] update imports and filepaths --- test/test_conjunctivegraph/test_conjunctivegraph_generators.py | 2 +- .../test_conjunctivegraph_operator_combinations.py | 2 +- test/test_dataset/test_dataset_generators.py | 2 +- test/test_graph/test_graph_generators.py | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) diff --git a/test/test_conjunctivegraph/test_conjunctivegraph_generators.py b/test/test_conjunctivegraph/test_conjunctivegraph_generators.py index 07e88a4f7..7642c2361 100644 --- a/test/test_conjunctivegraph/test_conjunctivegraph_generators.py +++ b/test/test_conjunctivegraph/test_conjunctivegraph_generators.py @@ -3,7 +3,7 @@ timblcardn3 = open( - os.path.join(os.path.dirname(__file__), "consistent_test_data", "timbl-card.n3") + os.path.join(os.path.dirname(__file__), "..", "consistent_test_data", "timbl-card.n3") ).read() diff --git a/test/test_conjunctivegraph/test_conjunctivegraph_operator_combinations.py b/test/test_conjunctivegraph/test_conjunctivegraph_operator_combinations.py index c639ceadb..13d204211 100644 --- a/test/test_conjunctivegraph/test_conjunctivegraph_operator_combinations.py +++ b/test/test_conjunctivegraph/test_conjunctivegraph_operator_combinations.py @@ -18,7 +18,7 @@ c2 = URIRef("urn:example:context-2") sportquadstrig = open( - os.path.join(os.path.dirname(__file__), "consistent_test_data", "sportquads.trig") + os.path.join(os.path.dirname(__file__), "..", "consistent_test_data", "sportquads.trig") ).read() diff --git a/test/test_dataset/test_dataset_generators.py b/test/test_dataset/test_dataset_generators.py index 874e48554..c69513360 100644 --- a/test/test_dataset/test_dataset_generators.py +++ b/test/test_dataset/test_dataset_generators.py @@ -2,7 +2,7 @@ from rdflib import Dataset, URIRef timblcardn3 = open( - os.path.join(os.path.dirname(__file__), "consistent_test_data", "timbl-card.n3") + os.path.join(os.path.dirname(__file__), "..", "consistent_test_data", "timbl-card.n3") ).read() diff --git a/test/test_graph/test_graph_generators.py b/test/test_graph/test_graph_generators.py index d013aaec8..958364d39 100644 --- a/test/test_graph/test_graph_generators.py +++ b/test/test_graph/test_graph_generators.py @@ -10,7 +10,7 @@ cheese = URIRef("urn:example:cheese") timblcardn3 = open( - os.path.join(os.path.dirname(__file__), "consistent_test_data", "timbl-card.n3") + os.path.join(os.path.dirname(__file__), "..", "consistent_test_data", "timbl-card.n3") ).read() From 97421c6b405b0e2fc214fbb6f9418f7f8d443ec0 Mon Sep 17 00:00:00 2001 From: Graham Higgins Date: Sat, 19 Mar 2022 12:54:21 +0000 Subject: [PATCH 12/12] update imports and filepaths --- test/test_store/test_store_sparqlstore.py | 4 ++-- test/test_store/test_store_sparqlupdatestore_mock.py | 2 +- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/test/test_store/test_store_sparqlstore.py b/test/test_store/test_store_sparqlstore.py index e7d7b4dac..d26ac9bb4 100644 --- a/test/test_store/test_store_sparqlstore.py +++ b/test/test_store/test_store_sparqlstore.py @@ -10,8 +10,8 @@ import pytest import re -from . import helper -from .testutils import ( +from test import helper +from test.testutils import ( MockHTTPResponse, ServedSimpleHTTPMock, ) diff --git a/test/test_store/test_store_sparqlupdatestore_mock.py b/test/test_store/test_store_sparqlupdatestore_mock.py index f812c3deb..5d35223f9 100644 --- a/test/test_store/test_store_sparqlupdatestore_mock.py +++ b/test/test_store/test_store_sparqlupdatestore_mock.py @@ -1,7 +1,7 @@ from rdflib.graph import ConjunctiveGraph from typing import ClassVar from rdflib import Namespace -from .testutils import MockHTTPResponse, ServedSimpleHTTPMock +from test.testutils import MockHTTPResponse, ServedSimpleHTTPMock import unittest EG = Namespace("http://example.org/")