From d3c67336316c19338b4337abb2e7aa16064d68e0 Mon Sep 17 00:00:00 2001
From: Jeen Broekstra
Date: Mon, 29 Oct 2018 11:03:10 +1100
Subject: [PATCH 01/54] eclipse/rdf4j#78 fix join optimizer messing with
bind/subselect combo
Signed-off-by: Jeen Broekstra
---
evaluation/pom.xml | 6 +++
.../evaluation/impl/QueryJoinOptimizer.java | 12 +++---
.../impl/QueryJoinOptimizerTest.java | 41 +++++++++++++++++++
3 files changed, 53 insertions(+), 6 deletions(-)
diff --git a/evaluation/pom.xml b/evaluation/pom.xml
index 4fa7d5bc9..ae756843b 100644
--- a/evaluation/pom.xml
+++ b/evaluation/pom.xml
@@ -59,6 +59,7 @@
junitjunit
+ testorg.mapdb
@@ -68,6 +69,11 @@
com.google.guavaguava
+
+ org.assertj
+ assertj-core
+ test
+
diff --git a/evaluation/src/main/java/org/eclipse/rdf4j/query/algebra/evaluation/impl/QueryJoinOptimizer.java b/evaluation/src/main/java/org/eclipse/rdf4j/query/algebra/evaluation/impl/QueryJoinOptimizer.java
index 63532ff53..66d484f80 100644
--- a/evaluation/src/main/java/org/eclipse/rdf4j/query/algebra/evaluation/impl/QueryJoinOptimizer.java
+++ b/evaluation/src/main/java/org/eclipse/rdf4j/query/algebra/evaluation/impl/QueryJoinOptimizer.java
@@ -92,15 +92,15 @@ public void meet(Join node) {
// Reorder the subselects and extensions to a more optimal sequence
List priorityArgs = new ArrayList(joinArgs.size());
- // first get all subselects and order them
- List orderedSubselects = reorderSubselects(getSubSelects(joinArgs));
- joinArgs.removeAll(orderedSubselects);
- priorityArgs.addAll(orderedSubselects);
-
- // second get all extensions (BIND clause)
+ // get all extensions (BIND clause)
List orderedExtensions = getExtensions(joinArgs);
joinArgs.removeAll(orderedExtensions);
priorityArgs.addAll(orderedExtensions);
+
+ // get all subselects and order them
+ List orderedSubselects = reorderSubselects(getSubSelects(joinArgs));
+ joinArgs.removeAll(orderedSubselects);
+ priorityArgs.addAll(orderedSubselects);
// We order all remaining join arguments based on cardinality and
// variable frequency statistics
diff --git a/evaluation/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/impl/QueryJoinOptimizerTest.java b/evaluation/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/impl/QueryJoinOptimizerTest.java
index fce9a4529..83d735c1f 100644
--- a/evaluation/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/impl/QueryJoinOptimizerTest.java
+++ b/evaluation/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/impl/QueryJoinOptimizerTest.java
@@ -9,6 +9,7 @@
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotEquals;
+import static org.assertj.core.api.Assertions.assertThat;
import org.eclipse.rdf4j.RDF4JException;
import org.eclipse.rdf4j.query.MalformedQueryException;
@@ -16,6 +17,7 @@
import org.eclipse.rdf4j.query.UnsupportedQueryLanguageException;
import org.eclipse.rdf4j.query.algebra.BinaryTupleOperator;
import org.eclipse.rdf4j.query.algebra.Extension;
+import org.eclipse.rdf4j.query.algebra.Join;
import org.eclipse.rdf4j.query.algebra.QueryModelNode;
import org.eclipse.rdf4j.query.algebra.QueryRoot;
import org.eclipse.rdf4j.query.algebra.TupleExpr;
@@ -109,6 +111,32 @@ public void testSES2116JoinBind()
Assert.assertTrue("Extension must be evaluated before StatementPattern", leaf.getParentNode() instanceof Extension);
}
+ @Test
+ public void bindSubselectJoinOrder() throws Exception
+ {
+ String query = "SELECT * WHERE {\n" +
+ " BIND (bnode() as ?ct01) \n" +
+ " { SELECT ?s WHERE {\n" +
+ " ?s ?p ?o .\n" +
+ " }\n" +
+ " LIMIT 10\n" +
+ " }\n" +
+ "}";
+
+ SPARQLParser parser = new SPARQLParser();
+ ParsedQuery q = parser.parseQuery(query, null);
+ QueryJoinOptimizer opt = new QueryJoinOptimizer();
+ QueryRoot optRoot = new QueryRoot(q.getTupleExpr());
+ opt.optimize(optRoot, null, null);
+
+ JoinFinder joinFinder = new JoinFinder();
+ optRoot.visit(joinFinder);
+ Join join = joinFinder.getJoin();
+
+ assertThat(join.getLeftArg()).as("BIND clause should be left-most argument of join").isInstanceOf(Extension.class);
+ }
+
+
private TupleExpr findLeaf(TupleExpr expr) {
if (expr instanceof UnaryTupleOperator) {
return findLeaf(((UnaryTupleOperator)expr).getArg());
@@ -136,4 +164,17 @@ private void testOptimizer(String expectedQuery, String actualQuery)
private void assertQueryModelTrees(QueryModelNode expected, QueryModelNode actual) {
assertEquals(expected, actual);
}
+
+ class JoinFinder extends AbstractQueryModelVisitor {
+
+ private Join join;
+
+ public void meet(Join join) {
+ this.join = join;
+ }
+
+ public Join getJoin() {
+ return join;
+ }
+ }
}
From 89107e1300d385276b1f0c5c16dc56e4fcaef58a Mon Sep 17 00:00:00 2001
From: Jeen Broekstra
Date: Sat, 17 Nov 2018 11:06:28 +1100
Subject: [PATCH 02/54] eclipse/rdf4j#1168 fix handling of language tags in
concat function
Signed-off-by: Jeen Broekstra
---
evaluation/pom.xml | 28 +++--
.../evaluation/function/string/Concat.java | 37 +++---
.../function/string/ConcatTest.java | 105 ++++++++++++++++++
3 files changed, 137 insertions(+), 33 deletions(-)
create mode 100644 evaluation/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/function/string/ConcatTest.java
diff --git a/evaluation/pom.xml b/evaluation/pom.xml
index 4fa7d5bc9..a581bbdf0 100644
--- a/evaluation/pom.xml
+++ b/evaluation/pom.xml
@@ -1,4 +1,6 @@
-
+4.0.0
@@ -29,7 +31,8 @@
rdf4j-query${project.version}
-
+
${project.groupId}rdf4j-repository-sparql
@@ -45,7 +48,7 @@
rdf4j-util${project.version}
-
+
org.slf4jslf4j-api
@@ -59,7 +62,14 @@
junitjunit
+ test
+
+
+ org.assertj
+ assertj-core
+ test
+
org.mapdbmapdb
@@ -69,12 +79,12 @@
guava
-
-
-
- com.github.siom79.japicmp
- japicmp-maven-plugin
-
+
+
+
+ com.github.siom79.japicmp
+ japicmp-maven-plugin
+
diff --git a/evaluation/src/main/java/org/eclipse/rdf4j/query/algebra/evaluation/function/string/Concat.java b/evaluation/src/main/java/org/eclipse/rdf4j/query/algebra/evaluation/function/string/Concat.java
index 0ffddac65..4a28d2c20 100644
--- a/evaluation/src/main/java/org/eclipse/rdf4j/query/algebra/evaluation/function/string/Concat.java
+++ b/evaluation/src/main/java/org/eclipse/rdf4j/query/algebra/evaluation/function/string/Concat.java
@@ -15,6 +15,7 @@
import org.eclipse.rdf4j.model.vocabulary.XMLSchema;
import org.eclipse.rdf4j.query.algebra.evaluation.ValueExprEvaluationException;
import org.eclipse.rdf4j.query.algebra.evaluation.function.Function;
+import org.eclipse.rdf4j.query.algebra.evaluation.util.QueryEvaluationUtil;
/**
* The SPARQL built-in {@link Function} CONCAT, as defined in
@@ -36,23 +37,25 @@ public Literal evaluate(ValueFactory valueFactory, Value... args)
}
StringBuilder concatBuilder = new StringBuilder();
- String languageTag = null;
-
+ String commonLanguageTag = null;
boolean useLanguageTag = true;
- boolean useDatatype = true;
for (Value arg : args) {
if (arg instanceof Literal) {
Literal lit = (Literal)arg;
+ if (!QueryEvaluationUtil.isStringLiteral(lit)) {
+ throw new ValueExprEvaluationException("unexpected datatype for CONCAT operand: " + lit);
+ }
+
// verify that every literal argument has the same language tag. If
// not, the operator result should not use a language tag.
if (useLanguageTag && Literals.isLanguageLiteral(lit)) {
- if (languageTag == null) {
- languageTag = lit.getLanguage().get();
+ if (commonLanguageTag == null) {
+ commonLanguageTag = lit.getLanguage().get();
}
- else if (!languageTag.equals(lit.getLanguage())) {
- languageTag = null;
+ else if (!commonLanguageTag.equals(lit.getLanguage().orElse(null))) {
+ commonLanguageTag = null;
useLanguageTag = false;
}
}
@@ -60,32 +63,18 @@ else if (!languageTag.equals(lit.getLanguage())) {
useLanguageTag = false;
}
- // check datatype: concat only expects plain, language-tagged or
- // string-typed literals. If all arguments are of type xsd:string,
- // the result also should be,
- // otherwise the result will not have a datatype.
- if (lit.getDatatype() == null) {
- useDatatype = false;
- }
- else if (!lit.getDatatype().equals(XMLSchema.STRING)) {
- throw new ValueExprEvaluationException("unexpected data type for concat operand: " + arg);
- }
-
concatBuilder.append(lit.getLabel());
}
else {
throw new ValueExprEvaluationException(
- "unexpected argument type for concat operator: " + arg);
+ "unexpected argument type for CONCAT operator: " + arg);
}
}
Literal result = null;
- if (useDatatype) {
- result = valueFactory.createLiteral(concatBuilder.toString(), XMLSchema.STRING);
- }
- else if (useLanguageTag) {
- result = valueFactory.createLiteral(concatBuilder.toString(), languageTag);
+ if (useLanguageTag) {
+ result = valueFactory.createLiteral(concatBuilder.toString(), commonLanguageTag);
}
else {
result = valueFactory.createLiteral(concatBuilder.toString());
diff --git a/evaluation/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/function/string/ConcatTest.java b/evaluation/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/function/string/ConcatTest.java
new file mode 100644
index 000000000..447be91b0
--- /dev/null
+++ b/evaluation/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/function/string/ConcatTest.java
@@ -0,0 +1,105 @@
+/*******************************************************************************
+ * Copyright (c) 2018 Eclipse RDF4J contributors.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.query.algebra.evaluation.function.string;
+
+import org.eclipse.rdf4j.model.Literal;
+import org.eclipse.rdf4j.model.ValueFactory;
+import org.eclipse.rdf4j.model.impl.BooleanLiteral;
+import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
+import org.eclipse.rdf4j.model.vocabulary.RDF;
+import org.eclipse.rdf4j.model.vocabulary.XMLSchema;
+import org.eclipse.rdf4j.query.algebra.evaluation.ValueExprEvaluationException;
+import org.junit.Before;
+import org.junit.Test;
+
+import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.Assert.fail;
+
+public class ConcatTest {
+
+ private Concat concatFunc;
+
+ private static final ValueFactory vf = SimpleValueFactory.getInstance();
+
+ private static final Literal foo = vf.createLiteral("foo");
+
+ private static final Literal bar = vf.createLiteral("bar");
+
+ private static final Literal foo_en = vf.createLiteral("foo", "en");
+
+ private static final Literal bar_en = vf.createLiteral("bar", "en");
+
+ private static final Literal foo_nl = vf.createLiteral("foo", "nl");
+
+ @Before
+ public void setUp()
+ throws Exception
+ {
+ concatFunc = new Concat();
+ }
+
+ @Test
+ public void stringLiteralHandling() {
+ Literal result = concatFunc.evaluate(vf, foo, bar);
+
+ assertThat(result.stringValue()).isEqualTo("foobar");
+ assertThat(result.getDatatype()).isEqualTo(XMLSchema.STRING);
+ assertThat(result.getLanguage().isPresent()).isFalse();
+ }
+
+ @Test
+ public void commonLanguageLiteralHandling() {
+ Literal result = concatFunc.evaluate(vf, foo_en, bar_en);
+
+ assertThat(result.stringValue()).isEqualTo("foobar");
+ assertThat(result.getDatatype()).isEqualTo(RDF.LANGSTRING);
+ assertThat(result.getLanguage().get()).isEqualTo("en");
+
+ }
+
+ @Test
+ public void mixedLanguageLiteralHandling() {
+ Literal result = concatFunc.evaluate(vf, foo_nl, bar_en);
+
+ assertThat(result.stringValue()).isEqualTo("foobar");
+ assertThat(result.getDatatype()).isEqualTo(XMLSchema.STRING);
+ assertThat(result.getLanguage().isPresent()).isFalse();
+ }
+
+ @Test
+ public void mixedLiteralHandling() {
+ Literal result = concatFunc.evaluate(vf, foo, bar_en);
+
+ assertThat(result.stringValue()).isEqualTo("foobar");
+ assertThat(result.getDatatype()).isEqualTo(XMLSchema.STRING);
+ assertThat(result.getLanguage().isPresent()).isFalse();
+ }
+
+ @Test
+ public void nonStringLiteralHandling() {
+ try {
+ concatFunc.evaluate(vf, RDF.TYPE, BooleanLiteral.TRUE);
+ fail("CONCAT expected to fail on non-stringliteral argument");
+ }
+ catch (ValueExprEvaluationException e) {
+ // ignore, expected
+ }
+ }
+
+ @Test
+ public void nonLiteralHandling() {
+ try {
+ concatFunc.evaluate(vf, RDF.TYPE, bar_en);
+ fail("CONCAT expected to fail on non-literal argument");
+ }
+ catch (ValueExprEvaluationException e) {
+ // ignore, expected
+ }
+ }
+
+}
From 0803e012c114fc83368c151f9e82f8ad64724e4e Mon Sep 17 00:00:00 2001
From: Jeen Broekstra
Date: Tue, 11 Dec 2018 20:14:02 +1100
Subject: [PATCH 03/54] eclipse/rdf4j#1196 temporarily disable tests involving
ESIntegTestCase
Signed-off-by: Jeen Broekstra
---
.../rdf4j/sail/elasticsearch/ElasticsearchIndexTest.java | 2 ++
.../sail/elasticsearch/ElasticsearchSailGeoSPARQLTest.java | 1 +
.../elasticsearch/ElasticsearchSailIndexedPropertiesTest.java | 2 ++
.../eclipse/rdf4j/sail/elasticsearch/ElasticsearchSailTest.java | 2 ++
4 files changed, 7 insertions(+)
diff --git a/compliance/elasticsearch/src/test/java/org/eclipse/rdf4j/sail/elasticsearch/ElasticsearchIndexTest.java b/compliance/elasticsearch/src/test/java/org/eclipse/rdf4j/sail/elasticsearch/ElasticsearchIndexTest.java
index 5d91f8aee..7fd2cefaa 100644
--- a/compliance/elasticsearch/src/test/java/org/eclipse/rdf4j/sail/elasticsearch/ElasticsearchIndexTest.java
+++ b/compliance/elasticsearch/src/test/java/org/eclipse/rdf4j/sail/elasticsearch/ElasticsearchIndexTest.java
@@ -41,10 +41,12 @@
import org.elasticsearch.test.ESIntegTestCase.SuppressLocalMode;
import org.junit.After;
import org.junit.Before;
+import org.junit.Ignore;
import org.junit.Test;
@ClusterScope(numDataNodes = 1)
@SuppressLocalMode
+@Ignore("timeouts on JIPP due to ES cluster being spun up - see https://github.com/eclipse/rdf4j/issues/1196")
public class ElasticsearchIndexTest extends ESIntegTestCase {
private static final ValueFactory vf = SimpleValueFactory.getInstance();
diff --git a/compliance/elasticsearch/src/test/java/org/eclipse/rdf4j/sail/elasticsearch/ElasticsearchSailGeoSPARQLTest.java b/compliance/elasticsearch/src/test/java/org/eclipse/rdf4j/sail/elasticsearch/ElasticsearchSailGeoSPARQLTest.java
index 23387680f..9fd040233 100644
--- a/compliance/elasticsearch/src/test/java/org/eclipse/rdf4j/sail/elasticsearch/ElasticsearchSailGeoSPARQLTest.java
+++ b/compliance/elasticsearch/src/test/java/org/eclipse/rdf4j/sail/elasticsearch/ElasticsearchSailGeoSPARQLTest.java
@@ -28,6 +28,7 @@
@ClusterScope(numDataNodes = 1)
@SuppressLocalMode
+@Ignore("timeouts on JIPP due to ES cluster being spun up - see https://github.com/eclipse/rdf4j/issues/1196")
public class ElasticsearchSailGeoSPARQLTest extends ESIntegTestCase {
AbstractLuceneSailGeoSPARQLTest delegateTest;
diff --git a/compliance/elasticsearch/src/test/java/org/eclipse/rdf4j/sail/elasticsearch/ElasticsearchSailIndexedPropertiesTest.java b/compliance/elasticsearch/src/test/java/org/eclipse/rdf4j/sail/elasticsearch/ElasticsearchSailIndexedPropertiesTest.java
index 87f69b66d..fc62a637b 100644
--- a/compliance/elasticsearch/src/test/java/org/eclipse/rdf4j/sail/elasticsearch/ElasticsearchSailIndexedPropertiesTest.java
+++ b/compliance/elasticsearch/src/test/java/org/eclipse/rdf4j/sail/elasticsearch/ElasticsearchSailIndexedPropertiesTest.java
@@ -23,10 +23,12 @@
import org.elasticsearch.test.ESIntegTestCase.SuppressLocalMode;
import org.junit.After;
import org.junit.Before;
+import org.junit.Ignore;
import org.junit.Test;
@ClusterScope(numDataNodes = 1)
@SuppressLocalMode
+@Ignore("timeouts on JIPP due to ES cluster being spun up - see https://github.com/eclipse/rdf4j/issues/1196")
public class ElasticsearchSailIndexedPropertiesTest extends ESIntegTestCase {
AbstractLuceneSailIndexedPropertiesTest delegateTest;
diff --git a/compliance/elasticsearch/src/test/java/org/eclipse/rdf4j/sail/elasticsearch/ElasticsearchSailTest.java b/compliance/elasticsearch/src/test/java/org/eclipse/rdf4j/sail/elasticsearch/ElasticsearchSailTest.java
index ee4e37645..a48724d38 100644
--- a/compliance/elasticsearch/src/test/java/org/eclipse/rdf4j/sail/elasticsearch/ElasticsearchSailTest.java
+++ b/compliance/elasticsearch/src/test/java/org/eclipse/rdf4j/sail/elasticsearch/ElasticsearchSailTest.java
@@ -23,10 +23,12 @@
import org.elasticsearch.test.ESIntegTestCase.SuppressLocalMode;
import org.junit.After;
import org.junit.Before;
+import org.junit.Ignore;
import org.junit.Test;
@ClusterScope(numDataNodes = 1)
@SuppressLocalMode
+@Ignore("timeouts on JIPP due to ES cluster being spun up - see https://github.com/eclipse/rdf4j/issues/1196")
public class ElasticsearchSailTest extends ESIntegTestCase {
AbstractLuceneSailTest delegateTest;
From c2622e1e2821d52e868229341d20b29fdc63f39c Mon Sep 17 00:00:00 2001
From: Jeen Broekstra
Date: Wed, 12 Dec 2018 09:54:32 +1100
Subject: [PATCH 04/54] eclipse/rdf4j#1195 bump maven shade plugin version
Signed-off-by: Jeen Broekstra
---
pom.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/pom.xml b/pom.xml
index 1a0a13970..93665f3fb 100644
--- a/pom.xml
+++ b/pom.xml
@@ -753,7 +753,7 @@
org.apache.maven.pluginsmaven-shade-plugin
- 3.0.0
+ 3.2.1
From 0c158671a8064c0477ea278176869fdc4be30ea6 Mon Sep 17 00:00:00 2001
From: Bart Hanssens
Date: Wed, 19 Dec 2018 18:37:42 +0100
Subject: [PATCH 05/54] Upgrade to latest Mockito 2
Signed-off-by: Bart Hanssens
---
.../function/geosparql/GeometricBinaryFunctionTest.java | 2 +-
.../geosparql/GeometricRelationFunctionTest.java | 2 +-
.../function/geosparql/GeometricUnaryFunctionTest.java | 2 +-
pom.xml | 9 +++++----
4 files changed, 8 insertions(+), 7 deletions(-)
diff --git a/geosparql/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/function/geosparql/GeometricBinaryFunctionTest.java b/geosparql/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/function/geosparql/GeometricBinaryFunctionTest.java
index 7f1943688..08c11880d 100644
--- a/geosparql/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/function/geosparql/GeometricBinaryFunctionTest.java
+++ b/geosparql/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/function/geosparql/GeometricBinaryFunctionTest.java
@@ -14,7 +14,7 @@
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public abstract class GeometricBinaryFunctionTest {
diff --git a/geosparql/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/function/geosparql/GeometricRelationFunctionTest.java b/geosparql/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/function/geosparql/GeometricRelationFunctionTest.java
index 8f11201e7..2cf7616e0 100644
--- a/geosparql/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/function/geosparql/GeometricRelationFunctionTest.java
+++ b/geosparql/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/function/geosparql/GeometricRelationFunctionTest.java
@@ -14,7 +14,7 @@
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public abstract class GeometricRelationFunctionTest {
diff --git a/geosparql/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/function/geosparql/GeometricUnaryFunctionTest.java b/geosparql/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/function/geosparql/GeometricUnaryFunctionTest.java
index fe32be311..32c0c2ff4 100644
--- a/geosparql/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/function/geosparql/GeometricUnaryFunctionTest.java
+++ b/geosparql/src/test/java/org/eclipse/rdf4j/query/algebra/evaluation/function/geosparql/GeometricUnaryFunctionTest.java
@@ -14,7 +14,7 @@
import org.junit.Test;
import org.junit.runner.RunWith;
import org.mockito.Mockito;
-import org.mockito.runners.MockitoJUnitRunner;
+import org.mockito.junit.MockitoJUnitRunner;
@RunWith(MockitoJUnitRunner.class)
public abstract class GeometricUnaryFunctionTest {
diff --git a/pom.xml b/pom.xml
index 3e4860b0b..9091d1b94 100644
--- a/pom.xml
+++ b/pom.xml
@@ -484,10 +484,9 @@
test
-
org.mockitomockito-core
- 1.10.19
+ 2.23.4test
@@ -496,7 +495,6 @@
3.9.1test
-
com.github.jsonld-javajsonld-java
@@ -799,6 +797,9 @@
1.8
+
+ META-INF/**/module-info
+ org.apache.logging.log4j:log4j-apiorg.elasticsearch:elasticsearch
@@ -813,7 +814,7 @@
org.codehaus.mojoextra-enforcer-rules
- 1.0-beta-6
+ 1.0
From e6f3c12b67058cd9a907def76a4ebc00c4527f90 Mon Sep 17 00:00:00 2001
From: Bart Hanssens
Date: Thu, 20 Dec 2018 13:22:31 +0100
Subject: [PATCH 06/54] Remove unused cglib
Signed-off-by: Bart Hanssens
---
pom.xml | 6 ------
1 file changed, 6 deletions(-)
diff --git a/pom.xml b/pom.xml
index 9091d1b94..e291e9e57 100644
--- a/pom.xml
+++ b/pom.xml
@@ -424,12 +424,6 @@
-
-
- cglib
- cglib
- 3.1
- com.google.guavaguava
From b8b438ed071f1d9ee11751d443764feb906fe323 Mon Sep 17 00:00:00 2001
From: Bart Hanssens
Date: Thu, 20 Dec 2018 20:04:49 +0100
Subject: [PATCH 07/54] Updated lucene version in config
Signed-off-by: Bart Hanssens
---
compliance/solr/solr/cores/embedded/conf/solrconfig.xml | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/compliance/solr/solr/cores/embedded/conf/solrconfig.xml b/compliance/solr/solr/cores/embedded/conf/solrconfig.xml
index fe32b1f0b..f16445486 100644
--- a/compliance/solr/solr/cores/embedded/conf/solrconfig.xml
+++ b/compliance/solr/solr/cores/embedded/conf/solrconfig.xml
@@ -1,6 +1,6 @@
- 5.0.0
+ 6.0.0target/test-data
From be80565e7d09a49f6d56ca505ce925b658a5d66f Mon Sep 17 00:00:00 2001
From: Jeen Broekstra
Date: Mon, 24 Dec 2018 11:23:20 +1100
Subject: [PATCH 08/54] fix reactor order and include shacl in shaded jar
Signed-off-by: Jeen Broekstra
---
pom.xml | 2 +-
storage/pom.xml | 1 +
2 files changed, 2 insertions(+), 1 deletion(-)
diff --git a/pom.xml b/pom.xml
index 93665f3fb..a88b2cf5b 100644
--- a/pom.xml
+++ b/pom.xml
@@ -68,8 +68,8 @@
sail-spinsolrspin
- storageshacl
+ storage
diff --git a/storage/pom.xml b/storage/pom.xml
index 8e2473777..d6acfa979 100644
--- a/storage/pom.xml
+++ b/storage/pom.xml
@@ -430,6 +430,7 @@
org.eclipse.rdf4j:rdf4j-sail-*org.eclipse.rdf4j:rdf4j-lucene-spinorg.eclipse.rdf4j:rdf4j-spin
+ org.eclipse.rdf4j:rdf4j-shacl
From 44994ef2365c52bf5ec691cc5923bb4f8299e2c0 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Ha=CC=8Avard=20Ottestad?=
Date: Sun, 9 Dec 2018 18:38:16 +0100
Subject: [PATCH 09/54] eclipse/rdf4j#1197 add statements with correct graph
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Signed-off-by: Håvard Ottestad
---
...SchemaCachingRDFSInferencerConnection.java | 62 ++++++++++---------
1 file changed, 33 insertions(+), 29 deletions(-)
diff --git a/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/SchemaCachingRDFSInferencerConnection.java b/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/SchemaCachingRDFSInferencerConnection.java
index c2ff00234..52b8a51a1 100644
--- a/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/SchemaCachingRDFSInferencerConnection.java
+++ b/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/SchemaCachingRDFSInferencerConnection.java
@@ -196,10 +196,10 @@ private void addStatement(boolean actuallyAdd, Resource subject, IRI predicate,
}
if (sail.useAllRdfsRules) {
- addInferredStatement(subject, RDF.TYPE, RDFS.RESOURCE);
+ addInferredStatement(subject, RDF.TYPE, RDFS.RESOURCE, resources);
if (object instanceof Resource) {
- addInferredStatement((Resource)object, RDF.TYPE, RDFS.RESOURCE);
+ addInferredStatement((Resource)object, RDF.TYPE, RDFS.RESOURCE, resources);
}
}
@@ -208,13 +208,13 @@ private void addStatement(boolean actuallyAdd, Resource subject, IRI predicate,
try {
int i = Integer.parseInt(predicate.getLocalName().substring(1));
if (i >= 1) {
- addInferredStatement(subject, RDFS.MEMBER, object);
+ addInferredStatement(subject, RDFS.MEMBER, object, resources);
- addInferredStatement(predicate, RDF.TYPE, RDFS.RESOURCE);
- addInferredStatement(predicate, RDF.TYPE, RDFS.CONTAINERMEMBERSHIPPROPERTY);
- addInferredStatement(predicate, RDF.TYPE, RDF.PROPERTY);
- addInferredStatement(predicate, RDFS.SUBPROPERTYOF, predicate);
- addInferredStatement(predicate, RDFS.SUBPROPERTYOF, RDFS.MEMBER);
+ addInferredStatement(predicate, RDF.TYPE, RDFS.RESOURCE, resources);
+ addInferredStatement(predicate, RDF.TYPE, RDFS.CONTAINERMEMBERSHIPPROPERTY, resources);
+ addInferredStatement(predicate, RDF.TYPE, RDF.PROPERTY, resources);
+ addInferredStatement(predicate, RDFS.SUBPROPERTYOF, predicate, resources);
+ addInferredStatement(predicate, RDFS.SUBPROPERTYOF, RDFS.MEMBER, resources);
}
}
@@ -241,32 +241,38 @@ private void addStatement(boolean actuallyAdd, Resource subject, IRI predicate,
sail.resolveTypes((Resource)object).stream().peek(inferredType -> {
if (sail.useAllRdfsRules && inferredType.equals(RDFS.CLASS)) {
- addInferredStatement(subject, RDFS.SUBCLASSOF, RDFS.RESOURCE);
+ addInferredStatement(subject, RDFS.SUBCLASSOF, RDFS.RESOURCE, resources);
}
}).filter(inferredType -> !inferredType.equals(object)).forEach(
- inferredType -> addInferredStatement(subject, RDF.TYPE, inferredType));
+ inferredType -> addInferredStatement(subject, RDF.TYPE, inferredType, resources));
}
- sail.resolveProperties(predicate).stream().filter(
- inferredProperty -> !inferredProperty.equals(predicate)).filter(
- inferredPropery -> inferredPropery instanceof IRI).map(
- inferredPropery -> ((IRI)inferredPropery)).forEach(
- inferredProperty -> addInferredStatement(subject, inferredProperty,
- object));
+ sail.resolveProperties(predicate)
+ .stream()
+ .filter(inferredProperty -> !inferredProperty.equals(predicate))
+ .filter(inferredPropery -> inferredPropery instanceof IRI)
+ .map(inferredPropery -> ((IRI) inferredPropery))
+ .forEach(inferredProperty -> addInferredStatement(subject, inferredProperty, object, resources));
if (object instanceof Resource) {
- sail.resolveRangeTypes(predicate).stream().peek(inferredType -> {
- if (sail.useAllRdfsRules && inferredType.equals(RDFS.CLASS)) {
- addInferredStatement(((Resource)object), RDFS.SUBCLASSOF, RDFS.RESOURCE);
- }
- }).forEach(inferredType -> addInferredStatement(((Resource)object), RDF.TYPE, inferredType));
+ sail.resolveRangeTypes(predicate)
+ .stream()
+ .peek(inferredType -> {
+ if (sail.useAllRdfsRules && inferredType.equals(RDFS.CLASS)) {
+ addInferredStatement(((Resource) object), RDFS.SUBCLASSOF, RDFS.RESOURCE, resources);
+ }
+ })
+ .forEach(inferredType -> addInferredStatement(((Resource) object), RDF.TYPE, inferredType, resources));
}
- sail.resolveDomainTypes(predicate).stream().peek(inferredType -> {
- if (sail.useAllRdfsRules && inferredType.equals(RDFS.CLASS)) {
- addInferredStatement(subject, RDFS.SUBCLASSOF, RDFS.RESOURCE);
- }
- }).forEach(inferredType -> addInferredStatement((subject), RDF.TYPE, inferredType));
+ sail.resolveDomainTypes(predicate)
+ .stream()
+ .peek(inferredType -> {
+ if (sail.useAllRdfsRules && inferredType.equals(RDFS.CLASS)) {
+ addInferredStatement(subject, RDFS.SUBCLASSOF, RDFS.RESOURCE, resources);
+ }
+ })
+ .forEach(inferredType -> addInferredStatement((subject), RDF.TYPE, inferredType, resources));
}
@@ -522,8 +528,7 @@ void addAxiomStatements() {
statement = vf.createStatement(RDFS.CONTAINERMEMBERSHIPPROPERTY, RDFS.SUBCLASSOF, RDFS.RESOURCE);
processForSchemaCache(statement);
addInferredStatement(statement.getSubject(), statement.getPredicate(), statement.getObject());
- statement = vf.createStatement(RDFS.CONTAINERMEMBERSHIPPROPERTY, RDFS.SUBCLASSOF,
- RDFS.CONTAINERMEMBERSHIPPROPERTY);
+ statement = vf.createStatement(RDFS.CONTAINERMEMBERSHIPPROPERTY, RDFS.SUBCLASSOF, RDFS.CONTAINERMEMBERSHIPPROPERTY);
processForSchemaCache(statement);
addInferredStatement(statement.getSubject(), statement.getPredicate(), statement.getObject());
statement = vf.createStatement(RDFS.CONTAINERMEMBERSHIPPROPERTY, RDFS.SUBCLASSOF, RDF.PROPERTY);
@@ -718,7 +723,6 @@ public void rollback()
sail.rolledBackAfterModifyingSchemaCache = true;
}
- statementsRemoved = false;
statementsRemoved = false;
sail.releaseExclusiveWriteLock();
From 57f512c8de6ba205177cb9690333f2821a43b322 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Ha=CC=8Avard=20Ottestad?=
Date: Tue, 11 Dec 2018 22:08:08 +0100
Subject: [PATCH 10/54] fixed tests locally instead of in the test suite and
also added a new test
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Signed-off-by: Håvard Ottestad
---
...FSchemaMemoryRepositoryConnectionTest.java | 124 +++++++++++++++++
...erencerNativeRepositoryConnectionTest.java | 129 ++++++++++++++++++
2 files changed, 253 insertions(+)
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest.java
index 1f11d7c04..947ca4619 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest.java
@@ -8,11 +8,22 @@
package org.eclipse.rdf4j.repository.sail.memory;
import org.eclipse.rdf4j.IsolationLevel;
+import org.eclipse.rdf4j.common.iteration.Iterations;
+import org.eclipse.rdf4j.model.IRI;
+import org.eclipse.rdf4j.model.Statement;
+import org.eclipse.rdf4j.model.vocabulary.RDF;
+import org.eclipse.rdf4j.model.vocabulary.RDFS;
import org.eclipse.rdf4j.repository.RDFSchemaRepositoryConnectionTest;
import org.eclipse.rdf4j.repository.Repository;
import org.eclipse.rdf4j.repository.sail.SailRepository;
import org.eclipse.rdf4j.sail.inferencer.fc.SchemaCachingRDFSInferencer;
import org.eclipse.rdf4j.sail.memory.MemoryStore;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import java.util.stream.Stream;
+
+import static org.junit.Assert.assertEquals;
public class SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest
extends RDFSchemaRepositoryConnectionTest
@@ -29,4 +40,117 @@ protected Repository createRepository() {
return new SailRepository(new SchemaCachingRDFSInferencer(new MemoryStore(), true));
}
+
+ @Ignore
+ @Test
+ @Override
+ public void testDefaultContext()
+ throws Exception
+ {
+ // ignore
+ }
+
+ @Ignore
+ @Test
+ @Override
+ public void testDefaultInsertContext()
+ throws Exception
+ {
+ // ignore
+ }
+
+ @Ignore
+ @Test
+ @Override
+ public void testExclusiveNullContext()
+ throws Exception
+ {
+ // ignore
+ }
+
+
+ @Override
+ @Test
+ @Ignore
+ public void testQueryDefaultGraph()
+ throws Exception
+ {
+ // ignore
+ }
+
+
+ @Override
+ @Test
+ @Ignore
+ public void testDeleteDefaultGraph()
+ throws Exception
+ {
+ // ignore
+ }
+
+ @Override
+ @Test
+ @Ignore
+ public void testContextStatementsNotDuplicated()
+ throws Exception
+ {
+ // ignore
+ }
+
+ @Override
+ @Test
+ @Ignore
+ public void testContextStatementsNotDuplicated2()
+ throws Exception
+ {
+ // ignore
+ }
+
+
+ @Test
+ public void testContextTbox()
+ {
+
+// Man subClassOf Human g1
+// Human subClassOf Animal g2
+// -> Man subClassOf Animal ??
+
+ IRI man = vf.createIRI("http://example.org/Man");
+ IRI human = vf.createIRI("http://example.org/Human");
+ IRI animal = vf.createIRI("http://example.org/Animal");
+ IRI bob = vf.createIRI("http://example.org/bob");
+
+ IRI graph1 = vf.createIRI("http://example.org/graph1");
+ IRI graph2 = vf.createIRI("http://example.org/graph2");
+ IRI graph3 = vf.createIRI("http://example.org/graph3");
+
+
+ testCon.add(man, RDFS.SUBCLASSOF, human, graph1);
+ testCon.add(human, RDFS.SUBCLASSOF, animal, graph2);
+ testCon.add(bob, RDF.TYPE, man, graph3);
+
+
+ /*
+ The SchemaCachingRDFSInferencer correctly adds inferred A-box statements to the correct graph,
+ but does not add inferred T-box statements to the correct graph.
+ */
+
+
+ System.out.println("-----------");
+ try (Stream stream = Iterations.stream(testCon.getStatements(man, RDFS.SUBCLASSOF, null,true))) {
+ stream.forEach(System.out::println);
+ }
+ System.out.println("-----------");
+ try (Stream stream = Iterations.stream(testCon.getStatements(bob, RDF.TYPE, null,true))) {
+ stream
+ .peek(statement -> assertEquals(statement.getContext(), graph3))
+ .forEach(System.out::println);
+ }
+
+ System.out.println("-----------");
+
+
+ }
+
+
}
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/nativerdf/SchemaCachingRDFSInferencerNativeRepositoryConnectionTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/nativerdf/SchemaCachingRDFSInferencerNativeRepositoryConnectionTest.java
index 6332cb060..ed3ed0f19 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/nativerdf/SchemaCachingRDFSInferencerNativeRepositoryConnectionTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/nativerdf/SchemaCachingRDFSInferencerNativeRepositoryConnectionTest.java
@@ -9,14 +9,29 @@
import java.io.File;
import java.io.IOException;
+import java.util.stream.Stream;
import org.eclipse.rdf4j.IsolationLevel;
import org.eclipse.rdf4j.common.io.FileUtil;
+import org.eclipse.rdf4j.common.iteration.Iterations;
+import org.eclipse.rdf4j.model.IRI;
+import org.eclipse.rdf4j.model.Resource;
+import org.eclipse.rdf4j.model.Statement;
+import org.eclipse.rdf4j.model.vocabulary.FOAF;
+import org.eclipse.rdf4j.model.vocabulary.RDF;
+import org.eclipse.rdf4j.model.vocabulary.RDFS;
import org.eclipse.rdf4j.repository.RDFSchemaRepositoryConnectionTest;
import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryResult;
import org.eclipse.rdf4j.repository.sail.SailRepository;
import org.eclipse.rdf4j.sail.inferencer.fc.SchemaCachingRDFSInferencer;
import org.eclipse.rdf4j.sail.nativerdf.NativeStore;
+import org.junit.Ignore;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
public class SchemaCachingRDFSInferencerNativeRepositoryConnectionTest
extends RDFSchemaRepositoryConnectionTest
@@ -48,4 +63,118 @@ public void tearDown()
FileUtil.deleteDir(dataDir);
}
}
+
+
+
+ @Ignore
+ @Test
+ @Override
+ public void testDefaultContext()
+ throws Exception
+ {
+ // ignore
+ }
+
+ @Ignore
+ @Test
+ @Override
+ public void testDefaultInsertContext()
+ throws Exception
+ {
+ // ignore
+ }
+
+ @Ignore
+ @Test
+ @Override
+ public void testExclusiveNullContext()
+ throws Exception
+ {
+ // ignore
+ }
+
+
+ @Override
+ @Test
+ @Ignore
+ public void testQueryDefaultGraph()
+ throws Exception
+ {
+ // ignore
+ }
+
+
+ @Override
+ @Test
+ @Ignore
+ public void testDeleteDefaultGraph()
+ throws Exception
+ {
+ // ignore
+ }
+
+ @Override
+ @Test
+ @Ignore
+ public void testContextStatementsNotDuplicated()
+ throws Exception
+ {
+ // ignore
+ }
+
+ @Override
+ @Test
+ @Ignore
+ public void testContextStatementsNotDuplicated2()
+ throws Exception
+ {
+ // ignore
+ }
+
+
+ @Test
+ public void testContextTbox()
+ {
+
+// Man subClassOf Human g1
+// Human subClassOf Animal g2
+// -> Man subClassOf Animal ??
+
+ IRI man = vf.createIRI("http://example.org/Man");
+ IRI human = vf.createIRI("http://example.org/Human");
+ IRI animal = vf.createIRI("http://example.org/Animal");
+ IRI bob = vf.createIRI("http://example.org/bob");
+
+ IRI graph1 = vf.createIRI("http://example.org/graph1");
+ IRI graph2 = vf.createIRI("http://example.org/graph2");
+ IRI graph3 = vf.createIRI("http://example.org/graph3");
+
+
+ testCon.add(man, RDFS.SUBCLASSOF, human, graph1);
+ testCon.add(human, RDFS.SUBCLASSOF, animal, graph2);
+ testCon.add(bob, RDF.TYPE, man, graph3);
+
+
+ /*
+ The SchemaCachingRDFSInferencer correctly adds inferred A-box statements to the correct graph,
+ but does not add inferred T-box statements to the correct graph.
+ */
+
+
+ System.out.println("-----------");
+ try (Stream stream = Iterations.stream(testCon.getStatements(man, RDFS.SUBCLASSOF, null,true))) {
+ stream.forEach(System.out::println);
+ }
+ System.out.println("-----------");
+ try (Stream stream = Iterations.stream(testCon.getStatements(bob, RDF.TYPE, null,true))) {
+ stream
+ .peek(statement -> assertEquals(statement.getContext(), graph3))
+ .forEach(System.out::println);
+ }
+
+ System.out.println("-----------");
+
+
+ }
+
}
From 06a0721bd87a20b593f832af0baafcf94fff5261 Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Ha=CC=8Avard=20Ottestad?=
Date: Sun, 16 Dec 2018 13:00:48 +0100
Subject: [PATCH 11/54] eclipse/rdf4j#1197 make inferred context behaviour
configurable
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Signed-off-by: Håvard Ottestad
---
...FSchemaMemoryRepositoryConnectionTest.java | 4 +-
...erencerNativeRepositoryConnectionTest.java | 5 +-
.../rdf4j/sail/fc/InferredContextTest.java | 83 +++++++++++++++++++
...chingRDFSInferencerMemInferencingTest.java | 3 +-
.../fc/SchemaCachingRDFSInferencer.java | 14 ++++
...SchemaCachingRDFSInferencerConnection.java | 43 ++++++----
6 files changed, 131 insertions(+), 21 deletions(-)
create mode 100644 compliance/store/src/test/java/org/eclipse/rdf4j/sail/fc/InferredContextTest.java
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest.java
index 947ca4619..89eb71df5 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest.java
@@ -37,7 +37,9 @@ public SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest(
@Override
protected Repository createRepository() {
- return new SailRepository(new SchemaCachingRDFSInferencer(new MemoryStore(), true));
+ SchemaCachingRDFSInferencer sail = new SchemaCachingRDFSInferencer(new MemoryStore(), true);
+ sail.setAddInferredStatementsToDefaultContext(false);
+ return new SailRepository(sail);
}
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/nativerdf/SchemaCachingRDFSInferencerNativeRepositoryConnectionTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/nativerdf/SchemaCachingRDFSInferencerNativeRepositoryConnectionTest.java
index ed3ed0f19..4077750b4 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/nativerdf/SchemaCachingRDFSInferencerNativeRepositoryConnectionTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/nativerdf/SchemaCachingRDFSInferencerNativeRepositoryConnectionTest.java
@@ -48,8 +48,9 @@ protected Repository createRepository()
throws IOException
{
dataDir = FileUtil.createTempDir("nativestore");
- return new SailRepository(
- new SchemaCachingRDFSInferencer(new NativeStore(dataDir, "spoc"), true));
+ SchemaCachingRDFSInferencer sail = new SchemaCachingRDFSInferencer(new NativeStore(dataDir, "spoc"), true);
+ sail.setAddInferredStatementsToDefaultContext(false);
+ return new SailRepository(sail);
}
@Override
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/fc/InferredContextTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/fc/InferredContextTest.java
new file mode 100644
index 000000000..2c0ca5fc0
--- /dev/null
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/fc/InferredContextTest.java
@@ -0,0 +1,83 @@
+package org.eclipse.rdf4j.sail.fc;
+
+import org.eclipse.rdf4j.common.iteration.CloseableIteration;
+import org.eclipse.rdf4j.model.BNode;
+import org.eclipse.rdf4j.model.Statement;
+import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
+import org.eclipse.rdf4j.model.vocabulary.RDF;
+import org.eclipse.rdf4j.model.vocabulary.RDFS;
+import org.eclipse.rdf4j.sail.SailException;
+import org.eclipse.rdf4j.sail.inferencer.fc.SchemaCachingRDFSInferencer;
+import org.eclipse.rdf4j.sail.inferencer.fc.SchemaCachingRDFSInferencerConnection;
+import org.eclipse.rdf4j.sail.memory.MemoryStore;
+import org.junit.Test;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertTrue;
+
+public class InferredContextTest {
+
+ SimpleValueFactory vf = SimpleValueFactory.getInstance();
+
+ BNode bNode = vf.createBNode();
+ BNode type = vf.createBNode();
+ BNode context = vf.createBNode();
+
+
+ @Test
+ public void testInferrecContextNull() {
+ SchemaCachingRDFSInferencer sail = new SchemaCachingRDFSInferencer(new MemoryStore());
+
+ sail.initialize();
+ sail.setAddInferredStatementsToDefaultContext(true);
+
+
+ try (SchemaCachingRDFSInferencerConnection connection = sail.getConnection()) {
+ connection.begin();
+ connection.addStatement(bNode, RDF.TYPE, type, context);
+ connection.commit();
+
+ assertFalse(connection.hasStatement(bNode, RDF.TYPE, RDFS.RESOURCE, true, context));
+ assertTrue(connection.hasStatement(bNode, RDF.TYPE, RDFS.RESOURCE, true));
+
+ }
+
+ }
+
+ @Test
+ public void testInferrecContextNoNull() {
+ SchemaCachingRDFSInferencer sail = new SchemaCachingRDFSInferencer(new MemoryStore());
+
+ sail.initialize();
+ sail.setAddInferredStatementsToDefaultContext(false);
+
+ try (SchemaCachingRDFSInferencerConnection connection = sail.getConnection()) {
+ connection.begin();
+ connection.addStatement(bNode, RDF.TYPE, type, context);
+ connection.commit();
+
+ assertTrue(connection.hasStatement(bNode, RDF.TYPE, RDFS.RESOURCE, true, context));
+
+ try (CloseableIteration extends Statement, SailException> statements = connection.getStatements(bNode, RDF.TYPE, RDFS.RESOURCE, true)) {
+ while (statements.hasNext()) {
+ Statement next = statements.next();
+ assertEquals("Context should be equal", context, next.getContext());
+ }
+ }
+ }
+
+ }
+
+ @Test
+ public void testDefaultBehaviour() {
+ SchemaCachingRDFSInferencer sail = new SchemaCachingRDFSInferencer(new MemoryStore());
+
+ assertTrue(
+ "Current default behaviour should be to add all statements to default context",
+ sail.isAddInferredStatementsToDefaultContext()
+ );
+
+ }
+
+}
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/memory/SchemaCachingRDFSInferencerMemInferencingTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/memory/SchemaCachingRDFSInferencerMemInferencingTest.java
index d9908a543..1fd106dd7 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/memory/SchemaCachingRDFSInferencerMemInferencingTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/memory/SchemaCachingRDFSInferencerMemInferencingTest.java
@@ -29,7 +29,8 @@ public class SchemaCachingRDFSInferencerMemInferencingTest extends InferencingTe
@Override
protected Sail createSail() {
- Sail sailStack = new SchemaCachingRDFSInferencer(new MemoryStore(), true);
+ SchemaCachingRDFSInferencer sailStack = new SchemaCachingRDFSInferencer(new MemoryStore(), true);
+ sailStack.setAddInferredStatementsToDefaultContext(false);
return sailStack;
}
diff --git a/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/SchemaCachingRDFSInferencer.java b/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/SchemaCachingRDFSInferencer.java
index f448a5aec..c8cc65282 100644
--- a/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/SchemaCachingRDFSInferencer.java
+++ b/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/SchemaCachingRDFSInferencer.java
@@ -93,6 +93,13 @@ public class SchemaCachingRDFSInferencer extends NotifyingSailWrapper {
// The previous transaction rolled back
boolean rolledBackAfterModifyingSchemaCache;
+ // Inferred statements can either be added to the default context
+ // or to the context that the original inserted statement has
+ // for the time being, the default behaviour will be to adde the
+ // statements to the default context.
+ // THIS BEHAVIOUR WILL BE SWITCHED ON THE NEXT MAJOR RELEASE
+ private boolean addInferredStatementsToDefaultContext = true;
+
/**
* Instantiate a SchemaCachingRDFSInferencer.
*
@@ -615,4 +622,11 @@ public List getSupportedIsolationLevels() {
return levels;
}
+ public boolean isAddInferredStatementsToDefaultContext() {
+ return addInferredStatementsToDefaultContext;
+ }
+
+ public void setAddInferredStatementsToDefaultContext(boolean addInferredStatementsToDefaultContext) {
+ this.addInferredStatementsToDefaultContext = addInferredStatementsToDefaultContext;
+ }
}
diff --git a/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/SchemaCachingRDFSInferencerConnection.java b/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/SchemaCachingRDFSInferencerConnection.java
index 52b8a51a1..2b0562740 100644
--- a/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/SchemaCachingRDFSInferencerConnection.java
+++ b/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/SchemaCachingRDFSInferencerConnection.java
@@ -187,19 +187,28 @@ public void addStatement(Resource subject, IRI predicate, Value object, Resource
// actuallyAdd
private void addStatement(boolean actuallyAdd, Resource subject, IRI predicate, Value object,
- Resource... resources)
+ Resource... context)
throws SailException
{
+
+ Resource[] inferredContext;
+ if(sail.isAddInferredStatementsToDefaultContext()){
+ inferredContext = new Resource[0];
+ } else {
+ inferredContext = context;
+ }
+
+
sail.acquireExclusiveWriteLock();
if (sail.schema == null) {
processForSchemaCache(sail.getValueFactory().createStatement(subject, predicate, object));
}
if (sail.useAllRdfsRules) {
- addInferredStatement(subject, RDF.TYPE, RDFS.RESOURCE, resources);
+ addInferredStatement(subject, RDF.TYPE, RDFS.RESOURCE, inferredContext);
if (object instanceof Resource) {
- addInferredStatement((Resource)object, RDF.TYPE, RDFS.RESOURCE, resources);
+ addInferredStatement((Resource)object, RDF.TYPE, RDFS.RESOURCE, inferredContext);
}
}
@@ -208,13 +217,13 @@ private void addStatement(boolean actuallyAdd, Resource subject, IRI predicate,
try {
int i = Integer.parseInt(predicate.getLocalName().substring(1));
if (i >= 1) {
- addInferredStatement(subject, RDFS.MEMBER, object, resources);
+ addInferredStatement(subject, RDFS.MEMBER, object, inferredContext);
- addInferredStatement(predicate, RDF.TYPE, RDFS.RESOURCE, resources);
- addInferredStatement(predicate, RDF.TYPE, RDFS.CONTAINERMEMBERSHIPPROPERTY, resources);
- addInferredStatement(predicate, RDF.TYPE, RDF.PROPERTY, resources);
- addInferredStatement(predicate, RDFS.SUBPROPERTYOF, predicate, resources);
- addInferredStatement(predicate, RDFS.SUBPROPERTYOF, RDFS.MEMBER, resources);
+ addInferredStatement(predicate, RDF.TYPE, RDFS.RESOURCE, inferredContext);
+ addInferredStatement(predicate, RDF.TYPE, RDFS.CONTAINERMEMBERSHIPPROPERTY, inferredContext);
+ addInferredStatement(predicate, RDF.TYPE, RDF.PROPERTY, inferredContext);
+ addInferredStatement(predicate, RDFS.SUBPROPERTYOF, predicate, inferredContext);
+ addInferredStatement(predicate, RDFS.SUBPROPERTYOF, RDFS.MEMBER, inferredContext);
}
}
@@ -230,7 +239,7 @@ private void addStatement(boolean actuallyAdd, Resource subject, IRI predicate,
}
if (actuallyAdd) {
- connection.addStatement(subject, predicate, object, resources);
+ connection.addStatement(subject, predicate, object, context);
}
@@ -241,10 +250,10 @@ private void addStatement(boolean actuallyAdd, Resource subject, IRI predicate,
sail.resolveTypes((Resource)object).stream().peek(inferredType -> {
if (sail.useAllRdfsRules && inferredType.equals(RDFS.CLASS)) {
- addInferredStatement(subject, RDFS.SUBCLASSOF, RDFS.RESOURCE, resources);
+ addInferredStatement(subject, RDFS.SUBCLASSOF, RDFS.RESOURCE, inferredContext);
}
}).filter(inferredType -> !inferredType.equals(object)).forEach(
- inferredType -> addInferredStatement(subject, RDF.TYPE, inferredType, resources));
+ inferredType -> addInferredStatement(subject, RDF.TYPE, inferredType, inferredContext));
}
sail.resolveProperties(predicate)
@@ -252,27 +261,27 @@ private void addStatement(boolean actuallyAdd, Resource subject, IRI predicate,
.filter(inferredProperty -> !inferredProperty.equals(predicate))
.filter(inferredPropery -> inferredPropery instanceof IRI)
.map(inferredPropery -> ((IRI) inferredPropery))
- .forEach(inferredProperty -> addInferredStatement(subject, inferredProperty, object, resources));
+ .forEach(inferredProperty -> addInferredStatement(subject, inferredProperty, object, inferredContext));
if (object instanceof Resource) {
sail.resolveRangeTypes(predicate)
.stream()
.peek(inferredType -> {
if (sail.useAllRdfsRules && inferredType.equals(RDFS.CLASS)) {
- addInferredStatement(((Resource) object), RDFS.SUBCLASSOF, RDFS.RESOURCE, resources);
+ addInferredStatement(((Resource) object), RDFS.SUBCLASSOF, RDFS.RESOURCE, inferredContext);
}
})
- .forEach(inferredType -> addInferredStatement(((Resource) object), RDF.TYPE, inferredType, resources));
+ .forEach(inferredType -> addInferredStatement(((Resource) object), RDF.TYPE, inferredType, inferredContext));
}
sail.resolveDomainTypes(predicate)
.stream()
.peek(inferredType -> {
if (sail.useAllRdfsRules && inferredType.equals(RDFS.CLASS)) {
- addInferredStatement(subject, RDFS.SUBCLASSOF, RDFS.RESOURCE, resources);
+ addInferredStatement(subject, RDFS.SUBCLASSOF, RDFS.RESOURCE, inferredContext);
}
})
- .forEach(inferredType -> addInferredStatement((subject), RDF.TYPE, inferredType, resources));
+ .forEach(inferredType -> addInferredStatement((subject), RDF.TYPE, inferredType, inferredContext));
}
From 01e0651f19979fc607c7b30c15fae5c524c15634 Mon Sep 17 00:00:00 2001
From: Jeen Broekstra
Date: Thu, 27 Dec 2018 11:34:03 +1100
Subject: [PATCH 12/54] GH-1230 migrate compliance tests to rdf4j-storage
Signed-off-by: Jeen Broekstra
---
compliance/pom.xml | 3 +-
compliance/sparql/pom.xml | 242 +++++
.../sparql/ArbitraryLengthPathTest.java | 137 +++
.../sparql/MemoryComplexSPARQLQueryTest.java | 27 +
.../parser/sparql/MemorySPARQLUpdateTest.java | 29 +
.../parser/sparql/SPARQLEmbeddedServer.java | 129 +++
.../sparql/SPARQLServiceEvaluationTest.java | 827 ++++++++++++++++++
.../parser/sparql/manifest/EarlReport.java | 186 ++++
.../W3CApprovedSPARQL10QueryTest.java | 74 ++
.../W3CApprovedSPARQL10SyntaxTest.java | 50 ++
.../W3CApprovedSPARQL11QueryTest.java | 70 ++
.../W3CApprovedSPARQL11SyntaxTest.java | 58 ++
.../W3CApprovedSPARQL11UpdateTest.java | 61 ++
.../FederationSPARQL10QueryTest.java | 73 ++
.../FederationSPARQL11QueryTest.java | 75 ++
.../sail/federation/FederationSparqlTest.java | 52 ++
.../src/test/resources/logback-test.xml | 15 +
17 files changed, 2107 insertions(+), 1 deletion(-)
create mode 100644 compliance/sparql/pom.xml
create mode 100644 compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/ArbitraryLengthPathTest.java
create mode 100644 compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/MemoryComplexSPARQLQueryTest.java
create mode 100644 compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/MemorySPARQLUpdateTest.java
create mode 100644 compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/SPARQLEmbeddedServer.java
create mode 100644 compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/SPARQLServiceEvaluationTest.java
create mode 100644 compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/EarlReport.java
create mode 100644 compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL10QueryTest.java
create mode 100644 compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL10SyntaxTest.java
create mode 100644 compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL11QueryTest.java
create mode 100644 compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL11SyntaxTest.java
create mode 100644 compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL11UpdateTest.java
create mode 100644 compliance/sparql/src/test/java/org/eclipse/rdf4j/sail/federation/FederationSPARQL10QueryTest.java
create mode 100644 compliance/sparql/src/test/java/org/eclipse/rdf4j/sail/federation/FederationSPARQL11QueryTest.java
create mode 100644 compliance/sparql/src/test/java/org/eclipse/rdf4j/sail/federation/FederationSparqlTest.java
create mode 100644 compliance/sparql/src/test/resources/logback-test.xml
diff --git a/compliance/pom.xml b/compliance/pom.xml
index 58cfabd86..f28046b0b 100644
--- a/compliance/pom.xml
+++ b/compliance/pom.xml
@@ -13,7 +13,8 @@
model
- lucene
+ sparql
+ luceneelasticsearchsolrstore
diff --git a/compliance/sparql/pom.xml b/compliance/sparql/pom.xml
new file mode 100644
index 000000000..fb4368650
--- /dev/null
+++ b/compliance/sparql/pom.xml
@@ -0,0 +1,242 @@
+
+
+ 4.0.0
+
+
+ org.eclipse.rdf4j
+ rdf4j-storage-compliance
+ 2.5-SNAPSHOT
+
+
+ rdf4j-sparql-compliance
+ war
+
+ RDF4J SPARQL query parser compliance tests
+ Tests for the SPARQL query language implementation
+
+
+
+ 2.4.2
+ 7.0.2.v20100331
+
+
+
+ ${project.groupId}
+ rdf4j-model
+ ${project.version}
+
+
+
+ ${project.groupId}
+ rdf4j-sparql-testsuite
+ ${project.version}
+
+
+
+ ${project.groupId}
+ rdf4j-rio-api
+ ${project.version}
+
+
+
+ ${project.groupId}
+ rdf4j-rio-turtle
+ ${project.version}
+ runtime
+
+
+
+ ${project.groupId}
+ rdf4j-rio-rdfxml
+ ${project.version}
+ runtime
+
+
+
+ ${project.groupId}
+ rdf4j-query
+ ${project.version}
+
+
+
+ ${project.groupId}
+ rdf4j-queryresultio-api
+ ${project.version}
+
+
+
+ ${project.groupId}
+ rdf4j-queryresultio-sparqlxml
+ ${project.version}
+ runtime
+
+
+
+ ${project.groupId}
+ rdf4j-queryparser-api
+ ${project.version}
+
+
+
+ ${project.groupId}
+ rdf4j-queryparser-sparql
+ ${project.version}
+ runtime
+
+
+
+ ${project.groupId}
+ rdf4j-repository-api
+ ${project.version}
+
+
+
+ ${project.groupId}
+ rdf4j-repository-dataset
+ ${project.version}
+
+
+
+ ${project.groupId}
+ rdf4j-repository-contextaware
+ ${project.version}
+
+
+
+ ${project.groupId}
+ rdf4j-repository-sail
+ ${project.version}
+
+
+
+ ${project.groupId}
+ rdf4j-sail-api
+ ${project.version}
+
+
+
+ ${project.groupId}
+ rdf4j-http-server
+ ${rdf4j.server.version}
+ war
+
+
+
+ ${project.groupId}
+ rdf4j-sail-memory
+ ${project.version}
+
+
+
+ ${project.groupId}
+ rdf4j-util
+ ${project.version}
+
+
+
+ ${project.groupId}
+ rdf4j-sail-federation
+ ${project.version}
+
+
+
+ org.slf4j
+ slf4j-api
+
+
+
+ ch.qos.logback
+ logback-classic
+ test
+
+
+
+ junit
+ junit
+ compile
+
+
+
+ ${project.groupId}
+ rdf4j-http-protocol
+ ${project.version}
+
+
+
+ ${project.groupId}
+ rdf4j-repository-manager
+ ${project.version}
+
+
+
+
+ org.eclipse.jetty
+ jetty-server
+ ${jetty.version}
+
+
+
+ org.eclipse.jetty
+ jetty-webapp
+ ${jetty.version}
+
+
+
+ org.mortbay.jetty
+ jetty-jsp-2.1
+ ${jetty.version}
+ runtime
+
+
+
+ org.slf4j
+ jcl-over-slf4j
+ test
+
+
+ org.slf4j
+ log4j-over-slf4j
+ test
+
+
+
+
+
+
+ org.apache.maven.plugins
+ maven-war-plugin
+
+ ${project.build.directory}/rdf4j-server
+
+
+
+ org.apache.maven.plugins
+ maven-surefire-plugin
+
+ true
+
+
+
+ org.apache.maven.plugins
+ maven-failsafe-plugin
+
+
+ integration-tests
+ integration-test
+
+ integration-test
+
+
+
+ verify
+ verify
+
+ verify
+
+
+
+
+
+
+
diff --git a/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/ArbitraryLengthPathTest.java b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/ArbitraryLengthPathTest.java
new file mode 100644
index 000000000..2fc7736f3
--- /dev/null
+++ b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/ArbitraryLengthPathTest.java
@@ -0,0 +1,137 @@
+/*******************************************************************************
+ * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.query.parser.sparql;
+
+import org.eclipse.rdf4j.model.ValueFactory;
+import org.eclipse.rdf4j.model.vocabulary.RDF;
+import org.eclipse.rdf4j.model.vocabulary.RDFS;
+import org.eclipse.rdf4j.query.QueryLanguage;
+import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
+import org.eclipse.rdf4j.repository.RepositoryException;
+import org.eclipse.rdf4j.repository.sail.SailRepository;
+import org.eclipse.rdf4j.sail.memory.MemoryStore;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Test;
+
+import junit.framework.TestCase;
+
+/**
+ * @author james
+ */
+public class ArbitraryLengthPathTest extends TestCase {
+
+ private Repository repo;
+
+ private RepositoryConnection con;
+
+ @Before
+ public void setUp()
+ throws Exception
+ {
+ repo = new SailRepository(new MemoryStore());
+ repo.initialize();
+ con = repo.getConnection();
+ }
+
+ @After
+ public void tearDown()
+ throws Exception
+ {
+ con.close();
+ repo.shutDown();
+ }
+
+ @Test
+ public void test10()
+ throws Exception
+ {
+ populate(10);
+ String sparql = "ASK { * }";
+ assertTrue(con.prepareBooleanQuery(QueryLanguage.SPARQL, sparql).evaluate());
+ }
+
+ @Test
+ public void test100()
+ throws Exception
+ {
+ populate(100);
+ String sparql = "ASK { * }";
+ assertTrue(con.prepareBooleanQuery(QueryLanguage.SPARQL, sparql).evaluate());
+ }
+
+ @Test
+ public void test1000()
+ throws Exception
+ {
+ populate(1000);
+ String sparql = "ASK { * }";
+ assertTrue(con.prepareBooleanQuery(QueryLanguage.SPARQL, sparql).evaluate());
+ }
+
+ @Test
+ public void test10000()
+ throws Exception
+ {
+ populate(10000);
+ String sparql = "ASK { * }";
+ assertTrue(con.prepareBooleanQuery(QueryLanguage.SPARQL, sparql).evaluate());
+ }
+
+ @Test
+ public void test100000()
+ throws Exception
+ {
+ populate(100000);
+ String sparql = "ASK { * }";
+ assertTrue(con.prepareBooleanQuery(QueryLanguage.SPARQL, sparql).evaluate());
+ }
+
+ @Test
+ public void testDirection()
+ throws Exception
+ {
+ ValueFactory vf = con.getValueFactory();
+ con.add(vf.createIRI("urn:test:a"), vf.createIRI("urn:test:rel"), vf.createIRI("urn:test:b"));
+ con.add(vf.createIRI("urn:test:b"), vf.createIRI("urn:test:rel"), vf.createIRI("urn:test:a"));
+ String sparql = "ASK { * . * }";
+ assertTrue(con.prepareBooleanQuery(QueryLanguage.SPARQL, sparql).evaluate());
+ }
+
+ @Test
+ public void testSimilarPatterns()
+ throws Exception
+ {
+ ValueFactory vf = con.getValueFactory();
+ con.add(vf.createIRI("urn:test:a"), RDF.TYPE, vf.createIRI("urn:test:c"));
+ con.add(vf.createIRI("urn:test:b"), RDF.TYPE, vf.createIRI("urn:test:d"));
+ con.add(vf.createIRI("urn:test:c"), RDFS.SUBCLASSOF, vf.createIRI("urn:test:e"));
+ con.add(vf.createIRI("urn:test:d"), RDFS.SUBCLASSOF, vf.createIRI("urn:test:f"));
+ String sparql = "ASK { \n"
+ + " values (?expectedTargetClass55555 ?expectedTargetClass5544T) {()}.\n"
+ + " a ?linkTargetClass55555 .\n"
+ + " ?linkTargetClass55555 rdfs:subClassOf* ?expectedTargetClass55555 .\n"
+ + " a ?linkTargetClass55556 .\n"
+ + " ?linkTargetClass55556 rdfs:subClassOf* ?expectedTargetClass5544T . }";
+ assertTrue(con.prepareBooleanQuery(QueryLanguage.SPARQL, sparql).evaluate());
+ }
+
+ private void populate(int n)
+ throws RepositoryException
+ {
+ ValueFactory vf = con.getValueFactory();
+ for (int i = 0; i < n; i++) {
+ con.add(vf.createIRI("urn:test:root"), vf.createIRI("urn:test:hasChild"),
+ vf.createIRI("urn:test:node" + i));
+ }
+ con.add(vf.createIRI("urn:test:root"), vf.createIRI("urn:test:hasChild"),
+ vf.createIRI("urn:test:node-end"));
+ }
+
+}
diff --git a/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/MemoryComplexSPARQLQueryTest.java b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/MemoryComplexSPARQLQueryTest.java
new file mode 100644
index 000000000..1c08edf45
--- /dev/null
+++ b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/MemoryComplexSPARQLQueryTest.java
@@ -0,0 +1,27 @@
+/*******************************************************************************
+ * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.query.parser.sparql;
+
+import org.eclipse.rdf4j.query.parser.sparql.ComplexSPARQLQueryTest;
+import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.sail.SailRepository;
+import org.eclipse.rdf4j.sail.memory.MemoryStore;
+
+/**
+ * @author jeen
+ */
+public class MemoryComplexSPARQLQueryTest extends ComplexSPARQLQueryTest {
+
+ @Override
+ protected Repository newRepository()
+ throws Exception
+ {
+ return new SailRepository(new MemoryStore());
+ }
+
+}
diff --git a/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/MemorySPARQLUpdateTest.java b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/MemorySPARQLUpdateTest.java
new file mode 100644
index 000000000..272b878fa
--- /dev/null
+++ b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/MemorySPARQLUpdateTest.java
@@ -0,0 +1,29 @@
+/*******************************************************************************
+ * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.query.parser.sparql;
+
+import org.eclipse.rdf4j.query.parser.sparql.SPARQLUpdateTest;
+import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.sail.SailRepository;
+import org.eclipse.rdf4j.sail.memory.MemoryStore;
+
+/**
+ * Test SPARQL 1.1 Update functionality on an in-memory store.
+ *
+ * @author Jeen Broekstra
+ */
+public class MemorySPARQLUpdateTest extends SPARQLUpdateTest {
+
+ @Override
+ protected Repository newRepository()
+ throws Exception
+ {
+ return new SailRepository(new MemoryStore());
+ }
+
+}
diff --git a/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/SPARQLEmbeddedServer.java b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/SPARQLEmbeddedServer.java
new file mode 100644
index 000000000..bbfc2fdf3
--- /dev/null
+++ b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/SPARQLEmbeddedServer.java
@@ -0,0 +1,129 @@
+/*******************************************************************************
+ * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.query.parser.sparql;
+
+import java.io.File;
+import java.util.List;
+
+import org.eclipse.jetty.server.Connector;
+import org.eclipse.jetty.server.Server;
+import org.eclipse.jetty.server.nio.BlockingChannelConnector;
+import org.eclipse.jetty.webapp.WebAppContext;
+import org.eclipse.rdf4j.http.protocol.Protocol;
+import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
+import org.eclipse.rdf4j.repository.RepositoryException;
+import org.eclipse.rdf4j.repository.config.RepositoryConfig;
+import org.eclipse.rdf4j.repository.config.RepositoryConfigException;
+import org.eclipse.rdf4j.repository.config.RepositoryConfigUtil;
+import org.eclipse.rdf4j.repository.http.HTTPRepository;
+import org.eclipse.rdf4j.repository.manager.SystemRepository;
+import org.eclipse.rdf4j.repository.sail.config.SailRepositoryConfig;
+import org.eclipse.rdf4j.sail.memory.config.MemoryStoreConfig;
+
+/**
+ * An embedded http server for SPARQL query testing. Initializes a memory store repository for each specified
+ * repositoryId.
+ *
+ * @author Andreas Schwarte
+ */
+public class SPARQLEmbeddedServer {
+
+ private static final String HOST = "localhost";
+
+ private static final int PORT = 18080;
+
+ private static final String SERVER_CONTEXT = "/rdf4j-server";
+
+ private final List repositoryIds;
+
+ private final Server jetty;
+
+ /**
+ * @param repositoryIds
+ */
+ public SPARQLEmbeddedServer(List repositoryIds) {
+ this.repositoryIds = repositoryIds;
+ System.clearProperty("DEBUG");
+
+ jetty = new Server();
+
+ Connector conn = new BlockingChannelConnector();
+ conn.setHost(HOST);
+ conn.setPort(PORT);
+ jetty.addConnector(conn);
+
+ WebAppContext webapp = new WebAppContext();
+ webapp.setContextPath(SERVER_CONTEXT);
+ // warPath configured in pom.xml maven-war-plugin configuration
+ webapp.setWar("./target/rdf4j-server");
+ jetty.setHandler(webapp);
+ }
+
+ /**
+ * @return the url to the repository with given id
+ */
+ public String getRepositoryUrl(String repoId) {
+ return Protocol.getRepositoryLocation(getServerUrl(), repoId);
+ }
+
+ /**
+ * @return the server url
+ */
+ public String getServerUrl() {
+ return "http://" + HOST + ":" + PORT + SERVER_CONTEXT;
+ }
+
+ public void start()
+ throws Exception
+ {
+ File dataDir = new File(System.getProperty("user.dir") + "/target/datadir");
+ dataDir.mkdirs();
+ System.setProperty("org.eclipse.rdf4j.appdata.basedir", dataDir.getAbsolutePath());
+
+ jetty.start();
+
+ createTestRepositories();
+ }
+
+ public void stop()
+ throws Exception
+ {
+ Repository systemRepo = new HTTPRepository(
+ Protocol.getRepositoryLocation(getServerUrl(), SystemRepository.ID));
+ RepositoryConnection con = systemRepo.getConnection();
+ try {
+ con.clear();
+ }
+ finally {
+ con.close();
+ systemRepo.shutDown();
+ }
+
+ jetty.stop();
+ System.clearProperty("org.mortbay.log.class");
+ }
+
+ private void createTestRepositories()
+ throws RepositoryException, RepositoryConfigException
+ {
+ Repository systemRep = new HTTPRepository(
+ Protocol.getRepositoryLocation(getServerUrl(), SystemRepository.ID));
+
+ // create a memory store for each provided repository id
+ for (String repId : repositoryIds) {
+ MemoryStoreConfig memStoreConfig = new MemoryStoreConfig();
+ memStoreConfig.setPersist(false);
+ SailRepositoryConfig sailRepConfig = new SailRepositoryConfig(memStoreConfig);
+ RepositoryConfig repConfig = new RepositoryConfig(repId, sailRepConfig);
+
+ RepositoryConfigUtil.updateRepositoryConfigs(systemRep, repConfig);
+ }
+
+ }
+}
diff --git a/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/SPARQLServiceEvaluationTest.java b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/SPARQLServiceEvaluationTest.java
new file mode 100644
index 000000000..4a522bf1c
--- /dev/null
+++ b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/SPARQLServiceEvaluationTest.java
@@ -0,0 +1,827 @@
+/*******************************************************************************
+ * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.query.parser.sparql;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.InputStreamReader;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Collections;
+import java.util.LinkedHashSet;
+import java.util.List;
+import java.util.Optional;
+import java.util.Set;
+
+import junit.framework.TestCase;
+
+import org.eclipse.rdf4j.common.io.IOUtil;
+import org.eclipse.rdf4j.common.iteration.Iterations;
+import org.eclipse.rdf4j.common.text.StringUtil;
+import org.eclipse.rdf4j.model.IRI;
+import org.eclipse.rdf4j.model.Literal;
+import org.eclipse.rdf4j.model.Statement;
+import org.eclipse.rdf4j.model.Value;
+import org.eclipse.rdf4j.model.ValueFactory;
+import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
+import org.eclipse.rdf4j.model.util.Literals;
+import org.eclipse.rdf4j.model.util.Models;
+import org.eclipse.rdf4j.model.vocabulary.FOAF;
+import org.eclipse.rdf4j.query.BindingSet;
+import org.eclipse.rdf4j.query.BooleanQuery;
+import org.eclipse.rdf4j.query.GraphQuery;
+import org.eclipse.rdf4j.query.GraphQueryResult;
+import org.eclipse.rdf4j.query.MalformedQueryException;
+import org.eclipse.rdf4j.query.Query;
+import org.eclipse.rdf4j.query.QueryEvaluationException;
+import org.eclipse.rdf4j.query.QueryLanguage;
+import org.eclipse.rdf4j.query.QueryResults;
+import org.eclipse.rdf4j.query.TupleQuery;
+import org.eclipse.rdf4j.query.TupleQueryResult;
+import org.eclipse.rdf4j.query.dawg.DAWGTestResultSetUtil;
+import org.eclipse.rdf4j.query.impl.MutableTupleQueryResult;
+import org.eclipse.rdf4j.query.impl.TupleQueryResultBuilder;
+import org.eclipse.rdf4j.query.resultio.QueryResultFormat;
+import org.eclipse.rdf4j.query.resultio.QueryResultIO;
+import org.eclipse.rdf4j.query.resultio.TupleQueryResultFormat;
+import org.eclipse.rdf4j.query.resultio.TupleQueryResultParser;
+import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
+import org.eclipse.rdf4j.repository.RepositoryException;
+import org.eclipse.rdf4j.repository.http.HTTPRepository;
+import org.eclipse.rdf4j.repository.sail.SailRepository;
+import org.eclipse.rdf4j.rio.RDFFormat;
+import org.eclipse.rdf4j.rio.RDFParseException;
+import org.eclipse.rdf4j.rio.RDFParser;
+import org.eclipse.rdf4j.rio.Rio;
+import org.eclipse.rdf4j.rio.RDFParser.DatatypeHandling;
+import org.eclipse.rdf4j.rio.helpers.StatementCollector;
+import org.eclipse.rdf4j.sail.memory.MemoryStore;
+import org.junit.After;
+import org.junit.Before;
+import org.junit.Ignore;
+import org.junit.Test;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * Test suite for evaluation of SPARQL queries involving SERVICE clauses. The test suite starts up an embedded
+ * Jetty server running RDF4J Server, which functions as the SPARQL endpoint to test against. The test is configured
+ * to execute the W3C service tests located in rdf4j-sparql-testsuite/src/main/resources/testcases-service
+ *
+ * @author Jeen Broekstra
+ * @author Andreas Schwarte
+ */
+public class SPARQLServiceEvaluationTest extends TestCase {
+
+ static final Logger logger = LoggerFactory.getLogger(SPARQLServiceEvaluationTest.class);
+
+ /**
+ * The maximal number of endpoints occurring in a (single) test case
+ */
+ protected static final int MAX_ENDPOINTS = 3;
+
+ private SPARQLEmbeddedServer server;
+
+ private SailRepository localRepository;
+
+ private List remoteRepositories;
+
+ public SPARQLServiceEvaluationTest() {
+
+ }
+
+ /**
+ * @throws java.lang.Exception
+ */
+ @Before
+ public void setUp()
+ throws Exception
+ {
+ // set up the server: the maximal number of endpoints must be known
+ List repositoryIds = new ArrayList(MAX_ENDPOINTS);
+ for (int i = 1; i <= MAX_ENDPOINTS; i++)
+ repositoryIds.add("endpoint" + i);
+ server = new SPARQLEmbeddedServer(repositoryIds);
+
+ try {
+ server.start();
+ }
+ catch (Exception e) {
+ server.stop();
+ throw e;
+ }
+
+ remoteRepositories = new ArrayList(MAX_ENDPOINTS);
+ for (int i = 1; i <= MAX_ENDPOINTS; i++) {
+ HTTPRepository r = new HTTPRepository(getRepositoryUrl(i));
+ r.initialize();
+ remoteRepositories.add(r);
+ }
+
+ localRepository = new SailRepository(new MemoryStore());
+ localRepository.initialize();
+ }
+
+ /**
+ * Get the repository url, initialized repositories are called endpoint1 endpoint2 ..
+ * endpoint%MAX_ENDPOINTS%
+ *
+ * @param i
+ * the index of the repository, starting with 1
+ * @return
+ */
+ protected String getRepositoryUrl(int i) {
+ return server.getRepositoryUrl("endpoint" + i);
+ }
+
+ /**
+ * Get the repository, initialized repositories are called endpoint1 endpoint2 .. endpoint%MAX_ENDPOINTS%
+ *
+ * @param i
+ * the index of the repository, starting with 1
+ * @return
+ */
+ public HTTPRepository getRepository(int i) {
+ return remoteRepositories.get(i - 1);
+ }
+
+ /**
+ * Prepare a particular test, and load the specified data. Note: the repositories are cleared before
+ * loading data
+ *
+ * @param localData
+ * a local data file that is added to local repository, use null if there is no local data
+ * @param endpointData
+ * a list of endpoint data files, dataFile at index is loaded to endpoint%i%, use empty list for no
+ * remote data
+ * @throws Exception
+ */
+ protected void prepareTest(String localData, List endpointData)
+ throws Exception
+ {
+
+ if (endpointData.size() > MAX_ENDPOINTS)
+ throw new RuntimeException("MAX_ENDPOINTs to low, " + endpointData.size()
+ + " repositories needed. Adjust configuration");
+
+ if (localData != null) {
+ loadDataSet(localRepository, localData);
+ }
+
+ int i = 1; // endpoint id, start with 1
+ for (String s : endpointData) {
+ loadDataSet(getRepository(i++), s);
+ }
+
+ }
+
+ /**
+ * Load a dataset. Note: the repositories are cleared before loading data
+ *
+ * @param rep
+ * @param datasetFile
+ * @throws RDFParseException
+ * @throws RepositoryException
+ * @throws IOException
+ */
+ protected void loadDataSet(Repository rep, String datasetFile)
+ throws RDFParseException, RepositoryException, IOException
+ {
+ logger.debug("loading dataset...");
+ InputStream dataset = SPARQLServiceEvaluationTest.class.getResourceAsStream(datasetFile);
+
+ if (dataset == null)
+ throw new IllegalArgumentException("Datasetfile " + datasetFile + " not found.");
+
+ RepositoryConnection con = rep.getConnection();
+ try {
+ con.clear();
+ con.add(dataset, "", Rio.getParserFormatForFileName(datasetFile).orElseThrow(
+ Rio.unsupportedFormat(datasetFile)));
+ }
+ finally {
+ dataset.close();
+ con.close();
+ }
+ logger.debug("dataset loaded.");
+ }
+
+ /**
+ * @throws java.lang.Exception
+ */
+ @After
+ public void tearDown()
+ throws Exception
+ {
+ try {
+ localRepository.shutDown();
+ }
+ finally {
+ server.stop();
+ }
+ }
+
+ /**
+ * Verify that BIND clause alias from the SERVICE clause gets added to the result set.
+ * @see #646
+ */
+ @Test
+ public void testValuesBindClauseHandling()
+ throws Exception
+ {
+ String query = "select * { service <" + getRepositoryUrl(1)
+ + "> { Bind(1 as ?val) . VALUES ?x {1 2} . } }";
+
+ try (RepositoryConnection conn = localRepository.getConnection()) {
+ TupleQuery tq = conn.prepareTupleQuery(query);
+ TupleQueryResult tqr = tq.evaluate();
+
+ assertNotNull(tqr);
+ assertTrue(tqr.hasNext());
+
+ List result = QueryResults.asList(tqr);
+ assertEquals(2, result.size());
+ for (BindingSet bs : result) {
+ assertTrue(bs.hasBinding("val"));
+ assertEquals(1, Literals.getIntValue(bs.getValue("val"), 0));
+ assertTrue(bs.hasBinding("x"));
+ int x = Literals.getIntValue(bs.getValue("x"), 0);
+ assertTrue(x == 1 || x == 2);
+ }
+ }
+ }
+
+ /**
+ * Verify that all relevant variable names from the SERVICE clause get added to the result set when
+ * a BIND clause is present.
+ * @see #703
+ */
+ @Test
+ public void testVariableNameHandling()
+ throws Exception
+ {
+ String query = "select * { service <" + getRepositoryUrl(1)
+ + "> { ?s ?p ?o . Bind(str(?o) as ?val) . } }";
+
+ // add some data to the remote endpoint (we don't care about the exact contents)
+ prepareTest(null, Arrays.asList("/testcases-service/data13.ttl"));
+ try (RepositoryConnection conn = localRepository.getConnection()) {
+ TupleQuery tq = conn.prepareTupleQuery(query);
+ TupleQueryResult tqr = tq.evaluate();
+
+ assertNotNull(tqr);
+ assertTrue(tqr.hasNext());
+
+ List result = QueryResults.asList(tqr);
+ assertTrue(result.size() > 0);
+ for (BindingSet bs : result) {
+ assertTrue(bs.hasBinding("val"));
+ assertTrue(bs.hasBinding("s"));
+ assertTrue(bs.hasBinding("p"));
+ assertTrue(bs.hasBinding("o"));
+ }
+ }
+ }
+
+ @Test
+ public void testSimpleServiceQuery()
+ throws RepositoryException
+ {
+ // test setup
+ String EX_NS = "http://example.org/";
+ ValueFactory f = localRepository.getValueFactory();
+ IRI bob = f.createIRI(EX_NS, "bob");
+ IRI alice = f.createIRI(EX_NS, "alice");
+ IRI william = f.createIRI(EX_NS, "william");
+
+ // clears the repository and adds new data
+ try {
+ prepareTest("/testcases-service/simple-default-graph.ttl",
+ Arrays.asList("/testcases-service/simple.ttl"));
+ }
+ catch (Exception e1) {
+ fail(e1.getMessage());
+ }
+
+ StringBuilder qb = new StringBuilder();
+ qb.append(" SELECT * \n");
+ qb.append(" WHERE { \n");
+ qb.append(" SERVICE <" + getRepositoryUrl(1) + "> { \n");
+ qb.append(" ?X <" + FOAF.NAME + "> ?Y \n ");
+ qb.append(" } \n ");
+ qb.append(" ?X a <" + FOAF.PERSON + "> . \n");
+ qb.append(" } \n");
+
+ RepositoryConnection conn = localRepository.getConnection();
+ try {
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, qb.toString());
+
+ TupleQueryResult tqr = tq.evaluate();
+
+ assertNotNull(tqr);
+ assertTrue(tqr.hasNext());
+
+ int count = 0;
+ while (tqr.hasNext()) {
+ BindingSet bs = tqr.next();
+ count++;
+
+ Value x = bs.getValue("X");
+ Value y = bs.getValue("Y");
+
+ assertFalse(william.equals(x));
+
+ assertTrue(bob.equals(x) || alice.equals(x));
+ if (bob.equals(x)) {
+ f.createLiteral("Bob").equals(y);
+ }
+ else if (alice.equals(x)) {
+ f.createLiteral("Alice").equals(y);
+ }
+ }
+
+ assertEquals(2, count);
+
+ }
+ catch (MalformedQueryException e) {
+ fail(e.getMessage());
+ }
+ catch (QueryEvaluationException e) {
+ fail(e.getMessage());
+ }
+ finally {
+ conn.close();
+ }
+ }
+
+ @Test
+ public void test1()
+ throws Exception
+ {
+ prepareTest("/testcases-service/data01.ttl", Arrays.asList("/testcases-service/data01endpoint.ttl"));
+ execute("/testcases-service/service01.rq", "/testcases-service/service01.srx", false);
+ }
+
+ @Test
+ public void test2()
+ throws Exception
+ {
+ prepareTest(null, Arrays.asList("/testcases-service/data02endpoint1.ttl",
+ "/testcases-service/data02endpoint2.ttl"));
+ execute("/testcases-service/service02.rq", "/testcases-service/service02.srx", false);
+ }
+
+ @Test
+ public void test3()
+ throws Exception
+ {
+ prepareTest(null, Arrays.asList("/testcases-service/data03endpoint1.ttl",
+ "/testcases-service/data03endpoint2.ttl"));
+ execute("/testcases-service/service03.rq", "/testcases-service/service03.srx", false);
+ }
+
+ @Test
+ public void test4()
+ throws Exception
+ {
+ prepareTest("/testcases-service/data04.ttl", Arrays.asList("/testcases-service/data04endpoint.ttl"));
+ execute("/testcases-service/service04.rq", "/testcases-service/service04.srx", false);
+ }
+
+ @Test
+ public void test5()
+ throws Exception
+ {
+ prepareTest("/testcases-service/data05.ttl", Arrays.asList("/testcases-service/data05endpoint1.ttl",
+ "/testcases-service/data05endpoint2.ttl"));
+ execute("/testcases-service/service05.rq", "/testcases-service/service05.srx", false);
+ }
+
+ @Test
+ public void test6()
+ throws Exception
+ {
+ prepareTest(null, Arrays.asList("/testcases-service/data06endpoint1.ttl"));
+ execute("/testcases-service/service06.rq", "/testcases-service/service06.srx", false);
+ }
+
+ @Test
+ public void test7()
+ throws Exception
+ {
+ // clears the repository and adds new data + execute
+ prepareTest("/testcases-service/data07.ttl", Collections. emptyList());
+ execute("/testcases-service/service07.rq", "/testcases-service/service07.srx", false);
+ }
+
+ @Test
+ public void test8()
+ throws Exception
+ {
+ /* test where the SERVICE expression is to be evaluated as ASK request */
+ prepareTest("/testcases-service/data08.ttl", Arrays.asList("/testcases-service/data08endpoint.ttl"));
+ execute("/testcases-service/service08.rq", "/testcases-service/service08.srx", false);
+ }
+
+ @Test
+ public void test9()
+ throws Exception
+ {
+ /* test where the service endpoint is bound at runtime through BIND */
+ prepareTest(null, Arrays.asList("/testcases-service/data09endpoint.ttl"));
+ execute("/testcases-service/service09.rq", "/testcases-service/service09.srx", false);
+ }
+
+ @Test
+ public void test10()
+ throws Exception
+ {
+ /* test how we deal with blank node */
+ prepareTest("/testcases-service/data10.ttl", Arrays.asList("/testcases-service/data10endpoint.ttl"));
+ execute("/testcases-service/service10.rq", "/testcases-service/service10.srx", false);
+ }
+
+ @Test
+ public void test11()
+ throws Exception
+ {
+ /* test vectored join with more intermediate results */
+ // clears the repository and adds new data + execute
+ prepareTest("/testcases-service/data11.ttl", Arrays.asList("/testcases-service/data11endpoint.ttl"));
+ execute("/testcases-service/service11.rq", "/testcases-service/service11.srx", false);
+ }
+
+ // test on remote DBpedia endpoint disabled. Only enable for manual testing,
+ // should not be enabled for
+ // Surefire or Hudson.
+ // /**
+ // * This is a manual test to see the Fallback in action. Query asks
+ // * DBpedia, which does not support BINDINGS
+ // *
+ // * @throws Exception
+ // */
+ // public void testFallbackWithDBpedia() throws Exception {
+ // /* test vectored join with more intermediate results */
+ // // clears the repository and adds new data + execute
+ // prepareTest("/testcases-service/data12.ttl",
+ // Collections.emptyList());
+ // execute("/testcases-service/service12.rq",
+ // "/testcases-service/service12.srx", false);
+ // }
+
+ @Test
+ public void test13()
+ throws Exception
+ {
+ /* test for bug SES-899: cross product is required */
+ prepareTest(null, Arrays.asList("/testcases-service/data13.ttl"));
+ execute("/testcases-service/service13.rq", "/testcases-service/service13.srx", false);
+ }
+
+ @Test
+ public void testEmptyServiceBlock()
+ throws Exception
+ {
+ /* test for bug SES-900: nullpointer for empty service block */
+ prepareTest(null, Arrays.asList("/testcases-service/data13.ttl"));
+ execute("/testcases-service/service14.rq", "/testcases-service/service14.srx", false);
+ }
+
+ @Test
+ public void testNotProjectedCount()
+ throws Exception
+ {
+ /* test projection of subqueries - SES-1000 */
+ prepareTest(null, Arrays.asList("/testcases-service/data17endpoint1.ttl"));
+ execute("/testcases-service/service17.rq", "/testcases-service/service17.srx", false);
+ }
+
+ @Test
+ public void testNonAsciiCharHandling()
+ throws Exception
+ {
+ /* SES-1056 */
+ prepareTest(null, Arrays.asList("/testcases-service/data18endpoint1.rdf"));
+ execute("/testcases-service/service18.rq", "/testcases-service/service18.srx", false);
+ }
+
+ /**
+ * Execute a testcase, both queryFile and expectedResultFile must be files located on the class path.
+ *
+ * @param queryFile
+ * @param expectedResultFile
+ * @param checkOrder
+ * @throws Exception
+ */
+ private void execute(String queryFile, String expectedResultFile, boolean checkOrder)
+ throws Exception
+ {
+ RepositoryConnection conn = localRepository.getConnection();
+ String queryString = readQueryString(queryFile);
+
+ try {
+ Query query = conn.prepareQuery(QueryLanguage.SPARQL, queryString);
+
+ if (query instanceof TupleQuery) {
+ TupleQueryResult queryResult = ((TupleQuery)query).evaluate();
+
+ TupleQueryResult expectedResult = readExpectedTupleQueryResult(expectedResultFile);
+
+ compareTupleQueryResults(queryResult, expectedResult, checkOrder);
+
+ }
+ else if (query instanceof GraphQuery) {
+ GraphQueryResult gqr = ((GraphQuery)query).evaluate();
+ Set queryResult = Iterations.asSet(gqr);
+
+ Set expectedResult = readExpectedGraphQueryResult(expectedResultFile);
+
+ compareGraphs(queryResult, expectedResult);
+
+ }
+ else if (query instanceof BooleanQuery) {
+ // TODO implement if needed
+ throw new RuntimeException("Not yet supported " + query.getClass());
+ }
+ else {
+ throw new RuntimeException("Unexpected query type: " + query.getClass());
+ }
+ }
+ finally {
+ conn.close();
+ }
+ }
+
+ /**
+ * Read the query string from the specified resource
+ *
+ * @param queryResource
+ * @return
+ * @throws RepositoryException
+ * @throws IOException
+ */
+ private String readQueryString(String queryResource)
+ throws RepositoryException, IOException
+ {
+ InputStream stream = SPARQLServiceEvaluationTest.class.getResourceAsStream(queryResource);
+ try {
+ return IOUtil.readString(new InputStreamReader(stream, "UTF-8"));
+ }
+ finally {
+ stream.close();
+ }
+ }
+
+ /**
+ * Read the expected tuple query result from the specified resource
+ *
+ * @param queryResource
+ * @return
+ * @throws RepositoryException
+ * @throws IOException
+ */
+ private TupleQueryResult readExpectedTupleQueryResult(String resultFile)
+ throws Exception
+ {
+ Optional tqrFormat = QueryResultIO.getParserFormatForFileName(resultFile);
+
+ if (tqrFormat.isPresent()) {
+ InputStream in = SPARQLServiceEvaluationTest.class.getResourceAsStream(resultFile);
+ try {
+ TupleQueryResultParser parser = QueryResultIO.createTupleParser(tqrFormat.get());
+ parser.setValueFactory(SimpleValueFactory.getInstance());
+
+ TupleQueryResultBuilder qrBuilder = new TupleQueryResultBuilder();
+ parser.setQueryResultHandler(qrBuilder);
+
+ parser.parseQueryResult(in);
+ return qrBuilder.getQueryResult();
+ }
+ finally {
+ in.close();
+ }
+ }
+ else {
+ Set resultGraph = readExpectedGraphQueryResult(resultFile);
+ return DAWGTestResultSetUtil.toTupleQueryResult(resultGraph);
+ }
+ }
+
+ /**
+ * Read the expected graph query result from the specified resource
+ *
+ * @param resultFile
+ * @return
+ * @throws Exception
+ */
+ private Set readExpectedGraphQueryResult(String resultFile)
+ throws Exception
+ {
+ RDFFormat rdfFormat = Rio.getParserFormatForFileName(resultFile).orElseThrow(
+ Rio.unsupportedFormat(resultFile));
+
+ RDFParser parser = Rio.createParser(rdfFormat);
+ parser.setDatatypeHandling(DatatypeHandling.IGNORE);
+ parser.setPreserveBNodeIDs(true);
+ parser.setValueFactory(SimpleValueFactory.getInstance());
+
+ Set result = new LinkedHashSet();
+ parser.setRDFHandler(new StatementCollector(result));
+
+ InputStream in = SPARQLServiceEvaluationTest.class.getResourceAsStream(resultFile);
+ try {
+ parser.parse(in, null); // TODO check
+ }
+ finally {
+ in.close();
+ }
+
+ return result;
+ }
+
+ /**
+ * Compare two tuple query results
+ *
+ * @param queryResult
+ * @param expectedResult
+ * @param checkOrder
+ * @throws Exception
+ */
+ private void compareTupleQueryResults(TupleQueryResult queryResult, TupleQueryResult expectedResult,
+ boolean checkOrder)
+ throws Exception
+ {
+ // Create MutableTupleQueryResult to be able to re-iterate over the
+ // results
+ MutableTupleQueryResult queryResultTable = new MutableTupleQueryResult(queryResult);
+ MutableTupleQueryResult expectedResultTable = new MutableTupleQueryResult(expectedResult);
+
+ boolean resultsEqual;
+
+ resultsEqual = QueryResults.equals(queryResultTable, expectedResultTable);
+
+ if (checkOrder) {
+ // also check the order in which solutions occur.
+ queryResultTable.beforeFirst();
+ expectedResultTable.beforeFirst();
+
+ while (queryResultTable.hasNext()) {
+ BindingSet bs = queryResultTable.next();
+ BindingSet expectedBs = expectedResultTable.next();
+
+ if (!bs.equals(expectedBs)) {
+ resultsEqual = false;
+ break;
+ }
+ }
+ }
+
+ if (!resultsEqual) {
+ queryResultTable.beforeFirst();
+ expectedResultTable.beforeFirst();
+
+ /*
+ * StringBuilder message = new StringBuilder(128); message.append("\n============ ");
+ * message.append(getName()); message.append(" =======================\n"); message.append(
+ * "Expected result: \n"); while (expectedResultTable.hasNext()) {
+ * message.append(expectedResultTable.next()); message.append("\n"); }
+ * message.append("============="); StringUtil.appendN('=', getName().length(), message);
+ * message.append("========================\n"); message.append("Query result: \n"); while
+ * (queryResultTable.hasNext()) { message.append(queryResultTable.next()); message.append("\n"); }
+ * message.append("============="); StringUtil.appendN('=', getName().length(), message);
+ * message.append("========================\n");
+ */
+
+ List queryBindings = Iterations.asList(queryResultTable);
+
+ List expectedBindings = Iterations.asList(expectedResultTable);
+
+ List missingBindings = new ArrayList(expectedBindings);
+ missingBindings.removeAll(queryBindings);
+
+ List unexpectedBindings = new ArrayList(queryBindings);
+ unexpectedBindings.removeAll(expectedBindings);
+
+ StringBuilder message = new StringBuilder(128);
+ message.append("\n============ ");
+ message.append(getName());
+ message.append(" =======================\n");
+
+ if (!missingBindings.isEmpty()) {
+
+ message.append("Missing bindings: \n");
+ for (BindingSet bs : missingBindings) {
+ message.append(bs);
+ message.append("\n");
+ }
+
+ message.append("=============");
+ StringUtil.appendN('=', getName().length(), message);
+ message.append("========================\n");
+ }
+
+ if (!unexpectedBindings.isEmpty()) {
+ message.append("Unexpected bindings: \n");
+ for (BindingSet bs : unexpectedBindings) {
+ message.append(bs);
+ message.append("\n");
+ }
+
+ message.append("=============");
+ StringUtil.appendN('=', getName().length(), message);
+ message.append("========================\n");
+ }
+
+ if (checkOrder && missingBindings.isEmpty() && unexpectedBindings.isEmpty()) {
+ message.append("Results are not in expected order.\n");
+ message.append(" =======================\n");
+ message.append("query result: \n");
+ for (BindingSet bs : queryBindings) {
+ message.append(bs);
+ message.append("\n");
+ }
+ message.append(" =======================\n");
+ message.append("expected result: \n");
+ for (BindingSet bs : expectedBindings) {
+ message.append(bs);
+ message.append("\n");
+ }
+ message.append(" =======================\n");
+
+ System.out.print(message.toString());
+ }
+
+ logger.error(message.toString());
+ fail(message.toString());
+ }
+ /*
+ * debugging only: print out result when test succeeds else { queryResultTable.beforeFirst();
+ * List queryBindings = Iterations.asList(queryResultTable); StringBuilder message = new
+ * StringBuilder(128); message.append("\n============ "); message.append(getName()); message.append(
+ * " =======================\n"); message.append(" =======================\n"); message.append(
+ * "query result: \n"); for (BindingSet bs: queryBindings) { message.append(bs); message.append("\n");
+ * } System.out.print(message.toString()); }
+ */
+ }
+
+ /**
+ * Compare two graphs
+ *
+ * @param queryResult
+ * @param expectedResult
+ * @throws Exception
+ */
+ private void compareGraphs(Set queryResult, Set expectedResult)
+ throws Exception
+ {
+ if (!Models.isomorphic(expectedResult, queryResult)) {
+ // Don't use RepositoryUtil.difference, it reports incorrect diffs
+ /*
+ * Collection extends Statement> unexpectedStatements = RepositoryUtil.difference(queryResult,
+ * expectedResult); Collection extends Statement> missingStatements =
+ * RepositoryUtil.difference(expectedResult, queryResult); StringBuilder message = new
+ * StringBuilder(128); message.append("\n=======Diff: "); message.append(getName());
+ * message.append("========================\n"); if (!unexpectedStatements.isEmpty()) {
+ * message.append("Unexpected statements in result: \n"); for (Statement st :
+ * unexpectedStatements) { message.append(st.toString()); message.append("\n"); }
+ * message.append("============="); for (int i = 0; i < getName().length(); i++) {
+ * message.append("="); } message.append("========================\n"); } if
+ * (!missingStatements.isEmpty()) { message.append("Statements missing in result: \n"); for
+ * (Statement st : missingStatements) { message.append(st.toString()); message.append("\n"); }
+ * message.append("============="); for (int i = 0; i < getName().length(); i++) {
+ * message.append("="); } message.append("========================\n"); }
+ */
+ StringBuilder message = new StringBuilder(128);
+ message.append("\n============ ");
+ message.append(getName());
+ message.append(" =======================\n");
+ message.append("Expected result: \n");
+ for (Statement st : expectedResult) {
+ message.append(st.toString());
+ message.append("\n");
+ }
+ message.append("=============");
+ StringUtil.appendN('=', getName().length(), message);
+ message.append("========================\n");
+
+ message.append("Query result: \n");
+ for (Statement st : queryResult) {
+ message.append(st.toString());
+ message.append("\n");
+ }
+ message.append("=============");
+ StringUtil.appendN('=', getName().length(), message);
+ message.append("========================\n");
+
+ logger.error(message.toString());
+ fail(message.toString());
+ }
+ }
+
+}
diff --git a/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/EarlReport.java b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/EarlReport.java
new file mode 100644
index 000000000..6802a6ea9
--- /dev/null
+++ b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/EarlReport.java
@@ -0,0 +1,186 @@
+/*******************************************************************************
+ * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.query.parser.sparql.manifest;
+
+import java.io.File;
+import java.io.FileOutputStream;
+import java.text.SimpleDateFormat;
+import java.util.Date;
+
+import junit.framework.AssertionFailedError;
+import junit.framework.Test;
+import junit.framework.TestListener;
+import junit.framework.TestResult;
+
+import org.eclipse.rdf4j.model.BNode;
+import org.eclipse.rdf4j.model.Resource;
+import org.eclipse.rdf4j.model.ValueFactory;
+import org.eclipse.rdf4j.model.vocabulary.DC;
+import org.eclipse.rdf4j.model.vocabulary.DCTERMS;
+import org.eclipse.rdf4j.model.vocabulary.DOAP;
+import org.eclipse.rdf4j.model.vocabulary.EARL;
+import org.eclipse.rdf4j.model.vocabulary.RDF;
+import org.eclipse.rdf4j.model.vocabulary.XMLSchema;
+import org.eclipse.rdf4j.query.parser.sparql.manifest.SPARQL11SyntaxTest;
+import org.eclipse.rdf4j.query.parser.sparql.manifest.SPARQLQueryTest;
+import org.eclipse.rdf4j.query.parser.sparql.manifest.SPARQLUpdateConformanceTest;
+import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
+import org.eclipse.rdf4j.repository.RepositoryException;
+import org.eclipse.rdf4j.repository.sail.SailRepository;
+import org.eclipse.rdf4j.rio.RDFFormat;
+import org.eclipse.rdf4j.rio.RDFWriterFactory;
+import org.eclipse.rdf4j.rio.RDFWriterRegistry;
+import org.eclipse.rdf4j.rio.Rio;
+import org.eclipse.rdf4j.sail.memory.MemoryStore;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+/**
+ * @author Arjohn Kampman
+ */
+public class EarlReport {
+
+ protected static Repository earlRepository;
+
+ protected static ValueFactory vf;
+
+ protected static RepositoryConnection con;
+
+ protected static Resource projectNode;
+
+ protected static Resource asserterNode;
+
+ private static Logger logger = LoggerFactory.getLogger(EarlReport.class);
+
+ public static void main(String[] args)
+ throws Exception
+ {
+ earlRepository = new SailRepository(new MemoryStore());
+ earlRepository.initialize();
+ vf = earlRepository.getValueFactory();
+ con = earlRepository.getConnection();
+ con.begin();
+
+ con.setNamespace("rdf", RDF.NAMESPACE);
+ con.setNamespace("xsd", XMLSchema.NAMESPACE);
+ con.setNamespace("doap", DOAP.NAMESPACE);
+ con.setNamespace("earl", EARL.NAMESPACE);
+ con.setNamespace("dcterms", DCTERMS.NAMESPACE);
+
+ projectNode = vf.createBNode();
+ BNode releaseNode = vf.createBNode();
+ con.add(projectNode, RDF.TYPE, DOAP.PROJECT);
+ con.add(projectNode, DOAP.NAME, vf.createLiteral("OpenRDF Sesame"));
+ con.add(projectNode, DOAP.RELEASE, releaseNode);
+ con.add(projectNode, DOAP.HOMEPAGE, vf.createIRI("http://www.openrdf.org/"));
+ con.add(releaseNode, RDF.TYPE, DOAP.VERSION);
+ con.add(releaseNode, DOAP.NAME, vf.createLiteral("Sesame 2.7.0"));
+ SimpleDateFormat xsdDataFormat = new SimpleDateFormat("yyyy-MM-dd");
+ String currentDate = xsdDataFormat.format(new Date());
+ con.add(releaseNode, DOAP.CREATED, vf.createLiteral(currentDate, XMLSchema.DATE));
+
+ asserterNode = vf.createBNode();
+ con.add(asserterNode, RDF.TYPE, EARL.SOFTWARE);
+ con.add(asserterNode, DC.TITLE, vf.createLiteral("OpenRDF SPARQL 1.1 compliance tests"));
+
+ TestResult testResult = new TestResult();
+ EarlTestListener listener = new EarlTestListener();
+ testResult.addListener(listener);
+
+ logger.info("running query evaluation tests..");
+ W3CApprovedSPARQL11QueryTest.suite().run(testResult);
+
+ logger.info("running syntax tests...");
+ W3CApprovedSPARQL11SyntaxTest.suite().run(testResult);
+
+ logger.info("running update tests...");
+ W3CApprovedSPARQL11UpdateTest.suite().run(testResult);
+ logger.info("tests complete, generating EARL report...");
+
+ con.commit();
+
+ RDFWriterFactory factory = RDFWriterRegistry.getInstance().get(RDFFormat.TURTLE).orElseThrow(
+ Rio.unsupportedFormat(RDFFormat.TURTLE));
+ File outFile = File.createTempFile("sesame-sparql-compliance",
+ "." + RDFFormat.TURTLE.getDefaultFileExtension());
+ FileOutputStream out = new FileOutputStream(outFile);
+ try {
+ con.export(factory.getWriter(out));
+ }
+ finally {
+ out.close();
+ }
+
+ con.close();
+ earlRepository.shutDown();
+
+ logger.info("EARL output written to " + outFile);
+ }
+
+ protected static class EarlTestListener implements TestListener {
+
+ private int errorCount;
+
+ private int failureCount;
+
+ public void startTest(Test test) {
+ errorCount = failureCount = 0;
+ }
+
+ public void endTest(Test test) {
+ String testURI = null;
+ ;
+ if (test instanceof SPARQLQueryTest) {
+ testURI = ((SPARQLQueryTest)test).testURI;
+ }
+ else if (test instanceof SPARQL11SyntaxTest) {
+ testURI = ((SPARQL11SyntaxTest)test).testURI;
+ }
+ else if (test instanceof SPARQLUpdateConformanceTest) {
+ testURI = ((SPARQLUpdateConformanceTest)test).testURI;
+ }
+ else {
+ throw new RuntimeException("Unexpected test type: " + test.getClass());
+ }
+
+ try {
+ BNode testNode = vf.createBNode();
+ BNode resultNode = vf.createBNode();
+ con.add(testNode, RDF.TYPE, EARL.ASSERTION);
+ con.add(testNode, EARL.ASSERTEDBY, asserterNode);
+ con.add(testNode, EARL.MODE, EARL.AUTOMATIC);
+ con.add(testNode, EARL.SUBJECT, projectNode);
+ con.add(testNode, EARL.TEST, vf.createIRI(testURI));
+ con.add(testNode, EARL.RESULT, resultNode);
+ con.add(resultNode, RDF.TYPE, EARL.TESTRESULT);
+
+ if (errorCount > 0) {
+ con.add(resultNode, EARL.OUTCOME, EARL.FAIL);
+ }
+ else if (failureCount > 0) {
+ con.add(resultNode, EARL.OUTCOME, EARL.FAIL);
+ }
+ else {
+ con.add(resultNode, EARL.OUTCOME, EARL.PASS);
+ }
+ }
+ catch (RepositoryException e) {
+ throw new RuntimeException(e);
+ }
+ }
+
+ public void addError(Test test, Throwable t) {
+ errorCount++;
+ }
+
+ public void addFailure(Test test, AssertionFailedError error) {
+ failureCount++;
+ }
+ }
+}
diff --git a/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL10QueryTest.java b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL10QueryTest.java
new file mode 100644
index 000000000..761fc6bc3
--- /dev/null
+++ b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL10QueryTest.java
@@ -0,0 +1,74 @@
+/*******************************************************************************
+ * Copyright (c) 2016 Eclipse RDF4J contributors.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.query.parser.sparql.manifest;
+
+import junit.framework.Test;
+
+import org.eclipse.rdf4j.query.Dataset;
+import org.eclipse.rdf4j.query.parser.sparql.manifest.SPARQL11ManifestTest;
+import org.eclipse.rdf4j.query.parser.sparql.manifest.SPARQLQueryTest;
+import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.dataset.DatasetRepository;
+import org.eclipse.rdf4j.repository.sail.SailRepository;
+import org.eclipse.rdf4j.sail.memory.MemoryStore;
+
+/**
+ * Checks conformance of SPARQL query evaluation against the W3C-approved SPARQL 1.0 query test cases
+ *
+ * @author Jeen Broekstra
+ */
+public class W3CApprovedSPARQL10QueryTest extends SPARQLQueryTest {
+
+ public static Test suite()
+ throws Exception
+ {
+ return SPARQL10ManifestTest.suite(new Factory() {
+
+ public W3CApprovedSPARQL10QueryTest createSPARQLQueryTest(String testURI, String name,
+ String queryFileURL, String resultFileURL, Dataset dataSet, boolean laxCardinality)
+ {
+ return createSPARQLQueryTest(testURI, name, queryFileURL, resultFileURL, dataSet,
+ laxCardinality, false);
+ }
+
+ public W3CApprovedSPARQL10QueryTest createSPARQLQueryTest(String testURI, String name,
+ String queryFileURL, String resultFileURL, Dataset dataSet, boolean laxCardinality,
+ boolean checkOrder)
+ {
+ String[] ignoredTests = {
+ // incompatible with SPARQL 1.1 - syntax for decimals was modified
+ "Basic - Term 6",
+ // incompatible with SPARQL 1.1 - syntax for decimals was modified
+ "Basic - Term 7",
+ // Test is incorrect: assumes timezoned date is comparable with non-timezoned
+ "date-2"};
+
+ return new W3CApprovedSPARQL10QueryTest(testURI, name, queryFileURL, resultFileURL, dataSet,
+ laxCardinality, checkOrder, ignoredTests);
+ }
+ });
+
+ }
+
+ protected W3CApprovedSPARQL10QueryTest(String testURI, String name, String queryFileURL,
+ String resultFileURL, Dataset dataSet, boolean laxCardinality, String... ignoredTests)
+ {
+ this(testURI, name, queryFileURL, resultFileURL, dataSet, laxCardinality, false, ignoredTests);
+ }
+
+ protected W3CApprovedSPARQL10QueryTest(String testURI, String name, String queryFileURL,
+ String resultFileURL, Dataset dataSet, boolean laxCardinality, boolean checkOrder,
+ String... ignoredTests)
+ {
+ super(testURI, name, queryFileURL, resultFileURL, dataSet, laxCardinality, checkOrder, ignoredTests);
+ }
+
+ protected Repository newRepository() {
+ return new DatasetRepository(new SailRepository(new MemoryStore()));
+ }
+}
diff --git a/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL10SyntaxTest.java b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL10SyntaxTest.java
new file mode 100644
index 000000000..23b3fd053
--- /dev/null
+++ b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL10SyntaxTest.java
@@ -0,0 +1,50 @@
+/*******************************************************************************
+ * Copyright (c) 2016 Eclipse RDF4J contributors.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.query.parser.sparql.manifest;
+
+import junit.framework.Test;
+
+import org.eclipse.rdf4j.query.MalformedQueryException;
+import org.eclipse.rdf4j.query.QueryLanguage;
+import org.eclipse.rdf4j.query.parser.ParsedOperation;
+import org.eclipse.rdf4j.query.parser.QueryParserUtil;
+import org.eclipse.rdf4j.query.parser.sparql.manifest.SPARQL11SyntaxTest;
+
+/**
+ * Checks conformance of SPARQL query parsing against the W3C-approved SPARQL 1.0 test cases
+ *
+ * @author Jeen Broekstra
+ */
+public class W3CApprovedSPARQL10SyntaxTest extends SPARQL11SyntaxTest {
+
+ public static Test suite()
+ throws Exception
+ {
+ return SPARQL11SyntaxTest.suite(new Factory() {
+
+ public SPARQL11SyntaxTest createSPARQLSyntaxTest(String testURI, String testName,
+ String testAction, boolean positiveTest)
+ {
+ return new W3CApprovedSPARQL10SyntaxTest(testURI, testName, testAction, positiveTest);
+ }
+ }, false);
+ }
+
+ public W3CApprovedSPARQL10SyntaxTest(String testURI, String name, String queryFileURL,
+ boolean positiveTest)
+ {
+ super(testURI, name, queryFileURL, positiveTest);
+ }
+
+ protected ParsedOperation parseOperation(String operation, String fileURL)
+ throws MalformedQueryException
+ {
+ return QueryParserUtil.parseOperation(QueryLanguage.SPARQL, operation, fileURL);
+ }
+
+}
diff --git a/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL11QueryTest.java b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL11QueryTest.java
new file mode 100644
index 000000000..b494025a7
--- /dev/null
+++ b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL11QueryTest.java
@@ -0,0 +1,70 @@
+/*******************************************************************************
+ * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.query.parser.sparql.manifest;
+
+import junit.framework.Test;
+
+import org.eclipse.rdf4j.query.Dataset;
+import org.eclipse.rdf4j.query.parser.sparql.manifest.SPARQL11ManifestTest;
+import org.eclipse.rdf4j.query.parser.sparql.manifest.SPARQLQueryTest;
+import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.dataset.DatasetRepository;
+import org.eclipse.rdf4j.repository.sail.SailRepository;
+import org.eclipse.rdf4j.sail.memory.MemoryStore;
+
+public class W3CApprovedSPARQL11QueryTest extends SPARQLQueryTest {
+
+ public static Test suite()
+ throws Exception
+ {
+ return SPARQL11ManifestTest.suite(new Factory() {
+
+ public W3CApprovedSPARQL11QueryTest createSPARQLQueryTest(String testURI, String name,
+ String queryFileURL, String resultFileURL, Dataset dataSet, boolean laxCardinality)
+ {
+ return createSPARQLQueryTest(testURI, name, queryFileURL, resultFileURL, dataSet,
+ laxCardinality, false);
+ }
+
+ public W3CApprovedSPARQL11QueryTest createSPARQLQueryTest(String testURI, String name,
+ String queryFileURL, String resultFileURL, Dataset dataSet, boolean laxCardinality,
+ boolean checkOrder)
+ {
+ String[] ignoredTests = {
+ // test case incompatible with RDF 1.1 - see
+ // http://lists.w3.org/Archives/Public/public-sparql-dev/2013AprJun/0006.html
+ "STRDT TypeErrors",
+ // test case incompatible with RDF 1.1 - see
+ // http://lists.w3.org/Archives/Public/public-sparql-dev/2013AprJun/0006.html
+ "STRLANG TypeErrors",
+ // known issue: SES-937
+ "sq03 - Subquery within graph pattern, graph variable is not bound" };
+
+ return new W3CApprovedSPARQL11QueryTest(testURI, name, queryFileURL, resultFileURL, dataSet,
+ laxCardinality, checkOrder, ignoredTests);
+ }
+ }, true, true, false, "service");
+ }
+
+ protected W3CApprovedSPARQL11QueryTest(String testURI, String name, String queryFileURL,
+ String resultFileURL, Dataset dataSet, boolean laxCardinality, String... ignoredTests)
+ {
+ this(testURI, name, queryFileURL, resultFileURL, dataSet, laxCardinality, false, ignoredTests);
+ }
+
+ protected W3CApprovedSPARQL11QueryTest(String testURI, String name, String queryFileURL,
+ String resultFileURL, Dataset dataSet, boolean laxCardinality, boolean checkOrder,
+ String... ignoredTests)
+ {
+ super(testURI, name, queryFileURL, resultFileURL, dataSet, laxCardinality, checkOrder, ignoredTests);
+ }
+
+ protected Repository newRepository() {
+ return new DatasetRepository(new SailRepository(new MemoryStore()));
+ }
+}
diff --git a/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL11SyntaxTest.java b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL11SyntaxTest.java
new file mode 100644
index 000000000..8c9ea9dba
--- /dev/null
+++ b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL11SyntaxTest.java
@@ -0,0 +1,58 @@
+/*******************************************************************************
+ * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.query.parser.sparql.manifest;
+
+import junit.framework.Test;
+
+import org.eclipse.rdf4j.query.MalformedQueryException;
+import org.eclipse.rdf4j.query.QueryLanguage;
+import org.eclipse.rdf4j.query.parser.ParsedOperation;
+import org.eclipse.rdf4j.query.parser.QueryParserUtil;
+import org.eclipse.rdf4j.query.parser.sparql.manifest.SPARQL11SyntaxTest;
+
+public class W3CApprovedSPARQL11SyntaxTest extends SPARQL11SyntaxTest {
+
+ public static Test suite()
+ throws Exception
+ {
+ return SPARQL11SyntaxTest.suite(new Factory() {
+
+ public SPARQL11SyntaxTest createSPARQLSyntaxTest(String testURI, String testName,
+ String testAction, boolean positiveTest)
+ {
+ return new W3CApprovedSPARQL11SyntaxTest(testURI, testName, testAction, positiveTest);
+ }
+ }, false);
+ }
+
+ public W3CApprovedSPARQL11SyntaxTest(String testURI, String name, String queryFileURL,
+ boolean positiveTest)
+ {
+ super(testURI, name, queryFileURL, positiveTest);
+ }
+
+ protected ParsedOperation parseOperation(String operation, String fileURL)
+ throws MalformedQueryException
+ {
+ return QueryParserUtil.parseOperation(QueryLanguage.SPARQL, operation, fileURL);
+ }
+
+ @Override
+ protected void runTest()
+ throws Exception
+ {
+ if (this.getName().contains("syntax-update-54")) {
+ // we skip this negative syntax test because it is an unnecessarily restrictive test that is almost
+ // impossible to implement correctly, and which in practice Sesame handles correctly simply by
+ // assigning different blank node ids.
+ }
+ else {
+ super.runTest();
+ }
+ }
+}
diff --git a/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL11UpdateTest.java b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL11UpdateTest.java
new file mode 100644
index 000000000..923792e49
--- /dev/null
+++ b/compliance/sparql/src/test/java/org/eclipse/rdf4j/query/parser/sparql/manifest/W3CApprovedSPARQL11UpdateTest.java
@@ -0,0 +1,61 @@
+/*******************************************************************************
+ * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.query.parser.sparql.manifest;
+
+import java.util.Map;
+
+import org.eclipse.rdf4j.model.IRI;
+import org.eclipse.rdf4j.query.parser.sparql.manifest.SPARQL11ManifestTest;
+import org.eclipse.rdf4j.query.parser.sparql.manifest.SPARQLUpdateConformanceTest;
+import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.contextaware.ContextAwareRepository;
+import org.eclipse.rdf4j.repository.sail.SailRepository;
+import org.eclipse.rdf4j.sail.memory.MemoryStore;
+import junit.framework.Test;
+
+import junit.framework.Test;
+
+/**
+ * @author Jeen Broekstra
+ */
+public class W3CApprovedSPARQL11UpdateTest extends SPARQLUpdateConformanceTest {
+
+ public W3CApprovedSPARQL11UpdateTest(String testURI, String name, String requestFile, IRI defaultGraphURI,
+ Map inputNamedGraphs, IRI resultDefaultGraphURI,
+ Map resultNamedGraphs)
+ {
+ super(testURI, name, requestFile, defaultGraphURI, inputNamedGraphs, resultDefaultGraphURI,
+ resultNamedGraphs);
+ }
+
+ public static Test suite()
+ throws Exception
+ {
+ return SPARQL11ManifestTest.suite(new Factory() {
+
+ public W3CApprovedSPARQL11UpdateTest createSPARQLUpdateConformanceTest(String testURI,
+ String name, String requestFile, IRI defaultGraphURI, Map inputNamedGraphs,
+ IRI resultDefaultGraphURI, Map resultNamedGraphs)
+ {
+ return new W3CApprovedSPARQL11UpdateTest(testURI, name, requestFile, defaultGraphURI,
+ inputNamedGraphs, resultDefaultGraphURI, resultNamedGraphs);
+ }
+
+ }, true, true, false);
+ }
+
+ @Override
+ protected Repository newRepository()
+ throws Exception
+ {
+ SailRepository repo = new SailRepository(new MemoryStore());
+
+ return repo;
+ }
+
+}
diff --git a/compliance/sparql/src/test/java/org/eclipse/rdf4j/sail/federation/FederationSPARQL10QueryTest.java b/compliance/sparql/src/test/java/org/eclipse/rdf4j/sail/federation/FederationSPARQL10QueryTest.java
new file mode 100644
index 000000000..1470ec0ed
--- /dev/null
+++ b/compliance/sparql/src/test/java/org/eclipse/rdf4j/sail/federation/FederationSPARQL10QueryTest.java
@@ -0,0 +1,73 @@
+/*******************************************************************************
+ * Copyright (c) 2016 Eclipse RDF4J contributors.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.sail.federation;
+
+import junit.framework.Test;
+
+import org.eclipse.rdf4j.query.Dataset;
+import org.eclipse.rdf4j.query.parser.sparql.manifest.SPARQL10ManifestTest;
+import org.eclipse.rdf4j.query.parser.sparql.manifest.SPARQLQueryTest;
+import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.dataset.DatasetRepository;
+import org.eclipse.rdf4j.repository.sail.SailRepository;
+import org.eclipse.rdf4j.sail.memory.MemoryStore;
+
+public class FederationSPARQL10QueryTest extends SPARQLQueryTest {
+
+ public static Test suite()
+ throws Exception
+ {
+ return SPARQL10ManifestTest.suite(new Factory() {
+
+ public FederationSPARQL10QueryTest createSPARQLQueryTest(String testURI, String name,
+ String queryFileURL, String resultFileURL, Dataset dataSet, boolean laxCardinality)
+ {
+ return createSPARQLQueryTest(testURI, name, queryFileURL, resultFileURL, dataSet,
+ laxCardinality, false);
+ }
+
+ public FederationSPARQL10QueryTest createSPARQLQueryTest(String testURI, String name,
+ String queryFileURL, String resultFileURL, Dataset dataSet, boolean laxCardinality,
+ boolean checkOrder)
+ {
+ String[] ignoredTests = {
+ // incompatible with SPARQL 1.1 - syntax for decimals was modified
+ "Basic - Term 6",
+ // incompatible with SPARQL 1.1 - syntax for decimals was modified
+ "Basic - Term 7",
+ // Test is incorrect: assumes timezoned date is comparable with non-timezoned
+ "date-2"};
+
+ return new FederationSPARQL10QueryTest(testURI, name, queryFileURL, resultFileURL, dataSet,
+ laxCardinality, checkOrder, ignoredTests);
+ }
+ });
+
+ }
+
+ protected FederationSPARQL10QueryTest(String testURI, String name, String queryFileURL,
+ String resultFileURL, Dataset dataSet, boolean laxCardinality, String... ignoredTests)
+ {
+ this(testURI, name, queryFileURL, resultFileURL, dataSet, laxCardinality, false, ignoredTests);
+ }
+
+ protected FederationSPARQL10QueryTest(String testURI, String name, String queryFileURL,
+ String resultFileURL, Dataset dataSet, boolean laxCardinality, boolean checkOrder,
+ String... ignoredTests)
+ {
+ super(testURI, name, queryFileURL, resultFileURL, dataSet, laxCardinality, checkOrder, ignoredTests);
+ }
+
+ protected Repository newRepository() {
+ Federation sail = new Federation();
+ sail.addMember(new SailRepository(new MemoryStore()));
+ sail.addMember(new SailRepository(new MemoryStore()));
+ sail.addMember(new SailRepository(new MemoryStore()));
+ return new DatasetRepository(new SailRepository(sail));
+ }
+}
diff --git a/compliance/sparql/src/test/java/org/eclipse/rdf4j/sail/federation/FederationSPARQL11QueryTest.java b/compliance/sparql/src/test/java/org/eclipse/rdf4j/sail/federation/FederationSPARQL11QueryTest.java
new file mode 100644
index 000000000..8ada6c6b7
--- /dev/null
+++ b/compliance/sparql/src/test/java/org/eclipse/rdf4j/sail/federation/FederationSPARQL11QueryTest.java
@@ -0,0 +1,75 @@
+/*******************************************************************************
+ * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.sail.federation;
+
+import org.eclipse.rdf4j.query.Dataset;
+import org.eclipse.rdf4j.query.parser.sparql.manifest.SPARQL11ManifestTest;
+import org.eclipse.rdf4j.query.parser.sparql.manifest.SPARQLQueryTest;
+import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.dataset.DatasetRepository;
+import org.eclipse.rdf4j.repository.sail.SailRepository;
+import org.eclipse.rdf4j.sail.memory.MemoryStore;
+
+import junit.framework.Test;
+
+public class FederationSPARQL11QueryTest extends SPARQLQueryTest {
+
+ public static Test suite()
+ throws Exception
+ {
+ return SPARQL11ManifestTest.suite(new Factory() {
+
+ public SPARQLQueryTest createSPARQLQueryTest(String testURI, String name, String queryFileURL,
+ String resultFileURL, Dataset dataSet, boolean laxCardinality)
+ {
+ return new FederationSPARQL11QueryTest(testURI, name, queryFileURL, resultFileURL, dataSet,
+ laxCardinality);
+ }
+
+ public SPARQLQueryTest createSPARQLQueryTest(String testURI, String name, String queryFileURL,
+ String resultFileURL, Dataset dataSet, boolean laxCardinality, boolean checkOrder)
+ {
+ String[] ignoredTests = {
+ // test case incompatible with RDF 1.1 - see
+ // http://lists.w3.org/Archives/Public/public-sparql-dev/2013AprJun/0006.html
+ "STRDT TypeErrors",
+ // test case incompatible with RDF 1.1 - see
+ // http://lists.w3.org/Archives/Public/public-sparql-dev/2013AprJun/0006.html
+ "STRLANG TypeErrors",
+ // known issue: SES-937
+ "sq03 - Subquery within graph pattern, graph variable is not bound",
+ };
+
+ return new FederationSPARQL11QueryTest(testURI, name, queryFileURL, resultFileURL, dataSet,
+ laxCardinality, checkOrder, ignoredTests);
+ }
+ // skip 'service' tests for now since they require presence of remote sparql endpoints.
+ }, true, true, false, "service");
+ }
+
+ public FederationSPARQL11QueryTest(String testURI, String name, String queryFileURL, String resultFileURL,
+ Dataset dataSet, boolean laxCardinality, String... ignoredTests)
+ {
+ super(testURI, name, queryFileURL, resultFileURL, dataSet, laxCardinality, ignoredTests);
+ }
+
+ public FederationSPARQL11QueryTest(String testURI, String name, String queryFileURL, String resultFileURL,
+ Dataset dataSet, boolean laxCardinality, boolean checkOrder, String... ignoredTests)
+ {
+ super(testURI, name, queryFileURL, resultFileURL, dataSet, laxCardinality, checkOrder, ignoredTests);
+ }
+
+ @Override
+ protected Repository newRepository() {
+ Federation sail = new Federation();
+ sail.addMember(new SailRepository(new MemoryStore()));
+ sail.addMember(new SailRepository(new MemoryStore()));
+ sail.addMember(new SailRepository(new MemoryStore()));
+ return new DatasetRepository(new SailRepository(sail));
+ }
+}
diff --git a/compliance/sparql/src/test/java/org/eclipse/rdf4j/sail/federation/FederationSparqlTest.java b/compliance/sparql/src/test/java/org/eclipse/rdf4j/sail/federation/FederationSparqlTest.java
new file mode 100644
index 000000000..18315e31d
--- /dev/null
+++ b/compliance/sparql/src/test/java/org/eclipse/rdf4j/sail/federation/FederationSparqlTest.java
@@ -0,0 +1,52 @@
+/*******************************************************************************
+ * Copyright (c) 2016 Eclipse RDF4J contributors.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.sail.federation;
+
+import static org.junit.Assert.assertFalse;
+
+import org.eclipse.rdf4j.query.QueryLanguage;
+import org.eclipse.rdf4j.query.TupleQuery;
+import org.eclipse.rdf4j.query.TupleQueryResult;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
+import org.eclipse.rdf4j.repository.sail.SailRepository;
+import org.eclipse.rdf4j.sail.memory.MemoryStore;
+import org.junit.Test;
+
+public class FederationSparqlTest {
+
+ @Test
+ public void test181Issue()
+ throws Exception
+ {
+ SailRepository repo1 = new SailRepository(new MemoryStore());
+ SailRepository repo2 = new SailRepository(new MemoryStore());
+ Federation fed = new Federation();
+ fed.addMember(repo1);
+ fed.addMember(repo2);
+ SailRepository repoFed = new SailRepository(fed);
+ repoFed.initialize();
+
+ try (RepositoryConnection conn = repo1.getConnection()) {
+ conn.add(getClass().getResource("/testcases-sparql-1.0-w3c/data-r2/algebra/var-scope-join-1.ttl"),
+ null, null, conn.getValueFactory().createIRI("http://example/g1"));
+ }
+ try (RepositoryConnection conn = repo2.getConnection()) {
+ conn.add(getClass().getResource("/testcases-sparql-1.0-w3c/data-r2/algebra/var-scope-join-1.ttl"),
+ null, null, conn.getValueFactory().createIRI("http://example/g2"));
+ }
+
+ String query = "PREFIX : SELECT * { graph :g1 {?X :name 'paul'} { graph :g2 {?Y :name 'george' . OPTIONAL { ?X :email ?Z } } } }";
+ boolean hasResults;
+ try (RepositoryConnection conn = repoFed.getConnection()) {
+ TupleQuery tq = conn.prepareTupleQuery(QueryLanguage.SPARQL, query);
+ TupleQueryResult tqr = tq.evaluate();
+ hasResults = tqr.hasNext();
+ }
+ assertFalse(hasResults);
+ }
+}
diff --git a/compliance/sparql/src/test/resources/logback-test.xml b/compliance/sparql/src/test/resources/logback-test.xml
new file mode 100644
index 000000000..4a6f17fbf
--- /dev/null
+++ b/compliance/sparql/src/test/resources/logback-test.xml
@@ -0,0 +1,15 @@
+
+
+
+
+
+ %d{HH:mm:ss.SSS} [%thread] %-5level %msg%n
+
+
+
+
+
+
+
+
+
From d6da3c7933df593b95a721c810c693a909109d60 Mon Sep 17 00:00:00 2001
From: Jeen Broekstra
Date: Thu, 27 Dec 2018 12:00:18 +1100
Subject: [PATCH 13/54] GH-1230 renamed sail-model compliance tests to avoid
conflict with rdf4j-model-compliance in rdf4j client repo
Signed-off-by: Jeen Broekstra
---
compliance/pom.xml | 2 +-
compliance/{model => sail-model}/pom.xml | 8 ++++----
.../org/eclipse/rdf4j/model/SailModelNamespacesTest.java | 0
.../test/java/org/eclipse/rdf4j/model/SailModelTest.java | 0
.../src/test/resources/logback-test.xml | 0
5 files changed, 5 insertions(+), 5 deletions(-)
rename compliance/{model => sail-model}/pom.xml (89%)
rename compliance/{model => sail-model}/src/test/java/org/eclipse/rdf4j/model/SailModelNamespacesTest.java (100%)
rename compliance/{model => sail-model}/src/test/java/org/eclipse/rdf4j/model/SailModelTest.java (100%)
rename compliance/{model => sail-model}/src/test/resources/logback-test.xml (100%)
diff --git a/compliance/pom.xml b/compliance/pom.xml
index f28046b0b..ad417edbf 100644
--- a/compliance/pom.xml
+++ b/compliance/pom.xml
@@ -12,7 +12,7 @@
pom
- model
+ sail-modelsparqlluceneelasticsearch
diff --git a/compliance/model/pom.xml b/compliance/sail-model/pom.xml
similarity index 89%
rename from compliance/model/pom.xml
rename to compliance/sail-model/pom.xml
index 27b4dedaf..9ebe831de 100644
--- a/compliance/model/pom.xml
+++ b/compliance/sail-model/pom.xml
@@ -8,15 +8,15 @@
2.5-SNAPSHOT
- rdf4j-model-compliance
+ rdf4j-sail-model-compliance
- RDF4J Model compliance test
- Tests for RDF4J Model
+ RDF4J SAIL Model compliance test
+ Tests for RDF4J SaIL Model${project.groupId}
- rdf4j-runtime
+ rdf4j-sail-model${project.version}
diff --git a/compliance/model/src/test/java/org/eclipse/rdf4j/model/SailModelNamespacesTest.java b/compliance/sail-model/src/test/java/org/eclipse/rdf4j/model/SailModelNamespacesTest.java
similarity index 100%
rename from compliance/model/src/test/java/org/eclipse/rdf4j/model/SailModelNamespacesTest.java
rename to compliance/sail-model/src/test/java/org/eclipse/rdf4j/model/SailModelNamespacesTest.java
diff --git a/compliance/model/src/test/java/org/eclipse/rdf4j/model/SailModelTest.java b/compliance/sail-model/src/test/java/org/eclipse/rdf4j/model/SailModelTest.java
similarity index 100%
rename from compliance/model/src/test/java/org/eclipse/rdf4j/model/SailModelTest.java
rename to compliance/sail-model/src/test/java/org/eclipse/rdf4j/model/SailModelTest.java
diff --git a/compliance/model/src/test/resources/logback-test.xml b/compliance/sail-model/src/test/resources/logback-test.xml
similarity index 100%
rename from compliance/model/src/test/resources/logback-test.xml
rename to compliance/sail-model/src/test/resources/logback-test.xml
From 25aa4952c4886be44c909e241189714a1a02ca8f Mon Sep 17 00:00:00 2001
From: Jeen Broekstra
Date: Thu, 27 Dec 2018 13:18:53 +1100
Subject: [PATCH 14/54] GH-1230 remove reference to obsolete http-compliance
module
Signed-off-by: Jeen Broekstra
---
compliance/pom.xml | 6 ------
1 file changed, 6 deletions(-)
diff --git a/compliance/pom.xml b/compliance/pom.xml
index ad417edbf..24147a1aa 100644
--- a/compliance/pom.xml
+++ b/compliance/pom.xml
@@ -28,12 +28,6 @@
-
- ${project.groupId}
- rdf4j-http-compliance
- ${project.version}
-
-
${project.groupId}rdf4j-rio-testsuite
From 6e10859f4499063a0b5fa829aea3b49c6a313979 Mon Sep 17 00:00:00 2001
From: Jeen Broekstra
Date: Thu, 27 Dec 2018 14:50:47 +1100
Subject: [PATCH 15/54] GH-1230 reorg compliance/testsuites
Signed-off-by: Jeen Broekstra
---
.../sail/EvaluationStrategyTest.java | 136 +++++++++++++
.../memory/MemoryEvaluationStrategyTest.java | 2 +-
.../memory/MemoryOptimisticIsolationTest.java | 12 +-
.../SpinMemoryRepositoryConnectionTest.java | 26 ---
...pinRDFSMemoryRepositoryConnectionTest.java | 27 ---
.../NativeEvaluationStrategyTest.java | 2 +-
.../NativeOptimisticIsolationTest.java | 12 +-
.../sail/CustomGraphQueryInferencerTest.java | 187 ++++++++++++++++++
.../rdf4j/sail/memory/MemInferencingTest.java | 6 +-
...chingRDFSInferencerMemInferencingTest.java | 32 +--
.../nativerdf/NativeStoreInferencingTest.java | 7 +-
11 files changed, 371 insertions(+), 78 deletions(-)
create mode 100644 compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/EvaluationStrategyTest.java
create mode 100644 compliance/store/src/test/java/org/eclipse/rdf4j/sail/CustomGraphQueryInferencerTest.java
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/EvaluationStrategyTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/EvaluationStrategyTest.java
new file mode 100644
index 000000000..6091c78bc
--- /dev/null
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/EvaluationStrategyTest.java
@@ -0,0 +1,136 @@
+/*******************************************************************************
+ * Copyright (c) 2016 Eclipse RDF4J contributors.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.repository.sail;
+
+import static org.junit.Assert.assertEquals;
+
+import java.util.List;
+
+import org.eclipse.rdf4j.model.IRI;
+import org.eclipse.rdf4j.model.Literal;
+import org.eclipse.rdf4j.model.ValueFactory;
+import org.eclipse.rdf4j.model.vocabulary.RDFS;
+import org.eclipse.rdf4j.model.vocabulary.XMLSchema;
+import org.eclipse.rdf4j.query.BindingSet;
+import org.eclipse.rdf4j.query.QueryResults;
+import org.eclipse.rdf4j.query.algebra.evaluation.impl.ExtendedEvaluationStrategy;
+import org.eclipse.rdf4j.query.algebra.evaluation.impl.ExtendedEvaluationStrategyFactory;
+import org.eclipse.rdf4j.query.algebra.evaluation.impl.StrictEvaluationStrategy;
+import org.eclipse.rdf4j.query.algebra.evaluation.impl.StrictEvaluationStrategyFactory;
+import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
+import org.eclipse.rdf4j.repository.config.RepositoryConfig;
+import org.eclipse.rdf4j.repository.config.RepositoryImplConfig;
+import org.eclipse.rdf4j.repository.manager.RepositoryManager;
+import org.eclipse.rdf4j.repository.manager.RepositoryProvider;
+import org.eclipse.rdf4j.repository.sail.config.SailRepositoryConfig;
+import org.eclipse.rdf4j.sail.base.config.BaseSailConfig;
+import org.junit.Before;
+import org.junit.BeforeClass;
+import org.junit.Rule;
+import org.junit.Test;
+import org.junit.rules.TemporaryFolder;
+
+/**
+ * Test cases for behavior of {@link StrictEvaluationStrategy} and {@link ExtendedEvaluationStrategy} on base
+ * Sail implementations.
+ *
+ * @author Jeen Broekstra
+ */
+public abstract class EvaluationStrategyTest {
+
+ @BeforeClass
+ public static void setUpClass()
+ throws Exception
+ {
+ System.setProperty("org.eclipse.rdf4j.repository.debug", "true");
+ }
+
+ @Rule
+ public TemporaryFolder tempDir = new TemporaryFolder();
+
+ private Repository strictRepo;
+
+ private Repository extendedRepo;
+
+ private RepositoryManager manager;
+
+ /**
+ * @throws java.lang.Exception
+ */
+ @Before
+ public void setUp()
+ throws Exception
+ {
+ manager = RepositoryProvider.getRepositoryManager(tempDir.newFolder());
+
+ BaseSailConfig strictStoreConfig = getBaseSailConfig();
+ strictStoreConfig.setEvaluationStrategyFactoryClassName(
+ StrictEvaluationStrategyFactory.class.getName());
+
+ strictRepo = createRepo(strictStoreConfig, "test-strict");
+
+ BaseSailConfig extendedStoreConfig = getBaseSailConfig();
+ extendedStoreConfig.setEvaluationStrategyFactoryClassName(
+ ExtendedEvaluationStrategyFactory.class.getName());
+
+ extendedRepo = createRepo(extendedStoreConfig, "test-extended");
+ }
+
+ private Repository createRepo(BaseSailConfig config, String id) {
+ RepositoryImplConfig ric = new SailRepositoryConfig(config);
+ manager.addRepositoryConfig(new RepositoryConfig(id, ric));
+
+ return manager.getRepository(id);
+ }
+
+ @Test
+ public void testDatetimeSubtypesStrict() {
+ ValueFactory vf = strictRepo.getValueFactory();
+
+ try (RepositoryConnection conn = strictRepo.getConnection()) {
+ Literal l1 = vf.createLiteral("2009", XMLSchema.GYEAR);
+ Literal l2 = vf.createLiteral("2009-01", XMLSchema.GYEARMONTH);
+ IRI s1 = vf.createIRI("urn:s1");
+ IRI s2 = vf.createIRI("urn:s2");
+ conn.add(s1, RDFS.LABEL, l1);
+ conn.add(s2, RDFS.LABEL, l2);
+
+ String query = "SELECT * WHERE { ?s rdfs:label ?l . FILTER(?l >= \"2008\"^^xsd:gYear) }";
+
+ List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate());
+ assertEquals(1, result.size());
+ }
+ }
+
+ @Test
+ public void testDatetimeSubtypesExtended() {
+ ValueFactory vf = extendedRepo.getValueFactory();
+
+ try (RepositoryConnection conn = extendedRepo.getConnection()) {
+ Literal l1 = vf.createLiteral("2009", XMLSchema.GYEAR);
+ Literal l2 = vf.createLiteral("2009-01", XMLSchema.GYEARMONTH);
+ IRI s1 = vf.createIRI("urn:s1");
+ IRI s2 = vf.createIRI("urn:s2");
+ conn.add(s1, RDFS.LABEL, l1);
+ conn.add(s2, RDFS.LABEL, l2);
+
+ String query = "SELECT * WHERE { ?s rdfs:label ?l . FILTER(?l >= \"2008\"^^xsd:gYear) }";
+
+ List result = QueryResults.asList(conn.prepareTupleQuery(query).evaluate());
+ assertEquals(2, result.size());
+ }
+ }
+
+ /**
+ * Gets a configuration object for the base Sail that should be tested.
+ *
+ * @return a {@link BaseSailConfig}.
+ */
+ protected abstract BaseSailConfig getBaseSailConfig();
+}
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/MemoryEvaluationStrategyTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/MemoryEvaluationStrategyTest.java
index 998595a41..7f95e6626 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/MemoryEvaluationStrategyTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/MemoryEvaluationStrategyTest.java
@@ -7,7 +7,7 @@
*******************************************************************************/
package org.eclipse.rdf4j.repository.sail.memory;
-import org.eclipse.rdf4j.repository.EvaluationStrategyTest;
+import org.eclipse.rdf4j.repository.sail.EvaluationStrategyTest;
import org.eclipse.rdf4j.sail.base.config.BaseSailConfig;
import org.eclipse.rdf4j.sail.memory.config.MemoryStoreConfig;
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/MemoryOptimisticIsolationTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/MemoryOptimisticIsolationTest.java
index 44822eebb..a3a6aa336 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/MemoryOptimisticIsolationTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/MemoryOptimisticIsolationTest.java
@@ -8,6 +8,9 @@
package org.eclipse.rdf4j.repository.sail.memory;
import org.eclipse.rdf4j.repository.OptimisticIsolationTest;
+import org.eclipse.rdf4j.repository.config.RepositoryImplConfig;
+import org.eclipse.rdf4j.repository.sail.config.SailRepositoryConfig;
+import org.eclipse.rdf4j.repository.sail.config.SailRepositoryFactory;
import org.eclipse.rdf4j.sail.memory.config.MemoryStoreFactory;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -19,13 +22,18 @@ public static void setUpClass()
throws Exception
{
System.setProperty("org.eclipse.rdf4j.repository.debug", "true");
- setSailFactory(new MemoryStoreFactory());
+ setRepositoryFactory(new SailRepositoryFactory() {
+ @Override
+ public RepositoryImplConfig getConfig() {
+ return new SailRepositoryConfig(new MemoryStoreFactory().getConfig());
+ }
+ });
}
@AfterClass
public static void tearDown()
throws Exception
{
- setSailFactory(null);
+ setRepositoryFactory(null);
}
}
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SpinMemoryRepositoryConnectionTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SpinMemoryRepositoryConnectionTest.java
index d1ef11eef..7fa150c29 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SpinMemoryRepositoryConnectionTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SpinMemoryRepositoryConnectionTest.java
@@ -35,30 +35,4 @@ protected Repository createRepository()
return new SailRepository(new SpinSail(new MemoryStore()));
}
- @Ignore
- @Test
- @Override
- public void testDefaultContext()
- throws Exception
- {
- // ignore
- }
-
- @Ignore
- @Test
- @Override
- public void testDefaultInsertContext()
- throws Exception
- {
- // ignore
- }
-
- @Ignore
- @Test
- @Override
- public void testExclusiveNullContext()
- throws Exception
- {
- // ignore
- }
}
\ No newline at end of file
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SpinRDFSMemoryRepositoryConnectionTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SpinRDFSMemoryRepositoryConnectionTest.java
index b4e01669d..e85386d45 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SpinRDFSMemoryRepositoryConnectionTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SpinRDFSMemoryRepositoryConnectionTest.java
@@ -37,31 +37,4 @@ protected Repository createRepository()
return new SailRepository(
new SpinSail(new ForwardChainingRDFSInferencer(new DedupingInferencer(new MemoryStore()))));
}
-
- @Ignore
- @Test
- @Override
- public void testDefaultContext()
- throws Exception
- {
- // ignore
- }
-
- @Ignore
- @Test
- @Override
- public void testDefaultInsertContext()
- throws Exception
- {
- // ignore
- }
-
- @Ignore
- @Test
- @Override
- public void testExclusiveNullContext()
- throws Exception
- {
- // ignore
- }
}
\ No newline at end of file
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/nativerdf/NativeEvaluationStrategyTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/nativerdf/NativeEvaluationStrategyTest.java
index 16d639a5d..3b635bfff 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/nativerdf/NativeEvaluationStrategyTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/nativerdf/NativeEvaluationStrategyTest.java
@@ -7,7 +7,7 @@
*******************************************************************************/
package org.eclipse.rdf4j.repository.sail.nativerdf;
-import org.eclipse.rdf4j.repository.EvaluationStrategyTest;
+import org.eclipse.rdf4j.repository.sail.EvaluationStrategyTest;
import org.eclipse.rdf4j.sail.base.config.BaseSailConfig;
import org.eclipse.rdf4j.sail.nativerdf.config.NativeStoreConfig;
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/nativerdf/NativeOptimisticIsolationTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/nativerdf/NativeOptimisticIsolationTest.java
index 0c81ce7c4..1a947ada3 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/nativerdf/NativeOptimisticIsolationTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/nativerdf/NativeOptimisticIsolationTest.java
@@ -8,6 +8,9 @@
package org.eclipse.rdf4j.repository.sail.nativerdf;
import org.eclipse.rdf4j.repository.OptimisticIsolationTest;
+import org.eclipse.rdf4j.repository.config.RepositoryImplConfig;
+import org.eclipse.rdf4j.repository.sail.config.SailRepositoryConfig;
+import org.eclipse.rdf4j.repository.sail.config.SailRepositoryFactory;
import org.eclipse.rdf4j.sail.nativerdf.config.NativeStoreFactory;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -18,13 +21,18 @@ public class NativeOptimisticIsolationTest extends OptimisticIsolationTest {
public static void setUpClass()
throws Exception
{
- setSailFactory(new NativeStoreFactory());
+ setRepositoryFactory(new SailRepositoryFactory() {
+ @Override
+ public RepositoryImplConfig getConfig() {
+ return new SailRepositoryConfig(new NativeStoreFactory().getConfig());
+ }
+ });
}
@AfterClass
public static void tearDown()
throws Exception
{
- setSailFactory(null);
+ setRepositoryFactory(null);
}
}
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/CustomGraphQueryInferencerTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/CustomGraphQueryInferencerTest.java
new file mode 100644
index 000000000..a82a10b4c
--- /dev/null
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/CustomGraphQueryInferencerTest.java
@@ -0,0 +1,187 @@
+/*******************************************************************************
+ * Copyright (c) 2015 Eclipse RDF4J contributors, Aduna, and others.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.sail;
+
+import static org.assertj.core.api.Assertions.assertThat;
+
+import java.io.IOException;
+import java.io.StringReader;
+import java.util.Arrays;
+import java.util.Collection;
+
+import org.eclipse.rdf4j.common.io.ResourceUtil;
+import org.eclipse.rdf4j.common.iteration.Iterations;
+import org.eclipse.rdf4j.model.IRI;
+import org.eclipse.rdf4j.model.Value;
+import org.eclipse.rdf4j.model.ValueFactory;
+import org.eclipse.rdf4j.query.MalformedQueryException;
+import org.eclipse.rdf4j.query.QueryLanguage;
+import org.eclipse.rdf4j.query.UnsupportedQueryLanguageException;
+import org.eclipse.rdf4j.query.UpdateExecutionException;
+import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
+import org.eclipse.rdf4j.repository.RepositoryException;
+import org.eclipse.rdf4j.repository.sail.SailRepository;
+import org.eclipse.rdf4j.rio.RDFFormat;
+import org.eclipse.rdf4j.rio.RDFParseException;
+import org.eclipse.rdf4j.sail.inferencer.fc.CustomGraphQueryInferencer;
+import org.junit.BeforeClass;
+import org.junit.Test;
+import org.junit.runner.RunWith;
+import org.junit.runners.Parameterized;
+import org.junit.runners.Parameterized.Parameters;
+
+@RunWith(Parameterized.class)
+public abstract class CustomGraphQueryInferencerTest {
+
+ @BeforeClass
+ public static void setUpClass()
+ throws Exception
+ {
+ System.setProperty("org.eclipse.rdf4j.repository.debug", "true");
+ }
+
+ protected static class Expectation {
+
+ private final int initialCount, countAfterRemove, subjCount, predCount, objCount;
+
+ public Expectation(int initialCount, int countAfterRemove, int subjCount, int predCount,
+ int objCount)
+ {
+ this.initialCount = initialCount;
+ this.countAfterRemove = countAfterRemove;
+ this.subjCount = subjCount;
+ this.predCount = predCount;
+ this.objCount = objCount;
+ }
+ }
+
+ private static final String TEST_DIR_PREFIX = "/testcases/custom-query-inferencing/";
+
+ private static final String BASE = "http://foo.org/bar#";
+
+ private static final String PREDICATE = "predicate";
+
+ @Parameters(name = "{0}")
+ public static final Collection
*
* This reasoner is not a rule based reasoner and will be up to 80x faster than the
- * ForwardChainingRDFSInferencer, as well as being more complete.
+ * {@link ForwardChainingRDFSInferencer}, as well as being more complete.
*
*
* The sail puts no limitations on isolation level for read transactions, however all write/delete/update
@@ -100,6 +100,14 @@ public class SchemaCachingRDFSInferencer extends NotifyingSailWrapper {
// THIS BEHAVIOUR WILL BE SWITCHED ON THE NEXT MAJOR RELEASE
private boolean addInferredStatementsToDefaultContext = true;
+ /**
+ * Instantiate a new SchemaCachingRDFSInferencer
+ */
+ public SchemaCachingRDFSInferencer() {
+ super();
+ schema = null;
+ }
+
/**
* Instantiate a SchemaCachingRDFSInferencer.
*
diff --git a/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/config/SchemaCachingRDFSInferencerConfig.java b/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/config/SchemaCachingRDFSInferencerConfig.java
new file mode 100644
index 000000000..5e0bbf20c
--- /dev/null
+++ b/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/config/SchemaCachingRDFSInferencerConfig.java
@@ -0,0 +1,28 @@
+/*******************************************************************************
+ * Copyright (c) 2019 Eclipse RDF4J contributors.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.sail.inferencer.fc.config;
+
+import org.eclipse.rdf4j.sail.config.AbstractDelegatingSailImplConfig;
+import org.eclipse.rdf4j.sail.config.SailImplConfig;
+import org.eclipse.rdf4j.sail.inferencer.fc.SchemaCachingRDFSInferencer;
+
+/**
+ * {@link SailImplConfig} for the {@link SchemaCachingRDFSInferencer}
+ *
+ * @author Jeen Broekstra
+ */
+public class SchemaCachingRDFSInferencerConfig extends AbstractDelegatingSailImplConfig {
+
+ public SchemaCachingRDFSInferencerConfig() {
+ super(SchemaCachingRDFSInferencerFactory.SAIL_TYPE);
+ }
+
+ public SchemaCachingRDFSInferencerConfig(SailImplConfig delegate) {
+ super(SchemaCachingRDFSInferencerFactory.SAIL_TYPE, delegate);
+ }
+}
diff --git a/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/config/SchemaCachingRDFSInferencerFactory.java b/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/config/SchemaCachingRDFSInferencerFactory.java
new file mode 100644
index 000000000..6e6c7af74
--- /dev/null
+++ b/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/config/SchemaCachingRDFSInferencerFactory.java
@@ -0,0 +1,54 @@
+/*******************************************************************************
+ * Copyright (c) 2019 Eclipse RDF4J contributors.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.sail.inferencer.fc.config;
+
+import org.eclipse.rdf4j.sail.Sail;
+import org.eclipse.rdf4j.sail.config.SailConfigException;
+import org.eclipse.rdf4j.sail.config.SailFactory;
+import org.eclipse.rdf4j.sail.config.SailImplConfig;
+import org.eclipse.rdf4j.sail.inferencer.fc.ForwardChainingRDFSInferencer;
+import org.eclipse.rdf4j.sail.inferencer.fc.SchemaCachingRDFSInferencer;
+
+/**
+ * A {@link SailFactory} that creates {@link SchemaCachingRDFSInferencer}s based on RDF configuration data.
+ *
+ * @author Jeen Broekstra
+ */
+public class SchemaCachingRDFSInferencerFactory implements SailFactory {
+
+ /**
+ * The type of repositories that are created by this factory.
+ *
+ * @see SailFactory#getSailType()
+ */
+ public static final String SAIL_TYPE = "rdf4j:SchemaCachingRDFSInferencer";
+
+ /**
+ * Returns the Sail's type: rdf4j:SchemaCachingRDFSInferencer.
+ */
+ @Override
+ public String getSailType() {
+ return SAIL_TYPE;
+ }
+
+ @Override
+ public SailImplConfig getConfig() {
+ return new ForwardChainingRDFSInferencerConfig();
+ }
+
+ @Override
+ public Sail getSail(SailImplConfig config)
+ throws SailConfigException
+ {
+ if (!SAIL_TYPE.equals(config.getType())) {
+ throw new SailConfigException("Invalid Sail type: " + config.getType());
+ }
+
+ return new SchemaCachingRDFSInferencer();
+ }
+}
diff --git a/inferencer/src/main/resources/META-INF/services/org.eclipse.rdf4j.sail.config.SailFactory b/inferencer/src/main/resources/META-INF/services/org.eclipse.rdf4j.sail.config.SailFactory
index 56e9d202d..641aaa97e 100644
--- a/inferencer/src/main/resources/META-INF/services/org.eclipse.rdf4j.sail.config.SailFactory
+++ b/inferencer/src/main/resources/META-INF/services/org.eclipse.rdf4j.sail.config.SailFactory
@@ -1,3 +1,4 @@
+org.eclipse.rdf4j.sail.inferencer.fc.config.SchemaCachingRDFSInferencerFactory
org.eclipse.rdf4j.sail.inferencer.fc.config.ForwardChainingRDFSInferencerFactory
org.eclipse.rdf4j.sail.inferencer.fc.config.DirectTypeHierarchyInferencerFactory
org.eclipse.rdf4j.sail.inferencer.fc.config.CustomGraphQueryInferencerFactory
From 928be956c9c4680e4ef1a323b55c82822ec10866 Mon Sep 17 00:00:00 2001
From: Jeen Broekstra
Date: Sat, 5 Jan 2019 16:29:01 +1100
Subject: [PATCH 20/54] use correct config class
Signed-off-by: Jeen Broekstra
---
.../fc/config/SchemaCachingRDFSInferencerFactory.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/config/SchemaCachingRDFSInferencerFactory.java b/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/config/SchemaCachingRDFSInferencerFactory.java
index 6e6c7af74..0d08a7985 100644
--- a/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/config/SchemaCachingRDFSInferencerFactory.java
+++ b/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/config/SchemaCachingRDFSInferencerFactory.java
@@ -38,7 +38,7 @@ public String getSailType() {
@Override
public SailImplConfig getConfig() {
- return new ForwardChainingRDFSInferencerConfig();
+ return new SchemaCachingRDFSInferencerConfig();
}
@Override
From 3036fbef04ab3d4ceeaf70cbd43cbbf3bdd95373 Mon Sep 17 00:00:00 2001
From: Jeen Broekstra
Date: Sat, 5 Jan 2019 16:30:51 +1100
Subject: [PATCH 21/54] add persistence test and fix default condition under
which to test
---
...FSchemaMemoryRepositoryConnectionTest.java | 1 -
...chingRDFSInferencerMemInferencingTest.java | 36 ++++++++++++++++++-
2 files changed, 35 insertions(+), 2 deletions(-)
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest.java
index 4ce43ace4..0cfad764f 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest.java
@@ -38,7 +38,6 @@ public SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest(
@Override
protected Repository createRepository() {
SchemaCachingRDFSInferencer sail = new SchemaCachingRDFSInferencer(new MemoryStore(), true);
- sail.setAddInferredStatementsToDefaultContext(false);
return new SailRepository(sail);
}
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/memory/SchemaCachingRDFSInferencerMemInferencingTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/memory/SchemaCachingRDFSInferencerMemInferencingTest.java
index e6aa61bb5..cfbb746e7 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/memory/SchemaCachingRDFSInferencerMemInferencingTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/memory/SchemaCachingRDFSInferencerMemInferencingTest.java
@@ -10,8 +10,10 @@
import static junit.framework.TestCase.assertFalse;
import static junit.framework.TestCase.assertTrue;
+import java.io.File;
import java.lang.reflect.InvocationTargetException;
+import org.assertj.core.util.Files;
import org.eclipse.rdf4j.model.BNode;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.ValueFactory;
@@ -29,10 +31,42 @@ public class SchemaCachingRDFSInferencerMemInferencingTest extends InferencingTe
@Override
protected Repository createRepository() {
SchemaCachingRDFSInferencer sailStack = new SchemaCachingRDFSInferencer(new MemoryStore(), true);
- sailStack.setAddInferredStatementsToDefaultContext(false);
+ //sailStack.setAddInferredStatementsToDefaultContext(false);
return new SailRepository(sailStack);
}
+ @Test
+ public void testPersistence() {
+ File datadir = Files.newTemporaryFolder();
+
+ SchemaCachingRDFSInferencer sailStack = new SchemaCachingRDFSInferencer(new MemoryStore(datadir), true);
+ SailRepository repo = new SailRepository(sailStack);
+ repo.initialize();
+ ValueFactory vf = repo.getValueFactory();
+
+ IRI s1= vf.createIRI("foo:s1");
+ IRI c2 = vf.createIRI("foo:c2");
+ IRI c1 = vf.createIRI("foo:c1");
+
+ try (RepositoryConnection conn = repo.getConnection()) {
+ conn.begin();
+ conn.add(s1, RDF.TYPE, c1);
+ conn.add(c1, RDFS.SUBCLASSOF, c2);
+ conn.commit();
+ assertTrue(conn.hasStatement(s1, RDF.TYPE, c2, true));
+ }
+ repo.shutDown();
+
+ // re-init
+// sailStack = new SchemaCachingRDFSInferencer(new MemoryStore(datadir), true);
+// repo = new SailRepository(sailStack);
+ repo.initialize();
+
+ try (RepositoryConnection conn = repo.getConnection()) {
+ assertTrue(conn.hasStatement(s1, RDF.TYPE, c2, true));
+ }
+ }
+
@Test
public void testBlankNodePredicateInference() {
Repository sailRepository = createRepository();
From 19ce3287a3ab468a70f64f41e503bec0c766285d Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Ha=CC=8Avard=20Ottestad?=
Date: Sun, 6 Jan 2019 15:17:30 +0100
Subject: [PATCH 22/54] eclipse/rdf4j#1234 fixed handling of updates
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
Signed-off-by: Håvard Ottestad
---
...FSchemaMemoryRepositoryConnectionTest.java | 138 +++++++++++++++---
...SchemaCachingRDFSInferencerConnection.java | 6 +
2 files changed, 127 insertions(+), 17 deletions(-)
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest.java
index 0cfad764f..8caa4a4f9 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/repository/sail/memory/SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest.java
@@ -15,7 +15,10 @@
import org.eclipse.rdf4j.model.vocabulary.RDFS;
import org.eclipse.rdf4j.repository.RDFSchemaRepositoryConnectionTest;
import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryResult;
import org.eclipse.rdf4j.repository.sail.SailRepository;
+import org.eclipse.rdf4j.repository.sail.SailRepositoryConnection;
+import org.eclipse.rdf4j.sail.inferencer.fc.ForwardChainingRDFSInferencer;
import org.eclipse.rdf4j.sail.inferencer.fc.SchemaCachingRDFSInferencer;
import org.eclipse.rdf4j.sail.memory.MemoryStore;
import org.junit.Ignore;
@@ -23,21 +26,24 @@
import java.util.stream.Stream;
+import static junit.framework.TestCase.assertTrue;
import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertFalse;
+import static org.junit.Assert.assertThat;
public class SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest
- extends RDFSchemaRepositoryConnectionTest
-{
+ extends RDFSchemaRepositoryConnectionTest {
public SchemaCachingRDFSInferencerRDFSchemaMemoryRepositoryConnectionTest(
- IsolationLevel level)
- {
+ IsolationLevel level) {
super(level);
}
@Override
protected Repository createRepository() {
SchemaCachingRDFSInferencer sail = new SchemaCachingRDFSInferencer(new MemoryStore(), true);
+ sail.setAddInferredStatementsToDefaultContext(false);
+
return new SailRepository(sail);
}
@@ -46,8 +52,7 @@ protected Repository createRepository() {
@Test
@Ignore
public void testQueryDefaultGraph()
- throws Exception
- {
+ throws Exception {
// ignore
}
@@ -56,8 +61,7 @@ public void testQueryDefaultGraph()
@Test
@Ignore
public void testDeleteDefaultGraph()
- throws Exception
- {
+ throws Exception {
// ignore
}
@@ -65,8 +69,7 @@ public void testDeleteDefaultGraph()
@Test
@Ignore
public void testContextStatementsNotDuplicated()
- throws Exception
- {
+ throws Exception {
// ignore
}
@@ -74,15 +77,13 @@ public void testContextStatementsNotDuplicated()
@Test
@Ignore
public void testContextStatementsNotDuplicated2()
- throws Exception
- {
+ throws Exception {
// ignore
}
@Test
- public void testContextTbox()
- {
+ public void testContextTbox() {
// Man subClassOf Human g1
// Human subClassOf Animal g2
@@ -110,13 +111,13 @@ public void testContextTbox()
System.out.println("-----------");
- try (Stream stream = Iterations.stream(testCon.getStatements(man, RDFS.SUBCLASSOF, null,true))) {
+ try (Stream stream = Iterations.stream(testCon.getStatements(man, RDFS.SUBCLASSOF, null, true))) {
stream.forEach(System.out::println);
}
System.out.println("-----------");
- try (Stream stream = Iterations.stream(testCon.getStatements(bob, RDF.TYPE, null,true))) {
+ try (Stream stream = Iterations.stream(testCon.getStatements(bob, RDF.TYPE, null, true))) {
stream
- .peek(statement -> assertEquals(statement.getContext(), graph3))
+ .peek(statement -> assertEquals(graph3, statement.getContext()))
.forEach(System.out::println);
}
@@ -126,4 +127,107 @@ public void testContextTbox()
}
+ @Test
+ public void testUpdateInsertData() {
+
+ SailRepository sail = new SailRepository(new SchemaCachingRDFSInferencer(new MemoryStore()));
+ sail.initialize();
+
+
+ try (SailRepositoryConnection connection = sail.getConnection()) {
+
+ IRI foo_s1 = connection.getValueFactory().createIRI("foo:s1");
+ IRI foo_C2 = connection.getValueFactory().createIRI("foo:C2");
+
+
+ connection.begin();
+ connection.prepareUpdate("insert data { a . rdfs:subClassOf } ").execute();
+ connection.commit();
+
+ assertTrue(connection.hasStatement(foo_s1, RDF.TYPE, foo_C2, true));
+
+
+ }
+
+ }
+
+ @Test
+ public void testUpdateInsert() {
+
+ SailRepository sail = new SailRepository(new SchemaCachingRDFSInferencer(new MemoryStore()));
+ sail.initialize();
+
+
+ try (SailRepositoryConnection connection = sail.getConnection()) {
+
+ IRI foo_s1 = connection.getValueFactory().createIRI("foo:s1");
+ IRI foo_C2 = connection.getValueFactory().createIRI("foo:C2");
+
+ connection.begin();
+ connection.prepareUpdate("insert { a . rdfs:subClassOf } where {?a ?b ?c}").execute();
+ connection.commit();
+
+ assertTrue(connection.hasStatement(foo_s1, RDF.TYPE, foo_C2, true));
+
+ }
+
+ }
+
+ @Test
+ public void testInsert() {
+
+ SailRepository sail = new SailRepository(new SchemaCachingRDFSInferencer(new MemoryStore()));
+ sail.initialize();
+
+
+ try (SailRepositoryConnection connection = sail.getConnection()) {
+
+ IRI foo_s1 = connection.getValueFactory().createIRI("foo:s1");
+ IRI foo_C2 = connection.getValueFactory().createIRI("foo:C2");
+ IRI foo_C1 = connection.getValueFactory().createIRI("foo:C1");
+
+
+ connection.begin();
+ connection.add(foo_s1, RDF.TYPE, foo_C1);
+ connection.add(foo_C1, RDFS.SUBCLASSOF, foo_C2);
+ connection.commit();
+
+ assertTrue(connection.hasStatement(foo_s1, RDF.TYPE, foo_C2, true));
+
+
+ }
+
+ }
+
+ @Test
+ public void testUpdateRemove() {
+
+ SailRepository sail = new SailRepository(new SchemaCachingRDFSInferencer(new MemoryStore()));
+ sail.initialize();
+
+
+ try (SailRepositoryConnection connection = sail.getConnection()) {
+
+ IRI foo_s1 = connection.getValueFactory().createIRI("foo:s1");
+ IRI foo_C2 = connection.getValueFactory().createIRI("foo:C2");
+
+
+ connection.begin();
+ connection.prepareUpdate("insert data { a . rdfs:subClassOf } ").execute();
+ connection.commit();
+
+ assertTrue(connection.hasStatement(foo_s1, RDF.TYPE, foo_C2, true));
+
+ connection.begin();
+ connection.prepareUpdate("delete data { a . rdfs:subClassOf } ").execute();
+ connection.commit();
+
+ assertFalse(connection.hasStatement(foo_s1, RDF.TYPE, foo_C2, true));
+
+
+ }
+
+ }
+
+
}
diff --git a/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/SchemaCachingRDFSInferencerConnection.java b/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/SchemaCachingRDFSInferencerConnection.java
index 2b0562740..4fac78e5a 100644
--- a/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/SchemaCachingRDFSInferencerConnection.java
+++ b/inferencer/src/main/java/org/eclipse/rdf4j/sail/inferencer/fc/SchemaCachingRDFSInferencerConnection.java
@@ -22,6 +22,7 @@
import org.eclipse.rdf4j.sail.SailConnectionListener;
import org.eclipse.rdf4j.sail.SailException;
import org.eclipse.rdf4j.sail.UnknownSailTransactionStateException;
+import org.eclipse.rdf4j.sail.UpdateContext;
import org.eclipse.rdf4j.sail.inferencer.InferencerConnection;
import org.eclipse.rdf4j.sail.inferencer.InferencerConnectionWrapper;
import org.slf4j.Logger;
@@ -810,4 +811,9 @@ public void statementRemoved(Statement st) {
statementsRemoved = true;
}
+ @Override
+ public void addStatement(UpdateContext modify, Resource subj, IRI pred, Value obj, Resource... contexts) throws SailException {
+ addStatement(false, subj, pred, obj, contexts);
+ super.addStatement(modify, subj, pred, obj, contexts);
+ }
}
From 11bafb7dfc9ad8ba933f1e7fa068d64ebeb282ea Mon Sep 17 00:00:00 2001
From: Jeen Broekstra
Date: Mon, 7 Jan 2019 19:48:04 +1100
Subject: [PATCH 23/54] eclipse/rdf4j#1226 added isolation level compliance
tests for inferencer
Signed-off-by: Jeen Broekstra
---
.../rdf4j/sail/fc/InferredContextTest.java | 1 -
...chingRDFSInferencerIsolationLevelTest.java | 32 +++++++++++++++++++
2 files changed, 32 insertions(+), 1 deletion(-)
create mode 100644 compliance/store/src/test/java/org/eclipse/rdf4j/sail/fc/SchemaCachingRDFSInferencerIsolationLevelTest.java
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/fc/InferredContextTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/fc/InferredContextTest.java
index 2c0ca5fc0..b6db36c56 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/fc/InferredContextTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/fc/InferredContextTest.java
@@ -32,7 +32,6 @@ public void testInferrecContextNull() {
sail.initialize();
sail.setAddInferredStatementsToDefaultContext(true);
-
try (SchemaCachingRDFSInferencerConnection connection = sail.getConnection()) {
connection.begin();
connection.addStatement(bNode, RDF.TYPE, type, context);
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/fc/SchemaCachingRDFSInferencerIsolationLevelTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/fc/SchemaCachingRDFSInferencerIsolationLevelTest.java
new file mode 100644
index 000000000..f616ad54b
--- /dev/null
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/fc/SchemaCachingRDFSInferencerIsolationLevelTest.java
@@ -0,0 +1,32 @@
+/*******************************************************************************
+ * Copyright (c) 2019 Eclipse RDF4J contributors.
+ * All rights reserved. This program and the accompanying materials
+ * are made available under the terms of the Eclipse Distribution License v1.0
+ * which accompanies this distribution, and is available at
+ * http://www.eclipse.org/org/documents/edl-v10.php.
+ *******************************************************************************/
+package org.eclipse.rdf4j.sail.fc;
+
+import org.eclipse.rdf4j.sail.Sail;
+import org.eclipse.rdf4j.sail.SailException;
+import org.eclipse.rdf4j.sail.SailIsolationLevelTest;
+import org.eclipse.rdf4j.sail.inferencer.fc.SchemaCachingRDFSInferencer;
+import org.eclipse.rdf4j.sail.memory.MemoryStore;
+
+/**
+ * An extension of {@link SailIsolationLevelTest} for testing the {@link SchemaCachingRDFSInferencer}.
+ */
+public class SchemaCachingRDFSInferencerIsolationLevelTest extends SailIsolationLevelTest {
+
+ /*---------*
+ * Methods *
+ *---------*/
+
+ @Override
+ protected Sail createSail()
+ throws SailException
+ {
+ // TODO we are testing the inferencer, not the store. We should use a mock here instead of a real memory store.
+ return new SchemaCachingRDFSInferencer(new MemoryStore());
+ }
+}
From dcd14e75b240cce575b5c0904d1e9f4070b04c69 Mon Sep 17 00:00:00 2001
From: Jeen Broekstra
Date: Sun, 13 Jan 2019 12:15:41 +1100
Subject: [PATCH 24/54] rename initialize() to init(), replace usage, clean up
tests
---
.../rdf4j/sail/memory/MemoryStoreTest.java | 2 +-
.../memory/PersistentMemoryStoreTest.java | 23 +++++--------------
.../nativerdf/NativeStoreContextTest.java | 2 +-
.../rdf4j/sail/nativerdf/NativeStoreTest.java | 4 ++--
.../rdf4j/sail/federation/Federation.java | 14 ++++++++---
.../rdf4j/repository/sail/SailRepository.java | 2 +-
.../java/org/eclipse/rdf4j/sail/Sail.java | 18 +++++++++++++++
.../rdf4j/sail/helpers/AbstractSail.java | 9 +++++++-
8 files changed, 48 insertions(+), 26 deletions(-)
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/memory/MemoryStoreTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/memory/MemoryStoreTest.java
index 8b1815d07..90cea23cd 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/memory/MemoryStoreTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/memory/MemoryStoreTest.java
@@ -26,7 +26,7 @@ protected NotifyingSail createSail()
throws SailException
{
NotifyingSail sail = new MemoryStore();
- sail.initialize();
+ sail.init();
return sail;
}
}
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/memory/PersistentMemoryStoreTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/memory/PersistentMemoryStoreTest.java
index a9f69be22..6edac4c5b 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/memory/PersistentMemoryStoreTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/memory/PersistentMemoryStoreTest.java
@@ -8,13 +8,13 @@
package org.eclipse.rdf4j.sail.memory;
-import java.io.File;
import java.io.IOException;
-import org.eclipse.rdf4j.common.io.FileUtil;
import org.eclipse.rdf4j.sail.NotifyingSail;
import org.eclipse.rdf4j.sail.RDFNotifyingStoreTest;
import org.eclipse.rdf4j.sail.SailException;
+import org.junit.Rule;
+import org.junit.rules.TemporaryFolder;
/**
* An extension of RDFStoreTest for testing the class
@@ -22,16 +22,16 @@
*/
public class PersistentMemoryStoreTest extends RDFNotifyingStoreTest {
- private volatile File dataDir;
+ @Rule
+ public TemporaryFolder tempDir = new TemporaryFolder();
@Override
protected NotifyingSail createSail()
throws SailException
{
try {
- dataDir = FileUtil.createTempDir(PersistentMemoryStoreTest.class.getSimpleName());
- NotifyingSail sail = new MemoryStore(dataDir);
- sail.initialize();
+ NotifyingSail sail = new MemoryStore(tempDir.newFolder(PersistentMemoryStoreTest.class.getSimpleName()));
+ sail.init();
return sail;
}
catch (IOException e) {
@@ -39,15 +39,4 @@ protected NotifyingSail createSail()
}
}
- @Override
- public void tearDown()
- throws Exception
- {
- try {
- super.tearDown();
- }
- finally {
- FileUtil.deleteDir(dataDir);
- }
- }
}
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/nativerdf/NativeStoreContextTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/nativerdf/NativeStoreContextTest.java
index 41c848a45..027a0b008 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/nativerdf/NativeStoreContextTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/nativerdf/NativeStoreContextTest.java
@@ -37,7 +37,7 @@ protected NotifyingSail createSail()
{
try {
NotifyingSail sail = new NativeStore(tempDir.newFolder("nativestore"), "spoc,posc");
- sail.initialize();
+ sail.init();
return sail;
}
catch (IOException e) {
diff --git a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/nativerdf/NativeStoreTest.java b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/nativerdf/NativeStoreTest.java
index f08b6bbd5..c00a61ae6 100644
--- a/compliance/store/src/test/java/org/eclipse/rdf4j/sail/nativerdf/NativeStoreTest.java
+++ b/compliance/store/src/test/java/org/eclipse/rdf4j/sail/nativerdf/NativeStoreTest.java
@@ -41,7 +41,7 @@ protected NotifyingSail createSail()
{
try {
NotifyingSail sail = new NativeStore(tempDir.newFolder("nativestore"), "spoc,posc");
- sail.initialize();
+ sail.init();
return sail;
}
catch (IOException e) {
@@ -61,7 +61,7 @@ public void testGetNamespacePersistence()
con.close();
sail.shutDown();
- sail.initialize();
+ sail.init();
con = sail.getConnection();
assertEquals(RDF.NAMESPACE, con.getNamespace("rdf"));
diff --git a/federation/src/main/java/org/eclipse/rdf4j/sail/federation/Federation.java b/federation/src/main/java/org/eclipse/rdf4j/sail/federation/Federation.java
index 4b0e298e2..2172b4994 100644
--- a/federation/src/main/java/org/eclipse/rdf4j/sail/federation/Federation.java
+++ b/federation/src/main/java/org/eclipse/rdf4j/sail/federation/Federation.java
@@ -254,20 +254,28 @@ public void setHttpClient(HttpClient client) {
}
}
- @Override
+ @Deprecated
public void initialize()
throws SailException
+ {
+ init();
+ }
+
+
+ @Override
+ public void init()
+ throws SailException
{
for (Repository member : members) {
try {
- member.initialize();
+ member.init();
}
catch (RepositoryException e) {
throw new SailException(e);
}
}
}
-
+
@Override
public void shutDown()
throws SailException
diff --git a/repository-sail/src/main/java/org/eclipse/rdf4j/repository/sail/SailRepository.java b/repository-sail/src/main/java/org/eclipse/rdf4j/repository/sail/SailRepository.java
index a558c84f3..00e8c39cc 100644
--- a/repository-sail/src/main/java/org/eclipse/rdf4j/repository/sail/SailRepository.java
+++ b/repository-sail/src/main/java/org/eclipse/rdf4j/repository/sail/SailRepository.java
@@ -150,7 +150,7 @@ protected void initializeInternal()
throws RepositoryException
{
try {
- sail.initialize();
+ sail.init();
}
catch (SailLockedException e) {
String l = e.getLockedBy();
diff --git a/sail-api/src/main/java/org/eclipse/rdf4j/sail/Sail.java b/sail-api/src/main/java/org/eclipse/rdf4j/sail/Sail.java
index 2da686e58..e22769faf 100644
--- a/sail-api/src/main/java/org/eclipse/rdf4j/sail/Sail.java
+++ b/sail-api/src/main/java/org/eclipse/rdf4j/sail/Sail.java
@@ -48,9 +48,27 @@ public interface Sail {
* If the Sail could not be initialized.
* @throws IllegalStateException
* If the Sail has already been initialized.
+ * @deprecated Use {{@link #init()} instead.
*/
+ @Deprecated
void initialize()
throws SailException;
+
+ /**
+ * Initializes the Sail. Care should be taken that required initialization parameters have been set before
+ * this method is called. Please consult the specific Sail implementation for information about the
+ * relevant parameters.
+ *
+ * @throws SailException
+ * If the Sail could not be initialized.
+ * @throws IllegalStateException
+ * If the Sail has already been initialized.
+ *
+ * @since 2.5
+ */
+ default void init() throws SailException {
+ initialize();
+ }
/**
* Shuts down the Sail, giving it the opportunity to synchronize any stale data. Care should be taken that
diff --git a/sail-api/src/main/java/org/eclipse/rdf4j/sail/helpers/AbstractSail.java b/sail-api/src/main/java/org/eclipse/rdf4j/sail/helpers/AbstractSail.java
index 597a22027..fe47111fd 100644
--- a/sail-api/src/main/java/org/eclipse/rdf4j/sail/helpers/AbstractSail.java
+++ b/sail-api/src/main/java/org/eclipse/rdf4j/sail/helpers/AbstractSail.java
@@ -173,8 +173,15 @@ protected boolean isInitialized() {
return initialized;
}
+
@Override
- public void initialize()
+ public void initialize() throws SailException
+ {
+ init();
+ }
+
+ @Override
+ public void init()
throws SailException
{
initializationLock.writeLock().lock();
From 81de2113e1285aa469cade203dcc6fe957bb6e83 Mon Sep 17 00:00:00 2001
From: Jeen Broekstra
Date: Sun, 13 Jan 2019 14:14:22 +1100
Subject: [PATCH 25/54] revert to use of deprecated Repository.initialize()
pending merge
---
.../main/java/org/eclipse/rdf4j/sail/federation/Federation.java | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/federation/src/main/java/org/eclipse/rdf4j/sail/federation/Federation.java b/federation/src/main/java/org/eclipse/rdf4j/sail/federation/Federation.java
index 2172b4994..ceb3c83b7 100644
--- a/federation/src/main/java/org/eclipse/rdf4j/sail/federation/Federation.java
+++ b/federation/src/main/java/org/eclipse/rdf4j/sail/federation/Federation.java
@@ -268,7 +268,7 @@ public void init()
{
for (Repository member : members) {
try {
- member.init();
+ member.initialize();
}
catch (RepositoryException e) {
throw new SailException(e);
From 65d1fd51ba4a2b1eb3e2fcd254a38211fa9ada71 Mon Sep 17 00:00:00 2001
From: Jeen Broekstra
Date: Sun, 13 Jan 2019 17:12:15 +1100
Subject: [PATCH 26/54] refactor to remove public use of Repository API in SAIL
constructor [WIP]
---
.../eclipse/rdf4j/sail/shacl/ShaclSail.java | 127 ++++++++++---
.../rdf4j/sail/shacl/ShaclSailConnection.java | 96 +++++++---
.../sail/shacl/ReduceNumberOfPlansTest.java | 104 +++++-----
.../eclipse/rdf4j/sail/shacl/ShaclTest.java | 89 ++++-----
.../eclipse/rdf4j/sail/shacl/TempTest.java | 66 +++----
.../sail/shacl/TrackAddedStatementsTest.java | 178 ++++++++----------
.../org/eclipse/rdf4j/sail/shacl/Utils.java | 66 +++++--
.../rdf4j/sail/shacl/VisulizerTest.java | 18 +-
.../benchmark/DatatypeBenchmarkEmpty.java | 4 +-
.../benchmark/DatatypeBenchmarkPrefilled.java | 4 +-
.../benchmark/MinCountBenchmarkEmpty.java | 4 +-
.../benchmark/MinCountBenchmarkPrefilled.java | 4 +-
.../MinCountPrefilledVsEmptyBenchmark.java | 16 +-
13 files changed, 451 insertions(+), 325 deletions(-)
diff --git a/shacl/src/main/java/org/eclipse/rdf4j/sail/shacl/ShaclSail.java b/shacl/src/main/java/org/eclipse/rdf4j/sail/shacl/ShaclSail.java
index 16083929a..de8e26216 100644
--- a/shacl/src/main/java/org/eclipse/rdf4j/sail/shacl/ShaclSail.java
+++ b/shacl/src/main/java/org/eclipse/rdf4j/sail/shacl/ShaclSail.java
@@ -8,65 +8,119 @@
package org.eclipse.rdf4j.sail.shacl;
-import org.apache.commons.io.FileUtils;
+import java.io.File;
+import java.io.IOException;
+import java.nio.file.Files;
+import java.util.List;
+
import org.apache.commons.io.IOUtils;
+import org.eclipse.rdf4j.model.IRI;
+import org.eclipse.rdf4j.model.Resource;
+import org.eclipse.rdf4j.model.Value;
+import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
+import org.eclipse.rdf4j.repository.Repository;
+import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.repository.sail.SailRepository;
import org.eclipse.rdf4j.repository.sail.SailRepositoryConnection;
import org.eclipse.rdf4j.sail.NotifyingSail;
import org.eclipse.rdf4j.sail.NotifyingSailConnection;
+import org.eclipse.rdf4j.sail.Sail;
import org.eclipse.rdf4j.sail.SailException;
+import org.eclipse.rdf4j.sail.config.SailConfigException;
import org.eclipse.rdf4j.sail.helpers.NotifyingSailWrapper;
+import org.eclipse.rdf4j.sail.memory.MemoryStore;
import org.eclipse.rdf4j.sail.shacl.AST.NodeShape;
-import java.io.IOException;
-import java.util.List;
-
/**
+ * A {@link Sail} implementation that adds support for the Shapes Constraint Language (SHACL)
+ *
* @author Heshan Jayasinghe
+ * @see SHACL W3C Recommendation
*/
public class ShaclSail extends NotifyingSailWrapper {
+ /**
+ * The virtual context identifier for persisting the SHACL shapes information.
+ */
+ public final static IRI SHAPE_GRAPH = SimpleValueFactory.getInstance().createIRI("http://rdf4j.org/schema/schacl#ShapeGraph");
+
public List nodeShapes;
+
boolean debugPrintPlans = false;
ShaclSailConfig config = new ShaclSailConfig();
+
private static String SH_OR_UPDATE_QUERY;
+
private static String SH_OR_NODE_SHAPE_UPDATE_QUERY;
+ /**
+ * an initialized {@link Repository} for storing/retrieving Shapes data
+ */
+ private SailRepository shapesRepo;
+
static {
try {
- SH_OR_UPDATE_QUERY = IOUtils.toString(ShaclSail.class.getClassLoader().getResourceAsStream("shacl-sparql-inference/sh_or.rq"), "UTF-8");
- SH_OR_NODE_SHAPE_UPDATE_QUERY = IOUtils.toString(ShaclSail.class.getClassLoader().getResourceAsStream("shacl-sparql-inference/sh_or_node_shape.rq"), "UTF-8");
- } catch (IOException e) {
+ SH_OR_UPDATE_QUERY = IOUtils.toString(
+ ShaclSail.class.getClassLoader().getResourceAsStream("shacl-sparql-inference/sh_or.rq"),
+ "UTF-8");
+ SH_OR_NODE_SHAPE_UPDATE_QUERY = IOUtils.toString(
+ ShaclSail.class.getClassLoader().getResourceAsStream(
+ "shacl-sparql-inference/sh_or_node_shape.rq"),
+ "UTF-8");
+ }
+ catch (IOException e) {
throw new IllegalStateException(e);
}
}
- public ShaclSail(NotifyingSail baseSail, SailRepository shaclSail) {
+ public ShaclSail(NotifyingSail baseSail) {
super(baseSail);
- try (SailRepositoryConnection shaclSailConnection = shaclSail.getConnection()) {
- runInferencingSparqlQueries(shaclSailConnection);
- nodeShapes = NodeShape.Factory.getShapes(shaclSailConnection);
+ String path = null;
+ if (baseSail.getDataDir() != null) {
+ path = baseSail.getDataDir().getPath();
+ }
+ else {
+ try {
+ path = Files.createTempDirectory("shacl-shapes").toString();
+ } catch (IOException e) {
+ throw new SailConfigException(e);
+ }
}
+ if (path.endsWith("/")) {
+ path = path.substring(0, path.length() -1);
+ }
+ path = path + "-shapes-graph/";
+
+ shapesRepo = new SailRepository(new MemoryStore(new File(path)));
+ shapesRepo.initialize();
}
- private void runInferencingSparqlQueries(SailRepositoryConnection shaclSailConnection) {
-
+ @Override
+ public void initialize() throws SailException {
+ super.initialize();
+ try (SailRepositoryConnection shapesRepoConnection = shapesRepo.getConnection()) {
+ runInferencingSparqlQueries(shapesRepoConnection);
+ nodeShapes = NodeShape.Factory.getShapes(shapesRepoConnection);
+ }
+ }
- long prevSize;
- long currentSize= shaclSailConnection.size();
- do {
- prevSize = currentSize;
- shaclSailConnection.prepareUpdate(SH_OR_NODE_SHAPE_UPDATE_QUERY).execute();
- shaclSailConnection.prepareUpdate(SH_OR_UPDATE_QUERY).execute();
- currentSize = shaclSailConnection.size();
- }while(prevSize != currentSize);
+ @Override
+ public void shutDown() throws SailException {
+ try {
+ shapesRepo.shutDown();
+ }
+ finally {
+ shapesRepo = null;
+ }
+ super.shutDown();
}
+
+
@Override
- public NotifyingSailConnection getConnection()
- throws SailException {
+ public NotifyingSailConnection getConnection() throws SailException {
return new ShaclSailConnection(this, super.getConnection(), super.getConnection());
}
@@ -85,6 +139,33 @@ public boolean isDebugPrintPlans() {
public void setDebugPrintPlans(boolean debugPrintPlans) {
this.debugPrintPlans = debugPrintPlans;
}
+
+ protected void addShapesStatement(Resource subj, IRI pred, Value obj) {
+ try (RepositoryConnection conn = shapesRepo.getConnection()) {
+ conn.add(subj, pred, obj);
+ }
+ }
+
+
+ protected void removeShapesStatements(Resource subj, IRI pred, Value obj) {
+ try (RepositoryConnection conn = shapesRepo.getConnection()) {
+ conn.remove(subj, pred, obj);
+ }
+ }
+
+ private void runInferencingSparqlQueries(SailRepositoryConnection shaclSailConnection) {
+
+ long prevSize;
+ long currentSize = shaclSailConnection.size();
+ do {
+ prevSize = currentSize;
+ shaclSailConnection.prepareUpdate(SH_OR_NODE_SHAPE_UPDATE_QUERY).execute();
+ shaclSailConnection.prepareUpdate(SH_OR_UPDATE_QUERY).execute();
+ currentSize = shaclSailConnection.size();
+ }
+ while (prevSize != currentSize);
+ }
+
}
class ShaclSailConfig {
diff --git a/shacl/src/main/java/org/eclipse/rdf4j/sail/shacl/ShaclSailConnection.java b/shacl/src/main/java/org/eclipse/rdf4j/sail/shacl/ShaclSailConnection.java
index 219a0099b..f5ee0d97b 100644
--- a/shacl/src/main/java/org/eclipse/rdf4j/sail/shacl/ShaclSailConnection.java
+++ b/shacl/src/main/java/org/eclipse/rdf4j/sail/shacl/ShaclSailConnection.java
@@ -11,7 +11,10 @@
import org.eclipse.rdf4j.IsolationLevel;
import org.eclipse.rdf4j.IsolationLevels;
import org.eclipse.rdf4j.common.iteration.Iterations;
+import org.eclipse.rdf4j.model.IRI;
+import org.eclipse.rdf4j.model.Resource;
import org.eclipse.rdf4j.model.Statement;
+import org.eclipse.rdf4j.model.Value;
import org.eclipse.rdf4j.repository.Repository;
import org.eclipse.rdf4j.repository.RepositoryConnection;
import org.eclipse.rdf4j.repository.sail.SailRepository;
@@ -39,7 +42,9 @@ public class ShaclSailConnection extends NotifyingSailConnectionWrapper {
private final Logger logger = LoggerFactory.getLogger(getClass());
private NotifyingSailConnection previousStateConnection;
+
private Repository addedStatements;
+
private Repository removedStatements;
public final ShaclSail sail;
@@ -47,9 +52,12 @@ public class ShaclSailConnection extends NotifyingSailConnectionWrapper {
public Stats stats;
private HashSet addedStatementsSet = new HashSet<>();
+
private HashSet removedStatementsSet = new HashSet<>();
- ShaclSailConnection(ShaclSail sail, NotifyingSailConnection connection, NotifyingSailConnection previousStateConnection) {
+ ShaclSailConnection(ShaclSail sail, NotifyingSailConnection connection,
+ NotifyingSailConnection previousStateConnection)
+ {
super(connection);
this.previousStateConnection = previousStateConnection;
this.sail = sail;
@@ -58,23 +66,23 @@ public class ShaclSailConnection extends NotifyingSailConnectionWrapper {
addConnectionListener(new SailConnectionListener() {
- @Override
- public void statementAdded(Statement statement) {
- boolean add = addedStatementsSet.add(statement);
- if (!add) {
- removedStatementsSet.remove(statement);
- }
+ @Override
+ public void statementAdded(Statement statement) {
+ boolean add = addedStatementsSet.add(statement);
+ if (!add) {
+ removedStatementsSet.remove(statement);
+ }
- }
+ }
- @Override
- public void statementRemoved(Statement statement) {
- boolean add = removedStatementsSet.add(statement);
- if (!add) {
- addedStatementsSet.remove(statement);
- }
- }
- }
+ @Override
+ public void statementRemoved(Statement statement) {
+ boolean add = removedStatementsSet.add(statement);
+ if (!add) {
+ addedStatementsSet.remove(statement);
+ }
+ }
+ }
);
}
@@ -98,8 +106,7 @@ public void begin() throws SailException {
}
@Override
- public void begin(IsolationLevel level)
- throws SailException {
+ public void begin(IsolationLevel level) throws SailException {
assert addedStatements == null;
assert removedStatements == null;
@@ -107,7 +114,7 @@ public void begin(IsolationLevel level)
stats = new Stats();
// start two transactions, synchronize on underlying sail so that we get two transactions immediatly successivley
- synchronized (sail){
+ synchronized (sail) {
super.begin(level);
previousStateConnection.begin(IsolationLevels.SNAPSHOT);
}
@@ -123,8 +130,7 @@ private SailRepository getNewMemorySail() {
}
@Override
- public void commit()
- throws SailException {
+ public void commit() throws SailException {
synchronized (sail) {
try {
boolean valid = validate();
@@ -133,15 +139,39 @@ public void commit()
if (!valid) {
rollback();
throw new SailException("Failed SHACL validation");
- } else {
+ }
+ else {
super.commit();
}
- } finally {
+ }
+ finally {
cleanup();
}
}
}
+ @Override
+ public void addStatement(Resource subj, IRI pred, Value obj, Resource... contexts) throws SailException {
+ if (contexts.length == 1 && contexts[0].equals(ShaclSail.SHAPE_GRAPH)) {
+ sail.addShapesStatement(subj, pred, obj);
+ }
+ else {
+ super.addStatement(subj, pred, obj, contexts);
+ }
+ }
+
+ @Override
+ public void removeStatements(Resource subj, IRI pred, Value obj, Resource... contexts)
+ throws SailException
+ {
+ if (contexts.length == 1 && contexts[0].equals(ShaclSail.SHAPE_GRAPH)) {
+ sail.removeShapesStatements(subj, pred, obj);
+ }
+ else {
+ super.removeStatements(subj, pred, obj, contexts);
+ }
+ }
+
@Override
public void rollback() throws SailException {
synchronized (sail) {
@@ -166,7 +196,6 @@ private void cleanup() {
stats = null;
}
-
private boolean validate() {
if (!sail.config.validationEnabled) {
@@ -185,7 +214,13 @@ private boolean validate() {
boolean valid = collect.size() == 0;
if (!valid) {
- logger.warn("SHACL not valid. The following experimental debug results were produced: \n\tNodeShape: {} \n\t\t{}", nodeShape.toString(), String.join("\n\t\t", collect.stream().map(a -> a.toString()+" -cause-> "+a.getCause()).collect(Collectors.toList())));
+ logger.warn(
+ "SHACL not valid. The following experimental debug results were produced: \n\tNodeShape: {} \n\t\t{}",
+ nodeShape.toString(),
+ String.join("\n\t\t",
+ collect.stream().map(
+ a -> a.toString() + " -cause-> " + a.getCause()).collect(
+ Collectors.toList())));
}
allValid = allValid && valid;
}
@@ -200,20 +235,20 @@ void fillAddedAndRemovedStatementRepositories() {
addedStatements = getNewMemorySail();
removedStatements = getNewMemorySail();
-
addedStatementsSet.forEach(stats::added);
removedStatementsSet.forEach(stats::removed);
-
try (RepositoryConnection connection = addedStatements.getConnection()) {
connection.begin(IsolationLevels.NONE);
- addedStatementsSet.stream().filter(statement -> !removedStatementsSet.contains(statement)).forEach(connection::add);
+ addedStatementsSet.stream().filter(
+ statement -> !removedStatementsSet.contains(statement)).forEach(connection::add);
connection.commit();
}
try (RepositoryConnection connection = removedStatements.getConnection()) {
connection.begin(IsolationLevels.NONE);
- removedStatementsSet.stream().filter(statement -> !addedStatementsSet.contains(statement)).forEach(connection::add);
+ removedStatementsSet.stream().filter(
+ statement -> !addedStatementsSet.contains(statement)).forEach(connection::add);
connection.commit();
}
}
@@ -226,10 +261,10 @@ synchronized public void close() throws SailException {
super.close();
}
-
public class Stats {
boolean hasAdded;
+
boolean hasRemoved;
public void added(Statement statement) {
@@ -250,4 +285,3 @@ public boolean hasRemoved() {
}
}
}
-
diff --git a/shacl/src/test/java/org/eclipse/rdf4j/sail/shacl/ReduceNumberOfPlansTest.java b/shacl/src/test/java/org/eclipse/rdf4j/sail/shacl/ReduceNumberOfPlansTest.java
index 564cd476f..95215371d 100644
--- a/shacl/src/test/java/org/eclipse/rdf4j/sail/shacl/ReduceNumberOfPlansTest.java
+++ b/shacl/src/test/java/org/eclipse/rdf4j/sail/shacl/ReduceNumberOfPlansTest.java
@@ -8,114 +8,114 @@
package org.eclipse.rdf4j.sail.shacl;
+import static junit.framework.TestCase.assertEquals;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.stream.Collectors;
+
+import org.eclipse.rdf4j.RDF4JException;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.ValueFactory;
import org.eclipse.rdf4j.model.vocabulary.RDF;
-import org.eclipse.rdf4j.repository.sail.SailRepository;
-import org.eclipse.rdf4j.repository.sail.SailRepositoryConnection;
+import org.eclipse.rdf4j.rio.RDFParseException;
+import org.eclipse.rdf4j.rio.UnsupportedRDFormatException;
import org.eclipse.rdf4j.sail.memory.MemoryStore;
import org.eclipse.rdf4j.sail.shacl.planNodes.PlanNode;
import org.junit.Test;
-import java.util.List;
-import java.util.stream.Collectors;
-
-import static junit.framework.TestCase.assertEquals;
-
/**
* @author Håvard Ottestad
*/
public class ReduceNumberOfPlansTest {
@Test
- public void testAddingTypeStatement() {
- SailRepository shaclSail = new SailRepository(new ShaclSail(new MemoryStore(), Utils.getSailRepository("reduceNumberOfPlansTest/shacl.ttl")));
+ public void testAddingTypeStatement()
+ throws RDFParseException, UnsupportedRDFormatException, IOException
+ {
+ ShaclSail shaclSail = new ShaclSail(new MemoryStore());
shaclSail.initialize();
+ Utils.loadShapeData(shaclSail, "reduceNumberOfPlansTest/shacl.ttl");
-
- try (SailRepositoryConnection connection = shaclSail.getConnection()) {
-
+ try (ShaclSailConnection connection = (ShaclSailConnection)shaclSail.getConnection()) {
connection.begin();
- ShaclSailConnection sailConnection = (ShaclSailConnection) connection.getSailConnection();
-
- sailConnection.fillAddedAndRemovedStatementRepositories();
- List collect = sailConnection.sail.nodeShapes.stream().flatMap(shape -> shape.generatePlans(sailConnection, shape, false).stream()).collect(Collectors.toList());
+ connection.fillAddedAndRemovedStatementRepositories();
+ List collect = shaclSail.nodeShapes.stream().flatMap(
+ shape -> shape.generatePlans(connection, shape, false).stream()).collect(
+ Collectors.toList());
assertEquals(0, collect.size());
IRI person1 = Utils.Ex.createIri();
- connection.add(person1, RDF.TYPE, Utils.Ex.Person);
- sailConnection.fillAddedAndRemovedStatementRepositories();
+ connection.addStatement(person1, RDF.TYPE, Utils.Ex.Person);
+ connection.fillAddedAndRemovedStatementRepositories();
- List collect2 = sailConnection.sail.nodeShapes.stream().flatMap(shape -> shape.generatePlans(sailConnection, shape, false).stream()).collect(Collectors.toList());
+ List collect2 = shaclSail.nodeShapes.stream().flatMap(
+ shape -> shape.generatePlans(connection, shape, false).stream()).collect(
+ Collectors.toList());
assertEquals(2, collect2.size());
- ValueFactory vf = connection.getValueFactory();
- connection.add(person1, Utils.Ex.ssn, vf.createLiteral("a"));
- connection.add(person1, Utils.Ex.ssn, vf.createLiteral("b"));
- connection.add(person1, Utils.Ex.name, vf.createLiteral("c"));
-
+ ValueFactory vf = shaclSail.getValueFactory();
+ connection.addStatement(person1, Utils.Ex.ssn, vf.createLiteral("a"));
+ connection.addStatement(person1, Utils.Ex.ssn, vf.createLiteral("b"));
+ connection.addStatement(person1, Utils.Ex.name, vf.createLiteral("c"));
connection.commit();
-
}
}
@Test
- public void testRemovingPredicate() {
- SailRepository shaclSail = new SailRepository(new ShaclSail(new MemoryStore(), Utils.getSailRepository("reduceNumberOfPlansTest/shacl.ttl")));
+ public void testRemovingPredicate() throws RDF4JException, UnsupportedRDFormatException, IOException {
+ ShaclSail shaclSail = new ShaclSail(new MemoryStore());
shaclSail.initialize();
+ Utils.loadShapeData(shaclSail, "reduceNumberOfPlansTest/shacl.ttl");
-
- try (SailRepositoryConnection connection = shaclSail.getConnection()) {
+ try (ShaclSailConnection connection = (ShaclSailConnection)shaclSail.getConnection()) {
connection.begin();
- ShaclSailConnection sailConnection = (ShaclSailConnection) connection.getSailConnection();
-
IRI person1 = Utils.Ex.createIri();
- ValueFactory vf = connection.getValueFactory();
- connection.add(person1, RDF.TYPE, Utils.Ex.Person);
- connection.add(person1, Utils.Ex.ssn, vf.createLiteral("a"));
- connection.add(person1, Utils.Ex.ssn, vf.createLiteral("b"));
- connection.add(person1, Utils.Ex.name, vf.createLiteral("c"));
-
-
+ ValueFactory vf = shaclSail.getValueFactory();
+ connection.addStatement(person1, RDF.TYPE, Utils.Ex.Person);
+ connection.addStatement(person1, Utils.Ex.ssn, vf.createLiteral("a"));
+ connection.addStatement(person1, Utils.Ex.ssn, vf.createLiteral("b"));
+ connection.addStatement(person1, Utils.Ex.name, vf.createLiteral("c"));
connection.commit();
-
connection.begin();
+ connection.removeStatements(person1, Utils.Ex.ssn, vf.createLiteral("b"));
- connection.remove(person1, Utils.Ex.ssn, vf.createLiteral("b"));
-
- sailConnection.fillAddedAndRemovedStatementRepositories();
+ connection.fillAddedAndRemovedStatementRepositories();
- List collect1 = sailConnection.sail.nodeShapes.stream().flatMap(shape -> shape.generatePlans(sailConnection, shape, false).stream()).collect(Collectors.toList());
+ List collect1 = shaclSail.nodeShapes.stream().flatMap(
+ shape -> shape.generatePlans(connection, shape, false).stream()).collect(
+ Collectors.toList());
assertEquals(1, collect1.size());
- connection.remove(person1, Utils.Ex.ssn, vf.createLiteral("a"));
+ connection.removeStatements(person1, Utils.Ex.ssn, vf.createLiteral("a"));
+ connection.fillAddedAndRemovedStatementRepositories();
- sailConnection.fillAddedAndRemovedStatementRepositories();
-
- List collect2 = sailConnection.sail.nodeShapes.stream().flatMap(shape -> shape.generatePlans(sailConnection, shape, false).stream()).collect(Collectors.toList());
+ List collect2 = shaclSail.nodeShapes.stream().flatMap(
+ shape -> shape.generatePlans(connection, shape, false).stream()).collect(
+ Collectors.toList());
assertEquals(1, collect2.size());
- connection.remove(person1, Utils.Ex.name, vf.createLiteral("c"));
- sailConnection.fillAddedAndRemovedStatementRepositories();
+ connection.removeStatements(person1, Utils.Ex.name, vf.createLiteral("c"));
+ connection.fillAddedAndRemovedStatementRepositories();
- List collect3 = sailConnection.sail.nodeShapes.stream().flatMap(shape -> shape.generatePlans(sailConnection, shape, false).stream()).collect(Collectors.toList());
+ List collect3 = shaclSail.nodeShapes.stream().flatMap(
+ shape -> shape.generatePlans(connection, shape, false).stream()).collect(
+ Collectors.toList());
assertEquals(2, collect3.size());
-
connection.rollback();
-
}
}
diff --git a/shacl/src/test/java/org/eclipse/rdf4j/sail/shacl/ShaclTest.java b/shacl/src/test/java/org/eclipse/rdf4j/sail/shacl/ShaclTest.java
index b7b830779..3ae7f1e88 100644
--- a/shacl/src/test/java/org/eclipse/rdf4j/sail/shacl/ShaclTest.java
+++ b/shacl/src/test/java/org/eclipse/rdf4j/sail/shacl/ShaclTest.java
@@ -35,23 +35,18 @@
@RunWith(Parameterized.class)
public class ShaclTest {
- static final List testCasePaths = Arrays.asList(
- "test-cases/datatype/simple",
- "test-cases/minCount/simple",
- "test-cases/maxCount/simple",
- "test-cases/or/inheritance",
- "test-cases/or/inheritance-deep",
- "test-cases/or/inheritance-deep-minCountMaxCount",
- "test-cases/or/inheritanceNodeShape",
- "test-cases/or/datatype",
- "test-cases/or/minCountMaxCount",
- "test-cases/or/maxCount",
- "test-cases/or/minCount"
+ static final List testCasePaths = Arrays.asList("test-cases/datatype/simple",
+ "test-cases/minCount/simple", "test-cases/maxCount/simple", "test-cases/or/inheritance",
+ "test-cases/or/inheritance-deep", "test-cases/or/inheritance-deep-minCountMaxCount",
+ "test-cases/or/inheritanceNodeShape", "test-cases/or/datatype", "test-cases/or/minCountMaxCount",
+ "test-cases/or/maxCount", "test-cases/or/minCount"
);
private final String testCasePath;
+
private final String path;
+
private final ExpectedResult expectedResult;
public ShaclTest(String testCasePath, String path, ExpectedResult expectedResult) {
@@ -64,8 +59,6 @@ public ShaclTest(String testCasePath, String path, ExpectedResult expectedResult
LoggingNode.loggingEnabled = true;
}
-
-
@Parameterized.Parameters(name = "{2} - {1}")
public static Collection