Implements a crate for the testsuite, adds parser tests and a better isomorphism algorithm

pull/41/head
Tpt 4 years ago
parent 69f94777b6
commit c3ae01e701
  1. 4
      .gitmodules
  2. 5
      Cargo.toml
  3. 5
      lib/Cargo.toml
  4. 336
      lib/benches/sparql_query.rs
  5. 12
      lib/src/model/named_node.rs
  6. 7
      lib/src/model/xsd/decimal.rs
  7. 434
      lib/src/store/memory.rs
  8. 1
      lib/tests/rdf-tests
  9. 755
      lib/tests/sparql_test_cases.rs
  10. 24
      testsuite/Cargo.toml
  11. 39
      testsuite/benches/sparql_query.rs
  12. 2
      testsuite/oxigraph-tests/sparql/group_concat_with_null.rq
  13. 2
      testsuite/oxigraph-tests/sparql/group_concat_with_null.srx
  14. 2
      testsuite/oxigraph-tests/sparql/group_concat_with_null.ttl
  15. 0
      testsuite/oxigraph-tests/sparql/manifest.ttl
  16. 1
      testsuite/rdf-tests
  17. 82
      testsuite/src/files.rs
  18. 18
      testsuite/src/lib.rs
  19. 276
      testsuite/src/manifest.rs
  20. 80
      testsuite/src/parser_evaluator.rs
  21. 10
      testsuite/src/report.rs
  22. 484
      testsuite/src/sparql_evaluator.rs
  23. 77
      testsuite/src/vocab.rs
  24. 25
      testsuite/tests/oxigraph.rs
  25. 43
      testsuite/tests/parser.rs
  26. 125
      testsuite/tests/sparql.rs

4
.gitmodules vendored

@ -1,5 +1,5 @@
[submodule "lib/tests/rdf-tests"] [submodule "testsuite/rdf-tests"]
path = lib/tests/rdf-tests path = testsuite/rdf-tests
url = https://github.com/w3c/rdf-tests.git url = https://github.com/w3c/rdf-tests.git
[submodule "bench/bsbm-tools"] [submodule "bench/bsbm-tools"]
path = bench/bsbm-tools path = bench/bsbm-tools

@ -1,9 +1,10 @@
[workspace] [workspace]
members = [ members = [
"js",
"lib", "lib",
"server", "server",
"wikibase", "testsuite",
"js" "wikibase"
] ]
[profile.release] [profile.release]

@ -43,7 +43,6 @@ getrandom = {version="0.1", features=["wasm-bindgen"]}
[dev-dependencies] [dev-dependencies]
rayon = "1" rayon = "1"
criterion = "0.3" criterion = "0.3"
anyhow = "1"
[target.'cfg(target_arch = "wasm32")'.dev-dependencies] [target.'cfg(target_arch = "wasm32")'.dev-dependencies]
wasm-bindgen-test = "0.3" wasm-bindgen-test = "0.3"
@ -52,7 +51,3 @@ wasm-bindgen-test = "0.3"
name = "store" name = "store"
harness = false harness = false
required-features = ["sled", "rocksdb"] required-features = ["sled", "rocksdb"]
[[bench]]
name = "sparql_query"
harness = false

@ -1,336 +0,0 @@
use criterion::{criterion_group, criterion_main, Criterion};
use oxigraph::model::vocab::rdf;
use oxigraph::model::*;
use oxigraph::sparql::*;
use oxigraph::*;
use std::fs::File;
use std::io::{BufRead, BufReader, Read};
use std::path::PathBuf;
criterion_group!(sparql, sparql_w3c_syntax_bench);
criterion_main!(sparql);
fn sparql_w3c_syntax_bench(c: &mut Criterion) {
let manifest_urls = vec![
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/manifest-syntax.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/syntax-query/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/syntax-fed/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/construct/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/grouping/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/aggregates/manifest.ttl",
];
let queries: Vec<_> = manifest_urls
.into_iter()
.flat_map(TestManifest::new)
.flat_map(|test| {
let test = test.unwrap();
if test.kind == "PositiveSyntaxTest" || test.kind == "PositiveSyntaxTest11" {
Some((read_file_to_string(&test.query).unwrap(), test.query))
} else {
None
}
})
.collect();
c.bench_function("query parser", |b| {
b.iter(|| {
for (query, base) in &queries {
Query::parse(query, Some(base)).unwrap();
}
})
});
}
fn load_graph_to_store(
url: &str,
store: &MemoryStore,
to_graph_name: Option<&NamedOrBlankNode>,
) -> Result<()> {
let syntax = if url.ends_with(".nt") {
GraphSyntax::NTriples
} else if url.ends_with(".ttl") {
GraphSyntax::Turtle
} else if url.ends_with(".rdf") {
GraphSyntax::RdfXml
} else {
return Err(Error::msg(format!(
"Serialization type not found for {}",
url
)));
};
store.load_graph(read_file(url)?, syntax, to_graph_name, Some(url))
}
fn to_relative_path(url: &str) -> Result<String> {
if url.starts_with("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/") {
Ok(url.replace(
"http://www.w3.org/2001/sw/DataAccess/tests/",
"rdf-tests/sparql11/",
))
} else if url.starts_with("http://www.w3.org/2009/sparql/docs/tests/data-sparql11/") {
Ok(url.replace(
"http://www.w3.org/2009/sparql/docs/tests/",
"rdf-tests/sparql11/",
))
} else {
Err(Error::msg(format!("Not supported url for file: {}", url)))
}
}
fn read_file(url: &str) -> Result<impl BufRead> {
let mut base_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
base_path.push("tests");
base_path.push(to_relative_path(url)?);
Ok(BufReader::new(File::open(&base_path).map_err(|e| {
Error::msg(format!(
"Opening file {} failed with {}",
base_path.display(),
e,
))
})?))
}
fn read_file_to_string(url: &str) -> Result<String> {
let mut string = String::default();
read_file(url)?.read_to_string(&mut string)?;
Ok(string)
}
mod rs {
use lazy_static::lazy_static;
use oxigraph::model::NamedNode;
lazy_static! {
pub static ref RESULT_SET: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#ResultSet")
.unwrap();
pub static ref RESULT_VARIABLE: NamedNode = NamedNode::parse(
"http://www.w3.org/2001/sw/DataAccess/tests/result-set#resultVariable"
)
.unwrap();
pub static ref SOLUTION: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#solution")
.unwrap();
pub static ref BINDING: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#binding")
.unwrap();
pub static ref VALUE: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#value")
.unwrap();
pub static ref VARIABLE: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#variable")
.unwrap();
pub static ref INDEX: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#index")
.unwrap();
pub static ref BOOLEAN: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#boolean")
.unwrap();
}
}
mod mf {
use lazy_static::lazy_static;
use oxigraph::model::NamedNode;
lazy_static! {
pub static ref INCLUDE: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#include")
.unwrap();
pub static ref ENTRIES: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#entries")
.unwrap();
pub static ref NAME: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#name")
.unwrap();
pub static ref ACTION: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#action")
.unwrap();
pub static ref RESULT: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#result")
.unwrap();
}
}
mod qt {
use lazy_static::lazy_static;
use oxigraph::model::NamedNode;
lazy_static! {
pub static ref QUERY: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-query#query")
.unwrap();
pub static ref DATA: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-query#data").unwrap();
pub static ref GRAPH_DATA: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-query#graphData")
.unwrap();
pub static ref SERVICE_DATA: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-query#serviceData")
.unwrap();
pub static ref ENDPOINT: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-query#endpoint")
.unwrap();
}
}
struct Test {
kind: String,
query: String,
}
struct TestManifest {
graph: MemoryStore,
tests_to_do: Vec<Term>,
manifests_to_do: Vec<String>,
}
impl TestManifest {
fn new(url: impl Into<String>) -> TestManifest {
Self {
graph: MemoryStore::new(),
tests_to_do: Vec::default(),
manifests_to_do: vec![url.into()],
}
}
}
impl Iterator for TestManifest {
type Item = Result<Test>;
fn next(&mut self) -> Option<Result<Test>> {
match self.tests_to_do.pop() {
Some(Term::NamedNode(test_node)) => {
let test_subject = NamedOrBlankNode::from(test_node.clone());
let kind =
match object_for_subject_predicate(&self.graph, &test_subject, &rdf::TYPE) {
Some(Term::NamedNode(c)) => match c.as_str().split('#').last() {
Some(k) => k.to_string(),
None => return self.next(), //We ignore the test
},
_ => return self.next(), //We ignore the test
};
let query =
match object_for_subject_predicate(&self.graph, &test_subject, &*mf::ACTION) {
Some(Term::NamedNode(n)) => n.as_str().to_owned(),
Some(Term::BlankNode(n)) => {
let n = n.clone().into();
match object_for_subject_predicate(&self.graph, &n, &qt::QUERY) {
Some(Term::NamedNode(q)) => q.as_str().to_owned(),
Some(_) => return Some(Err(Error::msg("invalid query"))),
None => return Some(Err(Error::msg("query not found"))),
}
}
Some(_) => return Some(Err(Error::msg("invalid action"))),
None => {
return Some(Err(Error::msg(format!(
"action not found for test {}",
test_subject
))));
}
};
Some(Ok(Test { kind, query }))
}
Some(_) => Some(Err(Error::msg("invalid test list"))),
None => {
match self.manifests_to_do.pop() {
Some(url) => {
let manifest =
NamedOrBlankNode::from(NamedNode::parse(url.clone()).unwrap());
if let Err(e) = load_graph_to_store(&url, &self.graph, None) {
return Some(Err(e));
}
// New manifests
match object_for_subject_predicate(&self.graph, &manifest, &*mf::INCLUDE) {
Some(Term::BlankNode(list)) => {
self.manifests_to_do.extend(
RdfListIterator::iter(&self.graph, list.clone().into())
.filter_map(|m| match m {
Term::NamedNode(nm) => Some(nm.into_string()),
_ => None,
}),
);
}
Some(_) => return Some(Err(Error::msg("invalid tests list"))),
None => (),
}
// New tests
match object_for_subject_predicate(&self.graph, &manifest, &*mf::ENTRIES) {
Some(Term::BlankNode(list)) => {
self.tests_to_do.extend(RdfListIterator::iter(
&self.graph,
list.clone().into(),
));
}
Some(term) => {
return Some(Err(Error::msg(format!(
"Invalid tests list. Got term {}",
term
))));
}
None => (),
}
}
None => return None,
}
self.next()
}
}
}
}
struct RdfListIterator<'a> {
graph: &'a MemoryStore,
current_node: Option<NamedOrBlankNode>,
}
impl<'a> RdfListIterator<'a> {
fn iter(graph: &'a MemoryStore, root: NamedOrBlankNode) -> RdfListIterator<'a> {
RdfListIterator {
graph,
current_node: Some(root),
}
}
}
impl<'a> Iterator for RdfListIterator<'a> {
type Item = Term;
fn next(&mut self) -> Option<Term> {
match self.current_node.clone() {
Some(current) => {
let result = object_for_subject_predicate(&self.graph, &current, &rdf::FIRST);
self.current_node =
match object_for_subject_predicate(&self.graph, &current, &rdf::REST) {
Some(Term::NamedNode(ref n)) if *n == *rdf::NIL => None,
Some(Term::NamedNode(n)) => Some(n.into()),
Some(Term::BlankNode(n)) => Some(n.into()),
_ => None,
};
result
}
None => None,
}
}
}
fn object_for_subject_predicate(
store: &MemoryStore,
subject: &NamedOrBlankNode,
predicate: &NamedNode,
) -> Option<Term> {
objects_for_subject_predicate(store, subject, predicate).next()
}
fn objects_for_subject_predicate(
store: &MemoryStore,
subject: &NamedOrBlankNode,
predicate: &NamedNode,
) -> impl Iterator<Item = Term> {
store
.quads_for_pattern(Some(subject), Some(predicate), None, None)
.map(|t| t.object_owned())
}

@ -69,3 +69,15 @@ impl PartialEq<NamedNode> for str {
self == other.as_str() self == other.as_str()
} }
} }
impl PartialEq<&str> for NamedNode {
fn eq(&self, other: &&str) -> bool {
self == *other
}
}
impl PartialEq<NamedNode> for &str {
fn eq(&self, other: &NamedNode) -> bool {
*self == other
}
}

@ -550,6 +550,13 @@ mod tests {
#[test] #[test]
fn format() { fn format() {
assert_eq!(format!("{}", Decimal::from(0)), "0");
assert_eq!(format!("{}", Decimal::from(1)), "1");
assert_eq!(format!("{}", Decimal::from(10)), "10");
assert_eq!(format!("{}", Decimal::from(100)), "100");
assert_eq!(format!("{}", Decimal::from(-1)), "-1");
assert_eq!(format!("{}", Decimal::from(-10)), "-10");
assert_eq!(format!("{:02}", Decimal::from(0)), "00"); assert_eq!(format!("{:02}", Decimal::from(0)), "00");
assert_eq!(format!("{:02}", Decimal::from(1)), "01"); assert_eq!(format!("{:02}", Decimal::from(1)), "01");
assert_eq!(format!("{:02}", Decimal::from(10)), "10"); assert_eq!(format!("{:02}", Decimal::from(10)), "10");

@ -6,12 +6,12 @@ use crate::store::numeric_encoder::*;
use crate::store::*; use crate::store::*;
use crate::{DatasetSyntax, GraphSyntax, Result}; use crate::{DatasetSyntax, GraphSyntax, Result};
use std::collections::hash_map::DefaultHasher; use std::collections::hash_map::DefaultHasher;
use std::collections::{BTreeSet, HashMap, HashSet}; use std::collections::{HashMap, HashSet};
use std::fmt; use std::fmt;
use std::hash::Hash; use std::hash::{BuildHasherDefault, Hash, Hasher};
use std::hash::Hasher;
use std::io::BufRead; use std::io::BufRead;
use std::iter::FromIterator; use std::iter::FromIterator;
use std::mem::size_of;
use std::sync::{Arc, RwLock, RwLockReadGuard, RwLockWriteGuard}; use std::sync::{Arc, RwLock, RwLockReadGuard, RwLockWriteGuard};
/// In-memory store. /// In-memory store.
@ -47,8 +47,10 @@ pub struct MemoryStore {
indexes: Arc<RwLock<MemoryStoreIndexes>>, indexes: Arc<RwLock<MemoryStoreIndexes>>,
} }
type TripleMap<T> = HashMap<T, HashMap<T, HashSet<T>>>; type TrivialHashMap<K, V> = HashMap<K, V, BuildHasherDefault<TrivialHasher>>;
type QuadMap<T> = HashMap<T, TripleMap<T>>; type TrivialHashSet<T> = HashSet<T, BuildHasherDefault<TrivialHasher>>;
type TripleMap<T> = TrivialHashMap<T, TrivialHashMap<T, TrivialHashSet<T>>>;
type QuadMap<T> = TrivialHashMap<T, TripleMap<T>>;
#[derive(Default)] #[derive(Default)]
struct MemoryStoreIndexes { struct MemoryStoreIndexes {
@ -282,9 +284,12 @@ impl MemoryStore {
/// Returns if the current dataset is [isomorphic](https://www.w3.org/TR/rdf11-concepts/#dfn-dataset-isomorphism) with another one. /// Returns if the current dataset is [isomorphic](https://www.w3.org/TR/rdf11-concepts/#dfn-dataset-isomorphism) with another one.
/// ///
/// Warning: This implementation worst-case complexity is in O(n!) /// It is implemented using the canonicalization approach presented in
/// [Canonical Forms for Isomorphic and Equivalent RDF Graphs: Algorithms for Leaning and Labelling Blank Nodes, Aidan Hogan, 2017](http://aidanhogan.com/docs/rdf-canonicalisation.pdf)
///
/// Warning: This implementation worst-case complexity is in O(b!) with b the number of blank node node in the input graphs.
pub fn is_isomorphic(&self, other: &Self) -> bool { pub fn is_isomorphic(&self, other: &Self) -> bool {
are_datasets_isomorphic(self, other) iso_canonicalize(self) == iso_canonicalize(other)
} }
fn indexes(&self) -> RwLockReadGuard<'_, MemoryStoreIndexes> { fn indexes(&self) -> RwLockReadGuard<'_, MemoryStoreIndexes> {
@ -768,12 +773,14 @@ fn remove_from_quad_map<T: Eq + Hash>(map1: &mut QuadMap<T>, e1: &T, e2: &T, e3:
} }
} }
fn option_set_flatten<'a, T: Clone>(i: Option<&'a HashSet<T>>) -> impl Iterator<Item = T> + 'a { fn option_set_flatten<'a, T: Clone>(
i: Option<&'a TrivialHashSet<T>>,
) -> impl Iterator<Item = T> + 'a {
i.into_iter().flat_map(|s| s.iter().cloned()) i.into_iter().flat_map(|s| s.iter().cloned())
} }
fn option_pair_map_flatten<'a, T: Copy>( fn option_pair_map_flatten<'a, T: Copy>(
i: Option<&'a HashMap<T, HashSet<T>>>, i: Option<&'a TrivialHashMap<T, TrivialHashSet<T>>>,
) -> impl Iterator<Item = (T, T)> + 'a { ) -> impl Iterator<Item = (T, T)> + 'a {
i.into_iter().flat_map(|kv| { i.into_iter().flat_map(|kv| {
kv.iter().flat_map(|(k, vs)| { kv.iter().flat_map(|(k, vs)| {
@ -937,6 +944,14 @@ impl WritableEncodedStore for MemoryTransaction<'_> {
} }
} }
impl PartialEq for MemoryStore {
fn eq(&self, other: &Self) -> bool {
self.indexes().spog == other.indexes().spog
}
}
impl Eq for MemoryStore {}
impl FromIterator<Quad> for MemoryStore { impl FromIterator<Quad> for MemoryStore {
fn from_iter<I: IntoIterator<Item = Quad>>(iter: I) -> Self { fn from_iter<I: IntoIterator<Item = Quad>>(iter: I) -> Self {
let mut store = MemoryStore::new(); let mut store = MemoryStore::new();
@ -964,257 +979,234 @@ impl fmt::Display for MemoryStore {
// Isomorphism implementation // Isomorphism implementation
fn split_hash_buckets( fn iso_canonicalize(g: &MemoryStore) -> Vec<Vec<u8>> {
bnodes_by_hash: HashMap<u64, Vec<EncodedTerm>>, let bnodes = bnodes(g);
graph: &MemoryStore, let (hash, partition) = hash_bnodes(g, bnodes.into_iter().map(|bnode| (bnode, 0)).collect());
distance: usize, distinguish(g, &hash, &partition)
) -> HashMap<u64, Vec<EncodedTerm>> { }
let mut new_bnodes_by_hash = HashMap::default();
for (hash, bnodes) in bnodes_by_hash { fn distinguish(
if bnodes.len() == 1 { g: &MemoryStore,
new_bnodes_by_hash.insert(hash, bnodes); // Nothing to improve hash: &TrivialHashMap<u128, u64>,
partition: &[(u64, Vec<u128>)],
) -> Vec<Vec<u8>> {
let b_prime = partition
.iter()
.find_map(|(_, b)| if b.len() > 1 { Some(b) } else { None });
if let Some(b_prime) = b_prime {
b_prime
.iter()
.map(|b| {
let mut hash_prime = hash.clone();
hash_prime.insert(*b, hash_tuple((hash_prime[b], 22)));
let (hash_prime_prime, partition_prime) = hash_bnodes(g, hash_prime);
distinguish(g, &hash_prime_prime, &partition_prime)
})
.fold(None, |a, b| {
Some(if let Some(a) = a {
if a <= b {
a
} else { } else {
for bnode in bnodes { b
let mut starts = vec![bnode];
for _ in 0..distance {
let mut new_starts = Vec::default();
for s in starts {
for q in graph.encoded_quads_for_subject(s) {
if q.object.is_named_node() || q.object.is_blank_node() {
new_starts.push(q.object)
}
}
for t in graph.encoded_quads_for_object(s) {
new_starts.push(t.subject);
}
} }
starts = new_starts; } else {
b
})
})
.unwrap_or_else(Vec::new)
} else {
label(g, hash)
}
}
fn hash_bnodes(
g: &MemoryStore,
mut hashes: TrivialHashMap<u128, u64>,
) -> (TrivialHashMap<u128, u64>, Vec<(u64, Vec<u128>)>) {
let mut to_hash = Vec::new();
let mut partition: TrivialHashMap<u64, Vec<u128>> =
TrivialHashMap::with_hasher(BuildHasherDefault::<TrivialHasher>::default());
let mut partition_len = 0;
loop {
//TODO: improve termination
let mut new_hashes =
TrivialHashMap::with_hasher(BuildHasherDefault::<TrivialHasher>::default());
for (b, old_hash) in &hashes {
let bnode = EncodedTerm::BlankNode { id: *b };
for q in g.encoded_quads_for_subject(bnode) {
to_hash.push((
hash_term(q.predicate, &hashes),
hash_term(q.object, &hashes),
hash_term(q.graph_name, &hashes),
0,
));
}
for q in g.encoded_quads_for_object(bnode) {
to_hash.push((
hash_term(q.subject, &hashes),
hash_term(q.predicate, &hashes),
hash_term(q.graph_name, &hashes),
1,
));
}
for q in g.encoded_quads_for_graph(bnode) {
to_hash.push((
hash_term(q.subject, &hashes),
hash_term(q.predicate, &hashes),
hash_term(q.object, &hashes),
2,
));
}
to_hash.sort();
let hash = hash_tuple((old_hash, &to_hash));
to_hash.clear();
new_hashes.insert(*b, hash);
partition.entry(hash).or_default().push(*b);
}
if partition.len() == partition_len {
let mut partition: Vec<_> = partition.into_iter().collect();
partition.sort_by(|(h1, b1), (h2, b2)| (b1.len(), h1).cmp(&(b2.len(), h2)));
return (hashes, partition);
}
hashes = new_hashes;
partition_len = partition.len();
partition.clear();
}
}
fn bnodes(g: &MemoryStore) -> TrivialHashSet<u128> {
let mut bnodes = TrivialHashSet::with_hasher(BuildHasherDefault::<TrivialHasher>::default());
for q in g.encoded_quads() {
if let EncodedTerm::BlankNode { id } = q.subject {
bnodes.insert(id);
}
if let EncodedTerm::BlankNode { id } = q.object {
bnodes.insert(id);
}
if let EncodedTerm::BlankNode { id } = q.graph_name {
bnodes.insert(id);
}
}
bnodes
}
fn label(g: &MemoryStore, hashes: &TrivialHashMap<u128, u64>) -> Vec<Vec<u8>> {
//TODO: better representation?
let mut data: Vec<_> = g
.encoded_quads()
.into_iter()
.map(|q| {
let mut buffer = Vec::with_capacity(WRITTEN_TERM_MAX_SIZE * 4);
write_spog_quad(
&mut buffer,
&EncodedQuad::new(
map_term(q.subject, hashes),
map_term(q.predicate, hashes),
map_term(q.object, hashes),
map_term(q.graph_name, hashes),
),
);
buffer
})
.collect();
data.sort();
data
} }
// We do the hashing fn map_term(term: EncodedTerm, bnodes_hash: &TrivialHashMap<u128, u64>) -> EncodedTerm {
let mut hasher = DefaultHasher::default(); if let EncodedTerm::BlankNode { id } = term {
hash.hash(&mut hasher); // We start with the previous hash EncodedTerm::BlankNode {
id: (*bnodes_hash.get(&id).unwrap()).into(),
// NB: we need to sort the triples to have the same hash
let mut po_set = BTreeSet::default();
for start in &starts {
for quad in graph.encoded_quads_for_subject(*start) {
if !quad.object.is_blank_node() {
po_set.insert(encode_term_pair(quad.predicate, quad.object));
}
} }
} else {
term
} }
for po in &po_set {
po.hash(&mut hasher);
} }
let mut sp_set = BTreeSet::default(); fn hash_term(term: EncodedTerm, bnodes_hash: &TrivialHashMap<u128, u64>) -> u64 {
for start in starts { if let EncodedTerm::BlankNode { id } = term {
for quad in graph.encoded_quads_for_object(start) { *bnodes_hash.get(&id).unwrap()
if !quad.subject.is_blank_node() { } else {
sp_set.insert(encode_term_pair(quad.subject, quad.predicate)); hash_tuple(term)
}
}
} }
for sp in &sp_set {
sp.hash(&mut hasher);
} }
new_bnodes_by_hash fn hash_tuple(v: impl Hash) -> u64 {
.entry(hasher.finish()) let mut hasher = DefaultHasher::new();
.or_insert_with(Vec::default) v.hash(&mut hasher);
.push(bnode); hasher.finish()
} }
}
} #[derive(Default)]
new_bnodes_by_hash struct TrivialHasher {
value: u64,
} }
fn encode_term_pair(t1: EncodedTerm, t2: EncodedTerm) -> Vec<u8> { #[allow(
let mut vec = Vec::with_capacity(2 * WRITTEN_TERM_MAX_SIZE); arithmetic_overflow,
write_term(&mut vec, t1); clippy::cast_sign_loss,
write_term(&mut vec, t2); clippy::cast_possible_truncation
vec )]
impl Hasher for TrivialHasher {
fn finish(&self) -> u64 {
self.value
} }
fn build_and_check_containment_from_hashes<'a>( fn write(&mut self, bytes: &[u8]) {
a_bnodes_by_hash: &mut Vec<(u64, Vec<EncodedTerm>)>, for chunk in bytes.chunks(size_of::<u64>()) {
b_bnodes_by_hash: &'a HashMap<u64, Vec<EncodedTerm>>, let mut val = [0; size_of::<u64>()];
a_to_b_mapping: &mut HashMap<EncodedTerm, EncodedTerm>, val[0..chunk.len()].copy_from_slice(chunk);
a: &'a HashSet<EncodedQuad>, self.write_u64(u64::from_le_bytes(val));
b: &'a HashSet<EncodedQuad>,
current_a_nodes: &[EncodedTerm],
current_b_nodes: &mut HashSet<EncodedTerm>,
) -> bool {
if let Some((a_node, remaining_a_node)) = current_a_nodes.split_last() {
let b_nodes = current_b_nodes.iter().cloned().collect::<Vec<_>>();
for b_node in b_nodes {
current_b_nodes.remove(&b_node);
a_to_b_mapping.insert(*a_node, b_node);
if check_is_contained_focused(a_to_b_mapping, *a_node, a, b)
&& build_and_check_containment_from_hashes(
a_bnodes_by_hash,
b_bnodes_by_hash,
a_to_b_mapping,
a,
b,
remaining_a_node,
current_b_nodes,
)
{
return true;
} }
current_b_nodes.insert(b_node);
} }
a_to_b_mapping.remove(a_node);
false
} else {
let (hash, new_a_nodes) = match a_bnodes_by_hash.pop() {
Some(v) => v,
None => return true,
};
let mut new_b_nodes = b_bnodes_by_hash fn write_u8(&mut self, i: u8) {
.get(&hash) self.write_u64(i.into());
.map_or(HashSet::default(), |v| v.iter().cloned().collect());
if new_a_nodes.len() != new_b_nodes.len() {
return false;
} }
if new_a_nodes.len() > 10 { fn write_u16(&mut self, i: u16) {
eprintln!("Too big instance, aborting"); self.write_u64(i.into());
return true; //TODO: Very very very bad
} }
if build_and_check_containment_from_hashes( fn write_u32(&mut self, i: u32) {
a_bnodes_by_hash, self.write_u64(i.into());
b_bnodes_by_hash,
a_to_b_mapping,
a,
b,
&new_a_nodes,
&mut new_b_nodes,
) {
true
} else {
a_bnodes_by_hash.push((hash, new_a_nodes));
false
}
}
} }
fn check_is_contained_focused<'a>( fn write_u64(&mut self, i: u64) {
a_to_b_mapping: &mut HashMap<EncodedTerm, EncodedTerm>, self.value ^= i;
a_bnode_focus: EncodedTerm,
a: &'a HashSet<EncodedQuad>,
b: &'a HashSet<EncodedQuad>,
) -> bool {
let ts_a = a
.iter()
.filter(|t| t.subject == a_bnode_focus)
.chain(a.iter().filter(|t| t.object == a_bnode_focus));
//TODO: these filters
for t_a in ts_a {
let subject = if t_a.subject.is_blank_node() {
if let Some(s_a) = a_to_b_mapping.get(&t_a.subject) {
*s_a
} else {
continue; // We skip for now
}
} else {
t_a.subject
};
let object = if t_a.object.is_blank_node() {
if let Some(o_a) = a_to_b_mapping.get(&t_a.object) {
*o_a
} else {
continue; // We skip for now
}
} else {
t_a.object
};
if !b.contains(&EncodedQuad::new(
subject,
t_a.predicate,
object,
t_a.graph_name, //TODO: support blank node graph names
)) {
//TODO
return false;
}
} }
true fn write_u128(&mut self, i: u128) {
self.write_u64(i as u64);
self.write_u64((i >> 64) as u64);
} }
fn graph_blank_nodes(graph: &HashSet<EncodedQuad>) -> Vec<EncodedTerm> { fn write_usize(&mut self, i: usize) {
let mut blank_nodes: HashSet<EncodedTerm> = HashSet::default(); self.write_u64(i as u64);
for t in graph { self.write_u64((i >> 64) as u64);
if t.subject.is_blank_node() {
blank_nodes.insert(t.subject);
}
if t.object.is_blank_node() {
blank_nodes.insert(t.object);
}
} }
blank_nodes.into_iter().collect()
}
fn are_datasets_isomorphic(a: &MemoryStore, b: &MemoryStore) -> bool {
/* TODO if a.len() != b.len() {
return false;
}*/
// We check containment of everything buts triples with blank nodes fn write_i8(&mut self, i: i8) {
let mut a_bnodes_triples = HashSet::default(); self.write_u8(i as u8);
for t in a.encoded_quads() {
if t.subject.is_blank_node() || t.object.is_blank_node() {
a_bnodes_triples.insert(t);
} else if !b.contains_encoded(&t) {
return false; // Triple in a not in b without blank nodes
}
} }
let mut b_bnodes_triples = HashSet::default(); fn write_i16(&mut self, i: i16) {
for t in b.encoded_quads() { self.write_u16(i as u16);
if t.subject.is_blank_node() || t.object.is_blank_node() {
b_bnodes_triples.insert(t);
} else if !a.contains_encoded(&t) {
return false; // Triple in a not in b without blank nodes
} }
}
let mut a_bnodes_by_hash = HashMap::default();
a_bnodes_by_hash.insert(0, graph_blank_nodes(&a_bnodes_triples));
let mut b_bnodes_by_hash = HashMap::default();
b_bnodes_by_hash.insert(0, graph_blank_nodes(&b_bnodes_triples));
for distance in 0..5 { fn write_i32(&mut self, i: i32) {
let max_size = a_bnodes_by_hash.values().map(Vec::len).max().unwrap_or(0); self.write_u32(i as u32);
if max_size < 2 {
break; // We only have small buckets
} }
a_bnodes_by_hash = split_hash_buckets(a_bnodes_by_hash, a, distance); fn write_i64(&mut self, i: i64) {
b_bnodes_by_hash = split_hash_buckets(b_bnodes_by_hash, b, distance); self.write_u64(i as u64);
// Hashes should have the same size
if a_bnodes_by_hash.len() != b_bnodes_by_hash.len() {
return false;
}
} }
let mut sorted_a_bnodes_by_hash: Vec<_> = a_bnodes_by_hash.into_iter().collect(); fn write_i128(&mut self, i: i128) {
sorted_a_bnodes_by_hash.sort_by(|(_, l1), (_, l2)| l1.len().cmp(&l2.len())); self.write_u128(i as u128);
}
build_and_check_containment_from_hashes( fn write_isize(&mut self, i: isize) {
&mut sorted_a_bnodes_by_hash, self.write_usize(i as usize);
&b_bnodes_by_hash, }
&mut HashMap::default(),
&a_bnodes_triples,
&b_bnodes_triples,
&[],
&mut HashSet::default(),
)
} }

@ -1 +0,0 @@
Subproject commit dc237e319e6562f2913341f6ba964ecbcbbf4499

@ -1,755 +0,0 @@
///! Integration tests based on [SPARQL 1.1 Test Cases](https://www.w3.org/2009/sparql/docs/tests/README.html)
use oxigraph::model::vocab::rdf;
use oxigraph::model::vocab::rdfs;
use oxigraph::model::*;
use oxigraph::sparql::*;
use oxigraph::*;
use rayon::prelude::*;
use std::collections::HashMap;
use std::fmt;
use std::fs::File;
use std::io::Read;
use std::io::{BufRead, BufReader};
use std::iter::once;
use std::path::PathBuf;
use std::sync::Arc;
#[test]
fn sparql_w3c_syntax_testsuite() -> Result<()> {
let manifest_10_urls =
vec!["http://www.w3.org/2001/sw/DataAccess/tests/data-r2/manifest-syntax.ttl"];
let manifest_11_urls = vec![
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/syntax-query/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/syntax-fed/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/construct/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/grouping/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/aggregates/manifest.ttl",
];
for test_result in manifest_10_urls
.into_iter()
.chain(manifest_11_urls.into_iter())
.chain(once(
"https://github.com/oxigraph/oxigraph/tests/sparql/manifest.ttl",
))
.flat_map(TestManifest::new)
{
let test = test_result.unwrap();
if test.kind == "PositiveSyntaxTest" || test.kind == "PositiveSyntaxTest11" {
match Query::parse(&read_file_to_string(&test.query)?, Some(&test.query)) {
Err(error) => panic!("Failure on {} with error: {}", test, error),
Ok(query) => {
if let Err(error) = Query::parse(&query.to_string(), None) {
assert!(
false,
"Failure to deserialize \"{}\" of {} with error: {}",
query.to_string(),
test,
error
)
}
}
}
} else if test.kind == "NegativeSyntaxTest" || test.kind == "NegativeSyntaxTest11" {
//TODO
if let Ok(result) = Query::parse(&read_file_to_string(&test.query)?, Some(&test.query))
{
eprintln!("Failure on {}. The output tree is: {}", test, result);
}
} else if test.kind != "QueryEvaluationTest" {
panic!("Not supported test: {}", test);
}
}
Ok(())
}
#[test]
fn sparql_w3c_query_evaluation_testsuite() -> Result<()> {
let manifest_10_urls = vec![
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/ask/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/basic/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/bnode-coreference/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/boolean-effective-value/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/bound/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/cast/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/construct/manifest.ttl",
//TODO FROM and FROM NAMED "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/construct/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/distinct/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-equals/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-ops/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/graph/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/i18n/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional-filter/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/reduced/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/regex/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/solution-seq/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/sort/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/triple-match/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/type-promotion/manifest.ttl",
];
let manifest_11_urls = vec![
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/aggregates/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/bind/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/bindings/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/construct/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/exists/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/functions/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/grouping/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/negation/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/project-expression/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/property-path/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/service/manifest.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/subquery/manifest.ttl",
];
let test_blacklist = vec![
//Multiple writing of the same xsd:integer. Our system does strong normalization.
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/distinct/manifest#distinct-1").unwrap(),
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/distinct/manifest#distinct-9").unwrap(),
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#dawg-str-1").unwrap(),
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#dawg-str-2").unwrap(),
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-equals/manifest#eq-graph-1").unwrap(),
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-equals/manifest#eq-graph-2").unwrap(),
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#open-eq-01").unwrap(),
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#open-eq-04").unwrap(),
//Multiple writing of the same xsd:double. Our system does strong normalization.
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#sameTerm").unwrap(),
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#sameTerm-simple").unwrap(),
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#sameTerm-eq").unwrap(),
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#sameTerm-not-eq").unwrap(),
//Simple literal vs xsd:string. We apply RDF 1.1
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/distinct/manifest#distinct-2").unwrap(),
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#open-eq-08").unwrap(),
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#open-eq-10").unwrap(),
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#open-eq-11").unwrap(),
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#open-eq-12").unwrap(),
//DATATYPE("foo"@en) returns rdf:langString in RDF 1.1
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#dawg-datatype-2").unwrap(),
// FROM support
NamedNode::parse("http://www.w3.org/2009/sparql/docs/tests/data-sparql11/construct/manifest#constructwhere04").unwrap(),
//BNODE() scope is currently wrong
NamedNode::parse("http://www.w3.org/2009/sparql/docs/tests/data-sparql11/functions/manifest#bnode01").unwrap(),
//Property path with unbound graph name are not supported yet
NamedNode::parse("http://www.w3.org/2009/sparql/docs/tests/data-sparql11/property-path/manifest#pp35").unwrap(),
//SERVICE name from a BGP
NamedNode::parse("http://www.w3.org/2009/sparql/docs/tests/data-sparql11/service/manifest#service5").unwrap(),
// We use XSD 1.1 equality on dates
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#date-2").unwrap(),
// We choose to simplify first the nested group patterns in OPTIONAL
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional-filter/manifest#dawg-optional-filter-005-not-simplified").unwrap(),
];
let tests: Result<Vec<_>> = manifest_10_urls
.into_iter()
.chain(manifest_11_urls.into_iter())
.flat_map(TestManifest::new)
.collect();
let failed: Vec<_> = tests?.into_par_iter().map(|test| {
if test_blacklist.contains(&test.id) {
Ok(())
} else if test.kind == "QueryEvaluationTest" {
let store = MemoryStore::new();
if let Some(data) = &test.data {
load_graph_to_store(&data, &store, None)?;
}
for graph_data in &test.graph_data {
load_graph_to_store(
&graph_data,
&store,
Some(&NamedNode::parse(graph_data)?.into()),
)?;
}
match store.prepare_query(&read_file_to_string(&test.query)?, QueryOptions::default().with_base_iri(&test.query).with_service_handler(StaticServiceHandler::new(&test.service_data)?))
{
Err(error) => Err(Error::msg(format!(
"Failure to parse query of {} with error: {}",
test, error
))),
Ok(query) => match query.exec() {
Err(error) => Err(Error::msg(format!(
"Failure to execute query of {} with error: {}",
test, error
))),
Ok(result) => {
let expected_graph =
load_sparql_query_result(test.result.as_ref().unwrap()).map_err(|e| Error::msg(format!("Error constructing expected graph for {}: {}", test, e)))?;
let with_order = expected_graph
.quads_for_pattern(None, Some(&rs::INDEX), None, None)
.next()
.is_some();
let actual_graph = to_dataset(result, with_order).map_err(|e| Error::msg(format!("Error constructing result graph for {}: {}", test, e)))?;
if actual_graph.is_isomorphic(&expected_graph) {
Ok(())
} else {
Err(Error::msg(format!("Failure on {}.\nExpected file:\n{}\nOutput file:\n{}\nParsed query:\n{}\nData:\n{}\n",
test,
expected_graph,
actual_graph,
Query::parse(&read_file_to_string(&test.query)?, Some(&test.query)).unwrap(),
store
)))
}
}
},
}
} else if test.kind != "NegativeSyntaxTest11" {
panic!("Not supported test: {}", test)
} else {
Ok(())
}
}).filter_map(|v| v.err()).map(|e| e.to_string()).collect();
assert!(
failed.is_empty(),
"{} tests failed:\n{}",
failed.len(),
failed.join("\n")
);
Ok(())
}
fn load_graph(url: &str) -> Result<MemoryStore> {
let store = MemoryStore::new();
load_graph_to_store(url, &store, None)?;
Ok(store)
}
fn load_graph_to_store(
url: &str,
store: &MemoryStore,
to_graph_name: Option<&NamedOrBlankNode>,
) -> Result<()> {
let syntax = if url.ends_with(".nt") {
GraphSyntax::NTriples
} else if url.ends_with(".ttl") {
GraphSyntax::Turtle
} else if url.ends_with(".rdf") {
GraphSyntax::RdfXml
} else {
return Err(Error::msg(format!(
"Serialization type not found for {}",
url
)));
};
store.load_graph(read_file(url)?, syntax, to_graph_name, Some(url))
}
fn load_sparql_query_result(url: &str) -> Result<MemoryStore> {
if url.ends_with(".srx") {
to_dataset(
QueryResult::read(read_file(url)?, QueryResultSyntax::Xml)?,
false,
)
} else {
load_graph(url)
}
}
fn to_relative_path(url: &str) -> Result<String> {
if url.starts_with("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/") {
Ok(url.replace(
"http://www.w3.org/2001/sw/DataAccess/tests/",
"rdf-tests/sparql11/",
))
} else if url.starts_with("http://www.w3.org/2009/sparql/docs/tests/data-sparql11/") {
Ok(url.replace(
"http://www.w3.org/2009/sparql/docs/tests/",
"rdf-tests/sparql11/",
))
} else if url.starts_with("https://github.com/oxigraph/oxigraph/tests/") {
Ok(url.replace(
"https://github.com/oxigraph/oxigraph/tests/",
"oxigraph-tests/",
))
} else {
Err(Error::msg(format!("Not supported url for file: {}", url)))
}
}
fn read_file(url: &str) -> Result<impl BufRead> {
let mut base_path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
base_path.push("tests");
base_path.push(to_relative_path(url)?);
Ok(BufReader::new(File::open(&base_path).map_err(|e| {
Error::msg(format!(
"Opening file {} failed with {}",
base_path.display(),
e,
))
})?))
}
fn read_file_to_string(url: &str) -> Result<String> {
let mut string = String::default();
read_file(url)?.read_to_string(&mut string)?;
Ok(string)
}
fn to_dataset(result: QueryResult<'_>, with_order: bool) -> Result<MemoryStore> {
match result {
QueryResult::Graph(graph) => graph.map(|t| t.map(|t| t.in_graph(None))).collect(),
QueryResult::Boolean(value) => {
let store = MemoryStore::new();
let result_set = BlankNode::default();
store.insert(Quad::new(
result_set,
rdf::TYPE.clone(),
rs::RESULT_SET.clone(),
None,
));
store.insert(Quad::new(
result_set,
rs::BOOLEAN.clone(),
Literal::from(value),
None,
));
Ok(store)
}
QueryResult::Bindings(solutions) => {
let store = MemoryStore::new();
let result_set = BlankNode::default();
store.insert(Quad::new(
result_set,
rdf::TYPE.clone(),
rs::RESULT_SET.clone(),
None,
));
for variable in solutions.variables() {
store.insert(Quad::new(
result_set,
rs::RESULT_VARIABLE.clone(),
Literal::new_simple_literal(variable.as_str()),
None,
));
}
for (i, solution) in solutions.enumerate() {
let solution = solution?;
let solution_id = BlankNode::default();
store.insert(Quad::new(
result_set,
rs::SOLUTION.clone(),
solution_id,
None,
));
for (variable, value) in solution.iter() {
let binding = BlankNode::default();
store.insert(Quad::new(solution_id, rs::BINDING.clone(), binding, None));
store.insert(Quad::new(binding, rs::VALUE.clone(), value.clone(), None));
store.insert(Quad::new(
binding,
rs::VARIABLE.clone(),
Literal::new_simple_literal(variable.as_str()),
None,
));
}
if with_order {
store.insert(Quad::new(
solution_id,
rs::INDEX.clone(),
Literal::from((i + 1) as i128),
None,
));
}
}
Ok(store)
}
}
}
mod rs {
use lazy_static::lazy_static;
use oxigraph::model::NamedNode;
lazy_static! {
pub static ref RESULT_SET: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#ResultSet")
.unwrap();
pub static ref RESULT_VARIABLE: NamedNode = NamedNode::parse(
"http://www.w3.org/2001/sw/DataAccess/tests/result-set#resultVariable"
)
.unwrap();
pub static ref SOLUTION: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#solution")
.unwrap();
pub static ref BINDING: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#binding")
.unwrap();
pub static ref VALUE: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#value")
.unwrap();
pub static ref VARIABLE: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#variable")
.unwrap();
pub static ref INDEX: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#index")
.unwrap();
pub static ref BOOLEAN: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#boolean")
.unwrap();
}
}
mod mf {
use lazy_static::lazy_static;
use oxigraph::model::NamedNode;
lazy_static! {
pub static ref INCLUDE: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#include")
.unwrap();
pub static ref ENTRIES: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#entries")
.unwrap();
pub static ref NAME: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#name")
.unwrap();
pub static ref ACTION: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#action")
.unwrap();
pub static ref RESULT: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#result")
.unwrap();
}
}
mod qt {
use lazy_static::lazy_static;
use oxigraph::model::NamedNode;
lazy_static! {
pub static ref QUERY: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-query#query")
.unwrap();
pub static ref DATA: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-query#data").unwrap();
pub static ref GRAPH_DATA: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-query#graphData")
.unwrap();
pub static ref SERVICE_DATA: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-query#serviceData")
.unwrap();
pub static ref ENDPOINT: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-query#endpoint")
.unwrap();
}
}
struct Test {
id: NamedNode,
kind: String,
name: Option<String>,
comment: Option<String>,
query: String,
data: Option<String>,
graph_data: Vec<String>,
service_data: Vec<(String, String)>,
result: Option<String>,
}
impl fmt::Display for Test {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.kind)?;
for name in &self.name {
write!(f, " named \"{}\"", name)?;
}
for comment in &self.comment {
write!(f, " with comment \"{}\"", comment)?;
}
write!(f, " on query {}", self.query)?;
for data in &self.data {
write!(f, " with data {}", data)?;
}
for data in &self.graph_data {
write!(f, " and graph data {}", data)?;
}
for result in &self.result {
write!(f, " and expected result {}", result)?;
}
Ok(())
}
}
struct TestManifest {
graph: MemoryStore,
tests_to_do: Vec<Term>,
manifests_to_do: Vec<String>,
}
impl TestManifest {
fn new(url: impl Into<String>) -> TestManifest {
Self {
graph: MemoryStore::new(),
tests_to_do: Vec::default(),
manifests_to_do: vec![url.into()],
}
}
}
impl Iterator for TestManifest {
type Item = Result<Test>;
fn next(&mut self) -> Option<Result<Test>> {
match self.tests_to_do.pop() {
Some(Term::NamedNode(test_node)) => {
let test_subject = NamedOrBlankNode::from(test_node.clone());
let kind =
match object_for_subject_predicate(&self.graph, &test_subject, &rdf::TYPE) {
Some(Term::NamedNode(c)) => match c.as_str().split('#').last() {
Some(k) => k.to_string(),
None => return self.next(), //We ignore the test
},
_ => return self.next(), //We ignore the test
};
let name = match object_for_subject_predicate(&self.graph, &test_subject, &mf::NAME)
{
Some(Term::Literal(c)) => Some(c.value().to_string()),
_ => None,
};
let comment = match object_for_subject_predicate(
&self.graph,
&test_subject,
&rdfs::COMMENT,
) {
Some(Term::Literal(c)) => Some(c.value().to_string()),
_ => None,
};
let (query, data, graph_data, service_data) =
match object_for_subject_predicate(&self.graph, &test_subject, &*mf::ACTION) {
Some(Term::NamedNode(n)) => (n.as_str().to_owned(), None, vec![], vec![]),
Some(Term::BlankNode(n)) => {
let n = n.clone().into();
let query =
match object_for_subject_predicate(&self.graph, &n, &qt::QUERY) {
Some(Term::NamedNode(q)) => q.as_str().to_owned(),
Some(_) => return Some(Err(Error::msg("invalid query"))),
None => return Some(Err(Error::msg("query not found"))),
};
let data =
match object_for_subject_predicate(&self.graph, &n, &qt::DATA) {
Some(Term::NamedNode(q)) => Some(q.as_str().to_owned()),
_ => None,
};
let graph_data =
objects_for_subject_predicate(&self.graph, &n, &qt::GRAPH_DATA)
.filter_map(|g| match g {
Term::NamedNode(q) => Some(q.as_str().to_owned()),
_ => None,
})
.collect();
let service_data =
objects_for_subject_predicate(&self.graph, &n, &qt::SERVICE_DATA)
.filter_map(|g| match g {
Term::NamedNode(g) => Some(g.into()),
Term::BlankNode(g) => Some(g.into()),
_ => None,
})
.filter_map(|g| {
if let (
Some(Term::NamedNode(endpoint)),
Some(Term::NamedNode(data)),
) = (
object_for_subject_predicate(
&self.graph,
&g,
&qt::ENDPOINT,
),
object_for_subject_predicate(
&self.graph,
&g,
&qt::DATA,
),
) {
Some((
endpoint.as_str().to_owned(),
data.as_str().to_owned(),
))
} else {
None
}
})
.collect();
(query, data, graph_data, service_data)
}
Some(_) => return Some(Err(Error::msg("invalid action"))),
None => {
return Some(Err(Error::msg(format!(
"action not found for test {}",
test_subject
))));
}
};
let result =
match object_for_subject_predicate(&self.graph, &test_subject, &*mf::RESULT) {
Some(Term::NamedNode(n)) => Some(n.as_str().to_owned()),
Some(_) => return Some(Err(Error::msg("invalid result"))),
None => None,
};
Some(Ok(Test {
id: test_node,
kind,
name,
comment,
query,
data,
graph_data,
service_data,
result,
}))
}
Some(_) => Some(Err(Error::msg("invalid test list"))),
None => {
match self.manifests_to_do.pop() {
Some(url) => {
let manifest =
NamedOrBlankNode::from(NamedNode::parse(url.clone()).unwrap());
if let Err(e) = load_graph_to_store(&url, &self.graph, None) {
return Some(Err(e));
}
// New manifests
match object_for_subject_predicate(&self.graph, &manifest, &*mf::INCLUDE) {
Some(Term::BlankNode(list)) => {
self.manifests_to_do.extend(
RdfListIterator::iter(&self.graph, list.clone().into())
.filter_map(|m| match m {
Term::NamedNode(nm) => Some(nm.into_string()),
_ => None,
}),
);
}
Some(_) => return Some(Err(Error::msg("invalid tests list"))),
None => (),
}
// New tests
match object_for_subject_predicate(&self.graph, &manifest, &*mf::ENTRIES) {
Some(Term::BlankNode(list)) => {
self.tests_to_do.extend(RdfListIterator::iter(
&self.graph,
list.clone().into(),
));
}
Some(term) => {
return Some(Err(Error::msg(format!(
"Invalid tests list. Got term {}",
term
))));
}
None => (),
}
}
None => return None,
}
self.next()
}
}
}
}
struct RdfListIterator<'a> {
graph: &'a MemoryStore,
current_node: Option<NamedOrBlankNode>,
}
impl<'a> RdfListIterator<'a> {
fn iter(graph: &'a MemoryStore, root: NamedOrBlankNode) -> RdfListIterator<'a> {
RdfListIterator {
graph,
current_node: Some(root),
}
}
}
impl<'a> Iterator for RdfListIterator<'a> {
type Item = Term;
fn next(&mut self) -> Option<Term> {
match self.current_node.clone() {
Some(current) => {
let result = object_for_subject_predicate(&self.graph, &current, &rdf::FIRST);
self.current_node =
match object_for_subject_predicate(&self.graph, &current, &rdf::REST) {
Some(Term::NamedNode(ref n)) if *n == *rdf::NIL => None,
Some(Term::NamedNode(n)) => Some(n.into()),
Some(Term::BlankNode(n)) => Some(n.into()),
_ => None,
};
result
}
None => None,
}
}
}
#[derive(Clone)]
struct StaticServiceHandler {
services: Arc<HashMap<NamedNode, MemoryStore>>,
}
impl StaticServiceHandler {
fn new(services: &[(String, String)]) -> Result<Self> {
Ok(Self {
services: Arc::new(
services
.iter()
.map(|(name, data)| {
let name = NamedNode::parse(name)?;
let store = MemoryStore::new();
load_graph_to_store(&data, &store, None)?;
Ok((name, store))
})
.collect::<Result<_>>()?,
),
})
}
}
impl ServiceHandler for StaticServiceHandler {
fn handle<'a>(
&'a self,
service_name: &NamedNode,
graph_pattern: &'a GraphPattern,
) -> Result<QuerySolutionsIterator<'a>> {
if let QueryResult::Bindings(iterator) = self
.services
.get(service_name)
.ok_or_else(|| Error::msg(format!("Service {} not found", service_name)))?
.prepare_query_from_pattern(
&graph_pattern,
QueryOptions::default().with_service_handler(self.clone()),
)?
.exec()?
{
//TODO: very ugly
let (variables, iter) = iterator.destruct();
let collected = iter.collect::<Vec<_>>();
Ok(QuerySolutionsIterator::new(
variables,
Box::new(collected.into_iter()),
))
} else {
Err(Error::msg("Expected bindings but got another QueryResult"))
}
}
}
fn object_for_subject_predicate(
store: &MemoryStore,
subject: &NamedOrBlankNode,
predicate: &NamedNode,
) -> Option<Term> {
objects_for_subject_predicate(store, subject, predicate).next()
}
fn objects_for_subject_predicate(
store: &MemoryStore,
subject: &NamedOrBlankNode,
predicate: &NamedNode,
) -> impl Iterator<Item = Term> {
store
.quads_for_pattern(Some(subject), Some(predicate), None, None)
.map(|t| t.object_owned())
}

@ -0,0 +1,24 @@
[package]
name = "oxigraph_testsuite"
version = "0.1.0"
authors = ["Tpt <thomas@pellissier-tanon.fr>"]
license = "MIT/Apache-2.0"
readme = "../README.md"
repository = "https://github.com/oxigraph/oxigraph"
description = """
Implementation of W3C testsuites for Oxigraph
"""
edition = "2018"
publish = false
[dependencies]
chrono = "0.4"
lazy_static = "1"
oxigraph = { path = "../lib" }
[dev-dependencies]
criterion = "0.3"
[[bench]]
name = "sparql_query"
harness = false

@ -0,0 +1,39 @@
use criterion::{criterion_group, criterion_main, Criterion};
use oxigraph::sparql::*;
use oxigraph_testsuite::files::read_file_to_string;
use oxigraph_testsuite::manifest::TestManifest;
criterion_group!(sparql, sparql_w3c_syntax_bench);
criterion_main!(sparql);
fn sparql_w3c_syntax_bench(c: &mut Criterion) {
let manifest_urls = vec![
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/manifest-syntax.ttl",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/manifest-sparql11-query.ttl",
];
let queries: Vec<_> = TestManifest::new(manifest_urls)
.flat_map(|test| {
let test = test.unwrap();
if test.kind == "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#PositiveSyntaxTest"
|| test.kind
== "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#PositiveSyntaxTest11" {
if let Some(query) = test.action {
Some((read_file_to_string(&query).unwrap(), query))
} else {
None
}
} else {
None
}
})
.collect();
c.bench_function("query parser", |b| {
b.iter(|| {
for (query, base) in &queries {
Query::parse(query, Some(&base)).unwrap();
}
})
});
}

@ -1,3 +1,3 @@
PREFIX : <http://www.example.org> PREFIX : <http://www.example.org/>
SELECT (GROUP_CONCAT(?opt) AS ?g) WHERE { ?baseS a :ex OPTIONAL { ?baseS :opt ?opt } } SELECT (GROUP_CONCAT(?opt) AS ?g) WHERE { ?baseS a :ex OPTIONAL { ?baseS :opt ?opt } }

@ -5,7 +5,7 @@
</head> </head>
<results> <results>
<result> <result>
<binding name="opt"> <binding name="g">
<literal datatype="http://www.w3.org/2001/XMLSchema#string">value</literal> <literal datatype="http://www.w3.org/2001/XMLSchema#string">value</literal>
</binding> </binding>
</result> </result>

@ -1,4 +1,4 @@
@prefix : <http://www.example.org/> . @prefix : <http://www.example.org/> .
:a a :ex ; :s :opt "value" . :a a :ex ; :opt "value" .
:b a :ex . :b a :ex .

@ -0,0 +1 @@
Subproject commit 4dd2ac9136a10b8854396c646e91e9423d229d85

@ -0,0 +1,82 @@
use oxigraph::model::NamedOrBlankNode;
use oxigraph::{DatasetSyntax, Error, GraphSyntax, MemoryStore, Result};
use std::fs::File;
use std::io::{BufRead, BufReader, Read};
use std::path::PathBuf;
pub fn read_file(url: &str) -> Result<impl BufRead> {
let mut path = PathBuf::from(env!("CARGO_MANIFEST_DIR"));
path.push(if url.starts_with("http://w3c.github.io/rdf-tests/") {
Ok(url.replace("http://w3c.github.io/rdf-tests/", "rdf-tests/"))
} else if url.starts_with("http://www.w3.org/2013/RDFXMLTests/") {
Ok(url.replace("http://www.w3.org/2013/RDFXMLTests/", "rdf-tests/rdf-xml/"))
} else if url.starts_with("http://www.w3.org/2001/sw/DataAccess/tests/data-r2/") {
Ok(url.replace(
"http://www.w3.org/2001/sw/DataAccess/tests/",
"rdf-tests/sparql11/",
))
} else if url.starts_with("http://www.w3.org/2009/sparql/docs/tests/data-sparql11/") {
Ok(url.replace(
"http://www.w3.org/2009/sparql/docs/tests/",
"rdf-tests/sparql11/",
))
} else if url.starts_with("https://github.com/oxigraph/oxigraph/tests/") {
Ok(url.replace(
"https://github.com/oxigraph/oxigraph/tests/",
"oxigraph-tests/",
))
} else {
Err(Error::msg(format!("Not supported url for file: {}", url)))
}?);
Ok(BufReader::new(File::open(&path)?))
}
pub fn read_file_to_string(url: &str) -> Result<String> {
let mut buf = String::new();
read_file(url)?.read_to_string(&mut buf)?;
Ok(buf)
}
pub fn load_to_store(
url: &str,
store: &MemoryStore,
to_graph_name: Option<&NamedOrBlankNode>,
) -> Result<()> {
if url.ends_with(".nt") {
store.load_graph(
read_file(url)?,
GraphSyntax::NTriples,
to_graph_name,
Some(url),
)
} else if url.ends_with(".ttl") {
store.load_graph(
read_file(url)?,
GraphSyntax::Turtle,
to_graph_name,
Some(url),
)
} else if url.ends_with(".rdf") {
store.load_graph(
read_file(url)?,
GraphSyntax::RdfXml,
to_graph_name,
Some(url),
)
} else if url.ends_with(".nq") {
store.load_dataset(read_file(url)?, DatasetSyntax::NQuads, Some(url))
} else if url.ends_with(".trig") {
store.load_dataset(read_file(url)?, DatasetSyntax::TriG, Some(url))
} else {
Err(Error::msg(format!(
"Serialization type not found for {}",
url
)))
}
}
pub fn load_store(url: &str) -> Result<MemoryStore> {
let store = MemoryStore::new();
load_to_store(url, &store, None)?;
Ok(store)
}

@ -0,0 +1,18 @@
//! Implementation of [W3C RDF tests](http://w3c.github.io/rdf-tests/) to tests Oxigraph conformance.
#![deny(
future_incompatible,
nonstandard_style,
rust_2018_idioms,
missing_copy_implementations,
trivial_casts,
trivial_numeric_casts,
unsafe_code,
unused_qualifications
)]
pub mod files;
pub mod manifest;
pub mod parser_evaluator;
pub mod report;
pub mod sparql_evaluator;
mod vocab;

@ -0,0 +1,276 @@
use crate::files::load_to_store;
use crate::vocab::*;
use oxigraph::model::vocab::*;
use oxigraph::model::*;
use oxigraph::{Error, MemoryStore, Result};
use std::fmt;
pub struct Test {
pub id: NamedNode,
pub kind: NamedNode,
pub name: Option<String>,
pub comment: Option<String>,
pub action: Option<String>,
pub query: Option<String>,
pub data: Option<String>,
pub graph_data: Vec<String>,
pub service_data: Vec<(String, String)>,
pub result: Option<String>,
}
impl fmt::Display for Test {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.kind)?;
for name in &self.name {
write!(f, " named \"{}\"", name)?;
}
for comment in &self.comment {
write!(f, " with comment \"{}\"", comment)?;
}
if let Some(action) = &self.action {
write!(f, " on file \"{}\"", action)?;
}
if let Some(query) = &self.query {
write!(f, " on query {}", &query)?;
}
for data in &self.data {
write!(f, " with data {}", data)?;
}
for data in &self.graph_data {
write!(f, " and graph data {}", data)?;
}
for result in &self.result {
write!(f, " and expected result {}", result)?;
}
Ok(())
}
}
pub struct TestManifest {
graph: MemoryStore,
tests_to_do: Vec<Term>,
manifests_to_do: Vec<String>,
}
impl TestManifest {
pub fn new<S: ToString>(manifest_urls: impl IntoIterator<Item = S>) -> Self {
Self {
graph: MemoryStore::new(),
tests_to_do: Vec::new(),
manifests_to_do: manifest_urls
.into_iter()
.map(|url| url.to_string())
.collect(),
}
}
}
impl Iterator for TestManifest {
type Item = Result<Test>;
fn next(&mut self) -> Option<Result<Test>> {
match self.tests_to_do.pop() {
Some(Term::NamedNode(test_node)) => {
let test_subject = NamedOrBlankNode::from(test_node.clone());
let kind =
match object_for_subject_predicate(&self.graph, &test_subject, &rdf::TYPE) {
Some(Term::NamedNode(c)) => c,
_ => return self.next(), //We ignore the test
};
let name = match object_for_subject_predicate(&self.graph, &test_subject, &mf::NAME)
{
Some(Term::Literal(c)) => Some(c.value().to_string()),
_ => None,
};
let comment = match object_for_subject_predicate(
&self.graph,
&test_subject,
&rdfs::COMMENT,
) {
Some(Term::Literal(c)) => Some(c.value().to_string()),
_ => None,
};
let (action, query, data, graph_data, service_data) =
match object_for_subject_predicate(&self.graph, &test_subject, &*mf::ACTION) {
Some(Term::NamedNode(n)) => {
(Some(n.into_string()), None, None, vec![], vec![])
}
Some(Term::BlankNode(n)) => {
let n = n.into();
let query =
match object_for_subject_predicate(&self.graph, &n, &qt::QUERY) {
Some(Term::NamedNode(q)) => Some(q.into_string()),
_ => None,
};
let data =
match object_for_subject_predicate(&self.graph, &n, &qt::DATA) {
Some(Term::NamedNode(q)) => Some(q.into_string()),
_ => None,
};
let graph_data =
objects_for_subject_predicate(&self.graph, &n, &qt::GRAPH_DATA)
.filter_map(|g| match g {
Term::NamedNode(q) => Some(q.into_string()),
_ => None,
})
.collect();
let service_data =
objects_for_subject_predicate(&self.graph, &n, &qt::SERVICE_DATA)
.filter_map(|g| match g {
Term::NamedNode(g) => Some(g.into()),
Term::BlankNode(g) => Some(g.into()),
_ => None,
})
.filter_map(|g| {
if let (
Some(Term::NamedNode(endpoint)),
Some(Term::NamedNode(data)),
) = (
object_for_subject_predicate(
&self.graph,
&g,
&qt::ENDPOINT,
),
object_for_subject_predicate(
&self.graph,
&g,
&qt::DATA,
),
) {
Some((endpoint.into_string(), data.into_string()))
} else {
None
}
})
.collect();
(None, query, data, graph_data, service_data)
}
Some(_) => return Some(Err(Error::msg("invalid action"))),
None => {
return Some(Err(Error::msg(format!(
"action not found for test {}",
test_subject
))));
}
};
let result =
match object_for_subject_predicate(&self.graph, &test_subject, &*mf::RESULT) {
Some(Term::NamedNode(n)) => Some(n.into_string()),
Some(_) => return Some(Err(Error::msg("invalid result"))),
None => None,
};
Some(Ok(Test {
id: test_node,
kind,
name,
comment,
action,
query,
data,
graph_data,
service_data,
result,
}))
}
Some(_) => self.next(),
None => {
match self.manifests_to_do.pop() {
Some(url) => {
let manifest =
NamedOrBlankNode::from(NamedNode::parse(url.clone()).unwrap());
if let Err(error) = load_to_store(&url, &self.graph, None) {
return Some(Err(error));
}
// New manifests
match object_for_subject_predicate(&self.graph, &manifest, &*mf::INCLUDE) {
Some(Term::BlankNode(list)) => {
self.manifests_to_do.extend(
RdfListIterator::iter(&self.graph, list.clone().into())
.filter_map(|m| match m {
Term::NamedNode(nm) => Some(nm.into_string()),
_ => None,
}),
);
}
Some(_) => return Some(Err(Error::msg("invalid tests list"))),
None => (),
}
// New tests
match object_for_subject_predicate(&self.graph, &manifest, &*mf::ENTRIES) {
Some(Term::BlankNode(list)) => {
self.tests_to_do.extend(RdfListIterator::iter(
&self.graph,
list.clone().into(),
));
}
Some(term) => {
return Some(Err(Error::msg(format!(
"Invalid tests list. Got term {}",
term
))));
}
None => (),
}
}
None => return None,
}
self.next()
}
}
}
}
struct RdfListIterator<'a> {
graph: &'a MemoryStore,
current_node: Option<NamedOrBlankNode>,
}
impl<'a> RdfListIterator<'a> {
fn iter(graph: &'a MemoryStore, root: NamedOrBlankNode) -> RdfListIterator<'a> {
RdfListIterator {
graph,
current_node: Some(root),
}
}
}
impl<'a> Iterator for RdfListIterator<'a> {
type Item = Term;
fn next(&mut self) -> Option<Term> {
match self.current_node.clone() {
Some(current) => {
let result = object_for_subject_predicate(&self.graph, &current, &rdf::FIRST);
self.current_node =
match object_for_subject_predicate(&self.graph, &current, &rdf::REST) {
Some(Term::NamedNode(ref n)) if *n == *rdf::NIL => None,
Some(Term::NamedNode(n)) => Some(n.into()),
Some(Term::BlankNode(n)) => Some(n.into()),
_ => None,
};
result
}
None => None,
}
}
}
fn object_for_subject_predicate(
store: &MemoryStore,
subject: &NamedOrBlankNode,
predicate: &NamedNode,
) -> Option<Term> {
objects_for_subject_predicate(store, subject, predicate).next()
}
fn objects_for_subject_predicate(
store: &MemoryStore,
subject: &NamedOrBlankNode,
predicate: &NamedNode,
) -> impl Iterator<Item = Term> {
store
.quads_for_pattern(Some(subject), Some(predicate), None, None)
.map(|t| t.object_owned())
}

@ -0,0 +1,80 @@
use crate::files::load_store;
use crate::manifest::Test;
use crate::report::TestResult;
use chrono::Utc;
use oxigraph::{Error, Result};
pub fn evaluate_parser_tests(
manifest: impl Iterator<Item = Result<Test>>,
) -> Result<Vec<TestResult>> {
manifest
.map(|test| {
let test = test?;
let outcome = evaluate_parser_test(&test);
Ok(TestResult {
test: test.id,
outcome,
date: Utc::now(),
})
})
.collect()
}
fn evaluate_parser_test(test: &Test) -> Result<()> {
let action = test
.action
.as_deref()
.ok_or_else(|| Error::msg(format!("No action found for test {}", test)))?;
if test.kind == "http://www.w3.org/ns/rdftest#TestNTriplesPositiveSyntax"
|| test.kind == "http://www.w3.org/ns/rdftest#TestNQuadsPositiveSyntax"
|| test.kind == "http://www.w3.org/ns/rdftest#TestTurtlePositiveSyntax"
|| test.kind == "http://www.w3.org/ns/rdftest#TestTrigPositiveSyntax"
{
match load_store(action) {
Ok(_) => Ok(()),
Err(e) => Err(Error::msg(format!("Parse error: {}", e))),
}
} else if test.kind == "http://www.w3.org/ns/rdftest#TestNTriplesNegativeSyntax"
|| test.kind == "http://www.w3.org/ns/rdftest#TestNQuadsNegativeSyntax"
|| test.kind == "http://www.w3.org/ns/rdftest#TestTurtleNegativeSyntax"
|| test.kind == "http://www.w3.org/ns/rdftest#TestTurtleNegativeEval"
|| test.kind == "http://www.w3.org/ns/rdftest#TestTrigNegativeSyntax"
|| test.kind == "http://www.w3.org/ns/rdftest#TestTrigNegativeEval"
|| test.kind == "http://www.w3.org/ns/rdftest#TestXMLNegativeSyntax"
{
match load_store(action) {
Ok(_) => Err(Error::msg(
"File parsed with an error even if it should not",
)),
Err(_) => Ok(()),
}
} else if test.kind == "http://www.w3.org/ns/rdftest#TestTurtleEval"
|| test.kind == "http://www.w3.org/ns/rdftest#TestTrigEval"
|| test.kind == "http://www.w3.org/ns/rdftest#TestXMLEval"
{
match load_store(action) {
Ok(actual_graph) => {
if let Some(result) = &test.result {
match load_store(result) {
Ok(expected_graph) => {
if expected_graph.is_isomorphic(&actual_graph) {
Ok(())
} else {
Err(Error::msg(format!(
"The two files are not isomorphic. Expected:\n{}\nActual:\n{}",
expected_graph, actual_graph
)))
}
}
Err(e) => Err(Error::msg(format!("Parse error on file {}: {}", action, e))),
}
} else {
Err(Error::msg("No tests result found".to_string()))
}
}
Err(e) => Err(Error::msg(format!("Parse error on file {}: {}", action, e))),
}
} else {
Err(Error::msg(format!("Unsupported test type: {}", test.kind)))
}
}

@ -0,0 +1,10 @@
use chrono::{DateTime, Utc};
use oxigraph::model::NamedNode;
use oxigraph::Result;
#[derive(Debug)]
pub struct TestResult {
pub test: NamedNode,
pub outcome: Result<()>,
pub date: DateTime<Utc>,
}

@ -0,0 +1,484 @@
use crate::files::*;
use crate::manifest::*;
use crate::report::*;
use crate::vocab::*;
use chrono::Utc;
use oxigraph::model::vocab::*;
use oxigraph::model::*;
use oxigraph::sparql::*;
use oxigraph::{Error, MemoryStore, Result};
use std::collections::HashMap;
use std::fmt;
use std::str::FromStr;
use std::sync::Arc;
pub fn evaluate_sparql_tests(
manifest: impl Iterator<Item = Result<Test>>,
) -> Result<Vec<TestResult>> {
manifest
.map(|test| {
let test = test?;
let outcome = evaluate_sparql_test(&test);
Ok(TestResult {
test: test.id,
outcome,
date: Utc::now(),
})
})
.collect()
}
fn evaluate_sparql_test(test: &Test) -> Result<()> {
if test.kind == "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#PositiveSyntaxTest"
|| test.kind
== "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#PositiveSyntaxTest11"
{
let query_file = test
.action
.as_deref()
.ok_or_else(|| Error::msg(format!("No action found for test {}", test)))?;
match Query::parse(&read_file_to_string(&query_file)?, Some(&query_file)) {
Err(error) => Err(Error::msg(format!(
"Not able to parse {} with error: {}",
test, error
))),
Ok(query) => match Query::parse(&query.to_string(), None) {
Ok(_) => Ok(()),
Err(error) => Err(Error::msg(format!(
"Failure to deserialize \"{}\" of {} with error: {}",
query.to_string(),
test,
error
))),
},
}
} else if test.kind
== "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#NegativeSyntaxTest"
|| test.kind
== "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#NegativeSyntaxTest11"
{
let query_file = test
.action
.as_deref()
.ok_or_else(|| Error::msg(format!("No action found for test {}", test)))?;
match Query::parse(&read_file_to_string(query_file)?, Some(query_file)) {
Ok(result) => Err(Error::msg(format!(
"Oxigraph parses even if it should not {}. The output tree is: {}",
test, result
))),
Err(_) => Ok(()),
}
} else if test.kind
== "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#QueryEvaluationTest"
{
let store = MemoryStore::new();
if let Some(data) = &test.data {
load_to_store(data, &store, None)?;
}
for graph_data in &test.graph_data {
load_to_store(
&graph_data,
&store,
Some(&NamedNode::parse(graph_data)?.into()),
)?;
}
let query_file = test
.query
.as_deref()
.ok_or_else(|| Error::msg(format!("No action found for test {}", test)))?;
let options = QueryOptions::default()
.with_base_iri(query_file)
.with_service_handler(StaticServiceHandler::new(&test.service_data)?);
match store.prepare_query(&read_file_to_string(query_file)?, options) {
Err(error) => Err(Error::msg(format!(
"Failure to parse query of {} with error: {}",
test, error
))),
Ok(query) => match query.exec() {
Err(error) => Err(Error::msg(format!(
"Failure to execute query of {} with error: {}",
test, error
))),
Ok(actual_results) => {
let expected_results = load_sparql_query_result(test.result.as_ref().unwrap())
.map_err(|e| {
Error::msg(format!(
"Error constructing expected graph for {}: {}",
test, e
))
})?;
let with_order =
if let StaticQueryResults::Solutions { ordered, .. } = &expected_results {
*ordered
} else {
false
};
let actual_results =
StaticQueryResults::from_query_results(actual_results, with_order)?;
if are_query_results_isomorphic(&expected_results, &actual_results) {
Ok(())
} else {
Err(Error::msg(format!("Failure on {}.\nExpected file:\n{}\nOutput file:\n{}\nParsed query:\n{}\nData:\n{}\n",
test,
actual_results,
expected_results,
Query::parse(&read_file_to_string(query_file)?, Some(query_file)).unwrap(),
store
)))
}
}
},
}
} else {
Err(Error::msg(format!("Unsupported test type: {}", test.kind)))
}
}
fn load_sparql_query_result(url: &str) -> Result<StaticQueryResults> {
if url.ends_with(".srx") {
StaticQueryResults::from_query_results(
QueryResult::read(read_file(url)?, QueryResultSyntax::Xml)?,
false,
)
} else if url.ends_with(".srj") {
StaticQueryResults::from_query_results(
QueryResult::read(read_file(url)?, QueryResultSyntax::Json)?,
false,
)
} else {
Ok(StaticQueryResults::from_dataset(load_store(url)?))
}
}
#[derive(Clone)]
struct StaticServiceHandler {
services: Arc<HashMap<NamedNode, MemoryStore>>,
}
impl StaticServiceHandler {
fn new(services: &[(String, String)]) -> Result<Self> {
Ok(Self {
services: Arc::new(
services
.iter()
.map(|(name, data)| {
let name = NamedNode::parse(name)?;
let store = MemoryStore::new();
load_to_store(&data, &store, None)?;
Ok((name, store))
})
.collect::<Result<_>>()?,
),
})
}
}
impl ServiceHandler for StaticServiceHandler {
fn handle<'a>(
&'a self,
service_name: &NamedNode,
graph_pattern: &'a GraphPattern,
) -> Result<QuerySolutionsIterator<'a>> {
if let QueryResult::Bindings(iterator) = self
.services
.get(service_name)
.ok_or_else(|| Error::msg(format!("Service {} not found", service_name)))?
.prepare_query_from_pattern(
&graph_pattern,
QueryOptions::default().with_service_handler(self.clone()),
)?
.exec()?
{
//TODO: very ugly
let (variables, iter) = iterator.destruct();
let collected = iter.collect::<Vec<_>>();
Ok(QuerySolutionsIterator::new(
variables,
Box::new(collected.into_iter()),
))
} else {
Err(Error::msg("Expected solutions but got another QueryResult"))
}
}
}
fn to_dataset(result: QueryResult<'_>, with_order: bool) -> Result<MemoryStore> {
match result {
QueryResult::Graph(graph) => graph.map(|t| t.map(|t| t.in_graph(None))).collect(),
QueryResult::Boolean(value) => {
let store = MemoryStore::new();
let result_set = BlankNode::default();
store.insert(Quad::new(
result_set,
rdf::TYPE.clone(),
rs::RESULT_SET.clone(),
None,
));
store.insert(Quad::new(
result_set,
rs::BOOLEAN.clone(),
Literal::from(value),
None,
));
Ok(store)
}
QueryResult::Bindings(solutions) => {
let store = MemoryStore::new();
let result_set = BlankNode::default();
store.insert(Quad::new(
result_set,
rdf::TYPE.clone(),
rs::RESULT_SET.clone(),
None,
));
for variable in solutions.variables() {
store.insert(Quad::new(
result_set,
rs::RESULT_VARIABLE.clone(),
Literal::new_simple_literal(variable.as_str()),
None,
));
}
for (i, solution) in solutions.enumerate() {
let solution = solution?;
let solution_id = BlankNode::default();
store.insert(Quad::new(
result_set,
rs::SOLUTION.clone(),
solution_id,
None,
));
for (variable, value) in solution.iter() {
let binding = BlankNode::default();
store.insert(Quad::new(solution_id, rs::BINDING.clone(), binding, None));
store.insert(Quad::new(binding, rs::VALUE.clone(), value.clone(), None));
store.insert(Quad::new(
binding,
rs::VARIABLE.clone(),
Literal::new_simple_literal(variable.as_str()),
None,
));
}
if with_order {
store.insert(Quad::new(
solution_id,
rs::INDEX.clone(),
Literal::from((i + 1) as i128),
None,
));
}
}
Ok(store)
}
}
}
fn are_query_results_isomorphic(
expected: &StaticQueryResults,
actual: &StaticQueryResults,
) -> bool {
match (expected, actual) {
(
StaticQueryResults::Solutions {
variables: expected_variables,
solutions: expected_solutions,
ordered,
},
StaticQueryResults::Solutions {
variables: actual_variables,
solutions: actual_solutions,
..
},
) => {
expected_variables == actual_variables
&& if *ordered {
expected_solutions.iter().zip(actual_solutions).all(
|(expected_solution, actual_solution)| {
compare_solutions(expected_solution, actual_solution)
},
)
} else {
expected_solutions.iter().all(|expected_solution| {
actual_solutions.iter().any(|actual_solution| {
compare_solutions(expected_solution, actual_solution)
})
})
}
}
(StaticQueryResults::Boolean(expected), StaticQueryResults::Boolean(actual)) => {
expected == actual
}
(StaticQueryResults::Graph(expected), StaticQueryResults::Graph(actual)) => {
expected.is_isomorphic(&actual)
}
_ => false,
}
}
fn compare_solutions(expected: &[(Variable, Term)], actual: &[(Variable, Term)]) -> bool {
let mut bnode_map = HashMap::new();
expected.iter().zip(actual).all(
move |((expected_variable, expected_value), (actual_variable, actual_value))| {
expected_variable == actual_variable
&& expected_value
== if let Term::BlankNode(actual_value) = actual_value {
bnode_map.entry(actual_value).or_insert(expected_value)
} else {
actual_value
}
},
)
}
enum StaticQueryResults {
Graph(MemoryStore),
Solutions {
variables: Vec<Variable>,
solutions: Vec<Vec<(Variable, Term)>>,
ordered: bool,
},
Boolean(bool),
}
impl fmt::Display for StaticQueryResults {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
StaticQueryResults::Graph(g) => g.fmt(f),
StaticQueryResults::Solutions {
variables,
solutions,
..
} => {
write!(f, "Variables:")?;
for v in variables {
write!(f, " {}", v)?;
}
for solution in solutions {
write!(f, "\n{{")?;
for (k, v) in solution {
write!(f, "{} = {} ", k, v)?;
}
write!(f, "}}")?;
}
Ok(())
}
StaticQueryResults::Boolean(b) => b.fmt(f),
}
}
}
impl StaticQueryResults {
fn from_query_results(
results: QueryResult<'_>,
with_order: bool,
) -> Result<StaticQueryResults> {
Ok(Self::from_dataset(to_dataset(results, with_order)?))
}
fn from_dataset(dataset: MemoryStore) -> StaticQueryResults {
if let Some(result_set) = dataset
.quads_for_pattern(
None,
Some(&rdf::TYPE),
Some(&rs::RESULT_SET.clone().into()),
None,
)
.map(|q| q.subject_owned())
.next()
{
if let Some(bool) = dataset
.quads_for_pattern(Some(&result_set), Some(&rs::BOOLEAN), None, None)
.map(|q| q.object_owned())
.next()
{
// Boolean query
StaticQueryResults::Boolean(bool == Literal::from(true).into())
} else {
// Regular query
let mut variables: Vec<Variable> = dataset
.quads_for_pattern(Some(&result_set), Some(&rs::RESULT_VARIABLE), None, None)
.filter_map(|q| {
if let Term::Literal(l) = q.object_owned() {
Some(Variable::new(l.value()))
} else {
None
}
})
.collect();
variables.sort();
let mut solutions: Vec<_> = dataset
.quads_for_pattern(Some(&result_set), Some(&rs::SOLUTION), None, None)
.filter_map(|q| {
if let Term::BlankNode(solution) = q.object_owned() {
let solution = solution.into();
let mut bindings = dataset
.quads_for_pattern(Some(&solution), Some(&rs::BINDING), None, None)
.filter_map(|q| {
if let Term::BlankNode(binding) = q.object_owned() {
let binding = binding.into();
if let (Some(Term::Literal(variable)), Some(value)) = (
dataset
.quads_for_pattern(
Some(&binding),
Some(&rs::VARIABLE),
None,
None,
)
.map(|q| q.object_owned())
.next(),
dataset
.quads_for_pattern(
Some(&binding),
Some(&rs::VALUE),
None,
None,
)
.map(|q| q.object_owned())
.next(),
) {
Some((Variable::new(variable.value()), value))
} else {
None
}
} else {
None
}
})
.collect::<Vec<_>>();
bindings.sort_by(|(a, _), (b, _)| a.cmp(&b));
let index = dataset
.quads_for_pattern(Some(&solution), Some(&rs::INDEX), None, None)
.filter_map(|q| {
if let Term::Literal(l) = q.object_owned() {
u64::from_str(l.value()).ok()
} else {
None
}
})
.next();
Some((bindings, index))
} else {
None
}
})
.collect();
solutions.sort_by(|(_, index_a), (_, index_b)| index_a.cmp(index_b));
let ordered = solutions.iter().all(|(_, index)| index.is_some());
StaticQueryResults::Solutions {
variables,
solutions: solutions
.into_iter()
.map(|(solution, _)| solution)
.collect(),
ordered,
}
}
} else {
StaticQueryResults::Graph(dataset)
}
}
}

@ -0,0 +1,77 @@
pub mod rs {
use lazy_static::lazy_static;
use oxigraph::model::NamedNode;
lazy_static! {
pub static ref RESULT_SET: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#ResultSet")
.unwrap();
pub static ref RESULT_VARIABLE: NamedNode = NamedNode::parse(
"http://www.w3.org/2001/sw/DataAccess/tests/result-set#resultVariable"
)
.unwrap();
pub static ref SOLUTION: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#solution")
.unwrap();
pub static ref BINDING: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#binding")
.unwrap();
pub static ref VALUE: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#value")
.unwrap();
pub static ref VARIABLE: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#variable")
.unwrap();
pub static ref INDEX: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#index")
.unwrap();
pub static ref BOOLEAN: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/result-set#boolean")
.unwrap();
}
}
pub mod mf {
use lazy_static::lazy_static;
use oxigraph::model::NamedNode;
lazy_static! {
pub static ref INCLUDE: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#include")
.unwrap();
pub static ref ENTRIES: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#entries")
.unwrap();
pub static ref NAME: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#name")
.unwrap();
pub static ref ACTION: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#action")
.unwrap();
pub static ref RESULT: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#result")
.unwrap();
}
}
pub mod qt {
use lazy_static::lazy_static;
use oxigraph::model::NamedNode;
lazy_static! {
pub static ref QUERY: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-query#query")
.unwrap();
pub static ref DATA: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-query#data").unwrap();
pub static ref GRAPH_DATA: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-query#graphData")
.unwrap();
pub static ref SERVICE_DATA: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-query#serviceData")
.unwrap();
pub static ref ENDPOINT: NamedNode =
NamedNode::parse("http://www.w3.org/2001/sw/DataAccess/tests/test-query#endpoint")
.unwrap();
}
}

@ -0,0 +1,25 @@
use oxigraph::Result;
use oxigraph_testsuite::manifest::TestManifest;
use oxigraph_testsuite::sparql_evaluator::evaluate_sparql_tests;
fn run_testsuite(manifest_urls: Vec<&str>) -> Result<()> {
let manifest = TestManifest::new(manifest_urls);
let results = evaluate_sparql_tests(manifest)?;
let mut errors = Vec::default();
for result in results {
if let Err(error) = &result.outcome {
errors.push(format!("{}: failed with error {}", result.test, error))
}
}
assert!(errors.is_empty(), "\n{}\n", errors.join("\n"));
Ok(())
}
#[test]
fn oxigraph_sparql_testsuite() -> Result<()> {
run_testsuite(vec![
"https://github.com/oxigraph/oxigraph/tests/sparql/manifest.ttl",
])
}

@ -0,0 +1,43 @@
use oxigraph::Result;
use oxigraph_testsuite::manifest::TestManifest;
use oxigraph_testsuite::parser_evaluator::evaluate_parser_tests;
fn run_testsuite(manifest_url: &str) -> Result<()> {
let manifest = TestManifest::new(vec![manifest_url]);
let results = evaluate_parser_tests(manifest)?;
let mut errors = Vec::default();
for result in results {
if let Err(error) = &result.outcome {
errors.push(format!("{}: failed with error {}", result.test, error))
}
}
assert!(errors.is_empty(), "\n{}\n", errors.join("\n"));
Ok(())
}
#[test]
fn ntriples_w3c_testsuite() -> Result<()> {
run_testsuite("http://w3c.github.io/rdf-tests/ntriples/manifest.ttl")
}
#[test]
fn nquads_w3c_testsuite() -> Result<()> {
run_testsuite("http://w3c.github.io/rdf-tests/nquads/manifest.ttl")
}
#[test]
fn turtle_w3c_testsuite() -> Result<()> {
run_testsuite("http://w3c.github.io/rdf-tests/turtle/manifest.ttl")
}
#[test]
fn trig_w3c_testsuite() -> Result<()> {
run_testsuite("http://w3c.github.io/rdf-tests/trig/manifest.ttl")
}
#[test]
fn rdf_xml_w3c_testsuite() -> Result<()> {
run_testsuite("http://www.w3.org/2013/RDFXMLTests/manifest.ttl")
}

@ -0,0 +1,125 @@
use oxigraph::Result;
use oxigraph_testsuite::manifest::TestManifest;
use oxigraph_testsuite::sparql_evaluator::evaluate_sparql_tests;
fn run_testsuite(manifest_urls: Vec<&str>, ignored_tests: Vec<&str>) -> Result<()> {
let manifest = TestManifest::new(manifest_urls);
let results = evaluate_sparql_tests(manifest)?;
let mut errors = Vec::default();
for result in results {
if let Err(error) = &result.outcome {
if !ignored_tests.contains(&result.test.as_str()) {
errors.push(format!("{}: failed with error {}", result.test, error))
}
}
}
assert!(errors.is_empty(), "\n{}\n", errors.join("\n"));
Ok(())
}
#[test]
fn sparql10_w3c_query_evaluation_testsuite() -> Result<()> {
let manifest_urls = vec![
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/manifest-syntax.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/algebra/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/ask/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/basic/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/bnode-coreference/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/boolean-effective-value/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/bound/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/cast/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/construct/manifest.ttl",
//TODO FROM and FROM NAMED "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/construct/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/distinct/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-equals/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-ops/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/graph/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/i18n/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional-filter/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/reduced/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/regex/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/solution-seq/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/sort/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/triple-match/manifest.ttl",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/type-promotion/manifest.ttl",
];
let test_blacklist = vec![
//Bad SPARQL query that should be rejected by the parser
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/syntax-sparql4/manifest#syn-bad-38",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/syntax-sparql4/manifest#syn-bad-34",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/syntax-sparql3/manifest#syn-bad-26",
//Multiple writing of the same xsd:integer. Our system does strong normalization.
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/distinct/manifest#distinct-1",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/distinct/manifest#distinct-9",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#dawg-str-1",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#dawg-str-2",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-equals/manifest#eq-graph-1",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-equals/manifest#eq-graph-2",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#open-eq-01",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#open-eq-04",
//Multiple writing of the same xsd:double. Our system does strong normalization.
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#sameTerm",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#sameTerm-simple",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#sameTerm-eq",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#sameTerm-not-eq",
//Simple literal vs xsd:string. We apply RDF 1.1
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/distinct/manifest#distinct-2",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#open-eq-08",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#open-eq-10",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#open-eq-11",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#open-eq-12",
//DATATYPE("foo"@en) returns rdf:langString in RDF 1.1
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/expr-builtin/manifest#dawg-datatype-2",
// We use XSD 1.1 equality on dates
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/open-world/manifest#date-2",
// We choose to simplify first the nested group patterns in OPTIONAL
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/optional-filter/manifest#dawg-optional-filter-005-not-simplified"
];
run_testsuite(manifest_urls, test_blacklist)
}
#[test]
fn sparql11_query_w3c_evaluation_testsuite() -> Result<()> {
let manifest_urls =
vec!["http://www.w3.org/2009/sparql/docs/tests/data-sparql11/manifest-sparql11-query.ttl"];
let test_blacklist = vec![
//Bad SPARQL query that should be rejected by the parser
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/aggregates/manifest#agg08",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/aggregates/manifest#agg09",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/aggregates/manifest#agg10",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/aggregates/manifest#agg11",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/aggregates/manifest#agg12",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/grouping/manifest#group07",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/grouping/manifest#group06",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/grouping/manifest#group07",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/syntax-query/manifest#test_43",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/syntax-query/manifest#test_44",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/syntax-query/manifest#test_45",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/syntax-query/manifest#test_60",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/syntax-query/manifest#test_61a",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/syntax-query/manifest#test_62a",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/syntax-query/manifest#test_65",
// SPARQL 1.1 JSON query results deserialization is not implemented yet
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/aggregates/manifest#agg-empty-group-count-1",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/aggregates/manifest#agg-empty-group-count-2",
// FROM support
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/construct/manifest#constructwhere04",
//BNODE() scope is currently wrong
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/functions/manifest#bnode01",
//Property path with unbound graph name are not supported yet
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/property-path/manifest#pp35",
//SERVICE name from a BGP
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/service/manifest#service5"
];
run_testsuite(manifest_urls, test_blacklist)
}
Loading…
Cancel
Save