From efd5eec65dca680a7c7dfc18b25ce85f500b2f9e Mon Sep 17 00:00:00 2001 From: Tpt Date: Thu, 7 Dec 2023 18:42:27 +0100 Subject: [PATCH] Introduces load_from_read instead of load_graph and load_dataset --- cli/src/main.rs | 34 +++-- js/src/store.rs | 20 ++- lib/benches/store.rs | 7 +- lib/oxrdfio/src/parser.rs | 6 + lib/oxrdfio/src/serializer.rs | 6 + lib/sparesults/src/parser.rs | 6 + lib/sparesults/src/serializer.rs | 6 + lib/src/store.rs | 207 ++++++++++++++++++++++++++---- lib/tests/store.rs | 32 ++--- python/src/store.rs | 34 +++-- testsuite/src/sparql_evaluator.rs | 38 +++--- 11 files changed, 280 insertions(+), 116 deletions(-) diff --git a/cli/src/main.rs b/cli/src/main.rs index 6dc7cc73..2a2f6562 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -780,16 +780,21 @@ pub fn main() -> anyhow::Result<()> { fn bulk_load( loader: &BulkLoader, - reader: impl Read, + read: impl Read, format: RdfFormat, base_iri: Option<&str>, to_graph_name: Option, ) -> anyhow::Result<()> { + let mut parser = RdfParser::from_format(format); if let Some(to_graph_name) = to_graph_name { - loader.load_graph(reader, format, to_graph_name, base_iri) - } else { - loader.load_dataset(reader, format, base_iri) - }?; + parser = parser.with_default_graph(to_graph_name); + } + if let Some(base_iri) = base_iri { + parser = parser + .with_base_iri(base_iri) + .with_context(|| format!("Invalid base IRI {base_iri}"))?; + } + loader.load_from_read(parser, read)?; Ok(()) } @@ -1646,15 +1651,16 @@ fn web_load_graph( } else { None }; + let mut parser = RdfParser::from_format(format) + .without_named_graphs() + .with_default_graph(to_graph_name.clone()); + if let Some(base_iri) = base_iri { + parser = parser.with_base_iri(base_iri).map_err(bad_request)?; + } if url_query_parameter(request, "no_transaction").is_some() { - web_bulk_loader(store, request).load_graph( - request.body_mut(), - format, - to_graph_name.clone(), - base_iri, - ) + web_bulk_loader(store, request).load_from_read(parser, request.body_mut()) } else { - store.load_graph(request.body_mut(), format, to_graph_name.clone(), base_iri) + store.load_from_read(parser, request.body_mut()) } .map_err(loader_to_http_error) } @@ -1665,9 +1671,9 @@ fn web_load_dataset( format: RdfFormat, ) -> Result<(), HttpError> { if url_query_parameter(request, "no_transaction").is_some() { - web_bulk_loader(store, request).load_dataset(request.body_mut(), format, None) + web_bulk_loader(store, request).load_from_read(format, request.body_mut()) } else { - store.load_dataset(request.body_mut(), format, None) + store.load_from_read(format, request.body_mut()) } .map_err(loader_to_http_error) } diff --git a/js/src/store.rs b/js/src/store.rs index 008f24f0..f76786a5 100644 --- a/js/src/store.rs +++ b/js/src/store.rs @@ -4,7 +4,7 @@ use crate::format_err; use crate::model::*; use crate::utils::to_err; use js_sys::{Array, Map}; -use oxigraph::io::RdfFormat; +use oxigraph::io::{RdfFormat, RdfParser}; use oxigraph::model::*; use oxigraph::sparql::QueryResults; use oxigraph::store::Store; @@ -161,18 +161,16 @@ impl JsStore { )); }; + let mut parser = RdfParser::from_format(format); if let Some(to_graph_name) = FROM_JS.with(|c| c.to_optional_term(to_graph_name))? { - self.store.load_graph( - data.as_bytes(), - format, - GraphName::try_from(to_graph_name)?, - base_iri.as_deref(), - ) - } else { - self.store - .load_dataset(data.as_bytes(), format, base_iri.as_deref()) + parser = parser.with_default_graph(GraphName::try_from(to_graph_name)?); + } + if let Some(base_iri) = base_iri { + parser = parser.with_base_iri(base_iri).map_err(to_err)?; } - .map_err(to_err) + self.store + .load_from_read(parser, data.as_bytes()) + .map_err(to_err) } pub fn dump(&self, format: &str, from_graph_name: &JsValue) -> Result { diff --git a/lib/benches/store.rs b/lib/benches/store.rs index 20826041..ae56266b 100644 --- a/lib/benches/store.rs +++ b/lib/benches/store.rs @@ -3,7 +3,6 @@ use criterion::{criterion_group, criterion_main, Criterion, Throughput}; use oxhttp::model::{Method, Request, Status}; use oxigraph::io::RdfFormat; -use oxigraph::model::{GraphName, GraphNameRef}; use oxigraph::sparql::{Query, QueryResults, Update}; use oxigraph::store::Store; use rand::random; @@ -64,16 +63,14 @@ fn store_load(c: &mut Criterion) { } fn do_load(store: &Store, data: &[u8]) { - store - .load_graph(data, RdfFormat::NTriples, GraphName::DefaultGraph, None) - .unwrap(); + store.load_from_read(RdfFormat::NTriples, data).unwrap(); store.optimize().unwrap(); } fn do_bulk_load(store: &Store, data: &[u8]) { store .bulk_loader() - .load_graph(data, RdfFormat::NTriples, GraphNameRef::DefaultGraph, None) + .load_from_read(RdfFormat::NTriples, data) .unwrap(); store.optimize().unwrap(); } diff --git a/lib/oxrdfio/src/parser.rs b/lib/oxrdfio/src/parser.rs index 6258ba18..b9aaf646 100644 --- a/lib/oxrdfio/src/parser.rs +++ b/lib/oxrdfio/src/parser.rs @@ -321,6 +321,12 @@ impl RdfParser { } } +impl From for RdfParser { + fn from(format: RdfFormat) -> Self { + Self::from_format(format) + } +} + /// Parses a RDF file from a [`Read`] implementation. Can be built using [`RdfParser::parse_read`]. /// /// Reads are buffered. diff --git a/lib/oxrdfio/src/serializer.rs b/lib/oxrdfio/src/serializer.rs index 347e40f2..487b4539 100644 --- a/lib/oxrdfio/src/serializer.rs +++ b/lib/oxrdfio/src/serializer.rs @@ -158,6 +158,12 @@ impl RdfSerializer { } } +impl From for RdfSerializer { + fn from(format: RdfFormat) -> Self { + Self::from_format(format) + } +} + /// Writes quads or triples to a [`Write`] implementation. /// /// Can be built using [`RdfSerializer::serialize_to_write`]. diff --git a/lib/sparesults/src/parser.rs b/lib/sparesults/src/parser.rs index b0aad24c..f54f85d3 100644 --- a/lib/sparesults/src/parser.rs +++ b/lib/sparesults/src/parser.rs @@ -118,6 +118,12 @@ impl QueryResultsParser { } } +impl From for QueryResultsParser { + fn from(format: QueryResultsFormat) -> Self { + Self::from_format(format) + } +} + /// The reader for a given read of a results file. /// /// It is either a read boolean ([`bool`]) or a streaming reader of a set of solutions ([`FromReadSolutionsReader`]). diff --git a/lib/sparesults/src/serializer.rs b/lib/sparesults/src/serializer.rs index 95356fb8..dc0baeb9 100644 --- a/lib/sparesults/src/serializer.rs +++ b/lib/sparesults/src/serializer.rs @@ -213,6 +213,12 @@ impl QueryResultsSerializer { } } +impl From for QueryResultsSerializer { + fn from(format: QueryResultsFormat) -> Self { + Self::from_format(format) + } +} + /// Allows writing query results into a [`Write`] implementation. /// /// Could be built using a [`QueryResultsSerializer`]. diff --git a/lib/src/store.rs b/lib/src/store.rs index fa1a243f..e78a137b 100644 --- a/lib/src/store.rs +++ b/lib/src/store.rs @@ -445,6 +445,57 @@ impl Store { .transaction(|mut t| evaluate_update(&mut t, &update, &options)) } + /// Loads a RDF file under into the store. + /// + /// This function is atomic, quite slow and memory hungry. To get much better performances you might want to use the [`bulk_loader`](Store::bulk_loader). + /// + /// Usage example: + /// ``` + /// use oxigraph::store::Store; + /// use oxigraph::io::RdfFormat; + /// use oxigraph::model::*; + /// use oxrdfio::RdfParser; + /// + /// let store = Store::new()?; + /// + /// // insert a dataset file (former load_dataset method) + /// let file = b" ."; + /// store.load_from_read(RdfFormat::NQuads, file.as_ref())?; + /// + /// // insert a graph file (former load_graph method) + /// let file = b"<> <> <> ."; + /// store.load_from_read( + /// RdfParser::from_format(RdfFormat::Turtle) + /// .with_base_iri("http://example.com")? + /// .without_named_graphs() // No named graphs allowed in the input + /// .with_default_graph(NamedNodeRef::new("http://example.com/g2")?), // we put the file default graph inside of a named graph + /// file.as_ref() + /// )?; + /// + /// // we inspect the store contents + /// let ex = NamedNodeRef::new("http://example.com")?; + /// assert!(store.contains(QuadRef::new(ex, ex, ex, NamedNodeRef::new("http://example.com/g")?))?); + /// assert!(store.contains(QuadRef::new(ex, ex, ex, NamedNodeRef::new("http://example.com/g2")?))?); + /// # Result::<_, Box>::Ok(()) + /// ``` + pub fn load_from_read( + &self, + parser: impl Into, + read: impl Read, + ) -> Result<(), LoaderError> { + let quads = parser + .into() + .rename_blank_nodes() + .parse_read(read) + .collect::, _>>()?; + self.storage.transaction(move |mut t| { + for quad in &quads { + t.insert(quad.as_ref())?; + } + Ok(()) + }) + } + /// Loads a graph file (i.e. triples) into the store. /// /// This function is atomic, quite slow and memory hungry. To get much better performances you might want to use the [`bulk_loader`](Store::bulk_loader). @@ -466,6 +517,7 @@ impl Store { /// assert!(store.contains(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?); /// # Result::<_, Box>::Ok(()) /// ``` + #[deprecated(note = "Use Store.load_from_read instead")] pub fn load_graph( &self, read: impl Read, @@ -475,8 +527,7 @@ impl Store { ) -> Result<(), LoaderError> { let mut parser = RdfParser::from_format(format.into()) .without_named_graphs() - .with_default_graph(to_graph_name) - .rename_blank_nodes(); + .with_default_graph(to_graph_name); if let Some(base_iri) = base_iri { parser = parser .with_base_iri(base_iri) @@ -485,13 +536,7 @@ impl Store { error: e, })?; } - let quads = parser.parse_read(read).collect::, _>>()?; - self.storage.transaction(move |mut t| { - for quad in &quads { - t.insert(quad.as_ref())?; - } - Ok(()) - }) + self.load_from_read(parser, read) } /// Loads a dataset file (i.e. quads) into the store. @@ -515,13 +560,14 @@ impl Store { /// assert!(store.contains(QuadRef::new(ex, ex, ex, ex))?); /// # Result::<_, Box>::Ok(()) /// ``` + #[deprecated(note = "Use Store.load_from_read instead")] pub fn load_dataset( &self, read: impl Read, format: impl Into, base_iri: Option<&str>, ) -> Result<(), LoaderError> { - let mut parser = RdfParser::from_format(format.into()).rename_blank_nodes(); + let mut parser = RdfParser::from_format(format.into()); if let Some(base_iri) = base_iri { parser = parser .with_base_iri(base_iri) @@ -530,13 +576,7 @@ impl Store { error: e, })?; } - let quads = parser.parse_read(read).collect::, _>>()?; - self.storage.transaction(move |mut t| { - for quad in &quads { - t.insert(quad.as_ref())?; - } - Ok(()) - }) + self.load_from_read(parser, read) } /// Adds a quad to this store. @@ -1062,6 +1102,53 @@ impl<'a> Transaction<'a> { ) } + /// Loads a RDF file into the store. + /// + /// This function is atomic, quite slow and memory hungry. To get much better performances you might want to use the [`bulk_loader`](Store::bulk_loader). + /// + /// Usage example: + /// ``` + /// use oxigraph::store::Store; + /// use oxigraph::io::RdfFormat; + /// use oxigraph::model::*; + /// use oxrdfio::RdfParser; + /// + /// let store = Store::new()?; + /// + /// // insert a dataset file (former load_dataset method) + /// let file = b" ."; + /// store.transaction(|mut t| t.load_from_read(RdfFormat::NQuads, file.as_ref()))?; + /// + /// // insert a graph file (former load_graph method) + /// let file = b"<> <> <> ."; + /// store.transaction(|mut t| + /// t.load_from_read( + /// RdfParser::from_format(RdfFormat::Turtle) + /// .with_base_iri("http://example.com") + /// .unwrap() + /// .without_named_graphs() // No named graphs allowed in the input + /// .with_default_graph(NamedNodeRef::new("http://example.com/g2").unwrap()), // we put the file default graph inside of a named graph + /// file.as_ref() + /// ) + /// )?; + /// + /// // we inspect the store contents + /// let ex = NamedNodeRef::new("http://example.com")?; + /// assert!(store.contains(QuadRef::new(ex, ex, ex, NamedNodeRef::new("http://example.com/g")?))?); + /// assert!(store.contains(QuadRef::new(ex, ex, ex, NamedNodeRef::new("http://example.com/g2")?))?); + /// # Result::<_, Box>::Ok(()) + /// ``` + pub fn load_from_read( + &mut self, + parser: impl Into, + read: impl Read, + ) -> Result<(), LoaderError> { + for quad in parser.into().rename_blank_nodes().parse_read(read) { + self.insert(quad?.as_ref())?; + } + Ok(()) + } + /// Loads a graph file (i.e. triples) into the store. /// /// Usage example: @@ -1083,6 +1170,7 @@ impl<'a> Transaction<'a> { /// assert!(store.contains(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?); /// # Result::<_,oxigraph::store::LoaderError>::Ok(()) /// ``` + #[deprecated(note = "Use Transaction.load_from_read instead")] pub fn load_graph( &mut self, read: impl Read, @@ -1092,8 +1180,7 @@ impl<'a> Transaction<'a> { ) -> Result<(), LoaderError> { let mut parser = RdfParser::from_format(format.into()) .without_named_graphs() - .with_default_graph(to_graph_name) - .rename_blank_nodes(); + .with_default_graph(to_graph_name); if let Some(base_iri) = base_iri { parser = parser .with_base_iri(base_iri) @@ -1102,10 +1189,7 @@ impl<'a> Transaction<'a> { error: e, })?; } - for quad in parser.parse_read(read) { - self.writer.insert(quad?.as_ref())?; - } - Ok(()) + self.load_from_read(parser, read) } /// Loads a dataset file (i.e. quads) into the store. @@ -1129,13 +1213,14 @@ impl<'a> Transaction<'a> { /// assert!(store.contains(QuadRef::new(ex, ex, ex, ex))?); /// # Result::<_,oxigraph::store::LoaderError>::Ok(()) /// ``` + #[deprecated(note = "Use Transaction.load_from_read instead")] pub fn load_dataset( &mut self, read: impl Read, format: impl Into, base_iri: Option<&str>, ) -> Result<(), LoaderError> { - let mut parser = RdfParser::from_format(format.into()).rename_blank_nodes(); + let mut parser = RdfParser::from_format(format.into()); if let Some(base_iri) = base_iri { parser = parser .with_base_iri(base_iri) @@ -1144,10 +1229,7 @@ impl<'a> Transaction<'a> { error: e, })?; } - for quad in parser.parse_read(read) { - self.writer.insert(quad?.as_ref())?; - } - Ok(()) + self.load_from_read(parser, read) } /// Adds a quad to this store. @@ -1448,6 +1530,73 @@ impl BulkLoader { self } + /// Loads a file using the bulk loader. + /// + /// This function is optimized for large dataset loading speed. For small files, [`Store::load_dataset`] might be more convenient. + /// + ///
This method is not atomic. + /// If the parsing fails in the middle of the file, only a part of it may be written to the store. + /// Results might get weird if you delete data during the loading process.
+ /// + ///
This method is optimized for speed. See [the struct](BulkLoader) documentation for more details.
+ /// + /// Usage example: + /// Usage example: + /// ``` + /// use oxigraph::store::Store; + /// use oxigraph::io::RdfFormat; + /// use oxigraph::model::*; + /// use oxrdfio::RdfParser; + /// + /// let store = Store::new()?; + /// + /// // insert a dataset file (former load_dataset method) + /// let file = b" ."; + /// store.bulk_loader().load_from_read(RdfFormat::NQuads, file.as_ref())?; + /// + /// // insert a graph file (former load_graph method) + /// let file = b"<> <> <> ."; + /// store.bulk_loader().load_from_read( + /// RdfParser::from_format(RdfFormat::Turtle) + /// .with_base_iri("http://example.com")? + /// .without_named_graphs() // No named graphs allowed in the input + /// .with_default_graph(NamedNodeRef::new("http://example.com/g2")?), // we put the file default graph inside of a named graph + /// file.as_ref() + /// )?; + /// + /// // we inspect the store contents + /// let ex = NamedNodeRef::new("http://example.com")?; + /// assert!(store.contains(QuadRef::new(ex, ex, ex, NamedNodeRef::new("http://example.com/g")?))?); + /// assert!(store.contains(QuadRef::new(ex, ex, ex, NamedNodeRef::new("http://example.com/g2")?))?); + /// # Result::<_, Box>::Ok(()) + /// ``` + pub fn load_from_read( + &self, + parser: impl Into, + read: impl Read, + ) -> Result<(), LoaderError> { + self.load_ok_quads( + parser + .into() + .rename_blank_nodes() + .parse_read(read) + .filter_map(|r| match r { + Ok(q) => Some(Ok(q)), + Err(e) => { + if let Some(callback) = &self.on_parse_error { + if let Err(e) = callback(e) { + Some(Err(e)) + } else { + None + } + } else { + Some(Err(e)) + } + } + }), + ) + } + /// Loads a dataset file using the bulk loader. /// /// This function is optimized for large dataset loading speed. For small files, [`Store::load_dataset`] might be more convenient. @@ -1475,6 +1624,7 @@ impl BulkLoader { /// assert!(store.contains(QuadRef::new(ex, ex, ex, ex))?); /// # Result::<_, Box>::Ok(()) /// ``` + #[deprecated(note = "Use BulkLoader.load_from_read instead")] pub fn load_dataset( &self, read: impl Read, @@ -1533,6 +1683,7 @@ impl BulkLoader { /// assert!(store.contains(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?); /// # Result::<_, Box>::Ok(()) /// ``` + #[deprecated(note = "Use BulkLoader.load_from_read instead")] pub fn load_graph( &self, read: impl Read, diff --git a/lib/tests/store.rs b/lib/tests/store.rs index c9e67bfb..070a5351 100644 --- a/lib/tests/store.rs +++ b/lib/tests/store.rs @@ -110,12 +110,7 @@ fn quads(graph_name: impl Into>) -> Vec> #[test] fn test_load_graph() -> Result<(), Box> { let store = Store::new()?; - store.load_graph( - DATA.as_bytes(), - RdfFormat::Turtle, - GraphNameRef::DefaultGraph, - None, - )?; + store.load_from_read(RdfFormat::Turtle, DATA.as_bytes())?; for q in quads(GraphNameRef::DefaultGraph) { assert!(store.contains(q)?); } @@ -127,12 +122,9 @@ fn test_load_graph() -> Result<(), Box> { #[cfg(not(target_family = "wasm"))] fn test_bulk_load_graph() -> Result<(), Box> { let store = Store::new()?; - store.bulk_loader().load_graph( - DATA.as_bytes(), - RdfFormat::Turtle, - GraphName::DefaultGraph, - None, - )?; + store + .bulk_loader() + .load_from_read(RdfFormat::Turtle, DATA.as_bytes())?; for q in quads(GraphNameRef::DefaultGraph) { assert!(store.contains(q)?); } @@ -144,11 +136,9 @@ fn test_bulk_load_graph() -> Result<(), Box> { #[cfg(not(target_family = "wasm"))] fn test_bulk_load_graph_lenient() -> Result<(), Box> { let store = Store::new()?; - store.bulk_loader().on_parse_error(|_| Ok(())).load_graph( - b" .\n .".as_slice(), + store.bulk_loader().on_parse_error(|_| Ok(())).load_from_read( RdfFormat::NTriples, - GraphName::DefaultGraph, - None, + b" .\n .".as_slice(), )?; assert_eq!(store.len()?, 1); assert!(store.contains(QuadRef::new( @@ -164,7 +154,7 @@ fn test_bulk_load_graph_lenient() -> Result<(), Box> { #[test] fn test_load_dataset() -> Result<(), Box> { let store = Store::new()?; - store.load_dataset(GRAPH_DATA.as_bytes(), RdfFormat::TriG, None)?; + store.load_from_read(RdfFormat::TriG, GRAPH_DATA.as_bytes())?; for q in quads(NamedNodeRef::new_unchecked( "http://www.wikidata.org/wiki/Special:EntityData/Q90", )) { @@ -180,7 +170,7 @@ fn test_bulk_load_dataset() -> Result<(), Box> { let store = Store::new()?; store .bulk_loader() - .load_dataset(GRAPH_DATA.as_bytes(), RdfFormat::TriG, None)?; + .load_from_read(RdfFormat::TriG, GRAPH_DATA.as_bytes())?; let graph_name = NamedNodeRef::new_unchecked("http://www.wikidata.org/wiki/Special:EntityData/Q90"); for q in quads(graph_name) { @@ -195,11 +185,9 @@ fn test_bulk_load_dataset() -> Result<(), Box> { fn test_load_graph_generates_new_blank_nodes() -> Result<(), Box> { let store = Store::new()?; for _ in 0..2 { - store.load_graph( - "_:a .".as_bytes(), + store.load_from_read( RdfFormat::NTriples, - GraphName::DefaultGraph, - None, + "_:a .".as_bytes(), )?; } assert_eq!(store.len()?, 2); diff --git a/python/src/store.rs b/python/src/store.rs index 910247df..6068296d 100644 --- a/python/src/store.rs +++ b/python/src/store.rs @@ -6,6 +6,7 @@ use crate::io::{ }; use crate::model::*; use crate::sparql::*; +use oxigraph::io::RdfParser; use oxigraph::model::{GraphName, GraphNameRef}; use oxigraph::sparql::Update; use oxigraph::store::{self, LoaderError, SerializerError, StorageError, Store}; @@ -399,13 +400,18 @@ impl PyStore { let input = PyReadable::from_args(&path, input, py)?; let format = lookup_rdf_format(format, path.as_deref())?; py.allow_threads(|| { + let mut parser = RdfParser::from_format(format); + if let Some(base_iri) = base_iri { + parser = parser + .with_base_iri(base_iri) + .map_err(|e| PyValueError::new_err(e.to_string()))?; + } if let Some(to_graph_name) = to_graph_name { - self.inner - .load_graph(input, format, to_graph_name, base_iri) - } else { - self.inner.load_dataset(input, format, base_iri) + parser = parser.with_default_graph(to_graph_name); } - .map_err(|e| map_loader_error(e, path)) + self.inner + .load_from_read(parser, input) + .map_err(|e| map_loader_error(e, path)) }) } @@ -466,16 +472,18 @@ impl PyStore { let input = PyReadable::from_args(&path, input, py)?; let format = lookup_rdf_format(format, path.as_deref())?; py.allow_threads(|| { + let mut parser = RdfParser::from_format(format); + if let Some(base_iri) = base_iri { + parser = parser + .with_base_iri(base_iri) + .map_err(|e| PyValueError::new_err(e.to_string()))?; + } if let Some(to_graph_name) = to_graph_name { - self.inner - .bulk_loader() - .load_graph(input, format, to_graph_name, base_iri) - } else { - self.inner - .bulk_loader() - .load_dataset(input, format, base_iri) + parser = parser.with_default_graph(to_graph_name); } - .map_err(|e| map_loader_error(e, path)) + self.inner + .load_from_read(parser, input) + .map_err(|e| map_loader_error(e, path)) }) } diff --git a/testsuite/src/sparql_evaluator.rs b/testsuite/src/sparql_evaluator.rs index 383be3e2..df9d471e 100644 --- a/testsuite/src/sparql_evaluator.rs +++ b/testsuite/src/sparql_evaluator.rs @@ -4,6 +4,7 @@ use crate::manifest::*; use crate::report::{dataset_diff, format_diff}; use crate::vocab::*; use anyhow::{bail, ensure, Context, Error, Result}; +use oxigraph::io::RdfParser; use oxigraph::model::vocab::*; use oxigraph::model::*; use oxigraph::sparql::results::QueryResultsFormat; @@ -129,10 +130,10 @@ fn evaluate_negative_result_syntax_test(test: &Test, format: QueryResultsFormat) fn evaluate_evaluation_test(test: &Test) -> Result<()> { let store = get_store()?; if let Some(data) = &test.data { - load_dataset_to_store(data, &store)?; + load_to_store(data, &store, GraphName::DefaultGraph)?; } for (name, value) in &test.graph_data { - load_graph_to_store(value, &store, name.clone())?; + load_to_store(value, &store, name.clone())?; } let query_file = test.query.as_deref().context("No action found")?; let options = QueryOptions::default() @@ -150,13 +151,13 @@ fn evaluate_evaluation_test(test: &Test) -> Result<()> { let GraphName::NamedNode(graph_name) = graph_name else { bail!("Invalid FROM in query {query}"); }; - load_graph_to_store(graph_name.as_str(), &store, graph_name.as_ref())?; + load_to_store(graph_name.as_str(), &store, graph_name.as_ref())?; } for graph_name in query.dataset().available_named_graphs().unwrap_or(&[]) { let NamedOrBlankNode::NamedNode(graph_name) = graph_name else { bail!("Invalid FROM NAMED in query {query}"); }; - load_graph_to_store(graph_name.as_str(), &store, graph_name.as_ref())?; + load_to_store(graph_name.as_str(), &store, graph_name.as_ref())?; } } @@ -210,18 +211,18 @@ fn evaluate_negative_update_syntax_test(test: &Test) -> Result<()> { fn evaluate_update_evaluation_test(test: &Test) -> Result<()> { let store = get_store()?; if let Some(data) = &test.data { - load_dataset_to_store(data, &store)?; + load_to_store(data, &store, GraphName::DefaultGraph)?; } for (name, value) in &test.graph_data { - load_graph_to_store(value, &store, name.clone())?; + load_to_store(value, &store, name.clone())?; } let result_store = get_store()?; if let Some(data) = &test.result { - load_dataset_to_store(data, &result_store)?; + load_to_store(data, &result_store, GraphName::DefaultGraph)?; } for (name, value) in &test.result_graph_data { - load_graph_to_store(value, &result_store, name.clone())?; + load_to_store(value, &result_store, name.clone())?; } let update_file = test.update.as_deref().context("No action found")?; @@ -271,7 +272,7 @@ impl StaticServiceHandler { .map(|(name, data)| { let name = NamedNode::new(name)?; let store = get_store()?; - load_dataset_to_store(data, &store)?; + load_to_store(data, &store, GraphName::DefaultGraph)?; Ok((name, store)) }) .collect::>()?, @@ -643,25 +644,16 @@ fn solutions_to_string(solutions: Vec>, ordered: bool) -> lines.join("\n") } -fn load_graph_to_store( - url: &str, - store: &Store, - to_graph_name: impl Into, -) -> Result<()> { - store.load_graph( +fn load_to_store(url: &str, store: &Store, to_graph_name: impl Into) -> Result<()> { + store.load_from_read( + RdfParser::from_format(guess_rdf_format(url)?) + .with_base_iri(url)? + .with_default_graph(to_graph_name), read_file(url)?, - guess_rdf_format(url)?, - to_graph_name, - Some(url), )?; Ok(()) } -fn load_dataset_to_store(url: &str, store: &Store) -> Result<()> { - store.load_dataset(read_file(url)?, guess_rdf_format(url)?, Some(url))?; - Ok(()) -} - fn evaluate_query_optimization_test(test: &Test) -> Result<()> { let action = test.action.as_deref().context("No action found")?; let actual = (&Optimizer::optimize_graph_pattern(