diff --git a/Cargo.lock b/Cargo.lock index 652ddf86..3d54e27c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1033,7 +1033,7 @@ dependencies = [ [[package]] name = "oxigraph" -version = "0.4.0-alpha.2" +version = "0.4.0-alpha.3-dev" dependencies = [ "codspeed-criterion-compat", "digest", @@ -1063,7 +1063,7 @@ dependencies = [ [[package]] name = "oxigraph-cli" -version = "0.4.0-alpha.2" +version = "0.4.0-alpha.3-dev" dependencies = [ "anyhow", "assert_cmd", @@ -1082,7 +1082,7 @@ dependencies = [ [[package]] name = "oxigraph-js" -version = "0.4.0-alpha.2" +version = "0.4.0-alpha.3-dev" dependencies = [ "console_error_panic_hook", "js-sys", @@ -1129,7 +1129,7 @@ dependencies = [ [[package]] name = "oxrdfio" -version = "0.1.0-alpha.1" +version = "0.1.0-alpha.2-dev" dependencies = [ "oxrdf", "oxrdfxml", @@ -1150,7 +1150,7 @@ dependencies = [ [[package]] name = "oxrocksdb-sys" -version = "0.4.0-alpha.2" +version = "0.4.0-alpha.3-dev" dependencies = [ "bindgen", "cc", @@ -1167,7 +1167,7 @@ dependencies = [ [[package]] name = "oxttl" -version = "0.1.0-alpha.1" +version = "0.1.0-alpha.2-dev" dependencies = [ "memchr", "oxilangtag", @@ -1403,7 +1403,7 @@ dependencies = [ [[package]] name = "pyoxigraph" -version = "0.4.0-alpha.2" +version = "0.4.0-alpha.3-dev" dependencies = [ "oxigraph", "pyo3", diff --git a/README.md b/README.md index e035c11b..8f9b7022 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,8 @@ [![Released API docs](https://docs.rs/oxigraph/badge.svg)](https://docs.rs/oxigraph) [![PyPI](https://img.shields.io/pypi/v/pyoxigraph)](https://pypi.org/project/pyoxigraph/) [![npm](https://img.shields.io/npm/v/oxigraph)](https://www.npmjs.com/package/oxigraph) -[![actions status](https://github.com/oxigraph/oxigraph/workflows/build/badge.svg)](https://github.com/oxigraph/oxigraph/actions) +[![tests status](https://github.com/oxigraph/oxigraph/actions/workflows/tests.yml/badge.svg)](https://github.com/oxigraph/oxigraph/actions) +[![artifacts status](https://github.com/oxigraph/oxigraph/actions/workflows/artifacts.yml/badge.svg)](https://github.com/oxigraph/oxigraph/actions) [![dependency status](https://deps.rs/repo/github/oxigraph/oxigraph/status.svg)](https://deps.rs/repo/github/oxigraph/oxigraph) [![Gitter](https://badges.gitter.im/oxigraph/community.svg)](https://gitter.im/oxigraph/community) [![Twitter URL](https://img.shields.io/twitter/url?style=social&url=https%3A%2F%2Ftwitter.com%2Foxigraph)](https://twitter.com/oxigraph) diff --git a/cli/Cargo.toml b/cli/Cargo.toml index 7cea59b8..befeb158 100644 --- a/cli/Cargo.toml +++ b/cli/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "oxigraph-cli" -version = "0.4.0-alpha.2" +version = "0.4.0-alpha.3-dev" authors = ["Tpt "] license = "MIT OR Apache-2.0" readme = "README.md" @@ -29,7 +29,7 @@ rustls-webpki = ["oxigraph/http-client-rustls-webpki"] anyhow = "1.0.72" oxhttp = { version = "0.2.0-alpha.3", features = ["flate2"] } clap = { version = "4.0", features = ["derive"] } -oxigraph = { version = "0.4.0-alpha.2", path = "../lib" } +oxigraph = { version = "0.4.0-alpha.3-dev", path = "../lib" } rand = "0.8" url = "2.4" oxiri = "0.2.3-alpha.1" diff --git a/cli/src/main.rs b/cli/src/main.rs index b0f7a6fa..9e9a0dbd 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -2354,7 +2354,7 @@ mod tests { .build(); ServerTest::new()?.test_body( request, - "{\"head\":{\"vars\":[\"s\",\"p\",\"o\"]},\"results\":{\"bindings\":[]}}", + r#"{"head":{"vars":["s","p","o"]},"results":{"bindings":[]}}"#, ) } @@ -2369,7 +2369,7 @@ mod tests { .build(); ServerTest::new()?.test_body( request, - "{\"head\":{\"vars\":[\"s\",\"p\",\"o\"]},\"results\":{\"bindings\":[]}}", + r#"{"head":{"vars":["s","p","o"]},"results":{"bindings":[]}}"#, ) } @@ -2387,7 +2387,7 @@ mod tests { .build(); ServerTest::new()?.test_body( request, - "{\"head\":{\"vars\":[\"s\",\"p\",\"o\"]},\"results\":{\"bindings\":[]}}", + r#"{"head":{"vars":["s","p","o"]},"results":{"bindings":[]}}"#, ) } @@ -2414,7 +2414,7 @@ mod tests { .build(); ServerTest::new()?.test_body( request, - "{\"head\":{\"vars\":[\"s\",\"p\",\"o\"]},\"results\":{\"bindings\":[]}}", + r#"{"head":{"vars":["s","p","o"]},"results":{"bindings":[]}}"#, ) } @@ -2429,7 +2429,7 @@ mod tests { .build(); ServerTest::new()?.test_body( request, - "{\"head\":{\"vars\":[\"s\",\"p\",\"o\"]},\"results\":{\"bindings\":[]}}", + r#"{"head":{"vars":["s","p","o"]},"results":{"bindings":[]}}"#, ) } #[test] @@ -2679,16 +2679,16 @@ mod tests { let request = Request::builder(Method::PUT, "http://localhost/store/person/1.ttl".parse()?) .with_header(HeaderName::CONTENT_TYPE, "text/turtle; charset=utf-8")? .with_body( - " + r#" @prefix foaf: . @prefix v: . a foaf:Person; foaf:businessCard [ a v:VCard; - v:fn \"John Doe\" + v:fn "John Doe" ]. -", +"#, ); server.test_status(request, Status::CREATED)?; @@ -2717,16 +2717,16 @@ mod tests { let request = Request::builder(Method::PUT, "http://localhost/store/person/1.ttl".parse()?) .with_header(HeaderName::CONTENT_TYPE, "text/turtle; charset=utf-8")? .with_body( - " + r#" @prefix foaf: . @prefix v: . a foaf:Person; foaf:businessCard [ a v:VCard; - v:fn \"Jane Doe\" + v:fn "Jane Doe" ]. -", +"#, ); server.test_status(request, Status::NO_CONTENT)?; @@ -2740,16 +2740,16 @@ mod tests { let request = Request::builder(Method::PUT, "http://localhost/store?default".parse()?) .with_header(HeaderName::CONTENT_TYPE, "text/turtle; charset=utf-8")? .with_body( - " + r#" @prefix foaf: . @prefix v: . [] a foaf:Person; foaf:businessCard [ a v:VCard; - v:given-name \"Alice\" + v:given-name "Alice" ] . -", +"#, ); server.test_status(request, Status::NO_CONTENT)?; // The default graph always exists in Oxigraph @@ -2781,16 +2781,16 @@ mod tests { let request = Request::builder(Method::PUT, "http://localhost/store/person/2.ttl".parse()?) .with_header(HeaderName::CONTENT_TYPE, "text/turtle; charset=utf-8")? .with_body( - " + r#" @prefix foaf: . @prefix v: . [] a foaf:Person; foaf:businessCard [ a v:VCard; - v:given-name \"Alice\" + v:given-name "Alice" ] . -", +"#, ); server.test_status(request, Status::NO_CONTENT)?; @@ -2839,16 +2839,16 @@ mod tests { let request = Request::builder(Method::POST, "http://localhost/store".parse()?) .with_header(HeaderName::CONTENT_TYPE, "text/turtle; charset=utf-8")? .with_body( - " + r#" @prefix foaf: . @prefix v: . [] a foaf:Person; foaf:businessCard [ a v:VCard; - v:given-name \"Alice\" + v:given-name "Alice" ] . -", +"#, ); let response = server.exec(request); assert_eq!(response.status(), Status::CREATED); diff --git a/js/Cargo.toml b/js/Cargo.toml index 6169004f..8caba72e 100644 --- a/js/Cargo.toml +++ b/js/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "oxigraph-js" -version = "0.4.0-alpha.2" +version = "0.4.0-alpha.3-dev" authors = ["Tpt "] license = "MIT OR Apache-2.0" readme = "README.md" diff --git a/js/src/model.rs b/js/src/model.rs index 323b5978..dd8a972c 100644 --- a/js/src/model.rs +++ b/js/src/model.rs @@ -1,9 +1,4 @@ -#![allow( - dead_code, - clippy::inherent_to_string, - clippy::unused_self, - clippy::use_self -)] +#![allow(dead_code, clippy::inherent_to_string, clippy::unused_self)] use crate::format_err; use crate::utils::to_err; @@ -564,7 +559,7 @@ impl From for JsTerm { impl TryFrom for NamedNode { type Error = JsValue; - fn try_from(value: JsTerm) -> Result { + fn try_from(value: JsTerm) -> Result { match value { JsTerm::NamedNode(node) => Ok(node.into()), JsTerm::BlankNode(node) => Err(format_err!( @@ -588,7 +583,7 @@ impl TryFrom for NamedNode { impl TryFrom for NamedOrBlankNode { type Error = JsValue; - fn try_from(value: JsTerm) -> Result { + fn try_from(value: JsTerm) -> Result { match value { JsTerm::NamedNode(node) => Ok(node.into()), JsTerm::BlankNode(node) => Ok(node.into()), @@ -614,7 +609,7 @@ impl TryFrom for NamedOrBlankNode { impl TryFrom for Subject { type Error = JsValue; - fn try_from(value: JsTerm) -> Result { + fn try_from(value: JsTerm) -> Result { match value { JsTerm::NamedNode(node) => Ok(node.into()), JsTerm::BlankNode(node) => Ok(node.into()), @@ -637,7 +632,7 @@ impl TryFrom for Subject { impl TryFrom for Term { type Error = JsValue; - fn try_from(value: JsTerm) -> Result { + fn try_from(value: JsTerm) -> Result { match value { JsTerm::NamedNode(node) => Ok(node.into()), JsTerm::BlankNode(node) => Ok(node.into()), @@ -657,7 +652,7 @@ impl TryFrom for Term { impl TryFrom for GraphName { type Error = JsValue; - fn try_from(value: JsTerm) -> Result { + fn try_from(value: JsTerm) -> Result { match value { JsTerm::NamedNode(node) => Ok(node.into()), JsTerm::BlankNode(node) => Ok(node.into()), diff --git a/js/src/store.rs b/js/src/store.rs index bb0af9e7..0b4b51b8 100644 --- a/js/src/store.rs +++ b/js/src/store.rs @@ -1,5 +1,3 @@ -#![allow(clippy::use_self)] - use crate::format_err; use crate::model::*; use crate::utils::to_err; diff --git a/lib/Cargo.toml b/lib/Cargo.toml index 944ecc62..8465ce8f 100644 --- a/lib/Cargo.toml +++ b/lib/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "oxigraph" -version = "0.4.0-alpha.2" +version = "0.4.0-alpha.3-dev" authors = ["Tpt "] license = "MIT OR Apache-2.0" readme = "README.md" @@ -33,7 +33,7 @@ md-5 = "0.10" oxilangtag = "0.1" oxiri = "0.2.3-alpha.1" oxrdf = { version = "0.2.0-alpha.1", path = "oxrdf", features = ["rdf-star", "oxsdatatypes"] } -oxrdfio = { version = "0.1.0-alpha.1", path = "oxrdfio", features = ["rdf-star"] } +oxrdfio = { version = "0.1.0-alpha.2-dev", path = "oxrdfio", features = ["rdf-star"] } oxsdatatypes = { version = "0.2.0-alpha.1", path = "oxsdatatypes" } rand = "0.8" regex = "1.7" @@ -46,7 +46,7 @@ sparopt = { version = "0.1.0-alpha.1", path = "sparopt", features = ["rdf-star", [target.'cfg(not(target_family = "wasm"))'.dependencies] libc = "0.2.147" -oxrocksdb-sys = { version = "0.4.0-alpha.2", path = "../oxrocksdb-sys" } +oxrocksdb-sys = { version = "0.4.0-alpha.3-dev", path = "../oxrocksdb-sys" } oxhttp = { version = "0.2.0-alpha.3", optional = true } [target.'cfg(all(target_family = "wasm", target_os = "unknown"))'.dependencies] diff --git a/lib/oxrdf/src/dataset.rs b/lib/oxrdf/src/dataset.rs index f240a33a..0c1fcbd7 100644 --- a/lib/oxrdf/src/dataset.rs +++ b/lib/oxrdf/src/dataset.rs @@ -927,7 +927,7 @@ impl<'a> IntoIterator for &'a Dataset { type Item = QuadRef<'a>; type IntoIter = Iter<'a>; - fn into_iter(self) -> Iter<'a> { + fn into_iter(self) -> Self::IntoIter { self.iter() } } @@ -1285,7 +1285,7 @@ impl<'a> IntoIterator for GraphView<'a> { type Item = TripleRef<'a>; type IntoIter = GraphViewIter<'a>; - fn into_iter(self) -> GraphViewIter<'a> { + fn into_iter(self) -> Self::IntoIter { self.iter() } } @@ -1294,7 +1294,7 @@ impl<'a, 'b> IntoIterator for &'b GraphView<'a> { type Item = TripleRef<'a>; type IntoIter = GraphViewIter<'a>; - fn into_iter(self) -> GraphViewIter<'a> { + fn into_iter(self) -> Self::IntoIter { self.iter() } } @@ -1496,7 +1496,7 @@ impl<'a> IntoIterator for &'a GraphViewMut<'a> { type Item = TripleRef<'a>; type IntoIter = GraphViewIter<'a>; - fn into_iter(self) -> GraphViewIter<'a> { + fn into_iter(self) -> Self::IntoIter { self.iter() } } @@ -1527,7 +1527,7 @@ pub struct Iter<'a> { impl<'a> Iterator for Iter<'a> { type Item = QuadRef<'a>; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { self.inner .next() .map(|(s, p, o, g)| self.dataset.decode_spog((s, p, o, g))) @@ -1551,7 +1551,7 @@ pub struct GraphViewIter<'a> { impl<'a> Iterator for GraphViewIter<'a> { type Item = TripleRef<'a>; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { self.inner .next() .map(|(_, s, p, o)| self.dataset.decode_spo((s, p, o))) diff --git a/lib/oxrdf/src/graph.rs b/lib/oxrdf/src/graph.rs index 980e3ebf..3077e5de 100644 --- a/lib/oxrdf/src/graph.rs +++ b/lib/oxrdf/src/graph.rs @@ -229,7 +229,7 @@ impl<'a> IntoIterator for &'a Graph { type Item = TripleRef<'a>; type IntoIter = Iter<'a>; - fn into_iter(self) -> Iter<'a> { + fn into_iter(self) -> Self::IntoIter { self.iter() } } @@ -276,7 +276,7 @@ pub struct Iter<'a> { impl<'a> Iterator for Iter<'a> { type Item = TripleRef<'a>; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { self.inner.next() } } diff --git a/lib/oxrdf/src/interning.rs b/lib/oxrdf/src/interning.rs index ef436149..45152b67 100644 --- a/lib/oxrdf/src/interning.rs +++ b/lib/oxrdf/src/interning.rs @@ -510,8 +510,8 @@ struct IdentityHasherBuilder; impl BuildHasher for IdentityHasherBuilder { type Hasher = IdentityHasher; - fn build_hasher(&self) -> IdentityHasher { - IdentityHasher::default() + fn build_hasher(&self) -> Self::Hasher { + Self::Hasher::default() } } diff --git a/lib/oxrdf/src/literal.rs b/lib/oxrdf/src/literal.rs index 976b863d..3f2727ca 100644 --- a/lib/oxrdf/src/literal.rs +++ b/lib/oxrdf/src/literal.rs @@ -24,12 +24,12 @@ use std::option::Option; /// ); /// /// assert_eq!( -/// "\"1999-01-01\"^^", +/// r#""1999-01-01"^^"#, /// Literal::new_typed_literal("1999-01-01", xsd::DATE).to_string() /// ); /// /// assert_eq!( -/// "\"foo\"@en", +/// r#""foo"@en"#, /// Literal::new_language_tagged_literal("foo", "en")?.to_string() /// ); /// # Result::<(), LanguageTagParseError>::Ok(()) @@ -436,7 +436,7 @@ impl From for Literal { /// ); /// /// assert_eq!( -/// "\"1999-01-01\"^^", +/// r#""1999-01-01"^^"#, /// LiteralRef::new_typed_literal("1999-01-01", xsd::DATE).to_string() /// ); /// ``` diff --git a/lib/oxrdf/src/parser.rs b/lib/oxrdf/src/parser.rs index 4d95072f..326868f5 100644 --- a/lib/oxrdf/src/parser.rs +++ b/lib/oxrdf/src/parser.rs @@ -25,10 +25,10 @@ impl FromStr for NamedNode { /// /// assert_eq!(NamedNode::from_str("").unwrap(), NamedNode::new("http://example.com").unwrap()) /// ``` - fn from_str(s: &str) -> Result { + fn from_str(s: &str) -> Result { let (term, left) = read_named_node(s)?; if !left.is_empty() { - return Err(TermParseError::msg( + return Err(Self::Err::msg( "Named node serialization should end with a >", )); } @@ -47,10 +47,10 @@ impl FromStr for BlankNode { /// /// assert_eq!(BlankNode::from_str("_:ex").unwrap(), BlankNode::new("ex").unwrap()) /// ``` - fn from_str(s: &str) -> Result { + fn from_str(s: &str) -> Result { let (term, left) = read_blank_node(s)?; if !left.is_empty() { - return Err(TermParseError::msg( + return Err(Self::Err::msg( "Blank node serialization should not contain whitespaces", )); } @@ -75,10 +75,10 @@ impl FromStr for Literal { /// assert_eq!(Literal::from_str("-122.23").unwrap(), Literal::new_typed_literal("-122.23", xsd::DECIMAL)); /// assert_eq!(Literal::from_str("-122e+1").unwrap(), Literal::new_typed_literal("-122e+1", xsd::DOUBLE)); /// ``` - fn from_str(s: &str) -> Result { + fn from_str(s: &str) -> Result { let (term, left) = read_literal(s)?; if !left.is_empty() { - return Err(TermParseError::msg("Invalid literal serialization")); + return Err(Self::Err::msg("Invalid literal serialization")); } Ok(term) } @@ -100,10 +100,10 @@ impl FromStr for Term { /// Literal::new_simple_literal("o") /// ).into()); /// ``` - fn from_str(s: &str) -> Result { + fn from_str(s: &str) -> Result { let (term, left) = read_term(s, 0)?; if !left.is_empty() { - return Err(TermParseError::msg("Invalid term serialization")); + return Err(Self::Err::msg("Invalid term serialization")); } Ok(term) } @@ -120,13 +120,13 @@ impl FromStr for Variable { /// /// assert_eq!(Variable::from_str("$foo").unwrap(), Variable::new("foo").unwrap()) /// ``` - fn from_str(s: &str) -> Result { + fn from_str(s: &str) -> Result { if !s.starts_with('?') && !s.starts_with('$') { - return Err(TermParseError::msg( + return Err(Self::Err::msg( "Variable serialization should start with ? or $", )); } - Self::new(&s[1..]).map_err(|error| TermParseError { + Self::new(&s[1..]).map_err(|error| Self::Err { kind: TermParseErrorKind::Variable { value: s.to_owned(), error, diff --git a/lib/oxrdf/src/vocab.rs b/lib/oxrdf/src/vocab.rs index a0c3bd5c..56110c4a 100644 --- a/lib/oxrdf/src/vocab.rs +++ b/lib/oxrdf/src/vocab.rs @@ -231,3 +231,12 @@ pub mod xsd { pub const YEAR_MONTH_DURATION: NamedNodeRef<'_> = NamedNodeRef::new_unchecked("http://www.w3.org/2001/XMLSchema#yearMonthDuration"); } + +pub mod geosparql { + //! [GeoSpatial](https://opengeospatial.github.io/ogc-geosparql/) vocabulary. + use crate::named_node::NamedNodeRef; + + /// Geospatial datatype like `"Point({longitude} {latitude})"^^geo:wktLiteral` + pub const WKT_LITERAL: NamedNodeRef<'_> = + NamedNodeRef::new_unchecked("http://www.opengis.net/ont/geosparql#wktLiteral"); +} diff --git a/lib/oxrdfio/Cargo.toml b/lib/oxrdfio/Cargo.toml index 6ded48d6..c2930046 100644 --- a/lib/oxrdfio/Cargo.toml +++ b/lib/oxrdfio/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "oxrdfio" -version = "0.1.0-alpha.1" +version = "0.1.0-alpha.2-dev" authors = ["Tpt "] license = "MIT OR Apache-2.0" readme = "README.md" @@ -21,7 +21,7 @@ rdf-star = ["oxrdf/rdf-star", "oxttl/rdf-star"] [dependencies] oxrdf = { version = "0.2.0-alpha.1", path = "../oxrdf" } oxrdfxml = { version = "0.1.0-alpha.1", path = "../oxrdfxml" } -oxttl = { version = "0.1.0-alpha.1", path = "../oxttl" } +oxttl = { version = "0.1.0-alpha.2-dev", path = "../oxttl" } tokio = { version = "1.29", optional = true, features = ["io-util"] } [dev-dependencies] diff --git a/lib/oxrdfio/src/error.rs b/lib/oxrdfio/src/error.rs index 235ba1b7..3b4691f9 100644 --- a/lib/oxrdfio/src/error.rs +++ b/lib/oxrdfio/src/error.rs @@ -42,7 +42,7 @@ impl Error for ParseError { impl From for SyntaxError { #[inline] fn from(error: oxttl::SyntaxError) -> Self { - SyntaxError { + Self { inner: SyntaxErrorKind::Turtle(error), } } @@ -61,7 +61,7 @@ impl From for ParseError { impl From for SyntaxError { #[inline] fn from(error: oxrdfxml::SyntaxError) -> Self { - SyntaxError { + Self { inner: SyntaxErrorKind::RdfXml(error), } } @@ -166,7 +166,7 @@ impl From for io::Error { match error.inner { SyntaxErrorKind::Turtle(error) => error.into(), SyntaxErrorKind::RdfXml(error) => error.into(), - SyntaxErrorKind::Msg { msg } => io::Error::new(io::ErrorKind::InvalidData, msg), + SyntaxErrorKind::Msg { msg } => Self::new(io::ErrorKind::InvalidData, msg), } } } diff --git a/lib/oxrdfio/src/parser.rs b/lib/oxrdfio/src/parser.rs index eb60e05e..d5bf196d 100644 --- a/lib/oxrdfio/src/parser.rs +++ b/lib/oxrdfio/src/parser.rs @@ -382,7 +382,7 @@ enum FromReadQuadReaderKind { impl Iterator for FromReadQuadReader { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { Some(match &mut self.parser { FromReadQuadReaderKind::N3(parser) => match parser.next()? { Ok(quad) => self.mapper.map_n3_quad(quad), diff --git a/lib/oxrdfxml/README.md b/lib/oxrdfxml/README.md index 8575d19b..29ebb4cf 100644 --- a/lib/oxrdfxml/README.md +++ b/lib/oxrdfxml/README.md @@ -12,31 +12,33 @@ OxRdfXml is a parser and serializer for [RDF/XML](https://www.w3.org/TR/rdf-synt The entry points of this library are the two [`RdfXmlParser`] and [`RdfXmlSerializer`] structs. Usage example counting the number of people in a RDF/XML file: + ```rust use oxrdf::{NamedNodeRef, vocab::rdf}; use oxrdfxml::RdfXmlParser; -let file = b" - - - +fn main() { + let file = br#" + + + Foo - -"; - -let schema_person = NamedNodeRef::new("http://schema.org/Person").unwrap(); -let mut count = 0; -for triple in RdfXmlParser::new().parse_read(file.as_ref()) { - let triple = triple.unwrap(); - if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { - count += 1; + +"#; + + let schema_person = NamedNodeRef::new("http://schema.org/Person").unwrap(); + let mut count = 0; + for triple in RdfXmlParser::new().parse_read(file.as_ref()) { + let triple = triple.unwrap(); + if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { + count += 1; + } } + assert_eq!(2, count); } -assert_eq!(2, count); ``` - ## License This project is licensed under either of diff --git a/lib/oxrdfxml/src/parser.rs b/lib/oxrdfxml/src/parser.rs index ba0b27a1..3c6b23ff 100644 --- a/lib/oxrdfxml/src/parser.rs +++ b/lib/oxrdfxml/src/parser.rs @@ -29,14 +29,14 @@ use tokio::io::{AsyncRead, BufReader as AsyncBufReader}; /// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdfxml::RdfXmlParser; /// -/// let file = b" -/// -/// -/// +/// let file = br#" +/// +/// +/// /// Foo /// -/// -/// "; +/// +/// "#; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let mut count = 0; @@ -87,14 +87,14 @@ impl RdfXmlParser { /// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdfxml::RdfXmlParser; /// - /// let file = b" - /// - /// - /// + /// let file = br#" + /// + /// + /// /// Foo /// - /// - /// "; + /// + /// "#; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let mut count = 0; @@ -124,14 +124,14 @@ impl RdfXmlParser { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxrdfxml::ParseError> { - /// let file = b" - /// - /// - /// + /// let file = br#" + /// + /// + /// /// Foo /// - /// - /// "; + /// + /// "#; /// /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let mut count = 0; @@ -182,14 +182,14 @@ impl RdfXmlParser { /// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdfxml::RdfXmlParser; /// -/// let file = b" -/// -/// -/// +/// let file = br#" +/// +/// +/// /// Foo /// -/// -/// "; +/// +/// "#; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let mut count = 0; @@ -212,7 +212,7 @@ pub struct FromReadRdfXmlReader { impl Iterator for FromReadRdfXmlReader { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { loop { if let Some(triple) = self.results.pop() { return Some(Ok(triple)); @@ -251,14 +251,14 @@ impl FromReadRdfXmlReader { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxrdfxml::ParseError> { -/// let file = b" -/// -/// -/// +/// let file = br#" +/// +/// +/// /// Foo /// -/// -/// "; +/// +/// "#; /// /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let mut count = 0; @@ -399,23 +399,23 @@ enum RdfXmlState { impl RdfXmlState { fn base_iri(&self) -> Option<&Iri> { match self { - RdfXmlState::Doc { base_iri, .. } - | RdfXmlState::Rdf { base_iri, .. } - | RdfXmlState::NodeElt { base_iri, .. } - | RdfXmlState::PropertyElt { base_iri, .. } - | RdfXmlState::ParseTypeCollectionPropertyElt { base_iri, .. } - | RdfXmlState::ParseTypeLiteralPropertyElt { base_iri, .. } => base_iri.as_ref(), + Self::Doc { base_iri, .. } + | Self::Rdf { base_iri, .. } + | Self::NodeElt { base_iri, .. } + | Self::PropertyElt { base_iri, .. } + | Self::ParseTypeCollectionPropertyElt { base_iri, .. } + | Self::ParseTypeLiteralPropertyElt { base_iri, .. } => base_iri.as_ref(), } } fn language(&self) -> Option<&String> { match self { - RdfXmlState::Doc { .. } => None, - RdfXmlState::Rdf { language, .. } - | RdfXmlState::NodeElt { language, .. } - | RdfXmlState::PropertyElt { language, .. } - | RdfXmlState::ParseTypeCollectionPropertyElt { language, .. } - | RdfXmlState::ParseTypeLiteralPropertyElt { language, .. } => language.as_ref(), + Self::Doc { .. } => None, + Self::Rdf { language, .. } + | Self::NodeElt { language, .. } + | Self::PropertyElt { language, .. } + | Self::ParseTypeCollectionPropertyElt { language, .. } + | Self::ParseTypeLiteralPropertyElt { language, .. } => language.as_ref(), } } } diff --git a/lib/oxsdatatypes/README.md b/lib/oxsdatatypes/README.md index c7ae8e55..1c3b2c39 100644 --- a/lib/oxsdatatypes/README.md +++ b/lib/oxsdatatypes/README.md @@ -38,7 +38,7 @@ The `DateTime::now()` function needs special OS support. Currently: - If the `custom-now` feature is enabled, a function computing `now` must be set: ```rust - use oxsdatatypes::Duration; + use oxsdatatypes::Duration; #[no_mangle] fn custom_ox_now() -> Duration { diff --git a/lib/oxsdatatypes/src/boolean.rs b/lib/oxsdatatypes/src/boolean.rs index 688af076..ad231c47 100644 --- a/lib/oxsdatatypes/src/boolean.rs +++ b/lib/oxsdatatypes/src/boolean.rs @@ -66,7 +66,7 @@ impl FromStr for Boolean { type Err = ParseBoolError; #[inline] - fn from_str(input: &str) -> Result { + fn from_str(input: &str) -> Result { Ok(match input { "true" | "1" => true, "false" | "0" => false, diff --git a/lib/oxsdatatypes/src/date_time.rs b/lib/oxsdatatypes/src/date_time.rs index 3c21cf43..d2405692 100644 --- a/lib/oxsdatatypes/src/date_time.rs +++ b/lib/oxsdatatypes/src/date_time.rs @@ -256,7 +256,7 @@ impl TryFrom for DateTime { type Error = DateTimeOverflowError; #[inline] - fn try_from(date: Date) -> Result { + fn try_from(date: Date) -> Result { Self::new( date.year(), date.month(), @@ -272,7 +272,7 @@ impl TryFrom for DateTime { impl FromStr for DateTime { type Err = ParseDateTimeError; - fn from_str(input: &str) -> Result { + fn from_str(input: &str) -> Result { ensure_complete(input, date_time_lexical_rep) } } @@ -528,7 +528,7 @@ impl From for Time { impl FromStr for Time { type Err = ParseDateTimeError; - fn from_str(input: &str) -> Result { + fn from_str(input: &str) -> Result { ensure_complete(input, time_lexical_rep) } } @@ -762,7 +762,7 @@ impl TryFrom for Date { type Error = DateTimeOverflowError; #[inline] - fn try_from(date_time: DateTime) -> Result { + fn try_from(date_time: DateTime) -> Result { Self::new( date_time.year(), date_time.month(), @@ -775,7 +775,7 @@ impl TryFrom for Date { impl FromStr for Date { type Err = ParseDateTimeError; - fn from_str(input: &str) -> Result { + fn from_str(input: &str) -> Result { ensure_complete(input, date_lexical_rep) } } @@ -896,7 +896,7 @@ impl TryFrom for GYearMonth { type Error = DateTimeOverflowError; #[inline] - fn try_from(date_time: DateTime) -> Result { + fn try_from(date_time: DateTime) -> Result { Self::new( date_time.year(), date_time.month(), @@ -917,7 +917,7 @@ impl From for GYearMonth { impl FromStr for GYearMonth { type Err = ParseDateTimeError; - fn from_str(input: &str) -> Result { + fn from_str(input: &str) -> Result { ensure_complete(input, g_year_month_lexical_rep) } } @@ -1031,7 +1031,7 @@ impl TryFrom for GYear { type Error = DateTimeOverflowError; #[inline] - fn try_from(date_time: DateTime) -> Result { + fn try_from(date_time: DateTime) -> Result { Self::new(date_time.year(), date_time.timezone_offset()) } } @@ -1041,7 +1041,7 @@ impl TryFrom for GYear { type Error = DateTimeOverflowError; #[inline] - fn try_from(date: Date) -> Result { + fn try_from(date: Date) -> Result { Self::new(date.year(), date.timezone_offset()) } } @@ -1050,7 +1050,7 @@ impl TryFrom for GYear { type Error = DateTimeOverflowError; #[inline] - fn try_from(year_month: GYearMonth) -> Result { + fn try_from(year_month: GYearMonth) -> Result { Self::new(year_month.year(), year_month.timezone_offset()) } } @@ -1058,7 +1058,7 @@ impl TryFrom for GYear { impl FromStr for GYear { type Err = ParseDateTimeError; - fn from_str(input: &str) -> Result { + fn from_str(input: &str) -> Result { ensure_complete(input, g_year_lexical_rep) } } @@ -1186,7 +1186,7 @@ impl From for GMonthDay { impl FromStr for GMonthDay { type Err = ParseDateTimeError; - fn from_str(input: &str) -> Result { + fn from_str(input: &str) -> Result { ensure_complete(input, g_month_day_lexical_rep) } } @@ -1315,7 +1315,7 @@ impl From for GMonth { impl FromStr for GMonth { type Err = ParseDateTimeError; - fn from_str(input: &str) -> Result { + fn from_str(input: &str) -> Result { ensure_complete(input, g_month_lexical_rep) } } @@ -1436,7 +1436,7 @@ impl From for GDay { impl FromStr for GDay { type Err = ParseDateTimeError; - fn from_str(input: &str) -> Result { + fn from_str(input: &str) -> Result { ensure_complete(input, g_day_lexical_rep) } } @@ -1499,18 +1499,18 @@ impl TryFrom for TimezoneOffset { type Error = InvalidTimezoneError; #[inline] - fn try_from(value: DayTimeDuration) -> Result { + fn try_from(value: DayTimeDuration) -> Result { let offset_in_minutes = value.minutes() + value.hours() * 60; let result = Self::new( offset_in_minutes .try_into() - .map_err(|_| InvalidTimezoneError { offset_in_minutes })?, + .map_err(|_| Self::Error { offset_in_minutes })?, )?; if DayTimeDuration::from(result) == value { Ok(result) } else { // The value is not an integral number of minutes or overflow problems - Err(InvalidTimezoneError { offset_in_minutes }) + Err(Self::Error { offset_in_minutes }) } } } @@ -1519,9 +1519,9 @@ impl TryFrom for TimezoneOffset { type Error = InvalidTimezoneError; #[inline] - fn try_from(value: Duration) -> Result { + fn try_from(value: Duration) -> Result { DayTimeDuration::try_from(value) - .map_err(|_| InvalidTimezoneError { + .map_err(|_| Self::Error { offset_in_minutes: 0, })? .try_into() @@ -2426,7 +2426,7 @@ impl Error for DateTimeOverflowError {} impl From for ParseDateTimeError { fn from(error: DateTimeOverflowError) -> Self { - ParseDateTimeError { + Self { kind: ParseDateTimeErrorKind::Overflow(error), } } diff --git a/lib/oxsdatatypes/src/decimal.rs b/lib/oxsdatatypes/src/decimal.rs index bb2090fd..6a59105e 100644 --- a/lib/oxsdatatypes/src/decimal.rs +++ b/lib/oxsdatatypes/src/decimal.rs @@ -361,7 +361,7 @@ impl TryFrom for Decimal { type Error = TooLargeForDecimalError; #[inline] - fn try_from(value: i128) -> Result { + fn try_from(value: i128) -> Result { Ok(Self { value: value .checked_mul(DECIMAL_PART_POW) @@ -374,7 +374,7 @@ impl TryFrom for Decimal { type Error = TooLargeForDecimalError; #[inline] - fn try_from(value: u128) -> Result { + fn try_from(value: u128) -> Result { Ok(Self { value: i128::try_from(value) .map_err(|_| TooLargeForDecimalError)? @@ -395,7 +395,7 @@ impl TryFrom for Decimal { type Error = TooLargeForDecimalError; #[inline] - fn try_from(value: Float) -> Result { + fn try_from(value: Float) -> Result { Double::from(value).try_into() } } @@ -405,7 +405,7 @@ impl TryFrom for Decimal { #[inline] #[allow(clippy::cast_precision_loss, clippy::cast_possible_truncation)] - fn try_from(value: Double) -> Result { + fn try_from(value: Double) -> Result { let shifted = f64::from(value) * (DECIMAL_PART_POW as f64); if (i128::MIN as f64) <= shifted && shifted <= (i128::MAX as f64) { Ok(Self { @@ -448,7 +448,7 @@ impl TryFrom for Integer { type Error = TooLargeForIntegerError; #[inline] - fn try_from(value: Decimal) -> Result { + fn try_from(value: Decimal) -> Result { Ok(i64::try_from( value .value @@ -464,7 +464,7 @@ impl FromStr for Decimal { type Err = ParseDecimalError; /// Parses decimals lexical mapping - fn from_str(input: &str) -> Result { + fn from_str(input: &str) -> Result { // (\+|-)?([0-9]+(\.[0-9]*)?|\.[0-9]+) let input = input.as_bytes(); if input.is_empty() { diff --git a/lib/oxsdatatypes/src/double.rs b/lib/oxsdatatypes/src/double.rs index a9d19ac5..1a399019 100644 --- a/lib/oxsdatatypes/src/double.rs +++ b/lib/oxsdatatypes/src/double.rs @@ -189,7 +189,7 @@ impl FromStr for Double { type Err = ParseFloatError; #[inline] - fn from_str(input: &str) -> Result { + fn from_str(input: &str) -> Result { Ok(f64::from_str(input)?.into()) } } diff --git a/lib/oxsdatatypes/src/duration.rs b/lib/oxsdatatypes/src/duration.rs index 1cf33ffb..93dac7d6 100644 --- a/lib/oxsdatatypes/src/duration.rs +++ b/lib/oxsdatatypes/src/duration.rs @@ -176,7 +176,7 @@ impl TryFrom for Duration { type Error = DurationOverflowError; #[inline] - fn try_from(value: StdDuration) -> Result { + fn try_from(value: StdDuration) -> Result { Ok(DayTimeDuration::try_from(value)?.into()) } } @@ -184,10 +184,10 @@ impl TryFrom for Duration { impl FromStr for Duration { type Err = ParseDurationError; - fn from_str(input: &str) -> Result { + fn from_str(input: &str) -> Result { let parts = ensure_complete(input, duration_parts)?; if parts.year_month.is_none() && parts.day_time.is_none() { - return Err(ParseDurationError::msg("Empty duration")); + return Err(Self::Err::msg("Empty duration")); } Ok(Self::new( parts.year_month.unwrap_or(0), @@ -394,7 +394,7 @@ impl TryFrom for YearMonthDuration { type Error = DurationOverflowError; #[inline] - fn try_from(value: Duration) -> Result { + fn try_from(value: Duration) -> Result { if value.day_time == DayTimeDuration::default() { Ok(value.year_month) } else { @@ -406,16 +406,18 @@ impl TryFrom for YearMonthDuration { impl FromStr for YearMonthDuration { type Err = ParseDurationError; - fn from_str(input: &str) -> Result { + fn from_str(input: &str) -> Result { let parts = ensure_complete(input, duration_parts)?; if parts.day_time.is_some() { - return Err(ParseDurationError::msg( + return Err(Self::Err::msg( "There must not be any day or time component in a yearMonthDuration", )); } - Ok(Self::new(parts.year_month.ok_or( - ParseDurationError::msg("No year and month values found"), - )?)) + Ok(Self::new( + parts + .year_month + .ok_or(Self::Err::msg("No year and month values found"))?, + )) } } @@ -580,7 +582,7 @@ impl TryFrom for DayTimeDuration { type Error = DurationOverflowError; #[inline] - fn try_from(value: Duration) -> Result { + fn try_from(value: Duration) -> Result { if value.year_month == YearMonthDuration::default() { Ok(value.day_time) } else { @@ -593,7 +595,7 @@ impl TryFrom for DayTimeDuration { type Error = DurationOverflowError; #[inline] - fn try_from(value: StdDuration) -> Result { + fn try_from(value: StdDuration) -> Result { Ok(Self { seconds: Decimal::new( i128::try_from(value.as_nanos()).map_err(|_| DurationOverflowError)?, @@ -608,7 +610,7 @@ impl TryFrom for StdDuration { type Error = DurationOverflowError; #[inline] - fn try_from(value: DayTimeDuration) -> Result { + fn try_from(value: DayTimeDuration) -> Result { if value.seconds.is_negative() { return Err(DurationOverflowError); } @@ -621,7 +623,7 @@ impl TryFrom for StdDuration { .ok_or(DurationOverflowError)? .checked_floor() .ok_or(DurationOverflowError)?; - Ok(StdDuration::new( + Ok(Self::new( secs.as_i128() .try_into() .map_err(|_| DurationOverflowError)?, @@ -636,16 +638,18 @@ impl TryFrom for StdDuration { impl FromStr for DayTimeDuration { type Err = ParseDurationError; - fn from_str(input: &str) -> Result { + fn from_str(input: &str) -> Result { let parts = ensure_complete(input, duration_parts)?; if parts.year_month.is_some() { - return Err(ParseDurationError::msg( + return Err(Self::Err::msg( "There must not be any year or month component in a dayTimeDuration", )); } - Ok(Self::new(parts.day_time.ok_or(ParseDurationError::msg( - "No day or time values found", - ))?)) + Ok(Self::new( + parts + .day_time + .ok_or(Self::Err::msg("No day or time values found"))?, + )) } } @@ -973,7 +977,7 @@ impl fmt::Display for DurationOverflowError { impl Error for DurationOverflowError {} -/// The year-month and the day-time components of a [`Duration\] have an opposite sign. +/// The year-month and the day-time components of a [`Duration`] have an opposite sign. #[derive(Debug, Clone, Copy)] pub struct OppositeSignInDurationComponentsError; diff --git a/lib/oxsdatatypes/src/float.rs b/lib/oxsdatatypes/src/float.rs index af4c66f7..bc0aab75 100644 --- a/lib/oxsdatatypes/src/float.rs +++ b/lib/oxsdatatypes/src/float.rs @@ -179,7 +179,7 @@ impl FromStr for Float { type Err = ParseFloatError; #[inline] - fn from_str(input: &str) -> Result { + fn from_str(input: &str) -> Result { Ok(f32::from_str(input)?.into()) } } diff --git a/lib/oxsdatatypes/src/integer.rs b/lib/oxsdatatypes/src/integer.rs index 376deea6..f2b8506f 100644 --- a/lib/oxsdatatypes/src/integer.rs +++ b/lib/oxsdatatypes/src/integer.rs @@ -228,7 +228,7 @@ impl FromStr for Integer { type Err = ParseIntError; #[inline] - fn from_str(input: &str) -> Result { + fn from_str(input: &str) -> Result { Ok(i64::from_str(input)?.into()) } } @@ -244,7 +244,7 @@ impl TryFrom for Integer { type Error = TooLargeForIntegerError; #[inline] - fn try_from(value: Float) -> Result { + fn try_from(value: Float) -> Result { Decimal::try_from(value) .map_err(|_| TooLargeForIntegerError)? .try_into() @@ -255,7 +255,7 @@ impl TryFrom for Integer { type Error = TooLargeForIntegerError; #[inline] - fn try_from(value: Double) -> Result { + fn try_from(value: Double) -> Result { Decimal::try_from(value) .map_err(|_| TooLargeForIntegerError)? .try_into() diff --git a/lib/oxttl/Cargo.toml b/lib/oxttl/Cargo.toml index c19cdffe..7f7428f6 100644 --- a/lib/oxttl/Cargo.toml +++ b/lib/oxttl/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "oxttl" -version = "0.1.0-alpha.1" +version = "0.1.0-alpha.2-dev" authors = ["Tpt "] license = "MIT OR Apache-2.0" readme = "README.md" diff --git a/lib/oxttl/src/lexer.rs b/lib/oxttl/src/lexer.rs index ef95938e..d4eb024f 100644 --- a/lib/oxttl/src/lexer.rs +++ b/lib/oxttl/src/lexer.rs @@ -56,7 +56,7 @@ impl TokenRecognizer for N3Lexer { &mut self, data: &'a [u8], is_ending: bool, - options: &N3LexerOptions, + options: &Self::Options, ) -> Option<(usize, Result, TokenRecognizerError>)> { match *data.first()? { b'<' => match *data.get(1)? { @@ -354,6 +354,7 @@ impl N3Lexer { let mut buffer = None; // Buffer if there are some escaped characters let mut position_that_is_already_in_buffer = 0; let mut might_be_invalid_iri = false; + let mut ends_with_unescaped_dot = 0; loop { if let Some(r) = Self::recognize_unicode_char(&data[i..], i) { match r { @@ -369,6 +370,7 @@ impl N3Lexer { ).into()))); } i += 1; + ends_with_unescaped_dot = 0; } else if c == '\\' { i += 1; let a = char::from(*data.get(i)?); @@ -416,6 +418,7 @@ impl N3Lexer { buffer.push(a); i += 1; position_that_is_already_in_buffer = i; + ends_with_unescaped_dot = 0; } else if i == 0 { if !(Self::is_possible_pn_chars_u(c) || c == ':' || c.is_ascii_digit()) { @@ -427,13 +430,17 @@ impl N3Lexer { || c == ':'; } i += consumed; - } else if Self::is_possible_pn_chars(c) || c == ':' || c == '.' { + } else if Self::is_possible_pn_chars(c) || c == ':' { if !self.unchecked { might_be_invalid_iri |= Self::is_possible_pn_chars_base_but_not_valid_iri(c) || c == ':'; } i += consumed; + ends_with_unescaped_dot = 0; + } else if c == '.' { + i += consumed; + ends_with_unescaped_dot += 1; } else { let buffer = if let Some(mut buffer) = buffer { buffer.push_str( @@ -445,22 +452,20 @@ impl N3Lexer { Err(e) => return Some((i, Err(e))), }, ); - // We do not include the last dot - while buffer.ends_with('.') { + // We do not include the last dots + for _ in 0..ends_with_unescaped_dot { buffer.pop(); - i -= 1; } + i -= ends_with_unescaped_dot; Cow::Owned(buffer) } else { let mut data = match str_from_utf8(&data[..i], 0..i) { Ok(data) => data, Err(e) => return Some((i, Err(e))), }; - // We do not include the last dot - while let Some(d) = data.strip_suffix('.') { - data = d; - i -= 1; - } + // We do not include the last dots + data = &data[..data.len() - ends_with_unescaped_dot]; + i -= ends_with_unescaped_dot; Cow::Borrowed(data) }; return Some((i, Ok((buffer, might_be_invalid_iri)))); diff --git a/lib/oxttl/src/line_formats.rs b/lib/oxttl/src/line_formats.rs index fc48cd53..e7d39e09 100644 --- a/lib/oxttl/src/line_formats.rs +++ b/lib/oxttl/src/line_formats.rs @@ -274,7 +274,7 @@ impl NQuadsRecognizer { true, Some(b"#"), ), - NQuadsRecognizer { + Self { stack: vec![NQuadsState::ExpectSubject], subjects: Vec::new(), predicates: Vec::new(), diff --git a/lib/oxttl/src/n3.rs b/lib/oxttl/src/n3.rs index 0642d416..a1c23f25 100644 --- a/lib/oxttl/src/n3.rs +++ b/lib/oxttl/src/n3.rs @@ -184,12 +184,12 @@ impl From for N3Quad { /// use oxrdf::{NamedNode, vocab::rdf}; /// use oxttl::n3::{N3Parser, N3Term}; /// -/// let file = b"@base . +/// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; -/// schema:name \"Foo\" . +/// schema:name "Foo" . /// a schema:Person ; -/// schema:name \"Bar\" ."; +/// schema:name "Bar" ."#; /// /// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned()); /// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?); @@ -253,12 +253,12 @@ impl N3Parser { /// use oxrdf::NamedNode; /// use oxttl::n3::{N3Parser, N3Term}; /// - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" . + /// schema:name "Foo" . /// a schema:Person ; - /// schema:name \"Bar\" ."; + /// schema:name "Bar" ."#; /// /// let rdf_type = N3Term::NamedNode(NamedNode::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?); /// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?); @@ -287,12 +287,12 @@ impl N3Parser { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxttl::ParseError> { - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" . + /// schema:name "Foo" . /// a schema:Person ; - /// schema:name \"Bar\" ."; + /// schema:name "Bar" ."#; /// /// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned()); /// let schema_person = N3Term::NamedNode(NamedNode::new_unchecked("http://schema.org/Person")); @@ -369,12 +369,12 @@ impl N3Parser { /// use oxrdf::{NamedNode, vocab::rdf}; /// use oxttl::n3::{N3Parser, N3Term}; /// -/// let file = b"@base . +/// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; -/// schema:name \"Foo\" . +/// schema:name "Foo" . /// a schema:Person ; -/// schema:name \"Bar\" ."; +/// schema:name "Bar" ."#; /// /// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned()); /// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?); @@ -403,10 +403,10 @@ impl FromReadN3Reader { /// ``` /// use oxttl::N3Parser; /// - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = N3Parser::new().parse_read(file.as_ref()); /// assert!(reader.prefixes().is_empty()); // No prefix at the beginning @@ -424,10 +424,10 @@ impl FromReadN3Reader { /// ``` /// use oxttl::N3Parser; /// - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = N3Parser::new().parse_read(file.as_ref()); /// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser. @@ -450,7 +450,7 @@ impl FromReadN3Reader { impl Iterator for FromReadN3Reader { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { self.inner.next() } } @@ -464,12 +464,12 @@ impl Iterator for FromReadN3Reader { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxttl::ParseError> { -/// let file = b"@base . +/// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; -/// schema:name \"Foo\" . +/// schema:name "Foo" . /// a schema:Person ; -/// schema:name \"Bar\" ."; +/// schema:name "Bar" ."#; /// /// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned()); /// let schema_person = N3Term::NamedNode(NamedNode::new_unchecked("http://schema.org/Person")); @@ -509,10 +509,10 @@ impl FromTokioAsyncReadN3Reader { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxttl::ParseError> { - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = N3Parser::new().parse_tokio_async_read(file.as_ref()); /// assert!(reader.prefixes().is_empty()); // No prefix at the beginning @@ -533,10 +533,10 @@ impl FromTokioAsyncReadN3Reader { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxttl::ParseError> { - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = N3Parser::new().parse_tokio_async_read(file.as_ref()); /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning @@ -633,10 +633,10 @@ impl LowLevelN3Reader { /// ``` /// use oxttl::N3Parser; /// - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = N3Parser::new().parse(); /// reader.extend_from_slice(file); @@ -655,10 +655,10 @@ impl LowLevelN3Reader { /// ``` /// use oxttl::N3Parser; /// - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = N3Parser::new().parse(); /// reader.extend_from_slice(file); @@ -1214,7 +1214,7 @@ impl N3Recognizer { true, Some(b"#"), ), - N3Recognizer { + Self { stack: vec![N3State::N3Doc], terms: Vec::new(), predicates: Vec::new(), diff --git a/lib/oxttl/src/nquads.rs b/lib/oxttl/src/nquads.rs index 9be1843b..f5108828 100644 --- a/lib/oxttl/src/nquads.rs +++ b/lib/oxttl/src/nquads.rs @@ -19,10 +19,10 @@ use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt}; /// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxttl::NQuadsParser; /// -/// let file = b" . -/// \"Foo\" . +/// let file = br#" . +/// "Foo" . /// . -/// \"Bar\" ."; +/// "Bar" ."#; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let mut count = 0; @@ -76,10 +76,10 @@ impl NQuadsParser { /// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxttl::NQuadsParser; /// - /// let file = b" . - /// \"Foo\" . + /// let file = br#" . + /// "Foo" . /// . - /// \"Bar\" ."; + /// "Bar" ."#; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let mut count = 0; @@ -107,10 +107,10 @@ impl NQuadsParser { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxttl::ParseError> { - /// let file = b" . - /// \"Foo\" . + /// let file = br#" . + /// "Foo" . /// . - /// \"Bar\" ."; + /// "Bar" ."#; /// /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let mut count = 0; @@ -191,10 +191,10 @@ impl NQuadsParser { /// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxttl::NQuadsParser; /// -/// let file = b" . -/// \"Foo\" . +/// let file = br#" . +/// "Foo" . /// . -/// \"Bar\" ."; +/// "Bar" ."#; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let mut count = 0; @@ -215,7 +215,7 @@ pub struct FromReadNQuadsReader { impl Iterator for FromReadNQuadsReader { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { self.inner.next() } } @@ -229,10 +229,10 @@ impl Iterator for FromReadNQuadsReader { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxttl::ParseError> { -/// let file = b" . -/// \"Foo\" . +/// let file = br#" . +/// "Foo" . /// . -/// \"Bar\" ."; +/// "Bar" ."#; /// /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let mut count = 0; diff --git a/lib/oxttl/src/ntriples.rs b/lib/oxttl/src/ntriples.rs index 995643bc..4e0f7d7c 100644 --- a/lib/oxttl/src/ntriples.rs +++ b/lib/oxttl/src/ntriples.rs @@ -19,10 +19,10 @@ use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt}; /// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxttl::NTriplesParser; /// -/// let file = b" . -/// \"Foo\" . +/// let file = br#" . +/// "Foo" . /// . -/// \"Bar\" ."; +/// "Bar" ."#; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let mut count = 0; @@ -76,10 +76,10 @@ impl NTriplesParser { /// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxttl::NTriplesParser; /// - /// let file = b" . - /// \"Foo\" . + /// let file = br#" . + /// "Foo" . /// . - /// \"Bar\" ."; + /// "Bar" ."#; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let mut count = 0; @@ -107,10 +107,10 @@ impl NTriplesParser { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxttl::ParseError> { - /// let file = b" . - /// \"Foo\" . + /// let file = br#" . + /// "Foo" . /// . - /// \"Bar\" ."; + /// "Bar" ."#; /// /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let mut count = 0; @@ -191,10 +191,10 @@ impl NTriplesParser { /// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxttl::NTriplesParser; /// -/// let file = b" . -/// \"Foo\" . +/// let file = br#" . +/// "Foo" . /// . -/// \"Bar\" ."; +/// "Bar" ."#; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let mut count = 0; @@ -215,7 +215,7 @@ pub struct FromReadNTriplesReader { impl Iterator for FromReadNTriplesReader { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { Some(self.inner.next()?.map(Into::into)) } } @@ -229,10 +229,10 @@ impl Iterator for FromReadNTriplesReader { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxttl::ParseError> { -/// let file = b" . -/// \"Foo\" . +/// let file = br#" . +/// "Foo" . /// . -/// \"Bar\" ."; +/// "Bar" ."#; /// /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let mut count = 0; @@ -565,7 +565,7 @@ mod tests { fn unchecked_parsing() { let triples = NTriplesParser::new() .unchecked() - .parse_read(" \"baz\"@toolonglangtag .".as_bytes()) + .parse_read(r#" "baz"@toolonglangtag ."#.as_bytes()) .collect::, _>>() .unwrap(); assert_eq!( diff --git a/lib/oxttl/src/terse.rs b/lib/oxttl/src/terse.rs index bc092c1f..86fad434 100644 --- a/lib/oxttl/src/terse.rs +++ b/lib/oxttl/src/terse.rs @@ -844,7 +844,7 @@ impl TriGRecognizer { true, Some(b"#"), ), - TriGRecognizer { + Self { stack: vec![TriGState::TriGDoc], cur_subject: Vec::new(), cur_predicate: Vec::new(), diff --git a/lib/oxttl/src/toolkit/error.rs b/lib/oxttl/src/toolkit/error.rs index df50b950..e279dab4 100644 --- a/lib/oxttl/src/toolkit/error.rs +++ b/lib/oxttl/src/toolkit/error.rs @@ -72,7 +72,7 @@ impl Error for SyntaxError {} impl From for io::Error { #[inline] fn from(error: SyntaxError) -> Self { - io::Error::new(io::ErrorKind::InvalidData, error) + Self::new(io::ErrorKind::InvalidData, error) } } diff --git a/lib/oxttl/src/trig.rs b/lib/oxttl/src/trig.rs index 70d3edb6..0dad7fd7 100644 --- a/lib/oxttl/src/trig.rs +++ b/lib/oxttl/src/trig.rs @@ -22,12 +22,12 @@ use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt}; /// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxttl::TriGParser; /// -/// let file = b"@base . +/// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; -/// schema:name \"Foo\" . +/// schema:name "Foo" . /// a schema:Person ; -/// schema:name \"Bar\" ."; +/// schema:name "Bar" ."#; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let mut count = 0; @@ -100,12 +100,12 @@ impl TriGParser { /// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxttl::TriGParser; /// - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" . + /// schema:name "Foo" . /// a schema:Person ; - /// schema:name \"Bar\" ."; + /// schema:name "Bar" ."#; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let mut count = 0; @@ -133,12 +133,12 @@ impl TriGParser { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxttl::ParseError> { - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" . + /// schema:name "Foo" . /// a schema:Person ; - /// schema:name \"Bar\" ."; + /// schema:name "Bar" ."#; /// /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let mut count = 0; @@ -220,12 +220,12 @@ impl TriGParser { /// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxttl::TriGParser; /// -/// let file = b"@base . +/// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; -/// schema:name \"Foo\" . +/// schema:name "Foo" . /// a schema:Person ; -/// schema:name \"Bar\" ."; +/// schema:name "Bar" ."#; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let mut count = 0; @@ -253,10 +253,10 @@ impl FromReadTriGReader { /// ``` /// use oxttl::TriGParser; /// - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = TriGParser::new().parse_read(file.as_ref()); /// assert!(reader.prefixes().is_empty()); // No prefix at the beginning @@ -274,10 +274,10 @@ impl FromReadTriGReader { /// ``` /// use oxttl::TriGParser; /// - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = TriGParser::new().parse_read(file.as_ref()); /// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser. @@ -300,7 +300,7 @@ impl FromReadTriGReader { impl Iterator for FromReadTriGReader { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { self.inner.next() } } @@ -314,12 +314,12 @@ impl Iterator for FromReadTriGReader { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxttl::ParseError> { -/// let file = b"@base . +/// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; -/// schema:name \"Foo\" . +/// schema:name "Foo" . /// a schema:Person ; -/// schema:name \"Bar\" ."; +/// schema:name "Bar" ."#; /// /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let mut count = 0; @@ -358,10 +358,10 @@ impl FromTokioAsyncReadTriGReader { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxttl::ParseError> { - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = TriGParser::new().parse_tokio_async_read(file.as_ref()); /// assert!(reader.prefixes().is_empty()); // No prefix at the beginning @@ -382,10 +382,10 @@ impl FromTokioAsyncReadTriGReader { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxttl::ParseError> { - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = TriGParser::new().parse_tokio_async_read(file.as_ref()); /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning @@ -481,10 +481,10 @@ impl LowLevelTriGReader { /// ``` /// use oxttl::TriGParser; /// - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = TriGParser::new().parse(); /// reader.extend_from_slice(file); @@ -503,10 +503,10 @@ impl LowLevelTriGReader { /// ``` /// use oxttl::TriGParser; /// - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = TriGParser::new().parse(); /// reader.extend_from_slice(file); diff --git a/lib/oxttl/src/turtle.rs b/lib/oxttl/src/turtle.rs index 0e225611..542afd27 100644 --- a/lib/oxttl/src/turtle.rs +++ b/lib/oxttl/src/turtle.rs @@ -24,12 +24,12 @@ use tokio::io::{AsyncRead, AsyncWrite}; /// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxttl::TurtleParser; /// -/// let file = b"@base . +/// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; -/// schema:name \"Foo\" . +/// schema:name "Foo" . /// a schema:Person ; -/// schema:name \"Bar\" ."; +/// schema:name "Bar" ."#; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let mut count = 0; @@ -102,12 +102,12 @@ impl TurtleParser { /// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxttl::TurtleParser; /// - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" . + /// schema:name "Foo" . /// a schema:Person ; - /// schema:name \"Bar\" ."; + /// schema:name "Bar" ."#; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let mut count = 0; @@ -135,12 +135,12 @@ impl TurtleParser { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxttl::ParseError> { - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" . + /// schema:name "Foo" . /// a schema:Person ; - /// schema:name \"Bar\" ."; + /// schema:name "Bar" ."#; /// /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let mut count = 0; @@ -222,12 +222,12 @@ impl TurtleParser { /// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxttl::TurtleParser; /// -/// let file = b"@base . +/// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; -/// schema:name \"Foo\" . +/// schema:name "Foo" . /// a schema:Person ; -/// schema:name \"Bar\" ."; +/// schema:name "Bar" ."#; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let mut count = 0; @@ -255,10 +255,10 @@ impl FromReadTurtleReader { /// ``` /// use oxttl::TurtleParser; /// - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = TurtleParser::new().parse_read(file.as_ref()); /// assert!(reader.prefixes().is_empty()); // No prefix at the beginning @@ -276,10 +276,10 @@ impl FromReadTurtleReader { /// ``` /// use oxttl::TurtleParser; /// - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = TurtleParser::new().parse_read(file.as_ref()); /// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser. @@ -302,7 +302,7 @@ impl FromReadTurtleReader { impl Iterator for FromReadTurtleReader { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { Some(self.inner.next()?.map(Into::into)) } } @@ -316,12 +316,12 @@ impl Iterator for FromReadTurtleReader { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxttl::ParseError> { -/// let file = b"@base . +/// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; -/// schema:name \"Foo\" . +/// schema:name "Foo" . /// a schema:Person ; -/// schema:name \"Bar\" ."; +/// schema:name "Bar" ."#; /// /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let mut count = 0; @@ -360,10 +360,10 @@ impl FromTokioAsyncReadTurtleReader { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxttl::ParseError> { - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = TurtleParser::new().parse_tokio_async_read(file.as_ref()); /// assert!(reader.prefixes().is_empty()); // No prefix at the beginning @@ -384,10 +384,10 @@ impl FromTokioAsyncReadTurtleReader { /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxttl::ParseError> { - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = TurtleParser::new().parse_tokio_async_read(file.as_ref()); /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning @@ -483,10 +483,10 @@ impl LowLevelTurtleReader { /// ``` /// use oxttl::TurtleParser; /// - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = TurtleParser::new().parse(); /// reader.extend_from_slice(file); @@ -505,10 +505,10 @@ impl LowLevelTurtleReader { /// ``` /// use oxttl::TurtleParser; /// - /// let file = b"@base . + /// let file = br#"@base . /// @prefix schema: . /// a schema:Person ; - /// schema:name \"Foo\" ."; + /// schema:name "Foo" ."#; /// /// let mut reader = TurtleParser::new().parse(); /// reader.extend_from_slice(file); diff --git a/lib/sparesults/src/parser.rs b/lib/sparesults/src/parser.rs index 0a826e96..a00d014c 100644 --- a/lib/sparesults/src/parser.rs +++ b/lib/sparesults/src/parser.rs @@ -22,11 +22,11 @@ use std::sync::Arc; /// /// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Json); /// // boolean -/// if let FromReadQueryResultsReader::Boolean(v) = json_parser.parse_read(b"{\"boolean\":true}".as_slice())? { +/// if let FromReadQueryResultsReader::Boolean(v) = json_parser.parse_read(br#"{"boolean":true}"#.as_slice())? { /// assert_eq!(v, true); /// } /// // solutions -/// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}}]}}".as_slice())? { +/// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#.as_slice())? { /// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]); /// for solution in solutions { /// assert_eq!(solution?.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]); @@ -57,12 +57,12 @@ impl QueryResultsParser { /// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Xml); /// /// // boolean - /// if let FromReadQueryResultsReader::Boolean(v) = json_parser.parse_read(b"true".as_slice())? { + /// if let FromReadQueryResultsReader::Boolean(v) = json_parser.parse_read(br#"true"#.as_slice())? { /// assert_eq!(v, true); /// } /// /// // solutions - /// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(b"test".as_slice())? { + /// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(br#"test"#.as_slice())? { /// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]); /// for solution in solutions { /// assert_eq!(solution?.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]); @@ -164,7 +164,7 @@ pub enum FromReadQueryResultsReader { /// use oxrdf::{Literal, Variable}; /// /// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Json); -/// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}}]}}".as_slice())? { +/// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#.as_slice())? { /// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]); /// for solution in solutions { /// assert_eq!(solution?.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]); @@ -206,7 +206,7 @@ impl FromReadSolutionsReader { impl Iterator for FromReadSolutionsReader { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { Some( match &mut self.solutions { SolutionsReaderKind::Xml(reader) => reader.read_next(), diff --git a/lib/sparesults/src/serializer.rs b/lib/sparesults/src/serializer.rs index 9a4ba143..13c21628 100644 --- a/lib/sparesults/src/serializer.rs +++ b/lib/sparesults/src/serializer.rs @@ -35,14 +35,14 @@ use tokio::io::AsyncWrite; /// // boolean /// let mut buffer = Vec::new(); /// json_serializer.serialize_boolean_to_write(&mut buffer, true)?; -/// assert_eq!(buffer, b"{\"head\":{},\"boolean\":true}"); +/// assert_eq!(buffer, br#"{"head":{},"boolean":true}"#); /// /// // solutions /// let mut buffer = Vec::new(); /// let mut writer = json_serializer.serialize_solutions_to_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")])?; /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?; /// writer.finish()?; -/// assert_eq!(buffer, b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}}]}}"); +/// assert_eq!(buffer, br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#); /// # std::io::Result::Ok(()) /// ``` pub struct QueryResultsSerializer { @@ -65,7 +65,7 @@ impl QueryResultsSerializer { /// let xml_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Xml); /// let mut buffer = Vec::new(); /// xml_serializer.serialize_boolean_to_write(&mut buffer, true)?; - /// assert_eq!(buffer, b"true"); + /// assert_eq!(buffer, br#"true"#); /// # std::io::Result::Ok(()) /// ``` pub fn serialize_boolean_to_write(&self, write: W, value: bool) -> io::Result { @@ -89,7 +89,7 @@ impl QueryResultsSerializer { /// let json_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Json); /// let mut buffer = Vec::new(); /// json_serializer.serialize_boolean_to_tokio_async_write(&mut buffer, false).await?; - /// assert_eq!(buffer, b"{\"head\":{},\"boolean\":false}"); + /// assert_eq!(buffer, br#"{"head":{},"boolean":false}"r); /// # Ok(()) /// # } /// ``` @@ -134,7 +134,7 @@ impl QueryResultsSerializer { /// let mut writer = xml_serializer.serialize_solutions_to_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")])?; /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?; /// writer.finish()?; - /// assert_eq!(buffer, b"test"); + /// assert_eq!(buffer, br#"test"#); /// # std::io::Result::Ok(()) /// ``` pub fn serialize_solutions_to_write( @@ -183,7 +183,7 @@ impl QueryResultsSerializer { /// let mut writer = json_serializer.serialize_solutions_to_tokio_async_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]).await?; /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test")))).await?; /// writer.finish().await?; - /// assert_eq!(buffer, b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}}]}}"); + /// assert_eq!(buffer, br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#); /// # Ok(()) /// # } /// ``` @@ -280,7 +280,7 @@ impl ToWriteSolutionsWriter { /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?; /// writer.write(&QuerySolution::from((vec![Variable::new_unchecked("bar")], vec![Some(Literal::from("test").into())])))?; /// writer.finish()?; - /// assert_eq!(buffer, b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}},{\"bar\":{\"type\":\"literal\",\"value\":\"test\"}}]}}"); + /// assert_eq!(buffer, br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}},{"bar":{"type":"literal","value":"test"}}]}}"#); /// # std::io::Result::Ok(()) /// ``` pub fn write<'a>( @@ -368,7 +368,7 @@ impl ToTokioAsyncWriteSolutionsWriter { /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test")))).await?; /// writer.write(&QuerySolution::from((vec![Variable::new_unchecked("bar")], vec![Some(Literal::from("test").into())]))).await?; /// writer.finish().await?; - /// assert_eq!(buffer, b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}},{\"bar\":{\"type\":\"literal\",\"value\":\"test\"}}]}}"); + /// assert_eq!(buffer, br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}},{"bar":{"type":"literal","value":"test"}}]}}"#); /// # Ok(()) /// # } /// ``` diff --git a/lib/sparesults/src/solution.rs b/lib/sparesults/src/solution.rs index 3990ab79..0d81adc2 100644 --- a/lib/sparesults/src/solution.rs +++ b/lib/sparesults/src/solution.rs @@ -130,7 +130,7 @@ impl<'a> IntoIterator for &'a QuerySolution { type IntoIter = Iter<'a>; #[inline] - fn into_iter(self) -> Iter<'a> { + fn into_iter(self) -> Self::IntoIter { Iter { inner: self.variables.iter().zip(&self.values), } @@ -142,7 +142,7 @@ impl Index for QuerySolution { #[allow(clippy::panic)] #[inline] - fn index(&self, index: usize) -> &Term { + fn index(&self, index: usize) -> &Self::Output { self.get(index) .unwrap_or_else(|| panic!("The column {index} is not set in this solution")) } @@ -153,7 +153,7 @@ impl Index<&str> for QuerySolution { #[allow(clippy::panic)] #[inline] - fn index(&self, index: &str) -> &Term { + fn index(&self, index: &str) -> &Self::Output { self.get(index) .unwrap_or_else(|| panic!("The variable ?{index} is not set in this solution")) } @@ -164,7 +164,7 @@ impl Index> for QuerySolution { #[allow(clippy::panic)] #[inline] - fn index(&self, index: VariableRef<'_>) -> &Term { + fn index(&self, index: VariableRef<'_>) -> &Self::Output { self.get(index) .unwrap_or_else(|| panic!("The variable {index} is not set in this solution")) } @@ -173,7 +173,7 @@ impl Index for QuerySolution { type Output = Term; #[inline] - fn index(&self, index: Variable) -> &Term { + fn index(&self, index: Variable) -> &Self::Output { self.index(index.as_ref()) } } @@ -182,7 +182,7 @@ impl Index<&Variable> for QuerySolution { type Output = Term; #[inline] - fn index(&self, index: &Variable) -> &Term { + fn index(&self, index: &Variable) -> &Self::Output { self.index(index.as_ref()) } } @@ -228,7 +228,7 @@ impl<'a> Iterator for Iter<'a> { type Item = (&'a Variable, &'a Term); #[inline] - fn next(&mut self) -> Option<(&'a Variable, &'a Term)> { + fn next(&mut self) -> Option { for (variable, value) in &mut self.inner { if let Some(value) = value { return Some((variable, value)); diff --git a/lib/spargebra/src/query.rs b/lib/spargebra/src/query.rs index 6cb7e57b..8716de73 100644 --- a/lib/spargebra/src/query.rs +++ b/lib/spargebra/src/query.rs @@ -275,7 +275,7 @@ impl fmt::Display for Query { impl FromStr for Query { type Err = ParseError; - fn from_str(query: &str) -> Result { + fn from_str(query: &str) -> Result { Self::parse(query, None) } } @@ -283,7 +283,7 @@ impl FromStr for Query { impl<'a> TryFrom<&'a str> for Query { type Error = ParseError; - fn try_from(query: &str) -> Result { + fn try_from(query: &str) -> Result { Self::from_str(query) } } @@ -291,7 +291,7 @@ impl<'a> TryFrom<&'a str> for Query { impl<'a> TryFrom<&'a String> for Query { type Error = ParseError; - fn try_from(query: &String) -> Result { + fn try_from(query: &String) -> Result { Self::from_str(query) } } diff --git a/lib/spargebra/src/term.rs b/lib/spargebra/src/term.rs index 6a33294c..362b3959 100644 --- a/lib/spargebra/src/term.rs +++ b/lib/spargebra/src/term.rs @@ -48,7 +48,7 @@ impl TryFrom for GroundSubject { type Error = (); #[inline] - fn try_from(subject: Subject) -> Result { + fn try_from(subject: Subject) -> Result { match subject { Subject::NamedNode(t) => Ok(t.into()), Subject::BlankNode(_) => Err(()), @@ -62,7 +62,7 @@ impl TryFrom for GroundSubject { type Error = (); #[inline] - fn try_from(term: GroundTerm) -> Result { + fn try_from(term: GroundTerm) -> Result { match term { GroundTerm::NamedNode(t) => Ok(t.into()), GroundTerm::Literal(_) => Err(()), @@ -125,7 +125,7 @@ impl TryFrom for GroundTerm { type Error = (); #[inline] - fn try_from(term: Term) -> Result { + fn try_from(term: Term) -> Result { match term { Term::NamedNode(t) => Ok(t.into()), Term::BlankNode(_) => Err(()), @@ -171,7 +171,7 @@ impl TryFrom for GroundTriple { type Error = (); #[inline] - fn try_from(triple: Triple) -> Result { + fn try_from(triple: Triple) -> Result { Ok(Self { subject: triple.subject.try_into()?, predicate: triple.predicate, @@ -221,7 +221,7 @@ impl TryFrom for GraphName { type Error = (); #[inline] - fn try_from(pattern: GraphNamePattern) -> Result { + fn try_from(pattern: GraphNamePattern) -> Result { match pattern { GraphNamePattern::NamedNode(t) => Ok(t.into()), GraphNamePattern::DefaultGraph => Ok(Self::DefaultGraph), @@ -295,7 +295,7 @@ impl TryFrom for Quad { type Error = (); #[inline] - fn try_from(quad: QuadPattern) -> Result { + fn try_from(quad: QuadPattern) -> Result { Ok(Self { subject: quad.subject.try_into()?, predicate: quad.predicate.try_into()?, @@ -370,7 +370,7 @@ impl TryFrom for GroundQuad { type Error = (); #[inline] - fn try_from(quad: Quad) -> Result { + fn try_from(quad: Quad) -> Result { Ok(Self { subject: quad.subject.try_into()?, predicate: quad.predicate, @@ -425,7 +425,7 @@ impl TryFrom for NamedNode { type Error = (); #[inline] - fn try_from(pattern: NamedNodePattern) -> Result { + fn try_from(pattern: NamedNodePattern) -> Result { match pattern { NamedNodePattern::NamedNode(t) => Ok(t), NamedNodePattern::Variable(_) => Err(()), @@ -559,7 +559,7 @@ impl TryFrom for Subject { type Error = (); #[inline] - fn try_from(term: TermPattern) -> Result { + fn try_from(term: TermPattern) -> Result { match term { TermPattern::NamedNode(t) => Ok(t.into()), TermPattern::BlankNode(t) => Ok(t.into()), @@ -574,7 +574,7 @@ impl TryFrom for Term { type Error = (); #[inline] - fn try_from(pattern: TermPattern) -> Result { + fn try_from(pattern: TermPattern) -> Result { match pattern { TermPattern::NamedNode(t) => Ok(t.into()), TermPattern::BlankNode(t) => Ok(t.into()), @@ -686,7 +686,7 @@ impl TryFrom for GroundTermPattern { type Error = (); #[inline] - fn try_from(pattern: TermPattern) -> Result { + fn try_from(pattern: TermPattern) -> Result { Ok(match pattern { TermPattern::NamedNode(named_node) => named_node.into(), TermPattern::BlankNode(_) => return Err(()), @@ -828,7 +828,7 @@ impl TryFrom for Triple { type Error = (); #[inline] - fn try_from(triple: TriplePattern) -> Result { + fn try_from(triple: TriplePattern) -> Result { Ok(Self { subject: triple.subject.try_into()?, predicate: triple.predicate.try_into()?, @@ -1000,7 +1000,7 @@ impl TryFrom for GroundQuadPattern { type Error = (); #[inline] - fn try_from(pattern: QuadPattern) -> Result { + fn try_from(pattern: QuadPattern) -> Result { Ok(Self { subject: pattern.subject.try_into()?, predicate: pattern.predicate, diff --git a/lib/spargebra/src/update.rs b/lib/spargebra/src/update.rs index e73b234e..e2a2c653 100644 --- a/lib/spargebra/src/update.rs +++ b/lib/spargebra/src/update.rs @@ -70,7 +70,7 @@ impl fmt::Display for Update { impl FromStr for Update { type Err = ParseError; - fn from_str(update: &str) -> Result { + fn from_str(update: &str) -> Result { Self::parse(update, None) } } @@ -78,7 +78,7 @@ impl FromStr for Update { impl<'a> TryFrom<&'a str> for Update { type Error = ParseError; - fn try_from(update: &str) -> Result { + fn try_from(update: &str) -> Result { Self::from_str(update) } } @@ -86,7 +86,7 @@ impl<'a> TryFrom<&'a str> for Update { impl<'a> TryFrom<&'a String> for Update { type Error = ParseError; - fn try_from(update: &String) -> Result { + fn try_from(update: &String) -> Result { Self::from_str(update) } } diff --git a/lib/sparopt/src/algebra.rs b/lib/sparopt/src/algebra.rs index e5cb0952..51ecf6fa 100644 --- a/lib/sparopt/src/algebra.rs +++ b/lib/sparopt/src/algebra.rs @@ -364,25 +364,25 @@ impl Expression { fn returns_boolean(&self) -> bool { match self { - Expression::Or(_) - | Expression::And(_) - | Expression::Equal(_, _) - | Expression::SameTerm(_, _) - | Expression::Greater(_, _) - | Expression::GreaterOrEqual(_, _) - | Expression::Less(_, _) - | Expression::LessOrEqual(_, _) - | Expression::Not(_) - | Expression::Exists(_) - | Expression::Bound(_) - | Expression::FunctionCall( + Self::Or(_) + | Self::And(_) + | Self::Equal(_, _) + | Self::SameTerm(_, _) + | Self::Greater(_, _) + | Self::GreaterOrEqual(_, _) + | Self::Less(_, _) + | Self::LessOrEqual(_, _) + | Self::Not(_) + | Self::Exists(_) + | Self::Bound(_) + | Self::FunctionCall( Function::IsBlank | Function::IsIri | Function::IsLiteral | Function::IsNumeric, _, ) => true, #[cfg(feature = "rdf-star")] - Expression::FunctionCall(Function::IsTriple, _) => true, - Expression::Literal(literal) => literal.datatype() == xsd::BOOLEAN, - Expression::If(_, a, b) => a.returns_boolean() && b.returns_boolean(), + Self::FunctionCall(Function::IsTriple, _) => true, + Self::Literal(literal) => literal.datatype() == xsd::BOOLEAN, + Self::If(_, a, b) => a.returns_boolean() && b.returns_boolean(), _ => false, } } @@ -847,7 +847,7 @@ impl GraphPattern { } } if all.is_empty() { - GraphPattern::empty() + Self::empty() } else { Self::Union { inner: order_vec(all), diff --git a/lib/src/io/format.rs b/lib/src/io/format.rs index 89ba37f9..b07c1709 100644 --- a/lib/src/io/format.rs +++ b/lib/src/io/format.rs @@ -258,7 +258,7 @@ impl TryFrom for GraphFormat { /// Attempts to find a graph format that is a subset of this [`DatasetFormat`]. #[inline] - fn try_from(value: DatasetFormat) -> Result { + fn try_from(value: DatasetFormat) -> Result { match value { DatasetFormat::NQuads => Ok(Self::NTriples), DatasetFormat::TriG => Ok(Self::Turtle), @@ -271,7 +271,7 @@ impl TryFrom for DatasetFormat { /// Attempts to find a dataset format that is a superset of this [`GraphFormat`]. #[inline] - fn try_from(value: GraphFormat) -> Result { + fn try_from(value: GraphFormat) -> Result { match value { GraphFormat::NTriples => Ok(Self::NQuads), GraphFormat::Turtle => Ok(Self::TriG), diff --git a/lib/src/io/read.rs b/lib/src/io/read.rs index 33065615..3400b8e2 100644 --- a/lib/src/io/read.rs +++ b/lib/src/io/read.rs @@ -95,7 +95,7 @@ pub struct TripleReader { impl Iterator for TripleReader { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { Some(self.parser.next()?.map(Into::into).map_err(Into::into)) } } @@ -184,7 +184,7 @@ pub struct QuadReader { impl Iterator for QuadReader { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { Some(self.parser.next()?.map_err(Into::into)) } } diff --git a/lib/src/sparql/algebra.rs b/lib/src/sparql/algebra.rs index d83241c6..b046de80 100644 --- a/lib/src/sparql/algebra.rs +++ b/lib/src/sparql/algebra.rs @@ -65,7 +65,7 @@ impl fmt::Display for Query { impl FromStr for Query { type Err = spargebra::ParseError; - fn from_str(query: &str) -> Result { + fn from_str(query: &str) -> Result { Self::parse(query, None) } } @@ -73,7 +73,7 @@ impl FromStr for Query { impl TryFrom<&str> for Query { type Error = spargebra::ParseError; - fn try_from(query: &str) -> Result { + fn try_from(query: &str) -> Result { Self::from_str(query) } } @@ -81,7 +81,7 @@ impl TryFrom<&str> for Query { impl TryFrom<&String> for Query { type Error = spargebra::ParseError; - fn try_from(query: &String) -> Result { + fn try_from(query: &String) -> Result { Self::from_str(query) } } @@ -158,7 +158,7 @@ impl fmt::Display for Update { impl FromStr for Update { type Err = spargebra::ParseError; - fn from_str(update: &str) -> Result { + fn from_str(update: &str) -> Result { Self::parse(update, None) } } @@ -166,7 +166,7 @@ impl FromStr for Update { impl TryFrom<&str> for Update { type Error = spargebra::ParseError; - fn try_from(update: &str) -> Result { + fn try_from(update: &str) -> Result { Self::from_str(update) } } @@ -174,7 +174,7 @@ impl TryFrom<&str> for Update { impl TryFrom<&String> for Update { type Error = spargebra::ParseError; - fn try_from(update: &String) -> Result { + fn try_from(update: &String) -> Result { Self::from_str(update) } } diff --git a/lib/src/sparql/eval.rs b/lib/src/sparql/eval.rs index 9ae02da1..25c4b3cf 100644 --- a/lib/src/sparql/eval.rs +++ b/lib/src/sparql/eval.rs @@ -3892,9 +3892,9 @@ impl TupleSelector { fn get_pattern_value(&self, tuple: &EncodedTuple) -> Option { match self { - TupleSelector::Constant(c) => Some(c.clone()), - TupleSelector::Variable(v) => tuple.get(*v).cloned(), - TupleSelector::TriplePattern(triple) => Some( + Self::Constant(c) => Some(c.clone()), + Self::Variable(v) => tuple.get(*v).cloned(), + Self::TriplePattern(triple) => Some( EncodedTriple { subject: triple.subject.get_pattern_value(tuple)?, predicate: triple.predicate.get_pattern_value(tuple)?, @@ -4732,7 +4732,7 @@ struct CartesianProductJoinIterator { impl Iterator for CartesianProductJoinIterator { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { loop { if let Some(result) = self.buffered_results.pop() { return Some(result); @@ -4767,7 +4767,7 @@ struct HashJoinIterator { impl Iterator for HashJoinIterator { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { loop { if let Some(result) = self.buffered_results.pop() { return Some(result); @@ -4806,7 +4806,7 @@ struct HashLeftJoinIterator { impl Iterator for HashLeftJoinIterator { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { loop { if let Some(result) = self.buffered_results.pop() { return Some(result); @@ -4854,7 +4854,7 @@ struct ForLoopLeftJoinIterator { impl Iterator for ForLoopLeftJoinIterator { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { if let Some(tuple) = self.current_right.next() { return Some(tuple); } @@ -4881,7 +4881,7 @@ struct UnionIterator { impl Iterator for UnionIterator { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { loop { if let Some(tuple) = self.current_iterator.next() { return Some(tuple); @@ -4903,7 +4903,7 @@ struct ConsecutiveDeduplication { impl Iterator for ConsecutiveDeduplication { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { // Basic idea. We buffer the previous result and we only emit it when we kow the next one or it's the end loop { if let Some(next) = self.inner.next() { @@ -4944,7 +4944,7 @@ struct ConstructIterator { impl Iterator for ConstructIterator { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { loop { if let Some(result) = self.buffered_results.pop() { return Some(result); @@ -5046,7 +5046,7 @@ struct DescribeIterator { impl Iterator for DescribeIterator { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { loop { if let Some(quad) = self.quads.next() { return Some(match quad { @@ -5097,7 +5097,7 @@ impl, I2: Iterator> Iterator { type Item = (Option, Option); - fn next(&mut self) -> Option<(Option, Option)> { + fn next(&mut self) -> Option { match (self.a.next(), self.b.next()) { (None, None) => None, r => Some(r), @@ -5220,7 +5220,7 @@ impl< { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { loop { if let Some(current) = &mut self.current { if let Some(next) = current.next() { @@ -5629,7 +5629,7 @@ struct StatsIterator { impl Iterator for StatsIterator { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { let start = Timer::now(); let result = self.inner.next(); self.stats.exec_duration.set( diff --git a/lib/src/sparql/model.rs b/lib/src/sparql/model.rs index 326e7603..ca42db16 100644 --- a/lib/src/sparql/model.rs +++ b/lib/src/sparql/model.rs @@ -43,7 +43,7 @@ impl QueryResults { /// /// let mut results = Vec::new(); /// store.query("SELECT ?s WHERE { ?s ?p ?o }")?.write(&mut results, QueryResultsFormat::Json)?; - /// assert_eq!(results, "{\"head\":{\"vars\":[\"s\"]},\"results\":{\"bindings\":[{\"s\":{\"type\":\"uri\",\"value\":\"http://example.com\"}}]}}".as_bytes()); + /// assert_eq!(results, r#"{"head":{"vars":["s"]},"results":{"bindings":[{"s":{"type":"uri","value":"http://example.com"}}]}}"#.as_bytes()); /// # Result::<_,Box>::Ok(()) /// ``` pub fn write( @@ -221,7 +221,7 @@ impl Iterator for QuerySolutionIter { type Item = Result; #[inline] - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { self.iter.next() } @@ -253,7 +253,7 @@ impl Iterator for QueryTripleIter { type Item = Result; #[inline] - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { self.iter.next() } diff --git a/lib/src/sparql/results.rs b/lib/src/sparql/results.rs index bbafe70d..88aff947 100644 --- a/lib/src/sparql/results.rs +++ b/lib/src/sparql/results.rs @@ -30,13 +30,13 @@ //! //! // Let's test with a boolean //! assert_eq!( -//! convert_json_to_tsv(b"{\"boolean\":true}".as_slice()).unwrap(), +//! convert_json_to_tsv(br#"{"boolean":true}"#.as_slice()).unwrap(), //! b"true" //! ); //! //! // And with a set of solutions //! assert_eq!( -//! convert_json_to_tsv(b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}}]}}".as_slice()).unwrap(), +//! convert_json_to_tsv(br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#.as_slice()).unwrap(), //! b"?foo\t?bar\n\"test\"\t\n" //! ); //! ``` diff --git a/lib/src/sparql/service.rs b/lib/src/sparql/service.rs index 4db172e4..562c1896 100644 --- a/lib/src/sparql/service.rs +++ b/lib/src/sparql/service.rs @@ -24,7 +24,7 @@ use std::time::Duration; /// impl ServiceHandler for TestServiceHandler { /// type Error = EvaluationError; /// -/// fn handle(&self,service_name: NamedNode, query: Query) -> Result { +/// fn handle(&self, service_name: NamedNode, query: Query) -> Result { /// if service_name == "http://example.com/service" { /// self.store.query(query) /// } else { @@ -61,7 +61,7 @@ pub struct EmptyServiceHandler; impl ServiceHandler for EmptyServiceHandler { type Error = EvaluationError; - fn handle(&self, name: NamedNode, _: Query) -> Result { + fn handle(&self, name: NamedNode, _: Query) -> Result { Err(EvaluationError::UnsupportedService(name)) } } @@ -79,11 +79,7 @@ impl ErrorConversionServiceHandler { impl ServiceHandler for ErrorConversionServiceHandler { type Error = EvaluationError; - fn handle( - &self, - service_name: NamedNode, - query: Query, - ) -> Result { + fn handle(&self, service_name: NamedNode, query: Query) -> Result { self.handler .handle(service_name, query) .map_err(|e| EvaluationError::Service(Box::new(e))) @@ -105,11 +101,7 @@ impl SimpleServiceHandler { impl ServiceHandler for SimpleServiceHandler { type Error = EvaluationError; - fn handle( - &self, - service_name: NamedNode, - query: Query, - ) -> Result { + fn handle(&self, service_name: NamedNode, query: Query) -> Result { let (content_type, body) = self .client .post( diff --git a/lib/src/storage/backend/rocksdb.rs b/lib/src/storage/backend/rocksdb.rs index acac6585..670963ba 100644 --- a/lib/src/storage/backend/rocksdb.rs +++ b/lib/src/storage/backend/rocksdb.rs @@ -1157,7 +1157,7 @@ impl Drop for PinnableSlice { impl Deref for PinnableSlice { type Target = [u8]; - fn deref(&self) -> &[u8] { + fn deref(&self) -> &Self::Target { unsafe { let mut len = 0; let val = rocksdb_pinnableslice_value(self.0, &mut len); @@ -1200,7 +1200,7 @@ impl Drop for Buffer { impl Deref for Buffer { type Target = [u8]; - fn deref(&self) -> &[u8] { + fn deref(&self) -> &Self::Target { unsafe { slice::from_raw_parts(self.base, self.len) } } } diff --git a/lib/src/storage/error.rs b/lib/src/storage/error.rs index 8c874d77..89895349 100644 --- a/lib/src/storage/error.rs +++ b/lib/src/storage/error.rs @@ -179,7 +179,7 @@ impl From for io::Error { LoaderError::Storage(error) => error.into(), LoaderError::Parsing(error) => error.into(), LoaderError::InvalidBaseIri { .. } => { - io::Error::new(io::ErrorKind::InvalidInput, error.to_string()) + Self::new(io::ErrorKind::InvalidInput, error.to_string()) } } } @@ -242,7 +242,7 @@ impl From for io::Error { SerializerError::Storage(error) => error.into(), SerializerError::Io(error) => error, SerializerError::DatasetFormatExpected(_) => { - io::Error::new(io::ErrorKind::InvalidInput, error.to_string()) + Self::new(io::ErrorKind::InvalidInput, error.to_string()) } } } diff --git a/lib/src/storage/mod.rs b/lib/src/storage/mod.rs index f592aeb9..9dd38ee1 100644 --- a/lib/src/storage/mod.rs +++ b/lib/src/storage/mod.rs @@ -814,7 +814,7 @@ impl ChainedDecodingQuadIterator { impl Iterator for ChainedDecodingQuadIterator { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { if let Some(result) = self.first.next() { Some(result) } else if let Some(second) = self.second.as_mut() { @@ -833,7 +833,7 @@ pub struct DecodingQuadIterator { impl Iterator for DecodingQuadIterator { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { if let Err(e) = self.iter.status() { return Some(Err(e)); } @@ -850,7 +850,7 @@ pub struct DecodingGraphIterator { impl Iterator for DecodingGraphIterator { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { if let Err(e) = self.iter.status() { return Some(Err(e)); } diff --git a/lib/src/storage/small_string.rs b/lib/src/storage/small_string.rs index be836c4d..c2862ff4 100644 --- a/lib/src/storage/small_string.rs +++ b/lib/src/storage/small_string.rs @@ -65,7 +65,7 @@ impl Deref for SmallString { type Target = str; #[inline] - fn deref(&self) -> &str { + fn deref(&self) -> &Self::Target { self.as_str() } } @@ -146,17 +146,17 @@ impl FromStr for SmallString { type Err = BadSmallStringError; #[inline] - fn from_str(value: &str) -> Result { + fn from_str(value: &str) -> Result { if value.len() <= 15 { let mut inner = [0; 16]; inner[..value.len()].copy_from_slice(value.as_bytes()); inner[15] = value .len() .try_into() - .map_err(|_| BadSmallStringError::TooLong(value.len()))?; + .map_err(|_| Self::Err::TooLong(value.len()))?; Ok(Self { inner }) } else { - Err(BadSmallStringError::TooLong(value.len())) + Err(Self::Err::TooLong(value.len())) } } } @@ -165,7 +165,7 @@ impl<'a> TryFrom<&'a str> for SmallString { type Error = BadSmallStringError; #[inline] - fn try_from(value: &'a str) -> Result { + fn try_from(value: &'a str) -> Result { Self::from_str(value) } } diff --git a/lib/src/store.rs b/lib/src/store.rs index 9b448141..95a25260 100644 --- a/lib/src/store.rs +++ b/lib/src/store.rs @@ -1471,7 +1471,7 @@ pub struct QuadIter { impl Iterator for QuadIter { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { Some(match self.iter.next()? { Ok(quad) => self.reader.decode_quad(&quad), Err(error) => Err(error), @@ -1488,7 +1488,7 @@ pub struct GraphNameIter { impl Iterator for GraphNameIter { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { Some( self.iter .next()? diff --git a/lints/test_debian_compatibility.py b/lints/test_debian_compatibility.py index ef00126f..2e81a79b 100644 --- a/lints/test_debian_compatibility.py +++ b/lints/test_debian_compatibility.py @@ -1,11 +1,20 @@ import json import subprocess from pathlib import Path +from time import sleep +from urllib.error import HTTPError from urllib.request import urlopen TARGET_DEBIAN_VERSIONS = ["sid"] IGNORE_PACKAGES = {"oxigraph-js", "oxigraph-testsuite", "pyoxigraph", "sparql-smith"} -ALLOWED_MISSING_PACKAGES = {"codspeed-criterion-compat", "escargot", "json-event-parser", "oxhttp", "oxiri", "quick-xml"} +ALLOWED_MISSING_PACKAGES = { + "codspeed-criterion-compat", + "escargot", + "json-event-parser", + "oxhttp", + "oxiri", + "quick-xml", +} base_path = Path(__file__).parent.parent @@ -25,6 +34,23 @@ def parse_version(version): return tuple(int(e) for e in version.split("-")[0].split(".")) +def fetch_debian_package_desc(debian_name): + url = f"https://sources.debian.org/api/src/{debian_name}/" + for i in range(0, 10): + try: + with urlopen(url) as response: + return json.loads(response.read().decode()) + except HTTPError as e: + if e.code / 100 == 5: + wait = 2**i + print(f"Error {e} from {url}, retrying after {wait}s") + sleep(wait) + else: + print(f"Failed to fetch debian name {debian_name} from {url}: {e}") + raise e + raise Exception(f"Failed to fetch {url}") + + for package_id in cargo_metadata["workspace_default_members"]: package = package_by_id[package_id] if package["name"] in IGNORE_PACKAGES: @@ -37,11 +63,9 @@ for package_id in cargo_metadata["workspace_default_members"]: continue candidate_debian_name = f"rust-{dependency['name'].replace('_', '-')}" if dependency["name"] not in debian_cache: - with urlopen( - f"https://sources.debian.org/api/src/{candidate_debian_name}/" - ) as response: - debian_package = json.loads(response.read().decode()) - debian_cache[candidate_debian_name] = debian_package + debian_cache[candidate_debian_name] = fetch_debian_package_desc( + candidate_debian_name + ) debian_package = debian_cache[candidate_debian_name] if "error" in debian_package: errors.add(f"No Debian package found for {dependency['name']}") diff --git a/oxrocksdb-sys/Cargo.toml b/oxrocksdb-sys/Cargo.toml index 1b0196a3..892243cc 100644 --- a/oxrocksdb-sys/Cargo.toml +++ b/oxrocksdb-sys/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "oxrocksdb-sys" -version = "0.4.0-alpha.2" +version = "0.4.0-alpha.3-dev" authors = ["Tpt "] license = "GPL-2.0 OR Apache-2.0" repository = "https://github.com/oxigraph/oxigraph/tree/main/oxrocksdb-sys" diff --git a/python/Cargo.toml b/python/Cargo.toml index 3f386aa9..3cacf9bc 100644 --- a/python/Cargo.toml +++ b/python/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "pyoxigraph" -version = "0.4.0-alpha.2" +version = "0.4.0-alpha.3-dev" authors = ["Tpt "] license = "MIT OR Apache-2.0" readme = "README.md" diff --git a/testsuite/oxigraph-tests/parser/escaped_trailing_dot.nq b/testsuite/oxigraph-tests/parser/escaped_trailing_dot.nq new file mode 100644 index 00000000..d0a345c1 --- /dev/null +++ b/testsuite/oxigraph-tests/parser/escaped_trailing_dot.nq @@ -0,0 +1 @@ + . diff --git a/testsuite/oxigraph-tests/parser/escaped_trailing_dot.nt b/testsuite/oxigraph-tests/parser/escaped_trailing_dot.nt new file mode 100644 index 00000000..d0a345c1 --- /dev/null +++ b/testsuite/oxigraph-tests/parser/escaped_trailing_dot.nt @@ -0,0 +1 @@ + . diff --git a/testsuite/oxigraph-tests/parser/escaped_trailing_dot.trig b/testsuite/oxigraph-tests/parser/escaped_trailing_dot.trig new file mode 100644 index 00000000..9017defa --- /dev/null +++ b/testsuite/oxigraph-tests/parser/escaped_trailing_dot.trig @@ -0,0 +1,2 @@ +@prefix ex: . +ex:s ex:p ex:o\. . diff --git a/testsuite/oxigraph-tests/parser/escaped_trailing_dot.ttl b/testsuite/oxigraph-tests/parser/escaped_trailing_dot.ttl new file mode 100644 index 00000000..9017defa --- /dev/null +++ b/testsuite/oxigraph-tests/parser/escaped_trailing_dot.ttl @@ -0,0 +1,2 @@ +@prefix ex: . +ex:s ex:p ex:o\. . diff --git a/testsuite/oxigraph-tests/parser/manifest.ttl b/testsuite/oxigraph-tests/parser/manifest.ttl index ec90b2bf..af3e80b5 100644 --- a/testsuite/oxigraph-tests/parser/manifest.ttl +++ b/testsuite/oxigraph-tests/parser/manifest.ttl @@ -18,6 +18,8 @@ <#keyword_vs_prefix_ttl> <#keyword_vs_prefix_trig> <#at_keywords_as_lang_tag> + <#escaped_trailing_dot_ttl> + <#escaped_trailing_dot_trig> ) . <#no_end_line_jump> @@ -88,3 +90,15 @@ mf:name "usage of at keywords as language tags" ; mf:action ; mf:result . + +<#escaped_trailing_dot_ttl> + rdf:type rdft:TestTurtleEval ; + mf:name "escaped dot at the end of a local name" ; + mf:action ; + mf:result . + +<#escaped_trailing_dot_trig> + rdf:type rdft:TestTrigEval ; + mf:name "escaped dot at the end of a local name" ; + mf:action ; + mf:result . diff --git a/testsuite/src/manifest.rs b/testsuite/src/manifest.rs index 00a0f6a6..97ee0ea7 100644 --- a/testsuite/src/manifest.rs +++ b/testsuite/src/manifest.rs @@ -58,7 +58,7 @@ pub struct TestManifest { impl Iterator for TestManifest { type Item = Result; - fn next(&mut self) -> Option> { + fn next(&mut self) -> Option { loop { if let Some(next) = self.next_test().transpose() { return Some(next); @@ -355,7 +355,7 @@ impl<'a> RdfListIterator<'a> { impl<'a> Iterator for RdfListIterator<'a> { type Item = Term; - fn next(&mut self) -> Option { + fn next(&mut self) -> Option { match self.current_node { Some(current) => { let result = self diff --git a/testsuite/src/sparql_evaluator.rs b/testsuite/src/sparql_evaluator.rs index df9d471e..844158d9 100644 --- a/testsuite/src/sparql_evaluator.rs +++ b/testsuite/src/sparql_evaluator.rs @@ -727,7 +727,7 @@ impl Drop for StoreRef { impl Deref for StoreRef { type Target = Store; - fn deref(&self) -> &Store { + fn deref(&self) -> &Self::Target { &self.store } }