diff --git a/cli/src/main.rs b/cli/src/main.rs index 9e9a0dbd..cabf08c4 100644 --- a/cli/src/main.rs +++ b/cli/src/main.rs @@ -469,7 +469,7 @@ pub fn main() -> anyhow::Result<()> { file.display(), error ) - //TODO: hard fail + // TODO: hard fail } }) } @@ -1845,7 +1845,8 @@ mod tests { use super::*; use anyhow::Result; use assert_cmd::Command; - use assert_fs::{prelude::*, NamedTempFile, TempDir}; + use assert_fs::prelude::*; + use assert_fs::{NamedTempFile, TempDir}; use flate2::write::GzEncoder; use flate2::Compression; use oxhttp::model::Method; diff --git a/lib/oxrdf/src/blank_node.rs b/lib/oxrdf/src/blank_node.rs index e813dd24..9603cd30 100644 --- a/lib/oxrdf/src/blank_node.rs +++ b/lib/oxrdf/src/blank_node.rs @@ -1,8 +1,7 @@ use rand::random; use std::error::Error; -use std::fmt; use std::io::Write; -use std::str; +use std::{fmt, str}; /// An owned RDF [blank node](https://www.w3.org/TR/rdf11-concepts/#dfn-blank-node). /// @@ -15,10 +14,7 @@ use std::str; /// ``` /// use oxrdf::BlankNode; /// -/// assert_eq!( -/// "_:a122", -/// BlankNode::new("a122")?.to_string() -/// ); +/// assert_eq!("_:a122", BlankNode::new("a122")?.to_string()); /// # Result::<_,oxrdf::BlankNodeIdParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Hash)] @@ -36,7 +32,7 @@ impl BlankNode { /// The blank node identifier must be valid according to N-Triples, Turtle, and SPARQL grammars. /// /// In most cases, it is much more convenient to create a blank node using [`BlankNode::default()`] - ///that creates a random ID that could be easily inlined by Oxigraph stores. + /// that creates a random ID that could be easily inlined by Oxigraph stores. pub fn new(id: impl Into) -> Result { let id = id.into(); validate_blank_node_identifier(&id)?; @@ -133,10 +129,7 @@ impl Default for BlankNode { /// ``` /// use oxrdf::BlankNodeRef; /// -/// assert_eq!( -/// "_:a122", -/// BlankNodeRef::new("a122")?.to_string() -/// ); +/// assert_eq!("_:a122", BlankNodeRef::new("a122")?.to_string()); /// # Result::<_,oxrdf::BlankNodeIdParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Copy, Hash)] @@ -192,7 +185,10 @@ impl<'a> BlankNodeRef<'a> { /// ``` /// use oxrdf::BlankNode; /// - /// assert_eq!(BlankNode::new_from_unique_id(128).as_ref().unique_id(), Some(128)); + /// assert_eq!( + /// BlankNode::new_from_unique_id(128).as_ref().unique_id(), + /// Some(128) + /// ); /// assert_eq!(BlankNode::new("foo")?.as_ref().unique_id(), None); /// # Result::<_,oxrdf::BlankNodeIdParseError>::Ok(()) /// ``` diff --git a/lib/oxrdf/src/dataset.rs b/lib/oxrdf/src/dataset.rs index 0c1fcbd7..8412a8aa 100644 --- a/lib/oxrdf/src/dataset.rs +++ b/lib/oxrdf/src/dataset.rs @@ -20,19 +20,20 @@ //! assert_eq!(vec![TripleRef::new(ex, ex, ex)], results); //! //! // Print -//! assert_eq!(dataset.to_string(), " .\n"); +//! assert_eq!( +//! dataset.to_string(), +//! " .\n" +//! ); //! # Result::<_,Box>::Ok(()) //! ``` //! //! See also [`Graph`] if you only care about plain triples. use crate::interning::*; -use crate::SubjectRef; use crate::*; use std::cmp::min; use std::collections::hash_map::DefaultHasher; -use std::collections::BTreeSet; -use std::collections::{HashMap, HashSet}; +use std::collections::{BTreeSet, HashMap, HashSet}; use std::fmt; use std::hash::{Hash, Hasher}; @@ -924,8 +925,8 @@ impl PartialEq for Dataset { impl Eq for Dataset {} impl<'a> IntoIterator for &'a Dataset { - type Item = QuadRef<'a>; type IntoIter = Iter<'a>; + type Item = QuadRef<'a>; fn into_iter(self) -> Self::IntoIter { self.iter() @@ -1282,8 +1283,8 @@ impl<'a> GraphView<'a> { } impl<'a> IntoIterator for GraphView<'a> { - type Item = TripleRef<'a>; type IntoIter = GraphViewIter<'a>; + type Item = TripleRef<'a>; fn into_iter(self) -> Self::IntoIter { self.iter() @@ -1291,8 +1292,8 @@ impl<'a> IntoIterator for GraphView<'a> { } impl<'a, 'b> IntoIterator for &'b GraphView<'a> { - type Item = TripleRef<'a>; type IntoIter = GraphViewIter<'a>; + type Item = TripleRef<'a>; fn into_iter(self) -> Self::IntoIter { self.iter() @@ -1493,8 +1494,8 @@ impl<'a, 'b, T: Into>> Extend for GraphViewMut<'a> { } impl<'a> IntoIterator for &'a GraphViewMut<'a> { - type Item = TripleRef<'a>; type IntoIter = GraphViewIter<'a>; + type Item = TripleRef<'a>; fn into_iter(self) -> Self::IntoIter { self.iter() diff --git a/lib/oxrdf/src/graph.rs b/lib/oxrdf/src/graph.rs index 3077e5de..33f67132 100644 --- a/lib/oxrdf/src/graph.rs +++ b/lib/oxrdf/src/graph.rs @@ -16,7 +16,10 @@ //! assert_eq!(vec![triple], results); //! //! // Print -//! assert_eq!(graph.to_string(), " .\n"); +//! assert_eq!( +//! graph.to_string(), +//! " .\n" +//! ); //! # Result::<_,Box>::Ok(()) //! ``` //! @@ -226,8 +229,8 @@ impl PartialEq for Graph { impl Eq for Graph {} impl<'a> IntoIterator for &'a Graph { - type Item = TripleRef<'a>; type IntoIter = Iter<'a>; + type Item = TripleRef<'a>; fn into_iter(self) -> Self::IntoIter { self.iter() diff --git a/lib/oxrdf/src/literal.rs b/lib/oxrdf/src/literal.rs index 3f2727ca..0872fab5 100644 --- a/lib/oxrdf/src/literal.rs +++ b/lib/oxrdf/src/literal.rs @@ -1,6 +1,5 @@ use crate::named_node::NamedNode; -use crate::vocab::rdf; -use crate::vocab::xsd; +use crate::vocab::{rdf, xsd}; use crate::NamedNodeRef; use oxilangtag::{LanguageTag, LanguageTagParseError}; #[cfg(feature = "oxsdatatypes")] @@ -15,8 +14,8 @@ use std::option::Option; /// The default string formatter is returning an N-Triples, Turtle, and SPARQL compatible representation: /// ``` /// # use oxilangtag::LanguageTagParseError; -/// use oxrdf::Literal; /// use oxrdf::vocab::xsd; +/// use oxrdf::Literal; /// /// assert_eq!( /// "\"foo\\nbar\"", @@ -427,8 +426,8 @@ impl From for Literal { /// /// The default string formatter is returning an N-Triples, Turtle, and SPARQL compatible representation: /// ``` -/// use oxrdf::LiteralRef; /// use oxrdf::vocab::xsd; +/// use oxrdf::LiteralRef; /// /// assert_eq!( /// "\"foo\\nbar\"", diff --git a/lib/oxrdf/src/parser.rs b/lib/oxrdf/src/parser.rs index 326868f5..1794540d 100644 --- a/lib/oxrdf/src/parser.rs +++ b/lib/oxrdf/src/parser.rs @@ -5,10 +5,9 @@ use crate::{ }; #[cfg(feature = "rdf-star")] use crate::{Subject, Triple}; -use std::char; use std::error::Error; -use std::fmt; use std::str::{Chars, FromStr}; +use std::{char, fmt}; /// This limit is set in order to avoid stack overflow error when parsing nested triples due to too many recursive calls. /// The actual limit value is a wet finger compromise between not failing to parse valid files and avoiding to trigger stack overflow errors. @@ -23,7 +22,10 @@ impl FromStr for NamedNode { /// use oxrdf::NamedNode; /// use std::str::FromStr; /// - /// assert_eq!(NamedNode::from_str("").unwrap(), NamedNode::new("http://example.com").unwrap()) + /// assert_eq!( + /// NamedNode::from_str("").unwrap(), + /// NamedNode::new("http://example.com").unwrap() + /// ) /// ``` fn from_str(s: &str) -> Result { let (term, left) = read_named_node(s)?; @@ -45,7 +47,10 @@ impl FromStr for BlankNode { /// use oxrdf::BlankNode; /// use std::str::FromStr; /// - /// assert_eq!(BlankNode::from_str("_:ex").unwrap(), BlankNode::new("ex").unwrap()) + /// assert_eq!( + /// BlankNode::from_str("_:ex").unwrap(), + /// BlankNode::new("ex").unwrap() + /// ) /// ``` fn from_str(s: &str) -> Result { let (term, left) = read_blank_node(s)?; @@ -64,16 +69,41 @@ impl FromStr for Literal { /// Parses a literal from its NTriples or Turtle serialization /// /// ``` - /// use oxrdf::{Literal, NamedNode, vocab::xsd}; + /// use oxrdf::vocab::xsd; + /// use oxrdf::{Literal, NamedNode}; /// use std::str::FromStr; /// - /// assert_eq!(Literal::from_str("\"ex\\n\"").unwrap(), Literal::new_simple_literal("ex\n")); - /// assert_eq!(Literal::from_str("\"ex\"@en").unwrap(), Literal::new_language_tagged_literal("ex", "en").unwrap()); - /// assert_eq!(Literal::from_str("\"2020\"^^").unwrap(), Literal::new_typed_literal("2020", NamedNode::new("http://www.w3.org/2001/XMLSchema#gYear").unwrap())); - /// assert_eq!(Literal::from_str("true").unwrap(), Literal::new_typed_literal("true", xsd::BOOLEAN)); - /// assert_eq!(Literal::from_str("+122").unwrap(), Literal::new_typed_literal("+122", xsd::INTEGER)); - /// assert_eq!(Literal::from_str("-122.23").unwrap(), Literal::new_typed_literal("-122.23", xsd::DECIMAL)); - /// assert_eq!(Literal::from_str("-122e+1").unwrap(), Literal::new_typed_literal("-122e+1", xsd::DOUBLE)); + /// assert_eq!( + /// Literal::from_str("\"ex\\n\"").unwrap(), + /// Literal::new_simple_literal("ex\n") + /// ); + /// assert_eq!( + /// Literal::from_str("\"ex\"@en").unwrap(), + /// Literal::new_language_tagged_literal("ex", "en").unwrap() + /// ); + /// assert_eq!( + /// Literal::from_str("\"2020\"^^").unwrap(), + /// Literal::new_typed_literal( + /// "2020", + /// NamedNode::new("http://www.w3.org/2001/XMLSchema#gYear").unwrap() + /// ) + /// ); + /// assert_eq!( + /// Literal::from_str("true").unwrap(), + /// Literal::new_typed_literal("true", xsd::BOOLEAN) + /// ); + /// assert_eq!( + /// Literal::from_str("+122").unwrap(), + /// Literal::new_typed_literal("+122", xsd::INTEGER) + /// ); + /// assert_eq!( + /// Literal::from_str("-122.23").unwrap(), + /// Literal::new_typed_literal("-122.23", xsd::DECIMAL) + /// ); + /// assert_eq!( + /// Literal::from_str("-122e+1").unwrap(), + /// Literal::new_typed_literal("-122e+1", xsd::DOUBLE) + /// ); /// ``` fn from_str(s: &str) -> Result { let (term, left) = read_literal(s)?; @@ -93,12 +123,19 @@ impl FromStr for Term { /// use oxrdf::*; /// use std::str::FromStr; /// - /// assert_eq!(Term::from_str("\"ex\"").unwrap(), Literal::new_simple_literal("ex").into()); - /// assert_eq!(Term::from_str("<< _:s \"o\" >>").unwrap(), Triple::new( - /// BlankNode::new("s").unwrap(), - /// NamedNode::new("http://example.com/p").unwrap(), - /// Literal::new_simple_literal("o") - /// ).into()); + /// assert_eq!( + /// Term::from_str("\"ex\"").unwrap(), + /// Literal::new_simple_literal("ex").into() + /// ); + /// assert_eq!( + /// Term::from_str("<< _:s \"o\" >>").unwrap(), + /// Triple::new( + /// BlankNode::new("s").unwrap(), + /// NamedNode::new("http://example.com/p").unwrap(), + /// Literal::new_simple_literal("o") + /// ) + /// .into() + /// ); /// ``` fn from_str(s: &str) -> Result { let (term, left) = read_term(s, 0)?; @@ -118,7 +155,10 @@ impl FromStr for Variable { /// use oxrdf::Variable; /// use std::str::FromStr; /// - /// assert_eq!(Variable::from_str("$foo").unwrap(), Variable::new("foo").unwrap()) + /// assert_eq!( + /// Variable::from_str("$foo").unwrap(), + /// Variable::new("foo").unwrap() + /// ) /// ``` fn from_str(s: &str) -> Result { if !s.starts_with('?') && !s.starts_with('$') { diff --git a/lib/oxrdf/src/triple.rs b/lib/oxrdf/src/triple.rs index 850b1375..813982d0 100644 --- a/lib/oxrdf/src/triple.rs +++ b/lib/oxrdf/src/triple.rs @@ -698,7 +698,7 @@ impl<'a> From> for Term { /// /// The default string formatter is returning an N-Triples, Turtle, and SPARQL compatible representation: /// ``` -/// use oxrdf::{Triple, NamedNode}; +/// use oxrdf::{NamedNode, Triple}; /// /// assert_eq!( /// " ", @@ -706,7 +706,8 @@ impl<'a> From> for Term { /// subject: NamedNode::new("http://example.com/s")?.into(), /// predicate: NamedNode::new("http://example.com/p")?, /// object: NamedNode::new("http://example.com/o")?.into(), -/// }.to_string() +/// } +/// .to_string() /// ); /// # Result::<_,oxrdf::IriParseError>::Ok(()) /// ``` @@ -769,7 +770,7 @@ impl fmt::Display for Triple { /// /// The default string formatter is returning an N-Triples, Turtle, and SPARQL compatible representation: /// ``` -/// use oxrdf::{TripleRef, NamedNodeRef}; +/// use oxrdf::{NamedNodeRef, TripleRef}; /// /// assert_eq!( /// " ", @@ -777,7 +778,8 @@ impl fmt::Display for Triple { /// subject: NamedNodeRef::new("http://example.com/s")?.into(), /// predicate: NamedNodeRef::new("http://example.com/p")?, /// object: NamedNodeRef::new("http://example.com/o")?.into(), -/// }.to_string() +/// } +/// .to_string() /// ); /// # Result::<_,oxrdf::IriParseError>::Ok(()) /// ``` diff --git a/lib/oxrdf/src/variable.rs b/lib/oxrdf/src/variable.rs index 8bde4d6e..044c73e7 100644 --- a/lib/oxrdf/src/variable.rs +++ b/lib/oxrdf/src/variable.rs @@ -8,10 +8,7 @@ use std::fmt; /// ``` /// use oxrdf::{Variable, VariableNameParseError}; /// -/// assert_eq!( -/// "?foo", -/// Variable::new("foo")?.to_string() -/// ); +/// assert_eq!("?foo", Variable::new("foo")?.to_string()); /// # Result::<_,VariableNameParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] @@ -67,12 +64,9 @@ impl fmt::Display for Variable { /// /// The default string formatter is returning a SPARQL compatible representation: /// ``` -/// use oxrdf::{VariableRef, VariableNameParseError}; +/// use oxrdf::{VariableNameParseError, VariableRef}; /// -/// assert_eq!( -/// "?foo", -/// VariableRef::new("foo")?.to_string() -/// ); +/// assert_eq!("?foo", VariableRef::new("foo")?.to_string()); /// # Result::<_,VariableNameParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)] diff --git a/lib/oxrdfio/src/format.rs b/lib/oxrdfio/src/format.rs index cb03a3eb..1cc6aa12 100644 --- a/lib/oxrdfio/src/format.rs +++ b/lib/oxrdfio/src/format.rs @@ -26,7 +26,10 @@ impl RdfFormat { /// ``` /// use oxrdfio::RdfFormat; /// - /// assert_eq!(RdfFormat::NTriples.iri(), "http://www.w3.org/ns/formats/N-Triples") + /// assert_eq!( + /// RdfFormat::NTriples.iri(), + /// "http://www.w3.org/ns/formats/N-Triples" + /// ) /// ``` #[inline] pub const fn iri(self) -> &'static str { @@ -136,7 +139,10 @@ impl RdfFormat { /// ``` /// use oxrdfio::RdfFormat; /// - /// assert_eq!(RdfFormat::from_media_type("text/turtle; charset=utf-8"), Some(RdfFormat::Turtle)) + /// assert_eq!( + /// RdfFormat::from_media_type("text/turtle; charset=utf-8"), + /// Some(RdfFormat::Turtle) + /// ) /// ``` #[inline] pub fn from_media_type(media_type: &str) -> Option { diff --git a/lib/oxrdfio/src/parser.rs b/lib/oxrdfio/src/parser.rs index d5bf196d..0f6d11ac 100644 --- a/lib/oxrdfio/src/parser.rs +++ b/lib/oxrdfio/src/parser.rs @@ -48,7 +48,9 @@ use tokio::io::AsyncRead; /// let file = " ."; /// /// let parser = RdfParser::from_format(RdfFormat::NTriples); -/// let quads = parser.parse_read(file.as_bytes()).collect::,_>>()?; +/// let quads = parser +/// .parse_read(file.as_bytes()) +/// .collect::, _>>()?; /// /// assert_eq!(quads.len(), 1); /// assert_eq!(quads[0].subject.to_string(), ""); @@ -129,9 +131,12 @@ impl RdfParser { /// The format the parser uses. /// /// ``` - /// use oxrdfio::{RdfParser, RdfFormat}; + /// use oxrdfio::{RdfFormat, RdfParser}; /// - /// assert_eq!(RdfParser::from_format(RdfFormat::Turtle).format(), RdfFormat::Turtle); + /// assert_eq!( + /// RdfParser::from_format(RdfFormat::Turtle).format(), + /// RdfFormat::Turtle + /// ); /// ``` pub fn format(&self) -> RdfFormat { match &self.inner { @@ -152,7 +157,9 @@ impl RdfParser { /// let file = "

."; /// /// let parser = RdfParser::from_format(RdfFormat::Turtle).with_base_iri("http://example.com")?; - /// let quads = parser.parse_read(file.as_bytes()).collect::,_>>()?; + /// let quads = parser + /// .parse_read(file.as_bytes()) + /// .collect::, _>>()?; /// /// assert_eq!(quads.len(), 1); /// assert_eq!(quads[0].subject.to_string(), ""); @@ -179,8 +186,11 @@ impl RdfParser { /// /// let file = " ."; /// - /// let parser = RdfParser::from_format(RdfFormat::Turtle).with_default_graph(NamedNode::new("http://example.com/g")?); - /// let quads = parser.parse_read(file.as_bytes()).collect::,_>>()?; + /// let parser = RdfParser::from_format(RdfFormat::Turtle) + /// .with_default_graph(NamedNode::new("http://example.com/g")?); + /// let quads = parser + /// .parse_read(file.as_bytes()) + /// .collect::, _>>()?; /// /// assert_eq!(quads.len(), 1); /// assert_eq!(quads[0].graph_name.to_string(), ""); @@ -221,10 +231,12 @@ impl RdfParser { /// /// let result1 = RdfParser::from_format(RdfFormat::NQuads) /// .rename_blank_nodes() - /// .parse_read(file.as_bytes()).collect::,_>>()?; + /// .parse_read(file.as_bytes()) + /// .collect::, _>>()?; /// let result2 = RdfParser::from_format(RdfFormat::NQuads) /// .rename_blank_nodes() - /// .parse_read(file.as_bytes()).collect::,_>>()?; + /// .parse_read(file.as_bytes()) + /// .collect::, _>>()?; /// assert_ne!(result1, result2); /// # Result::<_,Box>::Ok(()) /// ``` @@ -262,7 +274,9 @@ impl RdfParser { /// let file = " ."; /// /// let parser = RdfParser::from_format(RdfFormat::NTriples); - /// let quads = parser.parse_read(file.as_bytes()).collect::,_>>()?; + /// let quads = parser + /// .parse_read(file.as_bytes()) + /// .collect::, _>>()?; /// /// assert_eq!(quads.len(), 1); /// assert_eq!(quads[0].subject.to_string(), ""); @@ -358,7 +372,9 @@ impl From for RdfParser { /// let file = " ."; /// /// let parser = RdfParser::from_format(RdfFormat::NTriples); -/// let quads = parser.parse_read(file.as_bytes()).collect::,_>>()?; +/// let quads = parser +/// .parse_read(file.as_bytes()) +/// .collect::, _>>()?; /// /// assert_eq!(quads.len(), 1); /// assert_eq!(quads[0].subject.to_string(), ""); diff --git a/lib/oxrdfio/src/serializer.rs b/lib/oxrdfio/src/serializer.rs index cd132cd5..7abf7696 100644 --- a/lib/oxrdfio/src/serializer.rs +++ b/lib/oxrdfio/src/serializer.rs @@ -63,9 +63,12 @@ impl RdfSerializer { /// The format the serializer serializes to. /// /// ``` - /// use oxrdfio::{RdfSerializer, RdfFormat}; + /// use oxrdfio::{RdfFormat, RdfSerializer}; /// - /// assert_eq!(RdfSerializer::from_format(RdfFormat::Turtle).format(), RdfFormat::Turtle); + /// assert_eq!( + /// RdfSerializer::from_format(RdfFormat::Turtle).format(), + /// RdfFormat::Turtle + /// ); /// ``` pub fn format(&self) -> RdfFormat { self.format diff --git a/lib/oxrdfxml/src/parser.rs b/lib/oxrdfxml/src/parser.rs index 3c6b23ff..70ca91aa 100644 --- a/lib/oxrdfxml/src/parser.rs +++ b/lib/oxrdfxml/src/parser.rs @@ -26,7 +26,8 @@ use tokio::io::{AsyncRead, BufReader as AsyncBufReader}; /// /// Count the number of people: /// ``` -/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; /// use oxrdfxml::RdfXmlParser; /// /// let file = br#" @@ -84,7 +85,8 @@ impl RdfXmlParser { /// /// Count the number of people: /// ``` - /// use oxrdf::{NamedNodeRef, vocab::rdf}; + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNodeRef; /// use oxrdfxml::RdfXmlParser; /// /// let file = br#" @@ -119,7 +121,8 @@ impl RdfXmlParser { /// /// Count the number of people: /// ``` - /// use oxrdf::{NamedNodeRef, vocab::rdf}; + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNodeRef; /// use oxrdfxml::RdfXmlParser; /// /// # #[tokio::main(flavor = "current_thread")] @@ -179,7 +182,8 @@ impl RdfXmlParser { /// /// Count the number of people: /// ``` -/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; /// use oxrdfxml::RdfXmlParser; /// /// let file = br#" @@ -246,8 +250,9 @@ impl FromReadRdfXmlReader { /// /// Count the number of people: /// ``` -/// use oxrdf::{NamedNodeRef, vocab::rdf}; -/// use oxrdfxml::RdfXmlParser; +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; +/// use oxrdfxml::RdfXmlParser; /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> Result<(), oxrdfxml::ParseError> { @@ -368,7 +373,7 @@ enum RdfXmlState { li_counter: u64, }, PropertyElt { - //Resource, Literal or Empty property element + // Resource, Literal or Empty property element iri: NamedNode, base_iri: Option>, language: Option, @@ -392,7 +397,7 @@ enum RdfXmlState { subject: Subject, writer: Writer>, id_attr: Option, - emit: bool, //false for parseTypeOtherPropertyElt support + emit: bool, // false for parseTypeOtherPropertyElt support }, } @@ -523,7 +528,7 @@ impl RdfXmlReader { PropertyElt { subject: Subject }, } - //Literal case + // Literal case if let Some(RdfXmlState::ParseTypeLiteralPropertyElt { writer, .. }) = self.state.last_mut() { let mut clean_event = BytesStart::new( @@ -542,7 +547,7 @@ impl RdfXmlReader { let tag_name = self.resolve_tag_name(event.name())?; - //We read attributes + // We read attributes let (mut language, mut base_iri) = if let Some(current_state) = self.state.last() { ( current_state.language().cloned(), @@ -652,7 +657,7 @@ impl RdfXmlReader { } } - //Parsing with the base URI + // Parsing with the base URI let id_attr = match id_attr { Some(iri) => { let iri = self.resolve_iri(&base_iri, iri)?; @@ -855,7 +860,7 @@ impl RdfXmlReader { event: &BytesEnd<'_>, results: &mut Vec, ) -> Result<(), ParseError> { - //Literal case + // Literal case if self.in_literal_depth > 0 { if let Some(RdfXmlState::ParseTypeLiteralPropertyElt { writer, .. }) = self.state.last_mut() diff --git a/lib/oxsdatatypes/src/date_time.rs b/lib/oxsdatatypes/src/date_time.rs index d2405692..127990df 100644 --- a/lib/oxsdatatypes/src/date_time.rs +++ b/lib/oxsdatatypes/src/date_time.rs @@ -17,6 +17,13 @@ pub struct DateTime { } impl DateTime { + pub const MAX: Self = Self { + timestamp: Timestamp::MAX, + }; + pub const MIN: Self = Self { + timestamp: Timestamp::MIN, + }; + #[inline] pub(super) fn new( year: i64, @@ -241,14 +248,6 @@ impl DateTime { pub fn is_identical_with(self, other: Self) -> bool { self.timestamp.is_identical_with(other.timestamp) } - - pub const MIN: Self = Self { - timestamp: Timestamp::MIN, - }; - - pub const MAX: Self = Self { - timestamp: Timestamp::MAX, - }; } /// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). @@ -317,6 +316,21 @@ pub struct Time { } impl Time { + #[cfg(test)] + const MAX: Self = Self { + timestamp: Timestamp { + value: Decimal::new_from_i128_unchecked(62_230_255_200), + timezone_offset: Some(TimezoneOffset::MIN), + }, + }; + #[cfg(test)] + const MIN: Self = Self { + timestamp: Timestamp { + value: Decimal::new_from_i128_unchecked(62_230_154_400), + timezone_offset: Some(TimezoneOffset::MAX), + }, + }; + #[inline] fn new( mut hour: u8, @@ -493,22 +507,6 @@ impl Time { pub fn is_identical_with(self, other: Self) -> bool { self.timestamp.is_identical_with(other.timestamp) } - - #[cfg(test)] - const MIN: Self = Self { - timestamp: Timestamp { - value: Decimal::new_from_i128_unchecked(62_230_154_400), - timezone_offset: Some(TimezoneOffset::MAX), - }, - }; - - #[cfg(test)] - const MAX: Self = Self { - timestamp: Timestamp { - value: Decimal::new_from_i128_unchecked(62_230_255_200), - timezone_offset: Some(TimezoneOffset::MIN), - }, - }; } /// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). @@ -566,6 +564,19 @@ pub struct Date { } impl Date { + pub const MAX: Self = Self { + timestamp: Timestamp { + value: Decimal::new_from_i128_unchecked(170_141_183_460_469_216_800), + timezone_offset: Some(TimezoneOffset::MAX), + }, + }; + pub const MIN: Self = Self { + timestamp: Timestamp { + value: Decimal::new_from_i128_unchecked(-170_141_183_460_469_216_800), + timezone_offset: Some(TimezoneOffset::MIN), + }, + }; + #[inline] fn new( year: i64, @@ -742,19 +753,6 @@ impl Date { pub fn is_identical_with(self, other: Self) -> bool { self.timestamp.is_identical_with(other.timestamp) } - - pub const MIN: Self = Self { - timestamp: Timestamp { - value: Decimal::new_from_i128_unchecked(-170_141_183_460_469_216_800), - timezone_offset: Some(TimezoneOffset::MIN), - }, - }; - pub const MAX: Self = Self { - timestamp: Timestamp { - value: Decimal::new_from_i128_unchecked(170_141_183_460_469_216_800), - timezone_offset: Some(TimezoneOffset::MAX), - }, - }; } /// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). @@ -805,6 +803,19 @@ pub struct GYearMonth { } impl GYearMonth { + pub const MAX: Self = Self { + timestamp: Timestamp { + value: Decimal::new_from_i128_unchecked(170_141_183_460_469_216_800), + timezone_offset: Some(TimezoneOffset::MAX), + }, + }; + pub const MIN: Self = Self { + timestamp: Timestamp { + value: Decimal::new_from_i128_unchecked(-170_141_183_460_466_970_400), + timezone_offset: Some(TimezoneOffset::MIN), + }, + }; + #[inline] fn new( year: i64, @@ -876,19 +887,6 @@ impl GYearMonth { pub fn is_identical_with(self, other: Self) -> bool { self.timestamp.is_identical_with(other.timestamp) } - - pub const MIN: Self = Self { - timestamp: Timestamp { - value: Decimal::new_from_i128_unchecked(-170_141_183_460_466_970_400), - timezone_offset: Some(TimezoneOffset::MIN), - }, - }; - pub const MAX: Self = Self { - timestamp: Timestamp { - value: Decimal::new_from_i128_unchecked(170_141_183_460_469_216_800), - timezone_offset: Some(TimezoneOffset::MAX), - }, - }; } /// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). @@ -947,6 +945,19 @@ pub struct GYear { } impl GYear { + pub const MAX: Self = Self { + timestamp: Timestamp { + value: Decimal::new_from_i128_unchecked(170_141_183_460_461_440_800), + timezone_offset: Some(TimezoneOffset::MAX), + }, + }; + pub const MIN: Self = Self { + timestamp: Timestamp { + value: Decimal::new_from_i128_unchecked(-170_141_183_460_461_700_000), + timezone_offset: Some(TimezoneOffset::MIN), + }, + }; + #[inline] fn new( year: i64, @@ -1011,19 +1022,6 @@ impl GYear { pub fn is_identical_with(self, other: Self) -> bool { self.timestamp.is_identical_with(other.timestamp) } - - pub const MIN: Self = Self { - timestamp: Timestamp { - value: Decimal::new_from_i128_unchecked(-170_141_183_460_461_700_000), - timezone_offset: Some(TimezoneOffset::MIN), - }, - }; - pub const MAX: Self = Self { - timestamp: Timestamp { - value: Decimal::new_from_i128_unchecked(170_141_183_460_461_440_800), - timezone_offset: Some(TimezoneOffset::MAX), - }, - }; } /// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes). @@ -1461,6 +1459,10 @@ pub struct TimezoneOffset { } impl TimezoneOffset { + pub const MAX: Self = Self { offset: 14 * 60 }; + pub const MIN: Self = Self { offset: -14 * 60 }; + pub const UTC: Self = Self { offset: 0 }; + /// From offset in minute with respect to UTC #[inline] pub fn new(offset_in_minutes: i16) -> Result { @@ -1489,10 +1491,6 @@ impl TimezoneOffset { pub fn to_be_bytes(self) -> [u8; 2] { self.offset.to_be_bytes() } - - pub const MIN: Self = Self { offset: -14 * 60 }; - pub const UTC: Self = Self { offset: 0 }; - pub const MAX: Self = Self { offset: 14 * 60 }; } impl TryFrom for TimezoneOffset { @@ -1576,7 +1574,7 @@ impl PartialEq for Timestamp { fn eq(&self, other: &Self) -> bool { match (self.timezone_offset, other.timezone_offset) { (Some(_), Some(_)) | (None, None) => self.value.eq(&other.value), - _ => false, //TODO: implicit timezone + _ => false, // TODO: implicit timezone } } } @@ -1622,6 +1620,15 @@ impl Hash for Timestamp { } impl Timestamp { + pub const MAX: Self = Self { + value: Decimal::MAX, + timezone_offset: Some(TimezoneOffset::MAX), + }; + pub const MIN: Self = Self { + value: Decimal::MIN, + timezone_offset: Some(TimezoneOffset::MIN), + }; + #[inline] fn new(props: &DateTimeSevenPropertyModel) -> Result { Ok(Self { @@ -1790,7 +1797,7 @@ impl Timestamp { (Some(_), Some(_)) | (None, None) => { Some(DayTimeDuration::new(self.value.checked_sub(rhs.value)?)) } - _ => None, //TODO: implicit timezone + _ => None, // TODO: implicit timezone } } @@ -1816,13 +1823,13 @@ impl Timestamp { Self { value: self .value - .checked_add(i64::from(from_timezone.offset) * 60)?, // We keep the literal value + .checked_add(i64::from(from_timezone.offset) * 60)?, /* We keep the literal value */ timezone_offset: None, } } } else if let Some(to_timezone) = timezone_offset { Self { - value: self.value.checked_sub(i64::from(to_timezone.offset) * 60)?, // We keep the literal value + value: self.value.checked_sub(i64::from(to_timezone.offset) * 60)?, /* We keep the literal value */ timezone_offset: Some(to_timezone), } } else { @@ -1851,16 +1858,6 @@ impl Timestamp { pub fn is_identical_with(self, other: Self) -> bool { self.value == other.value && self.timezone_offset == other.timezone_offset } - - pub const MIN: Self = Self { - value: Decimal::MIN, - timezone_offset: Some(TimezoneOffset::MIN), - }; - - pub const MAX: Self = Self { - value: Decimal::MAX, - timezone_offset: Some(TimezoneOffset::MAX), - }; } #[cfg(feature = "custom-now")] @@ -1960,7 +1957,7 @@ fn normalize_second( mi: i64, se: Decimal, ) -> Option<(i64, u8, u8, u8, u8, Decimal)> { - let mi = mi.checked_add(i64::try_from(se.as_i128().checked_div(60)?).ok()?)?; //TODO: good idea? + let mi = mi.checked_add(i64::try_from(se.as_i128().checked_div(60)?).ok()?)?; // TODO: good idea? let se = se.checked_rem(60)?; let (yr, mo, da, hr, mi) = normalize_minute(yr, mo, da, hr, mi)?; Some((yr, mo, da, hr, mi, se)) diff --git a/lib/oxsdatatypes/src/decimal.rs b/lib/oxsdatatypes/src/decimal.rs index 6a59105e..0082ca8a 100644 --- a/lib/oxsdatatypes/src/decimal.rs +++ b/lib/oxsdatatypes/src/decimal.rs @@ -19,6 +19,11 @@ pub struct Decimal { } impl Decimal { + pub const MAX: Self = Self { value: i128::MAX }; + pub const MIN: Self = Self { value: i128::MIN }; + #[cfg(test)] + pub const STEP: Self = Self { value: 1 }; + /// Constructs the decimal i / 10^n #[inline] pub const fn new(i: i128, n: u32) -> Result { @@ -260,13 +265,6 @@ impl Decimal { pub(super) const fn as_i128(self) -> i128 { self.value / DECIMAL_PART_POW } - - pub const MIN: Self = Self { value: i128::MIN }; - - pub const MAX: Self = Self { value: i128::MAX }; - - #[cfg(test)] - pub const STEP: Self = Self { value: 1 }; } impl From for Decimal { @@ -499,7 +497,7 @@ impl FromStr for Decimal { } input = &input[1..]; if input.is_empty() && !with_before_dot { - //We only have a dot + // We only have a dot return Err(PARSE_UNEXPECTED_END); } while input.last() == Some(&b'0') { @@ -520,11 +518,11 @@ impl FromStr for Decimal { } } if exp == 0 { - //Underflow + // Underflow return Err(PARSE_UNDERFLOW); } } else if !with_before_dot { - //It's empty + // It's empty return Err(PARSE_UNEXPECTED_END); } diff --git a/lib/oxsdatatypes/src/double.rs b/lib/oxsdatatypes/src/double.rs index 1a399019..3b58858f 100644 --- a/lib/oxsdatatypes/src/double.rs +++ b/lib/oxsdatatypes/src/double.rs @@ -17,6 +17,16 @@ pub struct Double { } impl Double { + pub const INFINITY: Self = Self { + value: f64::INFINITY, + }; + pub const MAX: Self = Self { value: f64::MAX }; + pub const MIN: Self = Self { value: f64::MIN }; + pub const NAN: Self = Self { value: f64::NAN }; + pub const NEG_INFINITY: Self = Self { + value: f64::NEG_INFINITY, + }; + #[inline] #[must_use] pub fn from_be_bytes(bytes: [u8; 8]) -> Self { @@ -77,20 +87,6 @@ impl Double { pub fn is_identical_with(self, other: Self) -> bool { self.value.to_bits() == other.value.to_bits() } - - pub const MIN: Self = Self { value: f64::MIN }; - - pub const MAX: Self = Self { value: f64::MAX }; - - pub const INFINITY: Self = Self { - value: f64::INFINITY, - }; - - pub const NEG_INFINITY: Self = Self { - value: f64::NEG_INFINITY, - }; - - pub const NAN: Self = Self { value: f64::NAN }; } impl From for f64 { diff --git a/lib/oxsdatatypes/src/duration.rs b/lib/oxsdatatypes/src/duration.rs index 93dac7d6..efb92b62 100644 --- a/lib/oxsdatatypes/src/duration.rs +++ b/lib/oxsdatatypes/src/duration.rs @@ -15,6 +15,15 @@ pub struct Duration { } impl Duration { + pub const MAX: Self = Self { + year_month: YearMonthDuration::MAX, + day_time: DayTimeDuration::MAX, + }; + pub const MIN: Self = Self { + year_month: YearMonthDuration::MIN, + day_time: DayTimeDuration::MIN, + }; + #[inline] pub fn new( months: impl Into, @@ -160,16 +169,6 @@ impl Duration { pub fn is_identical_with(self, other: Self) -> bool { self == other } - - pub const MIN: Self = Self { - year_month: YearMonthDuration::MIN, - day_time: DayTimeDuration::MIN, - }; - - pub const MAX: Self = Self { - year_month: YearMonthDuration::MAX, - day_time: DayTimeDuration::MAX, - }; } impl TryFrom for Duration { @@ -301,6 +300,9 @@ pub struct YearMonthDuration { } impl YearMonthDuration { + pub const MAX: Self = Self { months: i64::MAX }; + pub const MIN: Self = Self { months: i64::MIN }; + #[inline] pub fn new(months: impl Into) -> Self { Self { @@ -374,10 +376,6 @@ impl YearMonthDuration { pub fn is_identical_with(self, other: Self) -> bool { self == other } - - pub const MIN: Self = Self { months: i64::MIN }; - - pub const MAX: Self = Self { months: i64::MAX }; } impl From for Duration { @@ -469,6 +467,13 @@ pub struct DayTimeDuration { } impl DayTimeDuration { + pub const MAX: Self = Self { + seconds: Decimal::MAX, + }; + pub const MIN: Self = Self { + seconds: Decimal::MIN, + }; + #[inline] pub fn new(seconds: impl Into) -> Self { Self { @@ -558,14 +563,6 @@ impl DayTimeDuration { pub fn is_identical_with(self, other: Self) -> bool { self == other } - - pub const MIN: Self = Self { - seconds: Decimal::MIN, - }; - - pub const MAX: Self = Self { - seconds: Decimal::MAX, - }; } impl From for Duration { diff --git a/lib/oxsdatatypes/src/float.rs b/lib/oxsdatatypes/src/float.rs index bc0aab75..c4d08d6c 100644 --- a/lib/oxsdatatypes/src/float.rs +++ b/lib/oxsdatatypes/src/float.rs @@ -17,6 +17,16 @@ pub struct Float { } impl Float { + pub const INFINITY: Self = Self { + value: f32::INFINITY, + }; + pub const MAX: Self = Self { value: f32::MAX }; + pub const MIN: Self = Self { value: f32::MIN }; + pub const NAN: Self = Self { value: f32::NAN }; + pub const NEG_INFINITY: Self = Self { + value: f32::NEG_INFINITY, + }; + #[inline] #[must_use] pub fn from_be_bytes(bytes: [u8; 4]) -> Self { @@ -77,20 +87,6 @@ impl Float { pub fn is_identical_with(self, other: Self) -> bool { self.value.to_bits() == other.value.to_bits() } - - pub const MIN: Self = Self { value: f32::MIN }; - - pub const MAX: Self = Self { value: f32::MAX }; - - pub const INFINITY: Self = Self { - value: f32::INFINITY, - }; - - pub const NEG_INFINITY: Self = Self { - value: f32::NEG_INFINITY, - }; - - pub const NAN: Self = Self { value: f32::NAN }; } impl From for f32 { diff --git a/lib/oxsdatatypes/src/integer.rs b/lib/oxsdatatypes/src/integer.rs index f2b8506f..e76ae62e 100644 --- a/lib/oxsdatatypes/src/integer.rs +++ b/lib/oxsdatatypes/src/integer.rs @@ -14,6 +14,9 @@ pub struct Integer { } impl Integer { + pub const MAX: Self = Self { value: i64::MAX }; + pub const MIN: Self = Self { value: i64::MIN }; + #[inline] #[must_use] pub fn from_be_bytes(bytes: [u8; 8]) -> Self { @@ -134,10 +137,6 @@ impl Integer { pub fn is_identical_with(self, other: Self) -> bool { self == other } - - pub const MIN: Self = Self { value: i64::MIN }; - - pub const MAX: Self = Self { value: i64::MAX }; } impl From for Integer { diff --git a/lib/oxttl/src/lexer.rs b/lib/oxttl/src/lexer.rs index d4eb024f..3fb62845 100644 --- a/lib/oxttl/src/lexer.rs +++ b/lib/oxttl/src/lexer.rs @@ -49,8 +49,8 @@ pub struct N3Lexer { // TODO: simplify by not giving is_end and fail with an "unexpected eof" is none is returned when is_end=true? impl TokenRecognizer for N3Lexer { - type Token<'a> = N3Token<'a>; type Options = N3LexerOptions; + type Token<'a> = N3Token<'a>; fn recognize_next_token<'a>( &mut self, @@ -790,7 +790,7 @@ impl N3Lexer { format!("Unexpected escape character '\\{}'", char::from(c)), ) .into()), - )), //TODO: read until end of string + )), // TODO: read until end of string } } diff --git a/lib/oxttl/src/line_formats.rs b/lib/oxttl/src/line_formats.rs index e7d39e09..5932f7a2 100644 --- a/lib/oxttl/src/line_formats.rs +++ b/lib/oxttl/src/line_formats.rs @@ -39,9 +39,9 @@ enum NQuadsState { } impl RuleRecognizer for NQuadsRecognizer { - type TokenRecognizer = N3Lexer; - type Output = Quad; type Context = NQuadsRecognizerContext; + type Output = Quad; + type TokenRecognizer = N3Lexer; fn error_recovery_state(mut self) -> Self { self.stack.clear(); @@ -251,7 +251,7 @@ impl RuleRecognizer for NQuadsRecognizer { self.emit_quad(results, GraphName::DefaultGraph); errors.push("Triples should be followed by a dot".into()) } - _ => errors.push("Unexpected end".into()), //TODO + _ => errors.push("Unexpected end".into()), // TODO } } diff --git a/lib/oxttl/src/n3.rs b/lib/oxttl/src/n3.rs index a1c23f25..263db936 100644 --- a/lib/oxttl/src/n3.rs +++ b/lib/oxttl/src/n3.rs @@ -181,7 +181,8 @@ impl From for N3Quad { /// /// Count the number of people: /// ``` -/// use oxrdf::{NamedNode, vocab::rdf}; +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNode; /// use oxttl::n3::{N3Parser, N3Term}; /// /// let file = br#"@base . @@ -260,7 +261,9 @@ impl N3Parser { /// a schema:Person ; /// schema:name "Bar" ."#; /// - /// let rdf_type = N3Term::NamedNode(NamedNode::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?); + /// let rdf_type = N3Term::NamedNode(NamedNode::new( + /// "http://www.w3.org/1999/02/22-rdf-syntax-ns#type", + /// )?); /// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?); /// let mut count = 0; /// for triple in N3Parser::new().parse_read(file.as_ref()) { @@ -282,7 +285,8 @@ impl N3Parser { /// /// Count the number of people: /// ``` - /// use oxrdf::{NamedNode, vocab::rdf}; + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNode; /// use oxttl::n3::{N3Parser, N3Term}; /// /// # #[tokio::main(flavor = "current_thread")] @@ -322,14 +326,16 @@ impl N3Parser { /// /// Count the number of people: /// ``` - /// use oxrdf::{NamedNode, vocab::rdf}; + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNode; /// use oxttl::n3::{N3Parser, N3Term}; /// - /// let file: [&[u8]; 5] = [b"@base ", + /// let file: [&[u8]; 5] = [ + /// b"@base ", /// b". @prefix schema: .", /// b" a schema:Person", /// b" ; schema:name \"Foo\" . ", - /// b" a schema:Person ; schema:name \"Bar\" ." + /// b" a schema:Person ; schema:name \"Bar\" .", /// ]; /// /// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned()); @@ -340,7 +346,7 @@ impl N3Parser { /// while !parser.is_end() { /// // We feed more data to the parser /// if let Some(chunk) = file_chunks.next() { - /// parser.extend_from_slice(chunk); + /// parser.extend_from_slice(chunk); /// } else { /// parser.end(); // It's finished /// } @@ -366,7 +372,8 @@ impl N3Parser { /// /// Count the number of people: /// ``` -/// use oxrdf::{NamedNode, vocab::rdf}; +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNode; /// use oxttl::n3::{N3Parser, N3Term}; /// /// let file = br#"@base . @@ -459,7 +466,8 @@ impl Iterator for FromReadN3Reader { /// /// Count the number of people: /// ``` -/// use oxrdf::{NamedNode, vocab::rdf}; +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNode; /// use oxttl::n3::{N3Parser, N3Term}; /// /// # #[tokio::main(flavor = "current_thread")] @@ -561,14 +569,16 @@ impl FromTokioAsyncReadN3Reader { /// /// Count the number of people: /// ``` -/// use oxrdf::{NamedNode, vocab::rdf}; +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNode; /// use oxttl::n3::{N3Parser, N3Term}; /// -/// let file: [&[u8]; 5] = [b"@base ", +/// let file: [&[u8]; 5] = [ +/// b"@base ", /// b". @prefix schema: .", /// b" a schema:Person", /// b" ; schema:name \"Foo\" . ", -/// b" a schema:Person ; schema:name \"Bar\" ." +/// b" a schema:Person ; schema:name \"Bar\" .", /// ]; /// /// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned()); @@ -579,7 +589,7 @@ impl FromTokioAsyncReadN3Reader { /// while !parser.is_end() { /// // We feed more data to the parser /// if let Some(chunk) = file_chunks.next() { -/// parser.extend_from_slice(chunk); +/// parser.extend_from_slice(chunk); /// } else { /// parser.end(); // It's finished /// } @@ -697,9 +707,9 @@ struct N3RecognizerContext { } impl RuleRecognizer for N3Recognizer { - type TokenRecognizer = N3Lexer; - type Output = N3Quad; type Context = N3RecognizerContext; + type Output = N3Quad; + type TokenRecognizer = N3Lexer; fn error_recovery_state(mut self) -> Self { self.stack.clear(); @@ -1191,7 +1201,7 @@ impl RuleRecognizer for N3Recognizer { ) { match &*self.stack { [] | [N3State::N3Doc] => (), - _ => errors.push("Unexpected end".into()), //TODO + _ => errors.push("Unexpected end".into()), // TODO } } diff --git a/lib/oxttl/src/terse.rs b/lib/oxttl/src/terse.rs index 86fad434..818524f6 100644 --- a/lib/oxttl/src/terse.rs +++ b/lib/oxttl/src/terse.rs @@ -4,12 +4,10 @@ use crate::lexer::{resolve_local_name, N3Lexer, N3LexerMode, N3LexerOptions, N3T use crate::toolkit::{Lexer, Parser, RuleRecognizer, RuleRecognizerError}; use crate::{MAX_BUFFER_SIZE, MIN_BUFFER_SIZE}; use oxiri::Iri; +use oxrdf::vocab::{rdf, xsd}; #[cfg(feature = "rdf-star")] use oxrdf::Triple; -use oxrdf::{ - vocab::{rdf, xsd}, - BlankNode, GraphName, Literal, NamedNode, NamedOrBlankNode, Quad, Subject, Term, -}; +use oxrdf::{BlankNode, GraphName, Literal, NamedNode, NamedOrBlankNode, Quad, Subject, Term}; use std::collections::HashMap; pub struct TriGRecognizer { @@ -30,9 +28,9 @@ pub struct TriGRecognizerContext { } impl RuleRecognizer for TriGRecognizer { - type TokenRecognizer = N3Lexer; - type Output = Quad; type Context = TriGRecognizerContext; + type Output = Quad; + type TokenRecognizer = N3Lexer; fn error_recovery_state(mut self) -> Self { self.stack.clear(); @@ -784,7 +782,7 @@ impl RuleRecognizer for TriGRecognizer { } } } else if token == N3Token::Punctuation(".") || token == N3Token::Punctuation("}") { - //TODO: be smarter depending if we are in '{' or not + // TODO: be smarter depending if we are in '{' or not self.stack.push(TriGState::TriGDoc); self } else { @@ -819,7 +817,7 @@ impl RuleRecognizer for TriGRecognizer { self.emit_quad(results); errors.push("Triples should be followed by a dot".into()) } - _ => errors.push("Unexpected end".into()), //TODO + _ => errors.push("Unexpected end".into()), // TODO } } diff --git a/lib/oxttl/src/toolkit/lexer.rs b/lib/oxttl/src/toolkit/lexer.rs index 0f7373c2..2406dfb1 100644 --- a/lib/oxttl/src/toolkit/lexer.rs +++ b/lib/oxttl/src/toolkit/lexer.rs @@ -366,7 +366,7 @@ impl Lexer { _ => return Some(()), } i += 1; - //TODO: SIMD + // TODO: SIMD } } else { for c in &self.data[self.position.buffer_offset..] { @@ -376,7 +376,7 @@ impl Lexer { } else { return Some(()); } - //TODO: SIMD + // TODO: SIMD } } Some(()) diff --git a/lib/oxttl/src/trig.rs b/lib/oxttl/src/trig.rs index 0dad7fd7..5a7cdb4a 100644 --- a/lib/oxttl/src/trig.rs +++ b/lib/oxttl/src/trig.rs @@ -6,7 +6,8 @@ use crate::terse::TriGRecognizer; use crate::toolkit::FromTokioAsyncReadIterator; use crate::toolkit::{FromReadIterator, ParseError, Parser, SyntaxError}; use oxiri::{Iri, IriParseError}; -use oxrdf::{vocab::xsd, GraphName, NamedNode, Quad, QuadRef, Subject, TermRef}; +use oxrdf::vocab::xsd; +use oxrdf::{GraphName, NamedNode, Quad, QuadRef, Subject, TermRef}; use std::collections::HashMap; use std::fmt; use std::io::{self, Read, Write}; @@ -19,7 +20,8 @@ use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt}; /// /// Count the number of people: /// ``` -/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; /// use oxttl::TriGParser; /// /// let file = br#"@base . @@ -97,7 +99,8 @@ impl TriGParser { /// /// Count the number of people: /// ``` - /// use oxrdf::{NamedNodeRef, vocab::rdf}; + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNodeRef; /// use oxttl::TriGParser; /// /// let file = br#"@base . @@ -128,7 +131,8 @@ impl TriGParser { /// /// Count the number of people: /// ``` - /// use oxrdf::{NamedNodeRef, vocab::rdf}; + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNodeRef; /// use oxttl::TriGParser; /// /// # #[tokio::main(flavor = "current_thread")] @@ -167,14 +171,16 @@ impl TriGParser { /// /// Count the number of people: /// ``` - /// use oxrdf::{NamedNodeRef, vocab::rdf}; + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNodeRef; /// use oxttl::TriGParser; /// - /// let file: [&[u8]; 5] = [b"@base ", + /// let file: [&[u8]; 5] = [ + /// b"@base ", /// b". @prefix schema: .", /// b" a schema:Person", /// b" ; schema:name \"Foo\" . ", - /// b" a schema:Person ; schema:name \"Bar\" ." + /// b" a schema:Person ; schema:name \"Bar\" .", /// ]; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; @@ -184,7 +190,7 @@ impl TriGParser { /// while !parser.is_end() { /// // We feed more data to the parser /// if let Some(chunk) = file_chunks.next() { - /// parser.extend_from_slice(chunk); + /// parser.extend_from_slice(chunk); /// } else { /// parser.end(); // It's finished /// } @@ -217,7 +223,8 @@ impl TriGParser { /// /// Count the number of people: /// ``` -/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; /// use oxttl::TriGParser; /// /// let file = br#"@base . @@ -309,7 +316,8 @@ impl Iterator for FromReadTriGReader { /// /// Count the number of people: /// ``` -/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; /// use oxttl::TriGParser; /// /// # #[tokio::main(flavor = "current_thread")] @@ -410,14 +418,16 @@ impl FromTokioAsyncReadTriGReader { /// /// Count the number of people: /// ``` -/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; /// use oxttl::TriGParser; /// -/// let file: [&[u8]; 5] = [b"@base ", +/// let file: [&[u8]; 5] = [ +/// b"@base ", /// b". @prefix schema: .", /// b" a schema:Person", /// b" ; schema:name \"Foo\" . ", -/// b" a schema:Person ; schema:name \"Bar\" ." +/// b" a schema:Person ; schema:name \"Bar\" .", /// ]; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; @@ -427,7 +437,7 @@ impl FromTokioAsyncReadTriGReader { /// while !parser.is_end() { /// // We feed more data to the parser /// if let Some(chunk) = file_chunks.next() { -/// parser.extend_from_slice(chunk); +/// parser.extend_from_slice(chunk); /// } else { /// parser.end(); // It's finished /// } diff --git a/lib/oxttl/src/turtle.rs b/lib/oxttl/src/turtle.rs index 542afd27..5a2b67a2 100644 --- a/lib/oxttl/src/turtle.rs +++ b/lib/oxttl/src/turtle.rs @@ -21,7 +21,8 @@ use tokio::io::{AsyncRead, AsyncWrite}; /// /// Count the number of people: /// ``` -/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; /// use oxttl::TurtleParser; /// /// let file = br#"@base . @@ -99,7 +100,8 @@ impl TurtleParser { /// /// Count the number of people: /// ``` - /// use oxrdf::{NamedNodeRef, vocab::rdf}; + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNodeRef; /// use oxttl::TurtleParser; /// /// let file = br#"@base . @@ -130,7 +132,8 @@ impl TurtleParser { /// /// Count the number of people: /// ``` - /// use oxrdf::{NamedNodeRef, vocab::rdf}; + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNodeRef; /// use oxttl::TurtleParser; /// /// # #[tokio::main(flavor = "current_thread")] @@ -169,14 +172,16 @@ impl TurtleParser { /// /// Count the number of people: /// ``` - /// use oxrdf::{NamedNodeRef, vocab::rdf}; + /// use oxrdf::vocab::rdf; + /// use oxrdf::NamedNodeRef; /// use oxttl::TurtleParser; /// - /// let file: [&[u8]; 5] = [b"@base ", + /// let file: [&[u8]; 5] = [ + /// b"@base ", /// b". @prefix schema: .", /// b" a schema:Person", /// b" ; schema:name \"Foo\" . ", - /// b" a schema:Person ; schema:name \"Bar\" ." + /// b" a schema:Person ; schema:name \"Bar\" .", /// ]; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; @@ -186,7 +191,7 @@ impl TurtleParser { /// while !parser.is_end() { /// // We feed more data to the parser /// if let Some(chunk) = file_chunks.next() { - /// parser.extend_from_slice(chunk); + /// parser.extend_from_slice(chunk); /// } else { /// parser.end(); // It's finished /// } @@ -219,7 +224,8 @@ impl TurtleParser { /// /// Count the number of people: /// ``` -/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; /// use oxttl::TurtleParser; /// /// let file = br#"@base . @@ -311,7 +317,8 @@ impl Iterator for FromReadTurtleReader { /// /// Count the number of people: /// ``` -/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; /// use oxttl::TurtleParser; /// /// # #[tokio::main(flavor = "current_thread")] @@ -412,14 +419,16 @@ impl FromTokioAsyncReadTurtleReader { /// /// Count the number of people: /// ``` -/// use oxrdf::{NamedNodeRef, vocab::rdf}; +/// use oxrdf::vocab::rdf; +/// use oxrdf::NamedNodeRef; /// use oxttl::TurtleParser; /// -/// let file: [&[u8]; 5] = [b"@base ", +/// let file: [&[u8]; 5] = [ +/// b"@base ", /// b". @prefix schema: .", /// b" a schema:Person", /// b" ; schema:name \"Foo\" . ", -/// b" a schema:Person ; schema:name \"Bar\" ." +/// b" a schema:Person ; schema:name \"Bar\" .", /// ]; /// /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; @@ -429,7 +438,7 @@ impl FromTokioAsyncReadTurtleReader { /// while !parser.is_end() { /// // We feed more data to the parser /// if let Some(chunk) = file_chunks.next() { -/// parser.extend_from_slice(chunk); +/// parser.extend_from_slice(chunk); /// } else { /// parser.end(); // It's finished /// } diff --git a/lib/sparesults/src/csv.rs b/lib/sparesults/src/csv.rs index 985092b4..7cf6059e 100644 --- a/lib/sparesults/src/csv.rs +++ b/lib/sparesults/src/csv.rs @@ -2,8 +2,8 @@ use crate::error::{ParseError, SyntaxError, SyntaxErrorKind, TextPosition}; use memchr::memchr; -use oxrdf::Variable; -use oxrdf::{vocab::xsd, *}; +use oxrdf::vocab::xsd; +use oxrdf::*; use std::io::{self, Read, Write}; use std::str::{self, FromStr}; #[cfg(feature = "async-tokio")] diff --git a/lib/sparesults/src/format.rs b/lib/sparesults/src/format.rs index e7eba74a..982ff11f 100644 --- a/lib/sparesults/src/format.rs +++ b/lib/sparesults/src/format.rs @@ -20,7 +20,10 @@ impl QueryResultsFormat { /// ``` /// use sparesults::QueryResultsFormat; /// - /// assert_eq!(QueryResultsFormat::Json.iri(), "http://www.w3.org/ns/formats/SPARQL_Results_JSON") + /// assert_eq!( + /// QueryResultsFormat::Json.iri(), + /// "http://www.w3.org/ns/formats/SPARQL_Results_JSON" + /// ) /// ``` #[inline] pub fn iri(self) -> &'static str { @@ -31,12 +34,16 @@ impl QueryResultsFormat { Self::Tsv => "http://www.w3.org/ns/formats/SPARQL_Results_TSV", } } + /// The format [IANA media type](https://tools.ietf.org/html/rfc2046). /// /// ``` /// use sparesults::QueryResultsFormat; /// - /// assert_eq!(QueryResultsFormat::Json.media_type(), "application/sparql-results+json") + /// assert_eq!( + /// QueryResultsFormat::Json.media_type(), + /// "application/sparql-results+json" + /// ) /// ``` #[inline] pub fn media_type(self) -> &'static str { @@ -91,7 +98,10 @@ impl QueryResultsFormat { /// ``` /// use sparesults::QueryResultsFormat; /// - /// assert_eq!(QueryResultsFormat::from_media_type("application/sparql-results+json; charset=utf-8"), Some(QueryResultsFormat::Json)) + /// assert_eq!( + /// QueryResultsFormat::from_media_type("application/sparql-results+json; charset=utf-8"), + /// Some(QueryResultsFormat::Json) + /// ) /// ``` #[inline] pub fn from_media_type(media_type: &str) -> Option { @@ -134,7 +144,10 @@ impl QueryResultsFormat { /// ``` /// use sparesults::QueryResultsFormat; /// - /// assert_eq!(QueryResultsFormat::from_extension("json"), Some(QueryResultsFormat::Json)) + /// assert_eq!( + /// QueryResultsFormat::from_extension("json"), + /// Some(QueryResultsFormat::Json) + /// ) /// ``` #[inline] pub fn from_extension(extension: &str) -> Option { diff --git a/lib/sparesults/src/json.rs b/lib/sparesults/src/json.rs index 85b03fcd..2e63fc81 100644 --- a/lib/sparesults/src/json.rs +++ b/lib/sparesults/src/json.rs @@ -5,7 +5,6 @@ use crate::error::{ParseError, SyntaxError}; use json_event_parser::ToTokioAsyncWriteJsonWriter; use json_event_parser::{FromReadJsonReader, JsonEvent, ToWriteJsonWriter}; use oxrdf::vocab::rdf; -use oxrdf::Variable; use oxrdf::*; use std::collections::BTreeMap; use std::io::{self, Read, Write}; @@ -522,7 +521,7 @@ fn read_value( JsonEvent::EndObject => { if let Some(s) = state { if s == State::Value { - state = None; //End of triple + state = None; // End of triple } else { return Err( SyntaxError::msg("Term description values should be string").into() diff --git a/lib/sparesults/src/parser.rs b/lib/sparesults/src/parser.rs index a00d014c..3332335b 100644 --- a/lib/sparesults/src/parser.rs +++ b/lib/sparesults/src/parser.rs @@ -130,8 +130,8 @@ impl From for QueryResultsParser { /// /// Example in TSV (the API is the same for JSON and XML): /// ``` -/// use sparesults::{QueryResultsFormat, QueryResultsParser, FromReadQueryResultsReader}; /// use oxrdf::{Literal, Variable}; +/// use sparesults::{FromReadQueryResultsReader, QueryResultsFormat, QueryResultsParser}; /// /// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Tsv); /// @@ -141,10 +141,24 @@ impl From for QueryResultsParser { /// } /// /// // solutions -/// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(b"?foo\t?bar\n\"test\"\t".as_slice())? { -/// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]); +/// if let FromReadQueryResultsReader::Solutions(solutions) = +/// json_parser.parse_read(b"?foo\t?bar\n\"test\"\t".as_slice())? +/// { +/// assert_eq!( +/// solutions.variables(), +/// &[ +/// Variable::new_unchecked("foo"), +/// Variable::new_unchecked("bar") +/// ] +/// ); /// for solution in solutions { -/// assert_eq!(solution?.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]); +/// assert_eq!( +/// solution?.iter().collect::>(), +/// vec![( +/// &Variable::new_unchecked("foo"), +/// &Literal::from("test").into() +/// )] +/// ); /// } /// } /// # Result::<(),sparesults::ParseError>::Ok(()) @@ -188,12 +202,20 @@ impl FromReadSolutionsReader { /// /// Example in TSV (the API is the same for JSON and XML): /// ``` - /// use sparesults::{QueryResultsFormat, QueryResultsParser, FromReadQueryResultsReader}; /// use oxrdf::Variable; + /// use sparesults::{FromReadQueryResultsReader, QueryResultsFormat, QueryResultsParser}; /// /// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Tsv); - /// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(b"?foo\t?bar\n\"ex1\"\t\"ex2\"".as_slice())? { - /// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]); + /// if let FromReadQueryResultsReader::Solutions(solutions) = + /// json_parser.parse_read(b"?foo\t?bar\n\"ex1\"\t\"ex2\"".as_slice())? + /// { + /// assert_eq!( + /// solutions.variables(), + /// &[ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar") + /// ] + /// ); /// } /// # Result::<(),sparesults::ParseError>::Ok(()) /// ``` diff --git a/lib/sparesults/src/serializer.rs b/lib/sparesults/src/serializer.rs index 13c21628..1d4a02b1 100644 --- a/lib/sparesults/src/serializer.rs +++ b/lib/sparesults/src/serializer.rs @@ -241,14 +241,23 @@ impl From for QueryResultsSerializer { /// /// Example in TSV (the API is the same for JSON, XML and CSV): /// ``` -/// use sparesults::{QueryResultsFormat, QueryResultsSerializer}; /// use oxrdf::{LiteralRef, Variable, VariableRef}; +/// use sparesults::{QueryResultsFormat, QueryResultsSerializer}; /// use std::iter::once; /// /// let tsv_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Tsv); /// let mut buffer = Vec::new(); -/// let mut writer = tsv_serializer.serialize_solutions_to_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")])?; -/// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?; +/// let mut writer = tsv_serializer.serialize_solutions_to_write( +/// &mut buffer, +/// vec![ +/// Variable::new_unchecked("foo"), +/// Variable::new_unchecked("bar"), +/// ], +/// )?; +/// writer.write(once(( +/// VariableRef::new_unchecked("foo"), +/// LiteralRef::from("test"), +/// )))?; /// writer.finish()?; /// assert_eq!(buffer, b"?foo\t?bar\n\"test\"\t\n"); /// # std::io::Result::Ok(()) @@ -321,16 +330,29 @@ impl ToWriteSolutionsWriter { /// /// Example in TSV (the API is the same for JSON, CSV and XML): /// ``` -/// use sparesults::{QueryResultsFormat, QueryResultsSerializer}; /// use oxrdf::{LiteralRef, Variable, VariableRef}; +/// use sparesults::{QueryResultsFormat, QueryResultsSerializer}; /// use std::iter::once; /// /// # #[tokio::main(flavor = "current_thread")] /// # async fn main() -> std::io::Result<()> { /// let tsv_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Tsv); /// let mut buffer = Vec::new(); -/// let mut writer = tsv_serializer.serialize_solutions_to_tokio_async_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]).await?; -/// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test")))).await?; +/// let mut writer = tsv_serializer +/// .serialize_solutions_to_tokio_async_write( +/// &mut buffer, +/// vec![ +/// Variable::new_unchecked("foo"), +/// Variable::new_unchecked("bar"), +/// ], +/// ) +/// .await?; +/// writer +/// .write(once(( +/// VariableRef::new_unchecked("foo"), +/// LiteralRef::from("test"), +/// ))) +/// .await?; /// writer.finish().await?; /// assert_eq!(buffer, b"?foo\t?bar\n\"test\"\t\n"); /// # Ok(()) diff --git a/lib/sparesults/src/solution.rs b/lib/sparesults/src/solution.rs index 0d81adc2..826a9eea 100644 --- a/lib/sparesults/src/solution.rs +++ b/lib/sparesults/src/solution.rs @@ -44,10 +44,16 @@ impl QuerySolution { /// It is also the number of columns in the solutions table. /// /// ``` + /// use oxrdf::{Literal, Variable}; /// use sparesults::QuerySolution; - /// use oxrdf::{Variable, Literal}; /// - /// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None])); + /// let solution = QuerySolution::from(( + /// vec![ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar"), + /// ], + /// vec![Some(Literal::from(1).into()), None], + /// )); /// assert_eq!(solution.len(), 2); /// ``` #[inline] @@ -58,13 +64,25 @@ impl QuerySolution { /// Is there any variable bound in the table? /// /// ``` + /// use oxrdf::{Literal, Variable}; /// use sparesults::QuerySolution; - /// use oxrdf::{Variable, Literal}; /// - /// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None])); + /// let solution = QuerySolution::from(( + /// vec![ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar"), + /// ], + /// vec![Some(Literal::from(1).into()), None], + /// )); /// assert!(!solution.is_empty()); /// - /// let empty_solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![None, None])); + /// let empty_solution = QuerySolution::from(( + /// vec![ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar"), + /// ], + /// vec![None, None], + /// )); /// assert!(empty_solution.is_empty()); /// ``` #[inline] @@ -75,11 +93,20 @@ impl QuerySolution { /// Returns an iterator over bound variables. /// /// ``` + /// use oxrdf::{Literal, Variable}; /// use sparesults::QuerySolution; - /// use oxrdf::{Variable, Literal}; /// - /// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None])); - /// assert_eq!(solution.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from(1).into())]); + /// let solution = QuerySolution::from(( + /// vec![ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar"), + /// ], + /// vec![Some(Literal::from(1).into()), None], + /// )); + /// assert_eq!( + /// solution.iter().collect::>(), + /// vec![(&Variable::new_unchecked("foo"), &Literal::from(1).into())] + /// ); /// ``` #[inline] pub fn iter(&self) -> impl Iterator { @@ -89,10 +116,16 @@ impl QuerySolution { /// Returns the ordered slice of variable values. /// /// ``` + /// use oxrdf::{Literal, Variable}; /// use sparesults::QuerySolution; - /// use oxrdf::{Variable, Literal}; /// - /// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None])); + /// let solution = QuerySolution::from(( + /// vec![ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar"), + /// ], + /// vec![Some(Literal::from(1).into()), None], + /// )); /// assert_eq!(solution.values(), &[Some(Literal::from(1).into()), None]); /// ``` #[inline] @@ -103,11 +136,23 @@ impl QuerySolution { /// Returns the ordered slice of the solution variables, bound or not. /// /// ``` + /// use oxrdf::{Literal, Variable}; /// use sparesults::QuerySolution; - /// use oxrdf::{Variable, Literal}; /// - /// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None])); - /// assert_eq!(solution.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]); + /// let solution = QuerySolution::from(( + /// vec![ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar"), + /// ], + /// vec![Some(Literal::from(1).into()), None], + /// )); + /// assert_eq!( + /// solution.variables(), + /// &[ + /// Variable::new_unchecked("foo"), + /// Variable::new_unchecked("bar") + /// ] + /// ); /// ``` #[inline] pub fn variables(&self) -> &[Variable] { @@ -126,8 +171,8 @@ impl>, S: Into>>> From<(V, S)> for Quer } impl<'a> IntoIterator for &'a QuerySolution { - type Item = (&'a Variable, &'a Term); type IntoIter = Iter<'a>; + type Item = (&'a Variable, &'a Term); #[inline] fn into_iter(self) -> Self::IntoIter { @@ -214,11 +259,20 @@ impl fmt::Debug for QuerySolution { /// An iterator over [`QuerySolution`] bound variables. /// /// ``` +/// use oxrdf::{Literal, Variable}; /// use sparesults::QuerySolution; -/// use oxrdf::{Variable, Literal}; /// -/// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None])); -/// assert_eq!(solution.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from(1).into())]); +/// let solution = QuerySolution::from(( +/// vec![ +/// Variable::new_unchecked("foo"), +/// Variable::new_unchecked("bar"), +/// ], +/// vec![Some(Literal::from(1).into()), None], +/// )); +/// assert_eq!( +/// solution.iter().collect::>(), +/// vec![(&Variable::new_unchecked("foo"), &Literal::from(1).into())] +/// ); /// ``` pub struct Iter<'a> { inner: Zip, std::slice::Iter<'a, Option>>, diff --git a/lib/sparesults/src/xml.rs b/lib/sparesults/src/xml.rs index 3c2d91a8..c0450fac 100644 --- a/lib/sparesults/src/xml.rs +++ b/lib/sparesults/src/xml.rs @@ -2,7 +2,6 @@ use crate::error::{ParseError, SyntaxError}; use oxrdf::vocab::rdf; -use oxrdf::Variable; use oxrdf::*; use quick_xml::events::{BytesDecl, BytesEnd, BytesStart, BytesText, Event}; use quick_xml::{Reader, Writer}; @@ -245,7 +244,7 @@ impl XmlQueryResultsReader { let mut variables = Vec::default(); let mut state = State::Start; - //Read header + // Read header loop { buffer.clear(); let event = reader.read_event_into(&mut buffer)?; @@ -553,7 +552,7 @@ impl XmlSolutionsReader { } State::BNode => { if term.is_none() { - //We default to a random bnode + // We default to a random bnode term = Some(BlankNode::default().into()) } state = self @@ -563,7 +562,7 @@ impl XmlSolutionsReader { } State::Literal => { if term.is_none() { - //We default to the empty literal + // We default to the empty literal term = Some(build_literal("", lang.take(), datatype.take())?.into()) } state = self diff --git a/lib/spargebra/src/parser.rs b/lib/spargebra/src/parser.rs index 65a251e5..03a71932 100644 --- a/lib/spargebra/src/parser.rs +++ b/lib/spargebra/src/parser.rs @@ -365,7 +365,7 @@ enum PartialGraphPattern { } fn new_join(l: GraphPattern, r: GraphPattern) -> GraphPattern { - //Avoid to output empty BGPs + // Avoid to output empty BGPs if let GraphPattern::Bgp { patterns: pl } = &l { if pl.is_empty() { return r; @@ -449,7 +449,7 @@ fn build_select( let mut p = r#where; let mut with_aggregate = false; - //GROUP BY + // GROUP BY let aggregates = state.aggregates.pop().unwrap_or_default(); if group.is_none() && !aggregates.is_empty() { group = Some((vec![], vec![])); @@ -471,7 +471,7 @@ fn build_select( with_aggregate = true; } - //HAVING + // HAVING if let Some(expr) = having { p = GraphPattern::Filter { expr, @@ -479,12 +479,12 @@ fn build_select( }; } - //VALUES + // VALUES if let Some(data) = values { p = new_join(p, data); } - //SELECT + // SELECT let mut pv = Vec::new(); let with_project = match select.variables { SelectionVariables::Explicit(sel_items) => { @@ -533,7 +533,7 @@ fn build_select( if with_aggregate { return Err("SELECT * is not authorized with GROUP BY"); } - //TODO: is it really useful to do a projection? + // TODO: is it really useful to do a projection? p.on_in_scope_variable(|v| { if !pv.contains(v) { pv.push(v.clone()); @@ -547,7 +547,7 @@ fn build_select( let mut m = p; - //ORDER BY + // ORDER BY if let Some(expression) = order_by { m = GraphPattern::OrderBy { inner: Box::new(m), @@ -555,7 +555,7 @@ fn build_select( }; } - //PROJECT + // PROJECT if with_project { m = GraphPattern::Project { inner: Box::new(m), @@ -568,7 +568,7 @@ fn build_select( SelectionOption::Default => (), } - //OFFSET LIMIT + // OFFSET LIMIT if let Some((start, length)) = offset_limit { m = GraphPattern::Slice { inner: Box::new(m), diff --git a/lib/spargebra/src/query.rs b/lib/spargebra/src/query.rs index 8716de73..5739b7b8 100644 --- a/lib/spargebra/src/query.rs +++ b/lib/spargebra/src/query.rs @@ -13,7 +13,10 @@ use std::str::FromStr; /// let query_str = "SELECT ?s ?p ?o WHERE { ?s ?p ?o . }"; /// let query = Query::parse(query_str, None)?; /// assert_eq!(query.to_string(), query_str); -/// assert_eq!(query.to_sse(), "(project (?s ?p ?o) (bgp (triple ?s ?p ?o)))"); +/// assert_eq!( +/// query.to_sse(), +/// "(project (?s ?p ?o) (bgp (triple ?s ?p ?o)))" +/// ); /// # Ok::<_, spargebra::ParseError>(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Hash)] diff --git a/lib/spargebra/src/term.rs b/lib/spargebra/src/term.rs index 362b3959..ba5fb8e3 100644 --- a/lib/spargebra/src/term.rs +++ b/lib/spargebra/src/term.rs @@ -141,7 +141,7 @@ impl TryFrom for GroundTerm { /// The default string formatter is returning a N-Quads representation. /// /// ``` -/// use spargebra::term::{NamedNode, GroundTriple}; +/// use spargebra::term::{GroundTriple, NamedNode}; /// /// assert_eq!( /// " ", @@ -149,7 +149,8 @@ impl TryFrom for GroundTerm { /// subject: NamedNode::new("http://example.com/s")?.into(), /// predicate: NamedNode::new("http://example.com/p")?, /// object: NamedNode::new("http://example.com/o")?.into(), -/// }.to_string() +/// } +/// .to_string() /// ); /// # Result::<_,oxrdf::IriParseError>::Ok(()) /// ``` diff --git a/lib/sparopt/src/algebra.rs b/lib/sparopt/src/algebra.rs index 51ecf6fa..b9bb30f7 100644 --- a/lib/sparopt/src/algebra.rs +++ b/lib/sparopt/src/algebra.rs @@ -197,10 +197,10 @@ impl Expression { xsd::BOOLEAN => match literal.value() { "true" | "1" => Some(true), "false" | "0" => Some(false), - _ => None, //TODO + _ => None, // TODO }, xsd::STRING => Some(!literal.value().is_empty()), - _ => None, //TODO + _ => None, // TODO } } else { None diff --git a/lib/sparopt/src/optimizer.rs b/lib/sparopt/src/optimizer.rs index 5dc9d404..facc5b0c 100644 --- a/lib/sparopt/src/optimizer.rs +++ b/lib/sparopt/src/optimizer.rs @@ -102,7 +102,7 @@ impl Optimizer { let expression = Self::normalize_expression(expression, &inner_types); let expression_type = infer_expression_type(&expression, &inner_types); if expression_type == VariableType::UNDEF { - //TODO: valid? + // TODO: valid? inner } else { GraphPattern::extend(inner, variable, expression) @@ -397,7 +397,7 @@ impl Optimizer { expression, variable, } => { - //TODO: handle the case where the filter overrides an expression variable (should not happen in SPARQL but allowed in the algebra) + // TODO: handle the case where the filter overrides an expression variable (should not happen in SPARQL but allowed in the algebra) let mut inner_filters = Vec::new(); let mut final_filters = Vec::new(); for filter in filters { @@ -735,7 +735,7 @@ fn is_fit_for_for_loop_join( global_input_types: &VariableTypes, entry_types: &VariableTypes, ) -> bool { - //TODO: think more about it + // TODO: think more about it match pattern { GraphPattern::Values { .. } | GraphPattern::QuadPattern { .. } diff --git a/lib/sparopt/src/type_inference.rs b/lib/sparopt/src/type_inference.rs index 161ba58a..d53b63e4 100644 --- a/lib/sparopt/src/type_inference.rs +++ b/lib/sparopt/src/type_inference.rs @@ -49,7 +49,7 @@ pub fn infer_graph_pattern_types( infer_graph_pattern_types(right, infer_graph_pattern_types(left, types)) } GraphPattern::LeftJoin { left, right, .. } => { - let mut right_types = infer_graph_pattern_types(right, types.clone()); //TODO: expression + let mut right_types = infer_graph_pattern_types(right, types.clone()); // TODO: expression for t in right_types.inner.values_mut() { t.undef = true; // Right might be unset } @@ -352,24 +352,14 @@ pub struct VariableType { } impl VariableType { - pub const UNDEF: Self = Self { + const ANY: Self = Self { undef: true, - named_node: false, - blank_node: false, - literal: false, - #[cfg(feature = "rdf-star")] - triple: false, - }; - - const NAMED_NODE: Self = Self { - undef: false, named_node: true, - blank_node: false, - literal: false, + blank_node: true, + literal: true, #[cfg(feature = "rdf-star")] - triple: false, + triple: true, }; - const BLANK_NODE: Self = Self { undef: false, named_node: false, @@ -378,7 +368,6 @@ impl VariableType { #[cfg(feature = "rdf-star")] triple: false, }; - const LITERAL: Self = Self { undef: false, named_node: false, @@ -387,16 +376,14 @@ impl VariableType { #[cfg(feature = "rdf-star")] triple: false, }; - - #[cfg(feature = "rdf-star")] - const TRIPLE: Self = Self { + const NAMED_NODE: Self = Self { undef: false, - named_node: false, + named_node: true, blank_node: false, literal: false, - triple: true, + #[cfg(feature = "rdf-star")] + triple: false, }; - const SUBJECT: Self = Self { undef: false, named_node: true, @@ -405,7 +392,6 @@ impl VariableType { #[cfg(feature = "rdf-star")] triple: true, }; - const TERM: Self = Self { undef: false, named_node: true, @@ -414,14 +400,21 @@ impl VariableType { #[cfg(feature = "rdf-star")] triple: true, }; - - const ANY: Self = Self { + #[cfg(feature = "rdf-star")] + const TRIPLE: Self = Self { + undef: false, + named_node: false, + blank_node: false, + literal: false, + triple: true, + }; + pub const UNDEF: Self = Self { undef: true, - named_node: true, - blank_node: true, - literal: true, + named_node: false, + blank_node: false, + literal: false, #[cfg(feature = "rdf-star")] - triple: true, + triple: false, }; } diff --git a/lib/sparql-smith/src/lib.rs b/lib/sparql-smith/src/lib.rs index 01ca45e5..3bc99c0b 100644 --- a/lib/sparql-smith/src/lib.rs +++ b/lib/sparql-smith/src/lib.rs @@ -44,7 +44,7 @@ struct QueryContent { #[derive(Arbitrary)] enum QueryVariant { Select(SelectQuery), - //TODO: Other variants! + // TODO: Other variants! } impl<'a> Arbitrary<'a> for Query { @@ -246,7 +246,7 @@ impl fmt::Display for GroupCondition { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::BuiltInCall(c) => write!(f, "{c}"), - //Self::FunctionCall(c) => write!(f, "{}", c), + // Self::FunctionCall(c) => write!(f, "{}", c), Self::Projection(e, v) => { if let Some(v) = v { write!(f, "({e} AS {v})") @@ -705,7 +705,7 @@ impl fmt::Display for Constraint { match self { Self::BrackettedExpression(e) => write!(f, "{e}"), Self::BuiltInCall(c) => write!(f, "{c}"), - //Self::FunctionCall(c) => write!(f, "{}", c), + // Self::FunctionCall(c) => write!(f, "{}", c), } } } @@ -1530,7 +1530,7 @@ enum BuiltInCall { IsLiteral(Box), IsNumeric(Box), Exists(ExistsFunc), - NotExists(NotExistsFunc), //TODO: Other functions + NotExists(NotExistsFunc), // TODO: Other functions } impl fmt::Display for BuiltInCall { @@ -1585,15 +1585,15 @@ impl fmt::Display for NotExistsFunc { struct IriOrFunction { // [128] iriOrFunction ::= iri ArgList? iri: Iri, - //TODO args: Option, + // TODO args: Option, } impl fmt::Display for IriOrFunction { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.iri)?; - /*if let Some(args) = &self.args { - write!(f, "{}", args)?; - }*/ + // if let Some(args) = &self.args { + // write!(f, "{}", args)?; + // } Ok(()) } } diff --git a/lib/src/io/format.rs b/lib/src/io/format.rs index b07c1709..08b61d8a 100644 --- a/lib/src/io/format.rs +++ b/lib/src/io/format.rs @@ -23,7 +23,10 @@ impl GraphFormat { /// ``` /// use oxigraph::io::GraphFormat; /// - /// assert_eq!(GraphFormat::NTriples.iri(), "http://www.w3.org/ns/formats/N-Triples") + /// assert_eq!( + /// GraphFormat::NTriples.iri(), + /// "http://www.w3.org/ns/formats/N-Triples" + /// ) /// ``` #[inline] pub fn iri(self) -> &'static str { @@ -65,6 +68,7 @@ impl GraphFormat { Self::RdfXml => "rdf", } } + /// Looks for a known format from a media type. /// /// It supports some media type aliases. @@ -74,7 +78,10 @@ impl GraphFormat { /// ``` /// use oxigraph::io::GraphFormat; /// - /// assert_eq!(GraphFormat::from_media_type("text/turtle; charset=utf-8"), Some(GraphFormat::Turtle)) + /// assert_eq!( + /// GraphFormat::from_media_type("text/turtle; charset=utf-8"), + /// Some(GraphFormat::Turtle) + /// ) /// ``` #[inline] pub fn from_media_type(media_type: &str) -> Option { @@ -94,7 +101,10 @@ impl GraphFormat { /// ``` /// use oxigraph::io::GraphFormat; /// - /// assert_eq!(GraphFormat::from_extension("nt"), Some(GraphFormat::NTriples)) + /// assert_eq!( + /// GraphFormat::from_extension("nt"), + /// Some(GraphFormat::NTriples) + /// ) /// ``` #[inline] pub fn from_extension(extension: &str) -> Option { @@ -151,7 +161,10 @@ impl DatasetFormat { /// ``` /// use oxigraph::io::DatasetFormat; /// - /// assert_eq!(DatasetFormat::NQuads.iri(), "http://www.w3.org/ns/formats/N-Quads") + /// assert_eq!( + /// DatasetFormat::NQuads.iri(), + /// "http://www.w3.org/ns/formats/N-Quads" + /// ) /// ``` #[inline] pub fn iri(self) -> &'static str { @@ -190,6 +203,7 @@ impl DatasetFormat { Self::TriG => "trig", } } + /// Looks for a known format from a media type. /// /// It supports some media type aliases. @@ -198,7 +212,10 @@ impl DatasetFormat { /// ``` /// use oxigraph::io::DatasetFormat; /// - /// assert_eq!(DatasetFormat::from_media_type("application/n-quads; charset=utf-8"), Some(DatasetFormat::NQuads)) + /// assert_eq!( + /// DatasetFormat::from_media_type("application/n-quads; charset=utf-8"), + /// Some(DatasetFormat::NQuads) + /// ) /// ``` #[inline] pub fn from_media_type(media_type: &str) -> Option { @@ -217,7 +234,10 @@ impl DatasetFormat { /// ``` /// use oxigraph::io::DatasetFormat; /// - /// assert_eq!(DatasetFormat::from_extension("nq"), Some(DatasetFormat::NQuads)) + /// assert_eq!( + /// DatasetFormat::from_extension("nq"), + /// Some(DatasetFormat::NQuads) + /// ) /// ``` #[inline] pub fn from_extension(extension: &str) -> Option { diff --git a/lib/src/io/read.rs b/lib/src/io/read.rs index 3400b8e2..841b166a 100644 --- a/lib/src/io/read.rs +++ b/lib/src/io/read.rs @@ -21,7 +21,9 @@ use std::io::Read; /// let file = " ."; /// /// let parser = GraphParser::from_format(GraphFormat::NTriples); -/// let triples = parser.read_triples(file.as_bytes()).collect::,_>>()?; +/// let triples = parser +/// .read_triples(file.as_bytes()) +/// .collect::, _>>()?; /// /// assert_eq!(triples.len(), 1); /// assert_eq!(triples[0].subject.to_string(), ""); @@ -50,8 +52,11 @@ impl GraphParser { /// /// let file = "

."; /// - /// let parser = GraphParser::from_format(GraphFormat::Turtle).with_base_iri("http://example.com")?; - /// let triples = parser.read_triples(file.as_bytes()).collect::,_>>()?; + /// let parser = + /// GraphParser::from_format(GraphFormat::Turtle).with_base_iri("http://example.com")?; + /// let triples = parser + /// .read_triples(file.as_bytes()) + /// .collect::, _>>()?; /// /// assert_eq!(triples.len(), 1); /// assert_eq!(triples[0].subject.to_string(), ""); @@ -81,7 +86,9 @@ impl GraphParser { /// let file = " ."; /// /// let parser = GraphParser::from_format(GraphFormat::NTriples); -/// let triples = parser.read_triples(file.as_bytes()).collect::,_>>()?; +/// let triples = parser +/// .read_triples(file.as_bytes()) +/// .collect::, _>>()?; /// /// assert_eq!(triples.len(), 1); /// assert_eq!(triples[0].subject.to_string(), ""); @@ -139,8 +146,11 @@ impl DatasetParser { /// /// let file = " {

}"; /// - /// let parser = DatasetParser::from_format(DatasetFormat::TriG).with_base_iri("http://example.com")?; - /// let triples = parser.read_quads(file.as_bytes()).collect::,_>>()?; + /// let parser = + /// DatasetParser::from_format(DatasetFormat::TriG).with_base_iri("http://example.com")?; + /// let triples = parser + /// .read_quads(file.as_bytes()) + /// .collect::, _>>()?; /// /// assert_eq!(triples.len(), 1); /// assert_eq!(triples[0].subject.to_string(), ""); diff --git a/lib/src/io/write.rs b/lib/src/io/write.rs index 7a9007c0..7f27cd9f 100644 --- a/lib/src/io/write.rs +++ b/lib/src/io/write.rs @@ -21,13 +21,16 @@ use std::io::{self, Write}; /// let mut buffer = Vec::new(); /// let mut writer = GraphSerializer::from_format(GraphFormat::NTriples).triple_writer(&mut buffer); /// writer.write(&Triple { -/// subject: NamedNode::new("http://example.com/s")?.into(), -/// predicate: NamedNode::new("http://example.com/p")?, -/// object: NamedNode::new("http://example.com/o")?.into() +/// subject: NamedNode::new("http://example.com/s")?.into(), +/// predicate: NamedNode::new("http://example.com/p")?, +/// object: NamedNode::new("http://example.com/o")?.into(), /// })?; /// writer.finish()?; /// -/// assert_eq!(buffer.as_slice(), " .\n".as_bytes()); +/// assert_eq!( +/// buffer.as_slice(), +/// " .\n".as_bytes() +/// ); /// # Result::<_,Box>::Ok(()) /// ``` #[deprecated(note = "use RdfSerializer instead", since = "0.4.0")] @@ -66,13 +69,16 @@ impl GraphSerializer { /// let mut buffer = Vec::new(); /// let mut writer = GraphSerializer::from_format(GraphFormat::NTriples).triple_writer(&mut buffer); /// writer.write(&Triple { -/// subject: NamedNode::new("http://example.com/s")?.into(), -/// predicate: NamedNode::new("http://example.com/p")?, -/// object: NamedNode::new("http://example.com/o")?.into() +/// subject: NamedNode::new("http://example.com/s")?.into(), +/// predicate: NamedNode::new("http://example.com/p")?, +/// object: NamedNode::new("http://example.com/o")?.into(), /// })?; /// writer.finish()?; /// -/// assert_eq!(buffer.as_slice(), " .\n".as_bytes()); +/// assert_eq!( +/// buffer.as_slice(), +/// " .\n".as_bytes() +/// ); /// # Result::<_,Box>::Ok(()) /// ``` #[must_use] diff --git a/lib/src/sparql/algebra.rs b/lib/src/sparql/algebra.rs index b046de80..819a9bd9 100644 --- a/lib/src/sparql/algebra.rs +++ b/lib/src/sparql/algebra.rs @@ -23,7 +23,10 @@ use std::str::FromStr; /// // We edit the query dataset specification /// let default = vec![NamedNode::new("http://example.com")?.into()]; /// query.dataset_mut().set_default_graph(default.clone()); -/// assert_eq!(query.dataset().default_graph_graphs(), Some(default.as_slice())); +/// assert_eq!( +/// query.dataset().default_graph_graphs(), +/// Some(default.as_slice()) +/// ); /// # Ok::<_, Box>(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Hash)] @@ -58,7 +61,7 @@ impl Query { impl fmt::Display for Query { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - self.inner.fmt(f) //TODO: override + self.inner.fmt(f) // TODO: override } } @@ -217,8 +220,15 @@ impl QueryDataset { /// ``` /// use oxigraph::sparql::Query; /// - /// assert!(Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)?.dataset().is_default_dataset()); - /// assert!(!Query::parse("SELECT ?s ?p ?o FROM WHERE { ?s ?p ?o . }", None)?.dataset().is_default_dataset()); + /// assert!(Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)? + /// .dataset() + /// .is_default_dataset()); + /// assert!(!Query::parse( + /// "SELECT ?s ?p ?o FROM WHERE { ?s ?p ?o . }", + /// None + /// )? + /// .dataset() + /// .is_default_dataset()); /// /// # Ok::<_, Box>(()) /// ``` @@ -250,7 +260,10 @@ impl QueryDataset { /// let mut query = Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)?; /// let default = vec![NamedNode::new("http://example.com")?.into()]; /// query.dataset_mut().set_default_graph(default.clone()); - /// assert_eq!(query.dataset().default_graph_graphs(), Some(default.as_slice())); + /// assert_eq!( + /// query.dataset().default_graph_graphs(), + /// Some(default.as_slice()) + /// ); /// /// # Ok::<_, Box>(()) /// ``` @@ -271,8 +284,13 @@ impl QueryDataset { /// /// let mut query = Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)?; /// let named = vec![NamedNode::new("http://example.com")?.into()]; - /// query.dataset_mut().set_available_named_graphs(named.clone()); - /// assert_eq!(query.dataset().available_named_graphs(), Some(named.as_slice())); + /// query + /// .dataset_mut() + /// .set_available_named_graphs(named.clone()); + /// assert_eq!( + /// query.dataset().available_named_graphs(), + /// Some(named.as_slice()) + /// ); /// /// # Ok::<_, Box>(()) /// ``` diff --git a/lib/src/sparql/error.rs b/lib/src/sparql/error.rs index 4728efb7..43234d67 100644 --- a/lib/src/sparql/error.rs +++ b/lib/src/sparql/error.rs @@ -5,8 +5,7 @@ use crate::sparql::ParseError; use crate::storage::StorageError; use std::convert::Infallible; use std::error::Error; -use std::fmt; -use std::io; +use std::{fmt, io}; /// A SPARQL evaluation error. #[derive(Debug)] diff --git a/lib/src/sparql/eval.rs b/lib/src/sparql/eval.rs index 25c4b3cf..19c6884d 100644 --- a/lib/src/sparql/eval.rs +++ b/lib/src/sparql/eval.rs @@ -33,8 +33,7 @@ use std::cmp::Ordering; use std::collections::hash_map::DefaultHasher; use std::collections::{HashMap, HashSet}; use std::hash::{Hash, Hasher}; -use std::iter::Iterator; -use std::iter::{empty, once}; +use std::iter::{empty, once, Iterator}; use std::rc::Rc; use std::sync::Arc; use std::{fmt, io, str}; @@ -112,8 +111,8 @@ impl EncodedTuple { } impl IntoIterator for EncodedTuple { - type Item = Option; type IntoIter = std::vec::IntoIter>; + type Item = Option; fn into_iter(self) -> Self::IntoIter { self.inner.into_iter() @@ -1010,7 +1009,7 @@ impl SimpleEvaluator { } }) .for_each(|tuple| { - //TODO avoid copy for key? + // TODO avoid copy for key? let key = key_variables .iter() .map(|v| tuple.get(*v).cloned()) @@ -3127,7 +3126,7 @@ pub(super) fn compile_pattern(pattern: &str, flags: Option<&str>) -> Option { regex_builder.ignore_whitespace(true); } - _ => (), //TODO: implement q + _ => (), // TODO: implement q } } } @@ -3632,7 +3631,7 @@ fn compare_str_str_id(dataset: &DatasetView, a: &str, b: &StrHash) -> Option Option { - //TODO: optimize? + // TODO: optimize? match value { EncodedTerm::NamedNode { .. } | EncodedTerm::SmallBlankNode { .. } @@ -4387,6 +4386,7 @@ impl PathEvaluator { } } } + fn eval_to_in_unknown_graph( &self, path: &PropertyPath, @@ -4968,7 +4968,7 @@ impl Iterator for ConstructIterator { )); } } - self.bnodes.clear(); //We do not reuse old bnodes + self.bnodes.clear(); // We do not reuse old bnodes } } } @@ -5300,7 +5300,7 @@ impl Accumulator for SumAccumulator { if let Some(sum) = &self.sum { if let Some(operands) = element.and_then(|e| NumericBinaryOperands::new(sum.clone(), e)) { - //TODO: unify with addition? + // TODO: unify with addition? self.sum = match operands { NumericBinaryOperands::Float(v1, v2) => Some((v1 + v2).into()), NumericBinaryOperands::Double(v1, v2) => Some((v1 + v2).into()), @@ -5343,8 +5343,8 @@ impl Accumulator for AvgAccumulator { if self.count == 0 { Some(0.into()) } else { - //TODO: deduplicate? - //TODO: duration? + // TODO: deduplicate? + // TODO: duration? let count = Integer::from(self.count); match sum { EncodedTerm::FloatLiteral(sum) => Some((sum / Float::from(count)).into()), @@ -5584,6 +5584,7 @@ impl EncodedTupleSet { len: 0, } } + fn insert(&mut self, tuple: EncodedTuple) { self.map .entry(self.tuple_key(&tuple)) diff --git a/lib/src/sparql/mod.rs b/lib/src/sparql/mod.rs index f48ea908..ec84467f 100644 --- a/lib/src/sparql/mod.rs +++ b/lib/src/sparql/mod.rs @@ -144,13 +144,13 @@ pub(crate) fn evaluate_query( /// /// Usage example disabling the federated query support: /// ``` -/// use oxigraph::store::Store; /// use oxigraph::sparql::QueryOptions; +/// use oxigraph::store::Store; /// /// let store = Store::new()?; /// store.query_opt( /// "SELECT * WHERE { SERVICE {} }", -/// QueryOptions::default().without_service_handler() +/// QueryOptions::default().without_service_handler(), /// )?; /// # Result::<_,Box>::Ok(()) /// ``` @@ -209,9 +209,9 @@ impl QueryOptions { /// /// Example with a function serializing terms to N-Triples: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; /// use oxigraph::sparql::{QueryOptions, QueryResults}; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// @@ -219,10 +219,13 @@ impl QueryOptions { /// "SELECT ((1) AS ?nt) WHERE {}", /// QueryOptions::default().with_custom_function( /// NamedNode::new("http://www.w3.org/ns/formats/N-Triples")?, - /// |args| args.get(0).map(|t| Literal::from(t.to_string()).into()) - /// ) + /// |args| args.get(0).map(|t| Literal::from(t.to_string()).into()), + /// ), /// )? { - /// assert_eq!(solutions.next().unwrap()?.get("nt"), Some(&Literal::from("\"1\"^^").into())); + /// assert_eq!( + /// solutions.next().unwrap()?.get("nt"), + /// Some(&Literal::from("\"1\"^^").into()) + /// ); /// } /// # Result::<_,Box>::Ok(()) /// ``` diff --git a/lib/src/sparql/model.rs b/lib/src/sparql/model.rs index ca42db16..59c38afd 100644 --- a/lib/src/sparql/model.rs +++ b/lib/src/sparql/model.rs @@ -104,17 +104,24 @@ impl QueryResults { /// This method fails if it is called on the `Solution` or `Boolean` results. /// /// ``` - /// use oxigraph::store::Store; /// use oxigraph::io::RdfFormat; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let graph = " .\n"; /// /// let store = Store::new()?; - /// store.load_graph(graph.as_bytes(), RdfFormat::NTriples, GraphName::DefaultGraph, None)?; + /// store.load_graph( + /// graph.as_bytes(), + /// RdfFormat::NTriples, + /// GraphName::DefaultGraph, + /// None, + /// )?; /// /// let mut results = Vec::new(); - /// store.query("CONSTRUCT WHERE { ?s ?p ?o }")?.write_graph(&mut results, RdfFormat::NTriples)?; + /// store + /// .query("CONSTRUCT WHERE { ?s ?p ?o }")? + /// .write_graph(&mut results, RdfFormat::NTriples)?; /// assert_eq!(results, graph.as_bytes()); /// # Result::<_,Box>::Ok(()) /// ``` @@ -159,8 +166,8 @@ impl From> for QueryResults { /// An iterator over [`QuerySolution`]s. /// /// ``` -/// use oxigraph::store::Store; /// use oxigraph::sparql::QueryResults; +/// use oxigraph::store::Store; /// /// let store = Store::new()?; /// if let QueryResults::Solutions(solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }")? { @@ -193,12 +200,15 @@ impl QuerySolutionIter { /// The variables used in the solutions. /// /// ``` - /// use oxigraph::store::Store; /// use oxigraph::sparql::{QueryResults, Variable}; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// if let QueryResults::Solutions(solutions) = store.query("SELECT ?s ?o WHERE { ?s ?p ?o }")? { - /// assert_eq!(solutions.variables(), &[Variable::new("s")?, Variable::new("o")?]); + /// assert_eq!( + /// solutions.variables(), + /// &[Variable::new("s")?, Variable::new("o")?] + /// ); /// } /// # Result::<_,Box>::Ok(()) /// ``` @@ -234,8 +244,8 @@ impl Iterator for QuerySolutionIter { /// An iterator over the triples that compose a graph solution. /// /// ``` -/// use oxigraph::store::Store; /// use oxigraph::sparql::QueryResults; +/// use oxigraph::store::Store; /// /// let store = Store::new()?; /// if let QueryResults::Graph(triples) = store.query("CONSTRUCT WHERE { ?s ?p ?o }")? { diff --git a/lib/src/sparql/service.rs b/lib/src/sparql/service.rs index 562c1896..e3dd5602 100644 --- a/lib/src/sparql/service.rs +++ b/lib/src/sparql/service.rs @@ -13,18 +13,22 @@ use std::time::Duration; /// before evaluating a SPARQL query that uses SERVICE calls. /// /// ``` -/// use oxigraph::store::Store; /// use oxigraph::model::*; -/// use oxigraph::sparql::{QueryOptions, QueryResults, ServiceHandler, Query, EvaluationError}; +/// use oxigraph::sparql::{EvaluationError, Query, QueryOptions, QueryResults, ServiceHandler}; +/// use oxigraph::store::Store; /// /// struct TestServiceHandler { -/// store: Store +/// store: Store, /// } /// /// impl ServiceHandler for TestServiceHandler { /// type Error = EvaluationError; /// -/// fn handle(&self, service_name: NamedNode, query: Query) -> Result { +/// fn handle( +/// &self, +/// service_name: NamedNode, +/// query: Query, +/// ) -> Result { /// if service_name == "http://example.com/service" { /// self.store.query(query) /// } else { @@ -35,14 +39,16 @@ use std::time::Duration; /// /// let store = Store::new()?; /// let service = TestServiceHandler { -/// store: Store::new()? +/// store: Store::new()?, /// }; /// let ex = NamedNodeRef::new("http://example.com")?; -/// service.store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?; +/// service +/// .store +/// .insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?; /// /// if let QueryResults::Solutions(mut solutions) = store.query_opt( /// "SELECT ?s WHERE { SERVICE { ?s ?p ?o } }", -/// QueryOptions::default().with_service_handler(service) +/// QueryOptions::default().with_service_handler(service), /// )? { /// assert_eq!(solutions.next().unwrap()?.get("s"), Some(&ex.into())); /// } diff --git a/lib/src/sparql/update.rs b/lib/src/sparql/update.rs index 2e318c71..967de822 100644 --- a/lib/src/sparql/update.rs +++ b/lib/src/sparql/update.rs @@ -139,7 +139,7 @@ impl<'a, 'b: 'a> SimpleUpdateEvaluator<'a, 'b> { let mut bnodes = HashMap::new(); let (eval, _) = evaluator.graph_pattern_evaluator(&pattern, &mut variables); let tuples = - eval(EncodedTuple::with_capacity(variables.len())).collect::, _>>()?; //TODO: would be much better to stream + eval(EncodedTuple::with_capacity(variables.len())).collect::, _>>()?; // TODO: would be much better to stream for tuple in tuples { for quad in delete { if let Some(quad) = diff --git a/lib/src/storage/backend/rocksdb.rs b/lib/src/storage/backend/rocksdb.rs index 670963ba..fed85421 100644 --- a/lib/src/storage/backend/rocksdb.rs +++ b/lib/src/storage/backend/rocksdb.rs @@ -19,16 +19,14 @@ use std::collections::HashMap; use std::env::temp_dir; use std::error::Error; use std::ffi::{CStr, CString}; -use std::fmt; use std::fs::remove_dir_all; -use std::io; use std::marker::PhantomData; use std::ops::Deref; use std::path::{Path, PathBuf}; use std::rc::{Rc, Weak}; use std::sync::{Arc, OnceLock}; use std::thread::{available_parallelism, yield_now}; -use std::{ptr, slice}; +use std::{fmt, io, ptr, slice}; macro_rules! ffi_result { ( $($function:ident)::*( $arg1:expr $(, $arg:expr)* $(,)? ) ) => {{ @@ -711,7 +709,7 @@ impl Db { column_family: &ColumnFamily, key: &[u8], ) -> Result { - Ok(self.get(column_family, key)?.is_some()) //TODO: optimize + Ok(self.get(column_family, key)?.is_some()) // TODO: optimize } pub fn insert( @@ -970,7 +968,7 @@ impl Reader { column_family: &ColumnFamily, key: &[u8], ) -> Result { - Ok(self.get(column_family, key)?.is_some()) //TODO: optimize + Ok(self.get(column_family, key)?.is_some()) // TODO: optimize } #[allow(clippy::iter_not_returning_iterator)] @@ -983,7 +981,7 @@ impl Reader { column_family: &ColumnFamily, prefix: &[u8], ) -> Result { - //We generate the upper bound + // We generate the upper bound let upper_bound = { let mut bound = prefix.to_vec(); let mut found = false; @@ -1101,7 +1099,7 @@ impl Transaction<'_> { column_family: &ColumnFamily, key: &[u8], ) -> Result { - Ok(self.get_for_update(column_family, key)?.is_some()) //TODO: optimize + Ok(self.get_for_update(column_family, key)?.is_some()) // TODO: optimize } pub fn insert( @@ -1228,7 +1226,7 @@ pub struct Iter { is_currently_valid: bool, _upper_bound: Option>, _reader: Reader, // needed to ensure that DB still lives while iter is used - options: *mut rocksdb_readoptions_t, // needed to ensure that options still lives while iter is used + options: *mut rocksdb_readoptions_t, /* needed to ensure that options still lives while iter is used */ } impl Drop for Iter { diff --git a/lib/src/storage/error.rs b/lib/src/storage/error.rs index 89895349..05076e6e 100644 --- a/lib/src/storage/error.rs +++ b/lib/src/storage/error.rs @@ -1,8 +1,7 @@ use crate::io::{ParseError, RdfFormat}; use oxiri::IriParseError; use std::error::Error; -use std::fmt; -use std::io; +use std::{fmt, io}; /// An error related to storage operations (reads, writes...). #[derive(Debug)] diff --git a/lib/src/storage/mod.rs b/lib/src/storage/mod.rs index 9dd38ee1..ce02449e 100644 --- a/lib/src/storage/mod.rs +++ b/lib/src/storage/mod.rs @@ -579,7 +579,7 @@ impl StorageReader { pub fn named_graphs(&self) -> DecodingGraphIterator { DecodingGraphIterator { - iter: self.reader.iter(&self.storage.graphs_cf).unwrap(), //TODO: propagate error? + iter: self.reader.iter(&self.storage.graphs_cf).unwrap(), // TODO: propagate error? } } @@ -786,7 +786,7 @@ impl StorageReader { #[cfg(target_family = "wasm")] #[allow(clippy::unused_self, clippy::unnecessary_wraps)] pub fn validate(&self) -> Result<(), StorageError> { - Ok(()) //TODO + Ok(()) // TODO } } diff --git a/lib/src/storage/numeric_encoder.rs b/lib/src/storage/numeric_encoder.rs index fd0d7544..a81b76e5 100644 --- a/lib/src/storage/numeric_encoder.rs +++ b/lib/src/storage/numeric_encoder.rs @@ -6,8 +6,7 @@ use crate::storage::small_string::SmallString; use oxsdatatypes::*; use siphasher::sip128::{Hasher128, SipHasher24}; use std::fmt::Debug; -use std::hash::Hash; -use std::hash::Hasher; +use std::hash::{Hash, Hasher}; use std::str; use std::sync::Arc; diff --git a/lib/src/storage/small_string.rs b/lib/src/storage/small_string.rs index c2862ff4..fcd9b227 100644 --- a/lib/src/storage/small_string.rs +++ b/lib/src/storage/small_string.rs @@ -1,11 +1,10 @@ use std::borrow::Borrow; use std::cmp::Ordering; use std::error::Error; -use std::fmt; use std::hash::{Hash, Hasher}; use std::ops::Deref; -use std::str; use std::str::{FromStr, Utf8Error}; +use std::{fmt, str}; /// A small inline string #[derive(Clone, Copy, Default)] diff --git a/lib/src/store.rs b/lib/src/store.rs index 95a25260..5b5a1640 100644 --- a/lib/src/store.rs +++ b/lib/src/store.rs @@ -4,9 +4,9 @@ //! //! Usage example: //! ``` -//! use oxigraph::store::Store; -//! use oxigraph::sparql::QueryResults; //! use oxigraph::model::*; +//! use oxigraph::sparql::QueryResults; +//! use oxigraph::store::Store; //! //! let store = Store::new()?; //! @@ -16,7 +16,7 @@ //! store.insert(&quad)?; //! //! // quad filter -//! let results: Result,_> = store.quads_for_pattern(None, None, None, None).collect(); +//! let results: Result, _> = store.quads_for_pattern(None, None, None, None).collect(); //! assert_eq!(vec![quad], results?); //! //! // SPARQL query @@ -56,9 +56,9 @@ use std::{fmt, str}; /// /// Usage example: /// ``` -/// use oxigraph::store::Store; -/// use oxigraph::sparql::QueryResults; /// use oxigraph::model::*; +/// use oxigraph::sparql::QueryResults; +/// use oxigraph::store::Store; /// # use std::fs::remove_dir_all; /// /// # { @@ -70,7 +70,7 @@ use std::{fmt, str}; /// store.insert(&quad)?; /// /// // quad filter -/// let results: Result,_> = store.quads_for_pattern(None, None, None, None).collect(); +/// let results: Result, _> = store.quads_for_pattern(None, None, None, None).collect(); /// assert_eq!(vec![quad], results?); /// /// // SPARQL query @@ -160,9 +160,9 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; /// use oxigraph::sparql::QueryResults; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// @@ -171,8 +171,11 @@ impl Store { /// store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?; /// /// // SPARQL query - /// if let QueryResults::Solutions(mut solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }")? { - /// assert_eq!(solutions.next().unwrap()?.get("s"), Some(&ex.into_owned().into())); + /// if let QueryResults::Solutions(mut solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }")? { + /// assert_eq!( + /// solutions.next().unwrap()?.get("s"), + /// Some(&ex.into_owned().into()) + /// ); /// } /// # Result::<_, Box>::Ok(()) /// ``` @@ -187,19 +190,22 @@ impl Store { /// /// Usage example with a custom function serializing terms to N-Triples: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; /// use oxigraph::sparql::{QueryOptions, QueryResults}; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// if let QueryResults::Solutions(mut solutions) = store.query_opt( /// "SELECT ((1) AS ?nt) WHERE {}", /// QueryOptions::default().with_custom_function( /// NamedNode::new("http://www.w3.org/ns/formats/N-Triples")?, - /// |args| args.get(0).map(|t| Literal::from(t.to_string()).into()) - /// ) + /// |args| args.get(0).map(|t| Literal::from(t.to_string()).into()), + /// ), /// )? { - /// assert_eq!(solutions.next().unwrap()?.get("nt"), Some(&Literal::from("\"1\"^^").into())); + /// assert_eq!( + /// solutions.next().unwrap()?.get("nt"), + /// Some(&Literal::from("\"1\"^^").into()) + /// ); /// } /// # Result::<_, Box>::Ok(()) /// ``` @@ -219,14 +225,17 @@ impl Store { /// /// Usage example serialising the explanation with statistics in JSON: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::sparql::{QueryOptions, QueryResults}; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; - /// if let (Ok(QueryResults::Solutions(solutions)), explanation) = store.explain_query_opt("SELECT ?s WHERE { VALUES ?s { 1 2 3 } }", QueryOptions::default(), true)? { + /// if let (Ok(QueryResults::Solutions(solutions)), explanation) = store.explain_query_opt( + /// "SELECT ?s WHERE { VALUES ?s { 1 2 3 } }", + /// QueryOptions::default(), + /// true, + /// )? { /// // We make sure to have read all the solutions - /// for _ in solutions { - /// } + /// for _ in solutions {} /// let mut buf = Vec::new(); /// explanation.write_in_json(&mut buf)?; /// } @@ -245,8 +254,8 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// @@ -256,7 +265,9 @@ impl Store { /// store.insert(&quad)?; /// /// // quad filter by object - /// let results = store.quads_for_pattern(None, None, Some((&ex).into()), None).collect::,_>>()?; + /// let results = store + /// .quads_for_pattern(None, None, Some((&ex).into()), None) + /// .collect::, _>>()?; /// assert_eq!(vec![quad], results); /// # Result::<_, Box>::Ok(()) /// ``` @@ -283,8 +294,8 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// @@ -294,7 +305,7 @@ impl Store { /// store.insert(&quad)?; /// /// // quad filter by object - /// let results = store.iter().collect::,_>>()?; + /// let results = store.iter().collect::, _>>()?; /// assert_eq!(vec![quad], results); /// # Result::<_, Box>::Ok(()) /// ``` @@ -306,8 +317,8 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let ex = NamedNodeRef::new("http://example.com")?; /// let quad = QuadRef::new(ex, ex, ex, ex); @@ -330,13 +341,13 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let ex = NamedNodeRef::new("http://example.com")?; /// let store = Store::new()?; /// store.insert(QuadRef::new(ex, ex, ex, ex))?; - /// store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?; + /// store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?; /// assert_eq!(2, store.len()?); /// # Result::<_, Box>::Ok(()) /// ``` @@ -348,8 +359,8 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// assert!(store.is_empty()?); @@ -371,8 +382,8 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::{StorageError, Store}; /// use oxigraph::model::*; + /// use oxigraph::store::{StorageError, Store}; /// /// let store = Store::new()?; /// let a = NamedNodeRef::new("http://example.com/a")?; @@ -399,13 +410,14 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// /// // insertion - /// store.update("INSERT DATA { }")?; + /// store + /// .update("INSERT DATA { }")?; /// /// // we inspect the store contents /// let ex = NamedNodeRef::new("http://example.com")?; @@ -504,15 +516,20 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::io::RdfFormat; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// /// // insertion /// let file = b" ."; - /// store.load_graph(file.as_ref(), RdfFormat::NTriples, GraphName::DefaultGraph, None)?; + /// store.load_graph( + /// file.as_ref(), + /// RdfFormat::NTriples, + /// GraphName::DefaultGraph, + /// None, + /// )?; /// /// // we inspect the store contents /// let ex = NamedNodeRef::new("http://example.com")?; @@ -547,14 +564,15 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::io::RdfFormat; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// /// // insertion - /// let file = b" ."; + /// let file = + /// b" ."; /// store.load_dataset(file.as_ref(), RdfFormat::NQuads, None)?; /// /// // we inspect the store contents @@ -587,8 +605,8 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let ex = NamedNodeRef::new("http://example.com")?; /// let quad = QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph); @@ -624,8 +642,8 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let ex = NamedNodeRef::new("http://example.com")?; /// let quad = QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph); @@ -646,10 +664,12 @@ impl Store { /// Dumps the store into a file. /// /// ``` - /// use oxigraph::store::Store; /// use oxigraph::io::RdfFormat; + /// use oxigraph::store::Store; /// - /// let file = " .\n".as_bytes(); + /// let file = + /// " .\n" + /// .as_bytes(); /// /// let store = Store::new()?; /// store.load_from_read(RdfFormat::NQuads, file)?; @@ -678,9 +698,9 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::io::RdfFormat; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let file = " .\n".as_bytes(); /// @@ -709,9 +729,9 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::io::RdfFormat; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let file = " .\n".as_bytes(); /// @@ -736,10 +756,12 @@ impl Store { /// Dumps the store into a file. /// /// ``` - /// use oxigraph::store::Store; /// use oxigraph::io::RdfFormat; + /// use oxigraph::store::Store; /// - /// let file = " .\n".as_bytes(); + /// let file = + /// " .\n" + /// .as_bytes(); /// /// let store = Store::new()?; /// store.load_from_read(RdfFormat::NQuads, file)?; @@ -761,14 +783,17 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let ex = NamedNode::new("http://example.com")?; /// let store = Store::new()?; /// store.insert(QuadRef::new(&ex, &ex, &ex, &ex))?; /// store.insert(QuadRef::new(&ex, &ex, &ex, GraphNameRef::DefaultGraph))?; - /// assert_eq!(vec![NamedOrBlankNode::from(ex)], store.named_graphs().collect::,_>>()?); + /// assert_eq!( + /// vec![NamedOrBlankNode::from(ex)], + /// store.named_graphs().collect::, _>>()? + /// ); /// # Result::<_, Box>::Ok(()) /// ``` pub fn named_graphs(&self) -> GraphNameIter { @@ -783,8 +808,8 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::{NamedNode, QuadRef}; + /// use oxigraph::store::Store; /// /// let ex = NamedNode::new("http://example.com")?; /// let store = Store::new()?; @@ -806,14 +831,17 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::NamedNodeRef; + /// use oxigraph::store::Store; /// /// let ex = NamedNodeRef::new("http://example.com")?; /// let store = Store::new()?; /// store.insert_named_graph(ex)?; /// - /// assert_eq!(store.named_graphs().collect::,_>>()?, vec![ex.into_owned().into()]); + /// assert_eq!( + /// store.named_graphs().collect::, _>>()?, + /// vec![ex.into_owned().into()] + /// ); /// # Result::<_, Box>::Ok(()) /// ``` pub fn insert_named_graph<'a>( @@ -828,8 +856,8 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::{NamedNodeRef, QuadRef}; + /// use oxigraph::store::Store; /// /// let ex = NamedNodeRef::new("http://example.com")?; /// let quad = QuadRef::new(ex, ex, ex, ex); @@ -856,8 +884,8 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::{NamedNodeRef, QuadRef}; + /// use oxigraph::store::Store; /// /// let ex = NamedNodeRef::new("http://example.com")?; /// let quad = QuadRef::new(ex, ex, ex, ex); @@ -882,13 +910,13 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let ex = NamedNodeRef::new("http://example.com")?; /// let store = Store::new()?; /// store.insert(QuadRef::new(ex, ex, ex, ex))?; - /// store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?; + /// store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?; /// assert_eq!(2, store.len()?); /// /// store.clear()?; @@ -944,15 +972,18 @@ impl Store { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::io::RdfFormat; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// /// // quads file insertion - /// let file = b" ."; - /// store.bulk_loader().load_from_read(RdfFormat::NQuads, file.as_ref())?; + /// let file = + /// b" ."; + /// store + /// .bulk_loader() + /// .load_from_read(RdfFormat::NQuads, file.as_ref())?; /// /// // we inspect the store contents /// let ex = NamedNodeRef::new("http://example.com")?; @@ -995,16 +1026,23 @@ impl<'a> Transaction<'a> { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; /// use oxigraph::sparql::{EvaluationError, QueryResults}; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// store.transaction(|mut transaction| { - /// if let QueryResults::Solutions(solutions) = transaction.query("SELECT ?s WHERE { ?s ?p ?o }")? { + /// if let QueryResults::Solutions(solutions) = + /// transaction.query("SELECT ?s WHERE { ?s ?p ?o }")? + /// { /// for solution in solutions { - /// if let Some(Term::NamedNode(s)) = solution?.get("s") { - /// transaction.insert(QuadRef::new(s, vocab::rdf::TYPE, NamedNodeRef::new_unchecked("http://example.com"), GraphNameRef::DefaultGraph))?; + /// if let Some(Term::NamedNode(s)) = solution?.get("s") { + /// transaction.insert(QuadRef::new( + /// s, + /// vocab::rdf::TYPE, + /// NamedNodeRef::new_unchecked("http://example.com"), + /// GraphNameRef::DefaultGraph, + /// ))?; /// } /// } /// } @@ -1023,9 +1061,9 @@ impl<'a> Transaction<'a> { /// /// Usage example with a custom function serializing terms to N-Triples: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; /// use oxigraph::sparql::{EvaluationError, QueryOptions, QueryResults}; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// store.transaction(|mut transaction| { @@ -1033,13 +1071,20 @@ impl<'a> Transaction<'a> { /// "SELECT ?s ((?s) AS ?nt) WHERE { ?s ?p ?o }", /// QueryOptions::default().with_custom_function( /// NamedNode::new_unchecked("http://www.w3.org/ns/formats/N-Triples"), - /// |args| args.get(0).map(|t| Literal::from(t.to_string()).into()) - /// ) + /// |args| args.get(0).map(|t| Literal::from(t.to_string()).into()), + /// ), /// )? { /// for solution in solutions { /// let solution = solution?; - /// if let (Some(Term::NamedNode(s)), Some(nt)) = (solution.get("s"), solution.get("nt")) { - /// transaction.insert(QuadRef::new(s, NamedNodeRef::new_unchecked("http://example.com/n-triples-representation"), nt, GraphNameRef::DefaultGraph))?; + /// if let (Some(Term::NamedNode(s)), Some(nt)) = + /// (solution.get("s"), solution.get("nt")) + /// { + /// transaction.insert(QuadRef::new( + /// s, + /// NamedNodeRef::new_unchecked("http://example.com/n-triples-representation"), + /// nt, + /// GraphNameRef::DefaultGraph, + /// ))?; /// } /// } /// } @@ -1060,8 +1105,8 @@ impl<'a> Transaction<'a> { /// /// Usage example: /// ``` - /// use oxigraph::store::{StorageError, Store}; /// use oxigraph::model::*; + /// use oxigraph::store::{StorageError, Store}; /// /// let store = Store::new()?; /// let a = NamedNodeRef::new("http://example.com/a")?; @@ -1123,14 +1168,16 @@ impl<'a> Transaction<'a> { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; /// use oxigraph::sparql::EvaluationError; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// store.transaction(|mut transaction| { /// // insertion - /// transaction.update("INSERT DATA { }")?; + /// transaction.update( + /// "INSERT DATA { }", + /// )?; /// /// // we inspect the store contents /// let ex = NamedNodeRef::new_unchecked("http://example.com"); @@ -1210,16 +1257,21 @@ impl<'a> Transaction<'a> { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::io::RdfFormat; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// /// // insertion /// let file = b" ."; /// store.transaction(|mut transaction| { - /// transaction.load_graph(file.as_ref(), RdfFormat::NTriples, GraphName::DefaultGraph, None) + /// transaction.load_graph( + /// file.as_ref(), + /// RdfFormat::NTriples, + /// GraphName::DefaultGraph, + /// None, + /// ) /// })?; /// /// // we inspect the store contents @@ -1253,14 +1305,15 @@ impl<'a> Transaction<'a> { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::io::RdfFormat; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// /// // insertion - /// let file = b" ."; + /// let file = + /// b" ."; /// store.transaction(|mut transaction| { /// transaction.load_dataset(file.as_ref(), RdfFormat::NQuads, None) /// })?; @@ -1295,16 +1348,14 @@ impl<'a> Transaction<'a> { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let ex = NamedNodeRef::new_unchecked("http://example.com"); /// let quad = QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph); /// /// let store = Store::new()?; - /// store.transaction(|mut transaction| { - /// transaction.insert(quad) - /// })?; + /// store.transaction(|mut transaction| transaction.insert(quad))?; /// assert!(store.contains(quad)?); /// # Result::<_,oxigraph::store::StorageError>::Ok(()) /// ``` @@ -1329,8 +1380,8 @@ impl<'a> Transaction<'a> { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let ex = NamedNodeRef::new_unchecked("http://example.com"); /// let quad = QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph); @@ -1371,15 +1422,16 @@ impl<'a> Transaction<'a> { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::NamedNodeRef; + /// use oxigraph::store::Store; /// /// let ex = NamedNodeRef::new_unchecked("http://example.com"); /// let store = Store::new()?; - /// store.transaction(|mut transaction| { - /// transaction.insert_named_graph(ex) - /// })?; - /// assert_eq!(store.named_graphs().collect::,_>>()?, vec![ex.into_owned().into()]); + /// store.transaction(|mut transaction| transaction.insert_named_graph(ex))?; + /// assert_eq!( + /// store.named_graphs().collect::, _>>()?, + /// vec![ex.into_owned().into()] + /// ); /// # Result::<_,oxigraph::store::StorageError>::Ok(()) /// ``` pub fn insert_named_graph<'b>( @@ -1393,8 +1445,8 @@ impl<'a> Transaction<'a> { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::{NamedNodeRef, QuadRef}; + /// use oxigraph::store::Store; /// /// let ex = NamedNodeRef::new_unchecked("http://example.com"); /// let quad = QuadRef::new(ex, ex, ex, ex); @@ -1420,8 +1472,8 @@ impl<'a> Transaction<'a> { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::{NamedNodeRef, QuadRef}; + /// use oxigraph::store::Store; /// /// let ex = NamedNodeRef::new_unchecked("http://example.com"); /// let quad = QuadRef::new(ex, ex, ex, ex); @@ -1445,8 +1497,8 @@ impl<'a> Transaction<'a> { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let ex = NamedNodeRef::new_unchecked("http://example.com"); /// let store = Store::new()?; @@ -1518,15 +1570,18 @@ impl Iterator for GraphNameIter { /// /// Usage example with loading a dataset: /// ``` -/// use oxigraph::store::Store; /// use oxigraph::io::RdfFormat; /// use oxigraph::model::*; +/// use oxigraph::store::Store; /// /// let store = Store::new()?; /// /// // quads file insertion -/// let file = b" ."; -/// store.bulk_loader().load_from_read(RdfFormat::NQuads, file.as_ref())?; +/// let file = +/// b" ."; +/// store +/// .bulk_loader() +/// .load_from_read(RdfFormat::NQuads, file.as_ref())?; /// /// // we inspect the store contents /// let ex = NamedNodeRef::new("http://example.com")?; @@ -1684,15 +1739,18 @@ impl BulkLoader { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::io::RdfFormat; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// /// // insertion - /// let file = b" ."; - /// store.bulk_loader().load_dataset(file.as_ref(), RdfFormat::NQuads, None)?; + /// let file = + /// b" ."; + /// store + /// .bulk_loader() + /// .load_dataset(file.as_ref(), RdfFormat::NQuads, None)?; /// /// // we inspect the store contents /// let ex = NamedNodeRef::new("http://example.com")?; @@ -1745,15 +1803,20 @@ impl BulkLoader { /// /// Usage example: /// ``` - /// use oxigraph::store::Store; /// use oxigraph::io::RdfFormat; /// use oxigraph::model::*; + /// use oxigraph::store::Store; /// /// let store = Store::new()?; /// /// // insertion /// let file = b" ."; - /// store.bulk_loader().load_graph(file.as_ref(), RdfFormat::NTriples, GraphName::DefaultGraph, None)?; + /// store.bulk_loader().load_graph( + /// file.as_ref(), + /// RdfFormat::NTriples, + /// GraphName::DefaultGraph, + /// None, + /// )?; /// /// // we inspect the store contents /// let ex = NamedNodeRef::new("http://example.com")?; diff --git a/python/src/io.rs b/python/src/io.rs index 8258fda2..bf3a4383 100644 --- a/python/src/io.rs +++ b/python/src/io.rs @@ -199,31 +199,26 @@ impl PyRdfFormat { const N3: Self = Self { inner: RdfFormat::N3, }; - /// `N-Quads `_ #[classattr] const N_QUADS: Self = Self { inner: RdfFormat::NQuads, }; - /// `N-Triples `_ #[classattr] const N_TRIPLES: Self = Self { inner: RdfFormat::NTriples, }; - /// `RDF/XML `_ #[classattr] const RDF_XML: Self = Self { inner: RdfFormat::RdfXml, }; - /// `TriG `_ #[classattr] const TRIG: Self = Self { inner: RdfFormat::TriG, }; - /// `Turtle `_ #[classattr] const TURTLE: Self = Self { diff --git a/python/src/model.rs b/python/src/model.rs index cf673d65..5933013f 100644 --- a/python/src/model.rs +++ b/python/src/model.rs @@ -6,8 +6,7 @@ use pyo3::prelude::*; use pyo3::types::{PyDict, PyTuple}; use pyo3::PyTypeInfo; use std::collections::hash_map::DefaultHasher; -use std::hash::Hash; -use std::hash::Hasher; +use std::hash::{Hash, Hasher}; use std::vec::IntoIter; /// An RDF `node identified by an IRI `_. @@ -345,7 +344,6 @@ impl PyLiteral { /// >>> Literal('example', language='en').language /// 'en' /// >>> Literal('example').language - /// #[getter] fn language(&self) -> Option<&str> { self.inner.language() diff --git a/python/src/sparql.rs b/python/src/sparql.rs index c79a6b7e..383a7413 100644 --- a/python/src/sparql.rs +++ b/python/src/sparql.rs @@ -515,29 +515,26 @@ pub struct PyQueryResultsFormat { #[pymethods] impl PyQueryResultsFormat { - /// `SPARQL Query Results XML Format `_ + /// `SPARQL Query Results CSV Format `_ #[classattr] - const XML: Self = Self { - inner: QueryResultsFormat::Xml, + const CSV: Self = Self { + inner: QueryResultsFormat::Csv, }; - /// `SPARQL Query Results JSON Format `_ #[classattr] const JSON: Self = Self { inner: QueryResultsFormat::Json, }; - - /// `SPARQL Query Results CSV Format `_ - #[classattr] - const CSV: Self = Self { - inner: QueryResultsFormat::Csv, - }; - /// `SPARQL Query Results TSV Format `_ #[classattr] const TSV: Self = Self { inner: QueryResultsFormat::Tsv, }; + /// `SPARQL Query Results XML Format `_ + #[classattr] + const XML: Self = Self { + inner: QueryResultsFormat::Xml, + }; /// :return: the format canonical IRI according to the `Unique URIs for file formats registry `_. /// :rtype: str diff --git a/rustfmt.toml b/rustfmt.toml index d51f7320..5fce0595 100644 --- a/rustfmt.toml +++ b/rustfmt.toml @@ -1,3 +1,11 @@ +force_explicit_abi = true +format_code_in_doc_comments = true +format_macro_matchers = true +imports_granularity = "Module" newline_style = "Unix" +normalize_comments = true +normalize_doc_attributes = true +reorder_impl_items = true +group_imports = "One" use_field_init_shorthand = true -use_try_shorthand = true \ No newline at end of file +use_try_shorthand = true diff --git a/testsuite/src/parser_evaluator.rs b/testsuite/src/parser_evaluator.rs index 0d3a22e6..830607b0 100644 --- a/testsuite/src/parser_evaluator.rs +++ b/testsuite/src/parser_evaluator.rs @@ -77,11 +77,11 @@ pub fn register_parser_tests(evaluator: &mut TestEvaluator) { ); evaluator.register( "https://w3c.github.io/rdf-canon/tests/vocab#RDFC10NegativeEvalTest", - |_| Ok(()), //TODO: not a proper implementation + |_| Ok(()), // TODO: not a proper implementation ); evaluator.register( "https://w3c.github.io/rdf-canon/tests/vocab#RDFC10MapTest", - |_| Ok(()), //TODO: not a proper implementation + |_| Ok(()), // TODO: not a proper implementation ); evaluator.register( "https://github.com/oxigraph/oxigraph/tests#TestNTripleRecovery", diff --git a/testsuite/tests/sparql.rs b/testsuite/tests/sparql.rs index eafb80fd..6d2a6f7d 100644 --- a/testsuite/tests/sparql.rs +++ b/testsuite/tests/sparql.rs @@ -8,7 +8,7 @@ fn sparql10_w3c_query_syntax_testsuite() -> Result<()> { check_testsuite( "https://w3c.github.io/rdf-tests/sparql/sparql10/manifest-syntax.ttl", &[ - "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/syntax-sparql3/manifest#syn-bad-26", // tokenizer + "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/syntax-sparql3/manifest#syn-bad-26", /* tokenizer */ ], ) } @@ -53,9 +53,9 @@ fn sparql11_query_w3c_evaluation_testsuite() -> Result<()> { check_testsuite( "https://w3c.github.io/rdf-tests/sparql/sparql11/manifest-sparql11-query.ttl", &[ - //BNODE() scope is currently wrong + // BNODE() scope is currently wrong "http://www.w3.org/2009/sparql/docs/tests/data-sparql11/functions/manifest#bnode01", - //SERVICE name from a BGP + // SERVICE name from a BGP "http://www.w3.org/2009/sparql/docs/tests/data-sparql11/service/manifest#service5", ], )