From d838d55f02c412c186c7b53be4dea9fff92d7f92 Mon Sep 17 00:00:00 2001
From: Tpt
Date: Thu, 18 Jan 2024 17:20:18 +0100
Subject: [PATCH] Uses nightly rustfmt on imports and comments
---
cli/src/main.rs | 5 +-
lib/oxrdf/src/blank_node.rs | 20 +--
lib/oxrdf/src/dataset.rs | 17 +-
lib/oxrdf/src/graph.rs | 7 +-
lib/oxrdf/src/literal.rs | 7 +-
lib/oxrdf/src/parser.rs | 78 +++++++---
lib/oxrdf/src/triple.rs | 10 +-
lib/oxrdf/src/variable.rs | 12 +-
lib/oxrdfio/src/format.rs | 10 +-
lib/oxrdfio/src/parser.rs | 36 +++--
lib/oxrdfio/src/serializer.rs | 7 +-
lib/oxrdfxml/src/parser.rs | 29 ++--
lib/oxsdatatypes/src/date_time.rs | 161 ++++++++++---------
lib/oxsdatatypes/src/decimal.rs | 18 +--
lib/oxsdatatypes/src/double.rs | 24 ++-
lib/oxsdatatypes/src/duration.rs | 41 +++--
lib/oxsdatatypes/src/float.rs | 24 ++-
lib/oxsdatatypes/src/integer.rs | 7 +-
lib/oxttl/src/lexer.rs | 4 +-
lib/oxttl/src/line_formats.rs | 6 +-
lib/oxttl/src/n3.rs | 42 +++--
lib/oxttl/src/terse.rs | 14 +-
lib/oxttl/src/toolkit/lexer.rs | 4 +-
lib/oxttl/src/trig.rs | 38 +++--
lib/oxttl/src/turtle.rs | 35 +++--
lib/sparesults/src/csv.rs | 4 +-
lib/sparesults/src/format.rs | 21 ++-
lib/sparesults/src/json.rs | 3 +-
lib/sparesults/src/parser.rs | 36 ++++-
lib/sparesults/src/serializer.rs | 34 +++-
lib/sparesults/src/solution.rs | 88 +++++++++--
lib/sparesults/src/xml.rs | 7 +-
lib/spargebra/src/parser.rs | 18 +--
lib/spargebra/src/query.rs | 5 +-
lib/spargebra/src/term.rs | 5 +-
lib/sparopt/src/algebra.rs | 4 +-
lib/sparopt/src/optimizer.rs | 6 +-
lib/sparopt/src/type_inference.rs | 51 +++---
lib/sparql-smith/src/lib.rs | 16 +-
lib/src/io/format.rs | 32 +++-
lib/src/io/read.rs | 22 ++-
lib/src/io/write.rs | 22 ++-
lib/src/sparql/algebra.rs | 32 +++-
lib/src/sparql/error.rs | 3 +-
lib/src/sparql/eval.rs | 21 +--
lib/src/sparql/mod.rs | 15 +-
lib/src/sparql/model.rs | 24 ++-
lib/src/sparql/service.rs | 20 ++-
lib/src/sparql/update.rs | 2 +-
lib/src/storage/backend/rocksdb.rs | 14 +-
lib/src/storage/error.rs | 3 +-
lib/src/storage/mod.rs | 4 +-
lib/src/storage/numeric_encoder.rs | 3 +-
lib/src/storage/small_string.rs | 3 +-
lib/src/store.rs | 241 ++++++++++++++++++-----------
python/src/io.rs | 5 -
python/src/model.rs | 4 +-
python/src/sparql.rs | 19 +--
rustfmt.toml | 10 +-
testsuite/src/parser_evaluator.rs | 4 +-
testsuite/tests/sparql.rs | 6 +-
61 files changed, 887 insertions(+), 576 deletions(-)
diff --git a/cli/src/main.rs b/cli/src/main.rs
index 9e9a0dbd..cabf08c4 100644
--- a/cli/src/main.rs
+++ b/cli/src/main.rs
@@ -469,7 +469,7 @@ pub fn main() -> anyhow::Result<()> {
file.display(),
error
)
- //TODO: hard fail
+ // TODO: hard fail
}
})
}
@@ -1845,7 +1845,8 @@ mod tests {
use super::*;
use anyhow::Result;
use assert_cmd::Command;
- use assert_fs::{prelude::*, NamedTempFile, TempDir};
+ use assert_fs::prelude::*;
+ use assert_fs::{NamedTempFile, TempDir};
use flate2::write::GzEncoder;
use flate2::Compression;
use oxhttp::model::Method;
diff --git a/lib/oxrdf/src/blank_node.rs b/lib/oxrdf/src/blank_node.rs
index e813dd24..9603cd30 100644
--- a/lib/oxrdf/src/blank_node.rs
+++ b/lib/oxrdf/src/blank_node.rs
@@ -1,8 +1,7 @@
use rand::random;
use std::error::Error;
-use std::fmt;
use std::io::Write;
-use std::str;
+use std::{fmt, str};
/// An owned RDF [blank node](https://www.w3.org/TR/rdf11-concepts/#dfn-blank-node).
///
@@ -15,10 +14,7 @@ use std::str;
/// ```
/// use oxrdf::BlankNode;
///
-/// assert_eq!(
-/// "_:a122",
-/// BlankNode::new("a122")?.to_string()
-/// );
+/// assert_eq!("_:a122", BlankNode::new("a122")?.to_string());
/// # Result::<_,oxrdf::BlankNodeIdParseError>::Ok(())
/// ```
#[derive(Eq, PartialEq, Debug, Clone, Hash)]
@@ -36,7 +32,7 @@ impl BlankNode {
/// The blank node identifier must be valid according to N-Triples, Turtle, and SPARQL grammars.
///
/// In most cases, it is much more convenient to create a blank node using [`BlankNode::default()`]
- ///that creates a random ID that could be easily inlined by Oxigraph stores.
+ /// that creates a random ID that could be easily inlined by Oxigraph stores.
pub fn new(id: impl Into) -> Result {
let id = id.into();
validate_blank_node_identifier(&id)?;
@@ -133,10 +129,7 @@ impl Default for BlankNode {
/// ```
/// use oxrdf::BlankNodeRef;
///
-/// assert_eq!(
-/// "_:a122",
-/// BlankNodeRef::new("a122")?.to_string()
-/// );
+/// assert_eq!("_:a122", BlankNodeRef::new("a122")?.to_string());
/// # Result::<_,oxrdf::BlankNodeIdParseError>::Ok(())
/// ```
#[derive(Eq, PartialEq, Debug, Clone, Copy, Hash)]
@@ -192,7 +185,10 @@ impl<'a> BlankNodeRef<'a> {
/// ```
/// use oxrdf::BlankNode;
///
- /// assert_eq!(BlankNode::new_from_unique_id(128).as_ref().unique_id(), Some(128));
+ /// assert_eq!(
+ /// BlankNode::new_from_unique_id(128).as_ref().unique_id(),
+ /// Some(128)
+ /// );
/// assert_eq!(BlankNode::new("foo")?.as_ref().unique_id(), None);
/// # Result::<_,oxrdf::BlankNodeIdParseError>::Ok(())
/// ```
diff --git a/lib/oxrdf/src/dataset.rs b/lib/oxrdf/src/dataset.rs
index 0c1fcbd7..8412a8aa 100644
--- a/lib/oxrdf/src/dataset.rs
+++ b/lib/oxrdf/src/dataset.rs
@@ -20,19 +20,20 @@
//! assert_eq!(vec![TripleRef::new(ex, ex, ex)], results);
//!
//! // Print
-//! assert_eq!(dataset.to_string(), " .\n");
+//! assert_eq!(
+//! dataset.to_string(),
+//! " .\n"
+//! );
//! # Result::<_,Box>::Ok(())
//! ```
//!
//! See also [`Graph`] if you only care about plain triples.
use crate::interning::*;
-use crate::SubjectRef;
use crate::*;
use std::cmp::min;
use std::collections::hash_map::DefaultHasher;
-use std::collections::BTreeSet;
-use std::collections::{HashMap, HashSet};
+use std::collections::{BTreeSet, HashMap, HashSet};
use std::fmt;
use std::hash::{Hash, Hasher};
@@ -924,8 +925,8 @@ impl PartialEq for Dataset {
impl Eq for Dataset {}
impl<'a> IntoIterator for &'a Dataset {
- type Item = QuadRef<'a>;
type IntoIter = Iter<'a>;
+ type Item = QuadRef<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
@@ -1282,8 +1283,8 @@ impl<'a> GraphView<'a> {
}
impl<'a> IntoIterator for GraphView<'a> {
- type Item = TripleRef<'a>;
type IntoIter = GraphViewIter<'a>;
+ type Item = TripleRef<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
@@ -1291,8 +1292,8 @@ impl<'a> IntoIterator for GraphView<'a> {
}
impl<'a, 'b> IntoIterator for &'b GraphView<'a> {
- type Item = TripleRef<'a>;
type IntoIter = GraphViewIter<'a>;
+ type Item = TripleRef<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
@@ -1493,8 +1494,8 @@ impl<'a, 'b, T: Into>> Extend for GraphViewMut<'a> {
}
impl<'a> IntoIterator for &'a GraphViewMut<'a> {
- type Item = TripleRef<'a>;
type IntoIter = GraphViewIter<'a>;
+ type Item = TripleRef<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
diff --git a/lib/oxrdf/src/graph.rs b/lib/oxrdf/src/graph.rs
index 3077e5de..33f67132 100644
--- a/lib/oxrdf/src/graph.rs
+++ b/lib/oxrdf/src/graph.rs
@@ -16,7 +16,10 @@
//! assert_eq!(vec![triple], results);
//!
//! // Print
-//! assert_eq!(graph.to_string(), " .\n");
+//! assert_eq!(
+//! graph.to_string(),
+//! " .\n"
+//! );
//! # Result::<_,Box>::Ok(())
//! ```
//!
@@ -226,8 +229,8 @@ impl PartialEq for Graph {
impl Eq for Graph {}
impl<'a> IntoIterator for &'a Graph {
- type Item = TripleRef<'a>;
type IntoIter = Iter<'a>;
+ type Item = TripleRef<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
diff --git a/lib/oxrdf/src/literal.rs b/lib/oxrdf/src/literal.rs
index 3f2727ca..0872fab5 100644
--- a/lib/oxrdf/src/literal.rs
+++ b/lib/oxrdf/src/literal.rs
@@ -1,6 +1,5 @@
use crate::named_node::NamedNode;
-use crate::vocab::rdf;
-use crate::vocab::xsd;
+use crate::vocab::{rdf, xsd};
use crate::NamedNodeRef;
use oxilangtag::{LanguageTag, LanguageTagParseError};
#[cfg(feature = "oxsdatatypes")]
@@ -15,8 +14,8 @@ use std::option::Option;
/// The default string formatter is returning an N-Triples, Turtle, and SPARQL compatible representation:
/// ```
/// # use oxilangtag::LanguageTagParseError;
-/// use oxrdf::Literal;
/// use oxrdf::vocab::xsd;
+/// use oxrdf::Literal;
///
/// assert_eq!(
/// "\"foo\\nbar\"",
@@ -427,8 +426,8 @@ impl From for Literal {
///
/// The default string formatter is returning an N-Triples, Turtle, and SPARQL compatible representation:
/// ```
-/// use oxrdf::LiteralRef;
/// use oxrdf::vocab::xsd;
+/// use oxrdf::LiteralRef;
///
/// assert_eq!(
/// "\"foo\\nbar\"",
diff --git a/lib/oxrdf/src/parser.rs b/lib/oxrdf/src/parser.rs
index 326868f5..1794540d 100644
--- a/lib/oxrdf/src/parser.rs
+++ b/lib/oxrdf/src/parser.rs
@@ -5,10 +5,9 @@ use crate::{
};
#[cfg(feature = "rdf-star")]
use crate::{Subject, Triple};
-use std::char;
use std::error::Error;
-use std::fmt;
use std::str::{Chars, FromStr};
+use std::{char, fmt};
/// This limit is set in order to avoid stack overflow error when parsing nested triples due to too many recursive calls.
/// The actual limit value is a wet finger compromise between not failing to parse valid files and avoiding to trigger stack overflow errors.
@@ -23,7 +22,10 @@ impl FromStr for NamedNode {
/// use oxrdf::NamedNode;
/// use std::str::FromStr;
///
- /// assert_eq!(NamedNode::from_str("").unwrap(), NamedNode::new("http://example.com").unwrap())
+ /// assert_eq!(
+ /// NamedNode::from_str("").unwrap(),
+ /// NamedNode::new("http://example.com").unwrap()
+ /// )
/// ```
fn from_str(s: &str) -> Result {
let (term, left) = read_named_node(s)?;
@@ -45,7 +47,10 @@ impl FromStr for BlankNode {
/// use oxrdf::BlankNode;
/// use std::str::FromStr;
///
- /// assert_eq!(BlankNode::from_str("_:ex").unwrap(), BlankNode::new("ex").unwrap())
+ /// assert_eq!(
+ /// BlankNode::from_str("_:ex").unwrap(),
+ /// BlankNode::new("ex").unwrap()
+ /// )
/// ```
fn from_str(s: &str) -> Result {
let (term, left) = read_blank_node(s)?;
@@ -64,16 +69,41 @@ impl FromStr for Literal {
/// Parses a literal from its NTriples or Turtle serialization
///
/// ```
- /// use oxrdf::{Literal, NamedNode, vocab::xsd};
+ /// use oxrdf::vocab::xsd;
+ /// use oxrdf::{Literal, NamedNode};
/// use std::str::FromStr;
///
- /// assert_eq!(Literal::from_str("\"ex\\n\"").unwrap(), Literal::new_simple_literal("ex\n"));
- /// assert_eq!(Literal::from_str("\"ex\"@en").unwrap(), Literal::new_language_tagged_literal("ex", "en").unwrap());
- /// assert_eq!(Literal::from_str("\"2020\"^^").unwrap(), Literal::new_typed_literal("2020", NamedNode::new("http://www.w3.org/2001/XMLSchema#gYear").unwrap()));
- /// assert_eq!(Literal::from_str("true").unwrap(), Literal::new_typed_literal("true", xsd::BOOLEAN));
- /// assert_eq!(Literal::from_str("+122").unwrap(), Literal::new_typed_literal("+122", xsd::INTEGER));
- /// assert_eq!(Literal::from_str("-122.23").unwrap(), Literal::new_typed_literal("-122.23", xsd::DECIMAL));
- /// assert_eq!(Literal::from_str("-122e+1").unwrap(), Literal::new_typed_literal("-122e+1", xsd::DOUBLE));
+ /// assert_eq!(
+ /// Literal::from_str("\"ex\\n\"").unwrap(),
+ /// Literal::new_simple_literal("ex\n")
+ /// );
+ /// assert_eq!(
+ /// Literal::from_str("\"ex\"@en").unwrap(),
+ /// Literal::new_language_tagged_literal("ex", "en").unwrap()
+ /// );
+ /// assert_eq!(
+ /// Literal::from_str("\"2020\"^^").unwrap(),
+ /// Literal::new_typed_literal(
+ /// "2020",
+ /// NamedNode::new("http://www.w3.org/2001/XMLSchema#gYear").unwrap()
+ /// )
+ /// );
+ /// assert_eq!(
+ /// Literal::from_str("true").unwrap(),
+ /// Literal::new_typed_literal("true", xsd::BOOLEAN)
+ /// );
+ /// assert_eq!(
+ /// Literal::from_str("+122").unwrap(),
+ /// Literal::new_typed_literal("+122", xsd::INTEGER)
+ /// );
+ /// assert_eq!(
+ /// Literal::from_str("-122.23").unwrap(),
+ /// Literal::new_typed_literal("-122.23", xsd::DECIMAL)
+ /// );
+ /// assert_eq!(
+ /// Literal::from_str("-122e+1").unwrap(),
+ /// Literal::new_typed_literal("-122e+1", xsd::DOUBLE)
+ /// );
/// ```
fn from_str(s: &str) -> Result {
let (term, left) = read_literal(s)?;
@@ -93,12 +123,19 @@ impl FromStr for Term {
/// use oxrdf::*;
/// use std::str::FromStr;
///
- /// assert_eq!(Term::from_str("\"ex\"").unwrap(), Literal::new_simple_literal("ex").into());
- /// assert_eq!(Term::from_str("<< _:s \"o\" >>").unwrap(), Triple::new(
- /// BlankNode::new("s").unwrap(),
- /// NamedNode::new("http://example.com/p").unwrap(),
- /// Literal::new_simple_literal("o")
- /// ).into());
+ /// assert_eq!(
+ /// Term::from_str("\"ex\"").unwrap(),
+ /// Literal::new_simple_literal("ex").into()
+ /// );
+ /// assert_eq!(
+ /// Term::from_str("<< _:s \"o\" >>").unwrap(),
+ /// Triple::new(
+ /// BlankNode::new("s").unwrap(),
+ /// NamedNode::new("http://example.com/p").unwrap(),
+ /// Literal::new_simple_literal("o")
+ /// )
+ /// .into()
+ /// );
/// ```
fn from_str(s: &str) -> Result {
let (term, left) = read_term(s, 0)?;
@@ -118,7 +155,10 @@ impl FromStr for Variable {
/// use oxrdf::Variable;
/// use std::str::FromStr;
///
- /// assert_eq!(Variable::from_str("$foo").unwrap(), Variable::new("foo").unwrap())
+ /// assert_eq!(
+ /// Variable::from_str("$foo").unwrap(),
+ /// Variable::new("foo").unwrap()
+ /// )
/// ```
fn from_str(s: &str) -> Result {
if !s.starts_with('?') && !s.starts_with('$') {
diff --git a/lib/oxrdf/src/triple.rs b/lib/oxrdf/src/triple.rs
index 850b1375..813982d0 100644
--- a/lib/oxrdf/src/triple.rs
+++ b/lib/oxrdf/src/triple.rs
@@ -698,7 +698,7 @@ impl<'a> From> for Term {
///
/// The default string formatter is returning an N-Triples, Turtle, and SPARQL compatible representation:
/// ```
-/// use oxrdf::{Triple, NamedNode};
+/// use oxrdf::{NamedNode, Triple};
///
/// assert_eq!(
/// " ",
@@ -706,7 +706,8 @@ impl<'a> From> for Term {
/// subject: NamedNode::new("http://example.com/s")?.into(),
/// predicate: NamedNode::new("http://example.com/p")?,
/// object: NamedNode::new("http://example.com/o")?.into(),
-/// }.to_string()
+/// }
+/// .to_string()
/// );
/// # Result::<_,oxrdf::IriParseError>::Ok(())
/// ```
@@ -769,7 +770,7 @@ impl fmt::Display for Triple {
///
/// The default string formatter is returning an N-Triples, Turtle, and SPARQL compatible representation:
/// ```
-/// use oxrdf::{TripleRef, NamedNodeRef};
+/// use oxrdf::{NamedNodeRef, TripleRef};
///
/// assert_eq!(
/// " ",
@@ -777,7 +778,8 @@ impl fmt::Display for Triple {
/// subject: NamedNodeRef::new("http://example.com/s")?.into(),
/// predicate: NamedNodeRef::new("http://example.com/p")?,
/// object: NamedNodeRef::new("http://example.com/o")?.into(),
-/// }.to_string()
+/// }
+/// .to_string()
/// );
/// # Result::<_,oxrdf::IriParseError>::Ok(())
/// ```
diff --git a/lib/oxrdf/src/variable.rs b/lib/oxrdf/src/variable.rs
index 8bde4d6e..044c73e7 100644
--- a/lib/oxrdf/src/variable.rs
+++ b/lib/oxrdf/src/variable.rs
@@ -8,10 +8,7 @@ use std::fmt;
/// ```
/// use oxrdf::{Variable, VariableNameParseError};
///
-/// assert_eq!(
-/// "?foo",
-/// Variable::new("foo")?.to_string()
-/// );
+/// assert_eq!("?foo", Variable::new("foo")?.to_string());
/// # Result::<_,VariableNameParseError>::Ok(())
/// ```
#[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)]
@@ -67,12 +64,9 @@ impl fmt::Display for Variable {
///
/// The default string formatter is returning a SPARQL compatible representation:
/// ```
-/// use oxrdf::{VariableRef, VariableNameParseError};
+/// use oxrdf::{VariableNameParseError, VariableRef};
///
-/// assert_eq!(
-/// "?foo",
-/// VariableRef::new("foo")?.to_string()
-/// );
+/// assert_eq!("?foo", VariableRef::new("foo")?.to_string());
/// # Result::<_,VariableNameParseError>::Ok(())
/// ```
#[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)]
diff --git a/lib/oxrdfio/src/format.rs b/lib/oxrdfio/src/format.rs
index cb03a3eb..1cc6aa12 100644
--- a/lib/oxrdfio/src/format.rs
+++ b/lib/oxrdfio/src/format.rs
@@ -26,7 +26,10 @@ impl RdfFormat {
/// ```
/// use oxrdfio::RdfFormat;
///
- /// assert_eq!(RdfFormat::NTriples.iri(), "http://www.w3.org/ns/formats/N-Triples")
+ /// assert_eq!(
+ /// RdfFormat::NTriples.iri(),
+ /// "http://www.w3.org/ns/formats/N-Triples"
+ /// )
/// ```
#[inline]
pub const fn iri(self) -> &'static str {
@@ -136,7 +139,10 @@ impl RdfFormat {
/// ```
/// use oxrdfio::RdfFormat;
///
- /// assert_eq!(RdfFormat::from_media_type("text/turtle; charset=utf-8"), Some(RdfFormat::Turtle))
+ /// assert_eq!(
+ /// RdfFormat::from_media_type("text/turtle; charset=utf-8"),
+ /// Some(RdfFormat::Turtle)
+ /// )
/// ```
#[inline]
pub fn from_media_type(media_type: &str) -> Option {
diff --git a/lib/oxrdfio/src/parser.rs b/lib/oxrdfio/src/parser.rs
index d5bf196d..0f6d11ac 100644
--- a/lib/oxrdfio/src/parser.rs
+++ b/lib/oxrdfio/src/parser.rs
@@ -48,7 +48,9 @@ use tokio::io::AsyncRead;
/// let file = " .";
///
/// let parser = RdfParser::from_format(RdfFormat::NTriples);
-/// let quads = parser.parse_read(file.as_bytes()).collect::,_>>()?;
+/// let quads = parser
+/// .parse_read(file.as_bytes())
+/// .collect::, _>>()?;
///
/// assert_eq!(quads.len(), 1);
/// assert_eq!(quads[0].subject.to_string(), "");
@@ -129,9 +131,12 @@ impl RdfParser {
/// The format the parser uses.
///
/// ```
- /// use oxrdfio::{RdfParser, RdfFormat};
+ /// use oxrdfio::{RdfFormat, RdfParser};
///
- /// assert_eq!(RdfParser::from_format(RdfFormat::Turtle).format(), RdfFormat::Turtle);
+ /// assert_eq!(
+ /// RdfParser::from_format(RdfFormat::Turtle).format(),
+ /// RdfFormat::Turtle
+ /// );
/// ```
pub fn format(&self) -> RdfFormat {
match &self.inner {
@@ -152,7 +157,9 @@ impl RdfParser {
/// let file = "
.";
///
/// let parser = RdfParser::from_format(RdfFormat::Turtle).with_base_iri("http://example.com")?;
- /// let quads = parser.parse_read(file.as_bytes()).collect::,_>>()?;
+ /// let quads = parser
+ /// .parse_read(file.as_bytes())
+ /// .collect::, _>>()?;
///
/// assert_eq!(quads.len(), 1);
/// assert_eq!(quads[0].subject.to_string(), "");
@@ -179,8 +186,11 @@ impl RdfParser {
///
/// let file = " .";
///
- /// let parser = RdfParser::from_format(RdfFormat::Turtle).with_default_graph(NamedNode::new("http://example.com/g")?);
- /// let quads = parser.parse_read(file.as_bytes()).collect::,_>>()?;
+ /// let parser = RdfParser::from_format(RdfFormat::Turtle)
+ /// .with_default_graph(NamedNode::new("http://example.com/g")?);
+ /// let quads = parser
+ /// .parse_read(file.as_bytes())
+ /// .collect::, _>>()?;
///
/// assert_eq!(quads.len(), 1);
/// assert_eq!(quads[0].graph_name.to_string(), "");
@@ -221,10 +231,12 @@ impl RdfParser {
///
/// let result1 = RdfParser::from_format(RdfFormat::NQuads)
/// .rename_blank_nodes()
- /// .parse_read(file.as_bytes()).collect::,_>>()?;
+ /// .parse_read(file.as_bytes())
+ /// .collect::, _>>()?;
/// let result2 = RdfParser::from_format(RdfFormat::NQuads)
/// .rename_blank_nodes()
- /// .parse_read(file.as_bytes()).collect::,_>>()?;
+ /// .parse_read(file.as_bytes())
+ /// .collect::, _>>()?;
/// assert_ne!(result1, result2);
/// # Result::<_,Box>::Ok(())
/// ```
@@ -262,7 +274,9 @@ impl RdfParser {
/// let file = " .";
///
/// let parser = RdfParser::from_format(RdfFormat::NTriples);
- /// let quads = parser.parse_read(file.as_bytes()).collect::,_>>()?;
+ /// let quads = parser
+ /// .parse_read(file.as_bytes())
+ /// .collect::, _>>()?;
///
/// assert_eq!(quads.len(), 1);
/// assert_eq!(quads[0].subject.to_string(), "");
@@ -358,7 +372,9 @@ impl From for RdfParser {
/// let file = " .";
///
/// let parser = RdfParser::from_format(RdfFormat::NTriples);
-/// let quads = parser.parse_read(file.as_bytes()).collect::,_>>()?;
+/// let quads = parser
+/// .parse_read(file.as_bytes())
+/// .collect::, _>>()?;
///
/// assert_eq!(quads.len(), 1);
/// assert_eq!(quads[0].subject.to_string(), "");
diff --git a/lib/oxrdfio/src/serializer.rs b/lib/oxrdfio/src/serializer.rs
index cd132cd5..7abf7696 100644
--- a/lib/oxrdfio/src/serializer.rs
+++ b/lib/oxrdfio/src/serializer.rs
@@ -63,9 +63,12 @@ impl RdfSerializer {
/// The format the serializer serializes to.
///
/// ```
- /// use oxrdfio::{RdfSerializer, RdfFormat};
+ /// use oxrdfio::{RdfFormat, RdfSerializer};
///
- /// assert_eq!(RdfSerializer::from_format(RdfFormat::Turtle).format(), RdfFormat::Turtle);
+ /// assert_eq!(
+ /// RdfSerializer::from_format(RdfFormat::Turtle).format(),
+ /// RdfFormat::Turtle
+ /// );
/// ```
pub fn format(&self) -> RdfFormat {
self.format
diff --git a/lib/oxrdfxml/src/parser.rs b/lib/oxrdfxml/src/parser.rs
index 3c6b23ff..70ca91aa 100644
--- a/lib/oxrdfxml/src/parser.rs
+++ b/lib/oxrdfxml/src/parser.rs
@@ -26,7 +26,8 @@ use tokio::io::{AsyncRead, BufReader as AsyncBufReader};
///
/// Count the number of people:
/// ```
-/// use oxrdf::{NamedNodeRef, vocab::rdf};
+/// use oxrdf::vocab::rdf;
+/// use oxrdf::NamedNodeRef;
/// use oxrdfxml::RdfXmlParser;
///
/// let file = br#"
@@ -84,7 +85,8 @@ impl RdfXmlParser {
///
/// Count the number of people:
/// ```
- /// use oxrdf::{NamedNodeRef, vocab::rdf};
+ /// use oxrdf::vocab::rdf;
+ /// use oxrdf::NamedNodeRef;
/// use oxrdfxml::RdfXmlParser;
///
/// let file = br#"
@@ -119,7 +121,8 @@ impl RdfXmlParser {
///
/// Count the number of people:
/// ```
- /// use oxrdf::{NamedNodeRef, vocab::rdf};
+ /// use oxrdf::vocab::rdf;
+ /// use oxrdf::NamedNodeRef;
/// use oxrdfxml::RdfXmlParser;
///
/// # #[tokio::main(flavor = "current_thread")]
@@ -179,7 +182,8 @@ impl RdfXmlParser {
///
/// Count the number of people:
/// ```
-/// use oxrdf::{NamedNodeRef, vocab::rdf};
+/// use oxrdf::vocab::rdf;
+/// use oxrdf::NamedNodeRef;
/// use oxrdfxml::RdfXmlParser;
///
/// let file = br#"
@@ -246,8 +250,9 @@ impl FromReadRdfXmlReader {
///
/// Count the number of people:
/// ```
-/// use oxrdf::{NamedNodeRef, vocab::rdf};
-/// use oxrdfxml::RdfXmlParser;
+/// use oxrdf::vocab::rdf;
+/// use oxrdf::NamedNodeRef;
+/// use oxrdfxml::RdfXmlParser;
///
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxrdfxml::ParseError> {
@@ -368,7 +373,7 @@ enum RdfXmlState {
li_counter: u64,
},
PropertyElt {
- //Resource, Literal or Empty property element
+ // Resource, Literal or Empty property element
iri: NamedNode,
base_iri: Option>,
language: Option,
@@ -392,7 +397,7 @@ enum RdfXmlState {
subject: Subject,
writer: Writer>,
id_attr: Option,
- emit: bool, //false for parseTypeOtherPropertyElt support
+ emit: bool, // false for parseTypeOtherPropertyElt support
},
}
@@ -523,7 +528,7 @@ impl RdfXmlReader {
PropertyElt { subject: Subject },
}
- //Literal case
+ // Literal case
if let Some(RdfXmlState::ParseTypeLiteralPropertyElt { writer, .. }) = self.state.last_mut()
{
let mut clean_event = BytesStart::new(
@@ -542,7 +547,7 @@ impl RdfXmlReader {
let tag_name = self.resolve_tag_name(event.name())?;
- //We read attributes
+ // We read attributes
let (mut language, mut base_iri) = if let Some(current_state) = self.state.last() {
(
current_state.language().cloned(),
@@ -652,7 +657,7 @@ impl RdfXmlReader {
}
}
- //Parsing with the base URI
+ // Parsing with the base URI
let id_attr = match id_attr {
Some(iri) => {
let iri = self.resolve_iri(&base_iri, iri)?;
@@ -855,7 +860,7 @@ impl RdfXmlReader {
event: &BytesEnd<'_>,
results: &mut Vec,
) -> Result<(), ParseError> {
- //Literal case
+ // Literal case
if self.in_literal_depth > 0 {
if let Some(RdfXmlState::ParseTypeLiteralPropertyElt { writer, .. }) =
self.state.last_mut()
diff --git a/lib/oxsdatatypes/src/date_time.rs b/lib/oxsdatatypes/src/date_time.rs
index d2405692..127990df 100644
--- a/lib/oxsdatatypes/src/date_time.rs
+++ b/lib/oxsdatatypes/src/date_time.rs
@@ -17,6 +17,13 @@ pub struct DateTime {
}
impl DateTime {
+ pub const MAX: Self = Self {
+ timestamp: Timestamp::MAX,
+ };
+ pub const MIN: Self = Self {
+ timestamp: Timestamp::MIN,
+ };
+
#[inline]
pub(super) fn new(
year: i64,
@@ -241,14 +248,6 @@ impl DateTime {
pub fn is_identical_with(self, other: Self) -> bool {
self.timestamp.is_identical_with(other.timestamp)
}
-
- pub const MIN: Self = Self {
- timestamp: Timestamp::MIN,
- };
-
- pub const MAX: Self = Self {
- timestamp: Timestamp::MAX,
- };
}
/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes).
@@ -317,6 +316,21 @@ pub struct Time {
}
impl Time {
+ #[cfg(test)]
+ const MAX: Self = Self {
+ timestamp: Timestamp {
+ value: Decimal::new_from_i128_unchecked(62_230_255_200),
+ timezone_offset: Some(TimezoneOffset::MIN),
+ },
+ };
+ #[cfg(test)]
+ const MIN: Self = Self {
+ timestamp: Timestamp {
+ value: Decimal::new_from_i128_unchecked(62_230_154_400),
+ timezone_offset: Some(TimezoneOffset::MAX),
+ },
+ };
+
#[inline]
fn new(
mut hour: u8,
@@ -493,22 +507,6 @@ impl Time {
pub fn is_identical_with(self, other: Self) -> bool {
self.timestamp.is_identical_with(other.timestamp)
}
-
- #[cfg(test)]
- const MIN: Self = Self {
- timestamp: Timestamp {
- value: Decimal::new_from_i128_unchecked(62_230_154_400),
- timezone_offset: Some(TimezoneOffset::MAX),
- },
- };
-
- #[cfg(test)]
- const MAX: Self = Self {
- timestamp: Timestamp {
- value: Decimal::new_from_i128_unchecked(62_230_255_200),
- timezone_offset: Some(TimezoneOffset::MIN),
- },
- };
}
/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes).
@@ -566,6 +564,19 @@ pub struct Date {
}
impl Date {
+ pub const MAX: Self = Self {
+ timestamp: Timestamp {
+ value: Decimal::new_from_i128_unchecked(170_141_183_460_469_216_800),
+ timezone_offset: Some(TimezoneOffset::MAX),
+ },
+ };
+ pub const MIN: Self = Self {
+ timestamp: Timestamp {
+ value: Decimal::new_from_i128_unchecked(-170_141_183_460_469_216_800),
+ timezone_offset: Some(TimezoneOffset::MIN),
+ },
+ };
+
#[inline]
fn new(
year: i64,
@@ -742,19 +753,6 @@ impl Date {
pub fn is_identical_with(self, other: Self) -> bool {
self.timestamp.is_identical_with(other.timestamp)
}
-
- pub const MIN: Self = Self {
- timestamp: Timestamp {
- value: Decimal::new_from_i128_unchecked(-170_141_183_460_469_216_800),
- timezone_offset: Some(TimezoneOffset::MIN),
- },
- };
- pub const MAX: Self = Self {
- timestamp: Timestamp {
- value: Decimal::new_from_i128_unchecked(170_141_183_460_469_216_800),
- timezone_offset: Some(TimezoneOffset::MAX),
- },
- };
}
/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes).
@@ -805,6 +803,19 @@ pub struct GYearMonth {
}
impl GYearMonth {
+ pub const MAX: Self = Self {
+ timestamp: Timestamp {
+ value: Decimal::new_from_i128_unchecked(170_141_183_460_469_216_800),
+ timezone_offset: Some(TimezoneOffset::MAX),
+ },
+ };
+ pub const MIN: Self = Self {
+ timestamp: Timestamp {
+ value: Decimal::new_from_i128_unchecked(-170_141_183_460_466_970_400),
+ timezone_offset: Some(TimezoneOffset::MIN),
+ },
+ };
+
#[inline]
fn new(
year: i64,
@@ -876,19 +887,6 @@ impl GYearMonth {
pub fn is_identical_with(self, other: Self) -> bool {
self.timestamp.is_identical_with(other.timestamp)
}
-
- pub const MIN: Self = Self {
- timestamp: Timestamp {
- value: Decimal::new_from_i128_unchecked(-170_141_183_460_466_970_400),
- timezone_offset: Some(TimezoneOffset::MIN),
- },
- };
- pub const MAX: Self = Self {
- timestamp: Timestamp {
- value: Decimal::new_from_i128_unchecked(170_141_183_460_469_216_800),
- timezone_offset: Some(TimezoneOffset::MAX),
- },
- };
}
/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes).
@@ -947,6 +945,19 @@ pub struct GYear {
}
impl GYear {
+ pub const MAX: Self = Self {
+ timestamp: Timestamp {
+ value: Decimal::new_from_i128_unchecked(170_141_183_460_461_440_800),
+ timezone_offset: Some(TimezoneOffset::MAX),
+ },
+ };
+ pub const MIN: Self = Self {
+ timestamp: Timestamp {
+ value: Decimal::new_from_i128_unchecked(-170_141_183_460_461_700_000),
+ timezone_offset: Some(TimezoneOffset::MIN),
+ },
+ };
+
#[inline]
fn new(
year: i64,
@@ -1011,19 +1022,6 @@ impl GYear {
pub fn is_identical_with(self, other: Self) -> bool {
self.timestamp.is_identical_with(other.timestamp)
}
-
- pub const MIN: Self = Self {
- timestamp: Timestamp {
- value: Decimal::new_from_i128_unchecked(-170_141_183_460_461_700_000),
- timezone_offset: Some(TimezoneOffset::MIN),
- },
- };
- pub const MAX: Self = Self {
- timestamp: Timestamp {
- value: Decimal::new_from_i128_unchecked(170_141_183_460_461_440_800),
- timezone_offset: Some(TimezoneOffset::MAX),
- },
- };
}
/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes).
@@ -1461,6 +1459,10 @@ pub struct TimezoneOffset {
}
impl TimezoneOffset {
+ pub const MAX: Self = Self { offset: 14 * 60 };
+ pub const MIN: Self = Self { offset: -14 * 60 };
+ pub const UTC: Self = Self { offset: 0 };
+
/// From offset in minute with respect to UTC
#[inline]
pub fn new(offset_in_minutes: i16) -> Result {
@@ -1489,10 +1491,6 @@ impl TimezoneOffset {
pub fn to_be_bytes(self) -> [u8; 2] {
self.offset.to_be_bytes()
}
-
- pub const MIN: Self = Self { offset: -14 * 60 };
- pub const UTC: Self = Self { offset: 0 };
- pub const MAX: Self = Self { offset: 14 * 60 };
}
impl TryFrom for TimezoneOffset {
@@ -1576,7 +1574,7 @@ impl PartialEq for Timestamp {
fn eq(&self, other: &Self) -> bool {
match (self.timezone_offset, other.timezone_offset) {
(Some(_), Some(_)) | (None, None) => self.value.eq(&other.value),
- _ => false, //TODO: implicit timezone
+ _ => false, // TODO: implicit timezone
}
}
}
@@ -1622,6 +1620,15 @@ impl Hash for Timestamp {
}
impl Timestamp {
+ pub const MAX: Self = Self {
+ value: Decimal::MAX,
+ timezone_offset: Some(TimezoneOffset::MAX),
+ };
+ pub const MIN: Self = Self {
+ value: Decimal::MIN,
+ timezone_offset: Some(TimezoneOffset::MIN),
+ };
+
#[inline]
fn new(props: &DateTimeSevenPropertyModel) -> Result {
Ok(Self {
@@ -1790,7 +1797,7 @@ impl Timestamp {
(Some(_), Some(_)) | (None, None) => {
Some(DayTimeDuration::new(self.value.checked_sub(rhs.value)?))
}
- _ => None, //TODO: implicit timezone
+ _ => None, // TODO: implicit timezone
}
}
@@ -1816,13 +1823,13 @@ impl Timestamp {
Self {
value: self
.value
- .checked_add(i64::from(from_timezone.offset) * 60)?, // We keep the literal value
+ .checked_add(i64::from(from_timezone.offset) * 60)?, /* We keep the literal value */
timezone_offset: None,
}
}
} else if let Some(to_timezone) = timezone_offset {
Self {
- value: self.value.checked_sub(i64::from(to_timezone.offset) * 60)?, // We keep the literal value
+ value: self.value.checked_sub(i64::from(to_timezone.offset) * 60)?, /* We keep the literal value */
timezone_offset: Some(to_timezone),
}
} else {
@@ -1851,16 +1858,6 @@ impl Timestamp {
pub fn is_identical_with(self, other: Self) -> bool {
self.value == other.value && self.timezone_offset == other.timezone_offset
}
-
- pub const MIN: Self = Self {
- value: Decimal::MIN,
- timezone_offset: Some(TimezoneOffset::MIN),
- };
-
- pub const MAX: Self = Self {
- value: Decimal::MAX,
- timezone_offset: Some(TimezoneOffset::MAX),
- };
}
#[cfg(feature = "custom-now")]
@@ -1960,7 +1957,7 @@ fn normalize_second(
mi: i64,
se: Decimal,
) -> Option<(i64, u8, u8, u8, u8, Decimal)> {
- let mi = mi.checked_add(i64::try_from(se.as_i128().checked_div(60)?).ok()?)?; //TODO: good idea?
+ let mi = mi.checked_add(i64::try_from(se.as_i128().checked_div(60)?).ok()?)?; // TODO: good idea?
let se = se.checked_rem(60)?;
let (yr, mo, da, hr, mi) = normalize_minute(yr, mo, da, hr, mi)?;
Some((yr, mo, da, hr, mi, se))
diff --git a/lib/oxsdatatypes/src/decimal.rs b/lib/oxsdatatypes/src/decimal.rs
index 6a59105e..0082ca8a 100644
--- a/lib/oxsdatatypes/src/decimal.rs
+++ b/lib/oxsdatatypes/src/decimal.rs
@@ -19,6 +19,11 @@ pub struct Decimal {
}
impl Decimal {
+ pub const MAX: Self = Self { value: i128::MAX };
+ pub const MIN: Self = Self { value: i128::MIN };
+ #[cfg(test)]
+ pub const STEP: Self = Self { value: 1 };
+
/// Constructs the decimal i / 10^n
#[inline]
pub const fn new(i: i128, n: u32) -> Result {
@@ -260,13 +265,6 @@ impl Decimal {
pub(super) const fn as_i128(self) -> i128 {
self.value / DECIMAL_PART_POW
}
-
- pub const MIN: Self = Self { value: i128::MIN };
-
- pub const MAX: Self = Self { value: i128::MAX };
-
- #[cfg(test)]
- pub const STEP: Self = Self { value: 1 };
}
impl From for Decimal {
@@ -499,7 +497,7 @@ impl FromStr for Decimal {
}
input = &input[1..];
if input.is_empty() && !with_before_dot {
- //We only have a dot
+ // We only have a dot
return Err(PARSE_UNEXPECTED_END);
}
while input.last() == Some(&b'0') {
@@ -520,11 +518,11 @@ impl FromStr for Decimal {
}
}
if exp == 0 {
- //Underflow
+ // Underflow
return Err(PARSE_UNDERFLOW);
}
} else if !with_before_dot {
- //It's empty
+ // It's empty
return Err(PARSE_UNEXPECTED_END);
}
diff --git a/lib/oxsdatatypes/src/double.rs b/lib/oxsdatatypes/src/double.rs
index 1a399019..3b58858f 100644
--- a/lib/oxsdatatypes/src/double.rs
+++ b/lib/oxsdatatypes/src/double.rs
@@ -17,6 +17,16 @@ pub struct Double {
}
impl Double {
+ pub const INFINITY: Self = Self {
+ value: f64::INFINITY,
+ };
+ pub const MAX: Self = Self { value: f64::MAX };
+ pub const MIN: Self = Self { value: f64::MIN };
+ pub const NAN: Self = Self { value: f64::NAN };
+ pub const NEG_INFINITY: Self = Self {
+ value: f64::NEG_INFINITY,
+ };
+
#[inline]
#[must_use]
pub fn from_be_bytes(bytes: [u8; 8]) -> Self {
@@ -77,20 +87,6 @@ impl Double {
pub fn is_identical_with(self, other: Self) -> bool {
self.value.to_bits() == other.value.to_bits()
}
-
- pub const MIN: Self = Self { value: f64::MIN };
-
- pub const MAX: Self = Self { value: f64::MAX };
-
- pub const INFINITY: Self = Self {
- value: f64::INFINITY,
- };
-
- pub const NEG_INFINITY: Self = Self {
- value: f64::NEG_INFINITY,
- };
-
- pub const NAN: Self = Self { value: f64::NAN };
}
impl From for f64 {
diff --git a/lib/oxsdatatypes/src/duration.rs b/lib/oxsdatatypes/src/duration.rs
index 93dac7d6..efb92b62 100644
--- a/lib/oxsdatatypes/src/duration.rs
+++ b/lib/oxsdatatypes/src/duration.rs
@@ -15,6 +15,15 @@ pub struct Duration {
}
impl Duration {
+ pub const MAX: Self = Self {
+ year_month: YearMonthDuration::MAX,
+ day_time: DayTimeDuration::MAX,
+ };
+ pub const MIN: Self = Self {
+ year_month: YearMonthDuration::MIN,
+ day_time: DayTimeDuration::MIN,
+ };
+
#[inline]
pub fn new(
months: impl Into,
@@ -160,16 +169,6 @@ impl Duration {
pub fn is_identical_with(self, other: Self) -> bool {
self == other
}
-
- pub const MIN: Self = Self {
- year_month: YearMonthDuration::MIN,
- day_time: DayTimeDuration::MIN,
- };
-
- pub const MAX: Self = Self {
- year_month: YearMonthDuration::MAX,
- day_time: DayTimeDuration::MAX,
- };
}
impl TryFrom for Duration {
@@ -301,6 +300,9 @@ pub struct YearMonthDuration {
}
impl YearMonthDuration {
+ pub const MAX: Self = Self { months: i64::MAX };
+ pub const MIN: Self = Self { months: i64::MIN };
+
#[inline]
pub fn new(months: impl Into) -> Self {
Self {
@@ -374,10 +376,6 @@ impl YearMonthDuration {
pub fn is_identical_with(self, other: Self) -> bool {
self == other
}
-
- pub const MIN: Self = Self { months: i64::MIN };
-
- pub const MAX: Self = Self { months: i64::MAX };
}
impl From for Duration {
@@ -469,6 +467,13 @@ pub struct DayTimeDuration {
}
impl DayTimeDuration {
+ pub const MAX: Self = Self {
+ seconds: Decimal::MAX,
+ };
+ pub const MIN: Self = Self {
+ seconds: Decimal::MIN,
+ };
+
#[inline]
pub fn new(seconds: impl Into) -> Self {
Self {
@@ -558,14 +563,6 @@ impl DayTimeDuration {
pub fn is_identical_with(self, other: Self) -> bool {
self == other
}
-
- pub const MIN: Self = Self {
- seconds: Decimal::MIN,
- };
-
- pub const MAX: Self = Self {
- seconds: Decimal::MAX,
- };
}
impl From for Duration {
diff --git a/lib/oxsdatatypes/src/float.rs b/lib/oxsdatatypes/src/float.rs
index bc0aab75..c4d08d6c 100644
--- a/lib/oxsdatatypes/src/float.rs
+++ b/lib/oxsdatatypes/src/float.rs
@@ -17,6 +17,16 @@ pub struct Float {
}
impl Float {
+ pub const INFINITY: Self = Self {
+ value: f32::INFINITY,
+ };
+ pub const MAX: Self = Self { value: f32::MAX };
+ pub const MIN: Self = Self { value: f32::MIN };
+ pub const NAN: Self = Self { value: f32::NAN };
+ pub const NEG_INFINITY: Self = Self {
+ value: f32::NEG_INFINITY,
+ };
+
#[inline]
#[must_use]
pub fn from_be_bytes(bytes: [u8; 4]) -> Self {
@@ -77,20 +87,6 @@ impl Float {
pub fn is_identical_with(self, other: Self) -> bool {
self.value.to_bits() == other.value.to_bits()
}
-
- pub const MIN: Self = Self { value: f32::MIN };
-
- pub const MAX: Self = Self { value: f32::MAX };
-
- pub const INFINITY: Self = Self {
- value: f32::INFINITY,
- };
-
- pub const NEG_INFINITY: Self = Self {
- value: f32::NEG_INFINITY,
- };
-
- pub const NAN: Self = Self { value: f32::NAN };
}
impl From for f32 {
diff --git a/lib/oxsdatatypes/src/integer.rs b/lib/oxsdatatypes/src/integer.rs
index f2b8506f..e76ae62e 100644
--- a/lib/oxsdatatypes/src/integer.rs
+++ b/lib/oxsdatatypes/src/integer.rs
@@ -14,6 +14,9 @@ pub struct Integer {
}
impl Integer {
+ pub const MAX: Self = Self { value: i64::MAX };
+ pub const MIN: Self = Self { value: i64::MIN };
+
#[inline]
#[must_use]
pub fn from_be_bytes(bytes: [u8; 8]) -> Self {
@@ -134,10 +137,6 @@ impl Integer {
pub fn is_identical_with(self, other: Self) -> bool {
self == other
}
-
- pub const MIN: Self = Self { value: i64::MIN };
-
- pub const MAX: Self = Self { value: i64::MAX };
}
impl From for Integer {
diff --git a/lib/oxttl/src/lexer.rs b/lib/oxttl/src/lexer.rs
index d4eb024f..3fb62845 100644
--- a/lib/oxttl/src/lexer.rs
+++ b/lib/oxttl/src/lexer.rs
@@ -49,8 +49,8 @@ pub struct N3Lexer {
// TODO: simplify by not giving is_end and fail with an "unexpected eof" is none is returned when is_end=true?
impl TokenRecognizer for N3Lexer {
- type Token<'a> = N3Token<'a>;
type Options = N3LexerOptions;
+ type Token<'a> = N3Token<'a>;
fn recognize_next_token<'a>(
&mut self,
@@ -790,7 +790,7 @@ impl N3Lexer {
format!("Unexpected escape character '\\{}'", char::from(c)),
)
.into()),
- )), //TODO: read until end of string
+ )), // TODO: read until end of string
}
}
diff --git a/lib/oxttl/src/line_formats.rs b/lib/oxttl/src/line_formats.rs
index e7d39e09..5932f7a2 100644
--- a/lib/oxttl/src/line_formats.rs
+++ b/lib/oxttl/src/line_formats.rs
@@ -39,9 +39,9 @@ enum NQuadsState {
}
impl RuleRecognizer for NQuadsRecognizer {
- type TokenRecognizer = N3Lexer;
- type Output = Quad;
type Context = NQuadsRecognizerContext;
+ type Output = Quad;
+ type TokenRecognizer = N3Lexer;
fn error_recovery_state(mut self) -> Self {
self.stack.clear();
@@ -251,7 +251,7 @@ impl RuleRecognizer for NQuadsRecognizer {
self.emit_quad(results, GraphName::DefaultGraph);
errors.push("Triples should be followed by a dot".into())
}
- _ => errors.push("Unexpected end".into()), //TODO
+ _ => errors.push("Unexpected end".into()), // TODO
}
}
diff --git a/lib/oxttl/src/n3.rs b/lib/oxttl/src/n3.rs
index a1c23f25..263db936 100644
--- a/lib/oxttl/src/n3.rs
+++ b/lib/oxttl/src/n3.rs
@@ -181,7 +181,8 @@ impl From for N3Quad {
///
/// Count the number of people:
/// ```
-/// use oxrdf::{NamedNode, vocab::rdf};
+/// use oxrdf::vocab::rdf;
+/// use oxrdf::NamedNode;
/// use oxttl::n3::{N3Parser, N3Term};
///
/// let file = br#"@base .
@@ -260,7 +261,9 @@ impl N3Parser {
/// a schema:Person ;
/// schema:name "Bar" ."#;
///
- /// let rdf_type = N3Term::NamedNode(NamedNode::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?);
+ /// let rdf_type = N3Term::NamedNode(NamedNode::new(
+ /// "http://www.w3.org/1999/02/22-rdf-syntax-ns#type",
+ /// )?);
/// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?);
/// let mut count = 0;
/// for triple in N3Parser::new().parse_read(file.as_ref()) {
@@ -282,7 +285,8 @@ impl N3Parser {
///
/// Count the number of people:
/// ```
- /// use oxrdf::{NamedNode, vocab::rdf};
+ /// use oxrdf::vocab::rdf;
+ /// use oxrdf::NamedNode;
/// use oxttl::n3::{N3Parser, N3Term};
///
/// # #[tokio::main(flavor = "current_thread")]
@@ -322,14 +326,16 @@ impl N3Parser {
///
/// Count the number of people:
/// ```
- /// use oxrdf::{NamedNode, vocab::rdf};
+ /// use oxrdf::vocab::rdf;
+ /// use oxrdf::NamedNode;
/// use oxttl::n3::{N3Parser, N3Term};
///
- /// let file: [&[u8]; 5] = [b"@base ",
+ /// let file: [&[u8]; 5] = [
+ /// b"@base ",
/// b". @prefix schema: .",
/// b" a schema:Person",
/// b" ; schema:name \"Foo\" . ",
- /// b" a schema:Person ; schema:name \"Bar\" ."
+ /// b" a schema:Person ; schema:name \"Bar\" .",
/// ];
///
/// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned());
@@ -340,7 +346,7 @@ impl N3Parser {
/// while !parser.is_end() {
/// // We feed more data to the parser
/// if let Some(chunk) = file_chunks.next() {
- /// parser.extend_from_slice(chunk);
+ /// parser.extend_from_slice(chunk);
/// } else {
/// parser.end(); // It's finished
/// }
@@ -366,7 +372,8 @@ impl N3Parser {
///
/// Count the number of people:
/// ```
-/// use oxrdf::{NamedNode, vocab::rdf};
+/// use oxrdf::vocab::rdf;
+/// use oxrdf::NamedNode;
/// use oxttl::n3::{N3Parser, N3Term};
///
/// let file = br#"@base .
@@ -459,7 +466,8 @@ impl Iterator for FromReadN3Reader {
///
/// Count the number of people:
/// ```
-/// use oxrdf::{NamedNode, vocab::rdf};
+/// use oxrdf::vocab::rdf;
+/// use oxrdf::NamedNode;
/// use oxttl::n3::{N3Parser, N3Term};
///
/// # #[tokio::main(flavor = "current_thread")]
@@ -561,14 +569,16 @@ impl FromTokioAsyncReadN3Reader {
///
/// Count the number of people:
/// ```
-/// use oxrdf::{NamedNode, vocab::rdf};
+/// use oxrdf::vocab::rdf;
+/// use oxrdf::NamedNode;
/// use oxttl::n3::{N3Parser, N3Term};
///
-/// let file: [&[u8]; 5] = [b"@base ",
+/// let file: [&[u8]; 5] = [
+/// b"@base ",
/// b". @prefix schema: .",
/// b" a schema:Person",
/// b" ; schema:name \"Foo\" . ",
-/// b" a schema:Person ; schema:name \"Bar\" ."
+/// b" a schema:Person ; schema:name \"Bar\" .",
/// ];
///
/// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned());
@@ -579,7 +589,7 @@ impl FromTokioAsyncReadN3Reader {
/// while !parser.is_end() {
/// // We feed more data to the parser
/// if let Some(chunk) = file_chunks.next() {
-/// parser.extend_from_slice(chunk);
+/// parser.extend_from_slice(chunk);
/// } else {
/// parser.end(); // It's finished
/// }
@@ -697,9 +707,9 @@ struct N3RecognizerContext {
}
impl RuleRecognizer for N3Recognizer {
- type TokenRecognizer = N3Lexer;
- type Output = N3Quad;
type Context = N3RecognizerContext;
+ type Output = N3Quad;
+ type TokenRecognizer = N3Lexer;
fn error_recovery_state(mut self) -> Self {
self.stack.clear();
@@ -1191,7 +1201,7 @@ impl RuleRecognizer for N3Recognizer {
) {
match &*self.stack {
[] | [N3State::N3Doc] => (),
- _ => errors.push("Unexpected end".into()), //TODO
+ _ => errors.push("Unexpected end".into()), // TODO
}
}
diff --git a/lib/oxttl/src/terse.rs b/lib/oxttl/src/terse.rs
index 86fad434..818524f6 100644
--- a/lib/oxttl/src/terse.rs
+++ b/lib/oxttl/src/terse.rs
@@ -4,12 +4,10 @@ use crate::lexer::{resolve_local_name, N3Lexer, N3LexerMode, N3LexerOptions, N3T
use crate::toolkit::{Lexer, Parser, RuleRecognizer, RuleRecognizerError};
use crate::{MAX_BUFFER_SIZE, MIN_BUFFER_SIZE};
use oxiri::Iri;
+use oxrdf::vocab::{rdf, xsd};
#[cfg(feature = "rdf-star")]
use oxrdf::Triple;
-use oxrdf::{
- vocab::{rdf, xsd},
- BlankNode, GraphName, Literal, NamedNode, NamedOrBlankNode, Quad, Subject, Term,
-};
+use oxrdf::{BlankNode, GraphName, Literal, NamedNode, NamedOrBlankNode, Quad, Subject, Term};
use std::collections::HashMap;
pub struct TriGRecognizer {
@@ -30,9 +28,9 @@ pub struct TriGRecognizerContext {
}
impl RuleRecognizer for TriGRecognizer {
- type TokenRecognizer = N3Lexer;
- type Output = Quad;
type Context = TriGRecognizerContext;
+ type Output = Quad;
+ type TokenRecognizer = N3Lexer;
fn error_recovery_state(mut self) -> Self {
self.stack.clear();
@@ -784,7 +782,7 @@ impl RuleRecognizer for TriGRecognizer {
}
}
} else if token == N3Token::Punctuation(".") || token == N3Token::Punctuation("}") {
- //TODO: be smarter depending if we are in '{' or not
+ // TODO: be smarter depending if we are in '{' or not
self.stack.push(TriGState::TriGDoc);
self
} else {
@@ -819,7 +817,7 @@ impl RuleRecognizer for TriGRecognizer {
self.emit_quad(results);
errors.push("Triples should be followed by a dot".into())
}
- _ => errors.push("Unexpected end".into()), //TODO
+ _ => errors.push("Unexpected end".into()), // TODO
}
}
diff --git a/lib/oxttl/src/toolkit/lexer.rs b/lib/oxttl/src/toolkit/lexer.rs
index 0f7373c2..2406dfb1 100644
--- a/lib/oxttl/src/toolkit/lexer.rs
+++ b/lib/oxttl/src/toolkit/lexer.rs
@@ -366,7 +366,7 @@ impl Lexer {
_ => return Some(()),
}
i += 1;
- //TODO: SIMD
+ // TODO: SIMD
}
} else {
for c in &self.data[self.position.buffer_offset..] {
@@ -376,7 +376,7 @@ impl Lexer {
} else {
return Some(());
}
- //TODO: SIMD
+ // TODO: SIMD
}
}
Some(())
diff --git a/lib/oxttl/src/trig.rs b/lib/oxttl/src/trig.rs
index 0dad7fd7..5a7cdb4a 100644
--- a/lib/oxttl/src/trig.rs
+++ b/lib/oxttl/src/trig.rs
@@ -6,7 +6,8 @@ use crate::terse::TriGRecognizer;
use crate::toolkit::FromTokioAsyncReadIterator;
use crate::toolkit::{FromReadIterator, ParseError, Parser, SyntaxError};
use oxiri::{Iri, IriParseError};
-use oxrdf::{vocab::xsd, GraphName, NamedNode, Quad, QuadRef, Subject, TermRef};
+use oxrdf::vocab::xsd;
+use oxrdf::{GraphName, NamedNode, Quad, QuadRef, Subject, TermRef};
use std::collections::HashMap;
use std::fmt;
use std::io::{self, Read, Write};
@@ -19,7 +20,8 @@ use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};
///
/// Count the number of people:
/// ```
-/// use oxrdf::{NamedNodeRef, vocab::rdf};
+/// use oxrdf::vocab::rdf;
+/// use oxrdf::NamedNodeRef;
/// use oxttl::TriGParser;
///
/// let file = br#"@base .
@@ -97,7 +99,8 @@ impl TriGParser {
///
/// Count the number of people:
/// ```
- /// use oxrdf::{NamedNodeRef, vocab::rdf};
+ /// use oxrdf::vocab::rdf;
+ /// use oxrdf::NamedNodeRef;
/// use oxttl::TriGParser;
///
/// let file = br#"@base .
@@ -128,7 +131,8 @@ impl TriGParser {
///
/// Count the number of people:
/// ```
- /// use oxrdf::{NamedNodeRef, vocab::rdf};
+ /// use oxrdf::vocab::rdf;
+ /// use oxrdf::NamedNodeRef;
/// use oxttl::TriGParser;
///
/// # #[tokio::main(flavor = "current_thread")]
@@ -167,14 +171,16 @@ impl TriGParser {
///
/// Count the number of people:
/// ```
- /// use oxrdf::{NamedNodeRef, vocab::rdf};
+ /// use oxrdf::vocab::rdf;
+ /// use oxrdf::NamedNodeRef;
/// use oxttl::TriGParser;
///
- /// let file: [&[u8]; 5] = [b"@base ",
+ /// let file: [&[u8]; 5] = [
+ /// b"@base ",
/// b". @prefix schema: .",
/// b" a schema:Person",
/// b" ; schema:name \"Foo\" . ",
- /// b" a schema:Person ; schema:name \"Bar\" ."
+ /// b" a schema:Person ; schema:name \"Bar\" .",
/// ];
///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
@@ -184,7 +190,7 @@ impl TriGParser {
/// while !parser.is_end() {
/// // We feed more data to the parser
/// if let Some(chunk) = file_chunks.next() {
- /// parser.extend_from_slice(chunk);
+ /// parser.extend_from_slice(chunk);
/// } else {
/// parser.end(); // It's finished
/// }
@@ -217,7 +223,8 @@ impl TriGParser {
///
/// Count the number of people:
/// ```
-/// use oxrdf::{NamedNodeRef, vocab::rdf};
+/// use oxrdf::vocab::rdf;
+/// use oxrdf::NamedNodeRef;
/// use oxttl::TriGParser;
///
/// let file = br#"@base .
@@ -309,7 +316,8 @@ impl Iterator for FromReadTriGReader {
///
/// Count the number of people:
/// ```
-/// use oxrdf::{NamedNodeRef, vocab::rdf};
+/// use oxrdf::vocab::rdf;
+/// use oxrdf::NamedNodeRef;
/// use oxttl::TriGParser;
///
/// # #[tokio::main(flavor = "current_thread")]
@@ -410,14 +418,16 @@ impl FromTokioAsyncReadTriGReader {
///
/// Count the number of people:
/// ```
-/// use oxrdf::{NamedNodeRef, vocab::rdf};
+/// use oxrdf::vocab::rdf;
+/// use oxrdf::NamedNodeRef;
/// use oxttl::TriGParser;
///
-/// let file: [&[u8]; 5] = [b"@base ",
+/// let file: [&[u8]; 5] = [
+/// b"@base ",
/// b". @prefix schema: .",
/// b" a schema:Person",
/// b" ; schema:name \"Foo\" . ",
-/// b" a schema:Person ; schema:name \"Bar\" ."
+/// b" a schema:Person ; schema:name \"Bar\" .",
/// ];
///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
@@ -427,7 +437,7 @@ impl FromTokioAsyncReadTriGReader {
/// while !parser.is_end() {
/// // We feed more data to the parser
/// if let Some(chunk) = file_chunks.next() {
-/// parser.extend_from_slice(chunk);
+/// parser.extend_from_slice(chunk);
/// } else {
/// parser.end(); // It's finished
/// }
diff --git a/lib/oxttl/src/turtle.rs b/lib/oxttl/src/turtle.rs
index 542afd27..5a2b67a2 100644
--- a/lib/oxttl/src/turtle.rs
+++ b/lib/oxttl/src/turtle.rs
@@ -21,7 +21,8 @@ use tokio::io::{AsyncRead, AsyncWrite};
///
/// Count the number of people:
/// ```
-/// use oxrdf::{NamedNodeRef, vocab::rdf};
+/// use oxrdf::vocab::rdf;
+/// use oxrdf::NamedNodeRef;
/// use oxttl::TurtleParser;
///
/// let file = br#"@base .
@@ -99,7 +100,8 @@ impl TurtleParser {
///
/// Count the number of people:
/// ```
- /// use oxrdf::{NamedNodeRef, vocab::rdf};
+ /// use oxrdf::vocab::rdf;
+ /// use oxrdf::NamedNodeRef;
/// use oxttl::TurtleParser;
///
/// let file = br#"@base .
@@ -130,7 +132,8 @@ impl TurtleParser {
///
/// Count the number of people:
/// ```
- /// use oxrdf::{NamedNodeRef, vocab::rdf};
+ /// use oxrdf::vocab::rdf;
+ /// use oxrdf::NamedNodeRef;
/// use oxttl::TurtleParser;
///
/// # #[tokio::main(flavor = "current_thread")]
@@ -169,14 +172,16 @@ impl TurtleParser {
///
/// Count the number of people:
/// ```
- /// use oxrdf::{NamedNodeRef, vocab::rdf};
+ /// use oxrdf::vocab::rdf;
+ /// use oxrdf::NamedNodeRef;
/// use oxttl::TurtleParser;
///
- /// let file: [&[u8]; 5] = [b"@base ",
+ /// let file: [&[u8]; 5] = [
+ /// b"@base ",
/// b". @prefix schema: .",
/// b" a schema:Person",
/// b" ; schema:name \"Foo\" . ",
- /// b" a schema:Person ; schema:name \"Bar\" ."
+ /// b" a schema:Person ; schema:name \"Bar\" .",
/// ];
///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
@@ -186,7 +191,7 @@ impl TurtleParser {
/// while !parser.is_end() {
/// // We feed more data to the parser
/// if let Some(chunk) = file_chunks.next() {
- /// parser.extend_from_slice(chunk);
+ /// parser.extend_from_slice(chunk);
/// } else {
/// parser.end(); // It's finished
/// }
@@ -219,7 +224,8 @@ impl TurtleParser {
///
/// Count the number of people:
/// ```
-/// use oxrdf::{NamedNodeRef, vocab::rdf};
+/// use oxrdf::vocab::rdf;
+/// use oxrdf::NamedNodeRef;
/// use oxttl::TurtleParser;
///
/// let file = br#"@base .
@@ -311,7 +317,8 @@ impl Iterator for FromReadTurtleReader {
///
/// Count the number of people:
/// ```
-/// use oxrdf::{NamedNodeRef, vocab::rdf};
+/// use oxrdf::vocab::rdf;
+/// use oxrdf::NamedNodeRef;
/// use oxttl::TurtleParser;
///
/// # #[tokio::main(flavor = "current_thread")]
@@ -412,14 +419,16 @@ impl FromTokioAsyncReadTurtleReader {
///
/// Count the number of people:
/// ```
-/// use oxrdf::{NamedNodeRef, vocab::rdf};
+/// use oxrdf::vocab::rdf;
+/// use oxrdf::NamedNodeRef;
/// use oxttl::TurtleParser;
///
-/// let file: [&[u8]; 5] = [b"@base ",
+/// let file: [&[u8]; 5] = [
+/// b"@base ",
/// b". @prefix schema: .",
/// b" a schema:Person",
/// b" ; schema:name \"Foo\" . ",
-/// b" a schema:Person ; schema:name \"Bar\" ."
+/// b" a schema:Person ; schema:name \"Bar\" .",
/// ];
///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
@@ -429,7 +438,7 @@ impl FromTokioAsyncReadTurtleReader {
/// while !parser.is_end() {
/// // We feed more data to the parser
/// if let Some(chunk) = file_chunks.next() {
-/// parser.extend_from_slice(chunk);
+/// parser.extend_from_slice(chunk);
/// } else {
/// parser.end(); // It's finished
/// }
diff --git a/lib/sparesults/src/csv.rs b/lib/sparesults/src/csv.rs
index 985092b4..7cf6059e 100644
--- a/lib/sparesults/src/csv.rs
+++ b/lib/sparesults/src/csv.rs
@@ -2,8 +2,8 @@
use crate::error::{ParseError, SyntaxError, SyntaxErrorKind, TextPosition};
use memchr::memchr;
-use oxrdf::Variable;
-use oxrdf::{vocab::xsd, *};
+use oxrdf::vocab::xsd;
+use oxrdf::*;
use std::io::{self, Read, Write};
use std::str::{self, FromStr};
#[cfg(feature = "async-tokio")]
diff --git a/lib/sparesults/src/format.rs b/lib/sparesults/src/format.rs
index e7eba74a..982ff11f 100644
--- a/lib/sparesults/src/format.rs
+++ b/lib/sparesults/src/format.rs
@@ -20,7 +20,10 @@ impl QueryResultsFormat {
/// ```
/// use sparesults::QueryResultsFormat;
///
- /// assert_eq!(QueryResultsFormat::Json.iri(), "http://www.w3.org/ns/formats/SPARQL_Results_JSON")
+ /// assert_eq!(
+ /// QueryResultsFormat::Json.iri(),
+ /// "http://www.w3.org/ns/formats/SPARQL_Results_JSON"
+ /// )
/// ```
#[inline]
pub fn iri(self) -> &'static str {
@@ -31,12 +34,16 @@ impl QueryResultsFormat {
Self::Tsv => "http://www.w3.org/ns/formats/SPARQL_Results_TSV",
}
}
+
/// The format [IANA media type](https://tools.ietf.org/html/rfc2046).
///
/// ```
/// use sparesults::QueryResultsFormat;
///
- /// assert_eq!(QueryResultsFormat::Json.media_type(), "application/sparql-results+json")
+ /// assert_eq!(
+ /// QueryResultsFormat::Json.media_type(),
+ /// "application/sparql-results+json"
+ /// )
/// ```
#[inline]
pub fn media_type(self) -> &'static str {
@@ -91,7 +98,10 @@ impl QueryResultsFormat {
/// ```
/// use sparesults::QueryResultsFormat;
///
- /// assert_eq!(QueryResultsFormat::from_media_type("application/sparql-results+json; charset=utf-8"), Some(QueryResultsFormat::Json))
+ /// assert_eq!(
+ /// QueryResultsFormat::from_media_type("application/sparql-results+json; charset=utf-8"),
+ /// Some(QueryResultsFormat::Json)
+ /// )
/// ```
#[inline]
pub fn from_media_type(media_type: &str) -> Option {
@@ -134,7 +144,10 @@ impl QueryResultsFormat {
/// ```
/// use sparesults::QueryResultsFormat;
///
- /// assert_eq!(QueryResultsFormat::from_extension("json"), Some(QueryResultsFormat::Json))
+ /// assert_eq!(
+ /// QueryResultsFormat::from_extension("json"),
+ /// Some(QueryResultsFormat::Json)
+ /// )
/// ```
#[inline]
pub fn from_extension(extension: &str) -> Option {
diff --git a/lib/sparesults/src/json.rs b/lib/sparesults/src/json.rs
index 85b03fcd..2e63fc81 100644
--- a/lib/sparesults/src/json.rs
+++ b/lib/sparesults/src/json.rs
@@ -5,7 +5,6 @@ use crate::error::{ParseError, SyntaxError};
use json_event_parser::ToTokioAsyncWriteJsonWriter;
use json_event_parser::{FromReadJsonReader, JsonEvent, ToWriteJsonWriter};
use oxrdf::vocab::rdf;
-use oxrdf::Variable;
use oxrdf::*;
use std::collections::BTreeMap;
use std::io::{self, Read, Write};
@@ -522,7 +521,7 @@ fn read_value(
JsonEvent::EndObject => {
if let Some(s) = state {
if s == State::Value {
- state = None; //End of triple
+ state = None; // End of triple
} else {
return Err(
SyntaxError::msg("Term description values should be string").into()
diff --git a/lib/sparesults/src/parser.rs b/lib/sparesults/src/parser.rs
index a00d014c..3332335b 100644
--- a/lib/sparesults/src/parser.rs
+++ b/lib/sparesults/src/parser.rs
@@ -130,8 +130,8 @@ impl From for QueryResultsParser {
///
/// Example in TSV (the API is the same for JSON and XML):
/// ```
-/// use sparesults::{QueryResultsFormat, QueryResultsParser, FromReadQueryResultsReader};
/// use oxrdf::{Literal, Variable};
+/// use sparesults::{FromReadQueryResultsReader, QueryResultsFormat, QueryResultsParser};
///
/// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Tsv);
///
@@ -141,10 +141,24 @@ impl From for QueryResultsParser {
/// }
///
/// // solutions
-/// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(b"?foo\t?bar\n\"test\"\t".as_slice())? {
-/// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]);
+/// if let FromReadQueryResultsReader::Solutions(solutions) =
+/// json_parser.parse_read(b"?foo\t?bar\n\"test\"\t".as_slice())?
+/// {
+/// assert_eq!(
+/// solutions.variables(),
+/// &[
+/// Variable::new_unchecked("foo"),
+/// Variable::new_unchecked("bar")
+/// ]
+/// );
/// for solution in solutions {
-/// assert_eq!(solution?.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]);
+/// assert_eq!(
+/// solution?.iter().collect::>(),
+/// vec![(
+/// &Variable::new_unchecked("foo"),
+/// &Literal::from("test").into()
+/// )]
+/// );
/// }
/// }
/// # Result::<(),sparesults::ParseError>::Ok(())
@@ -188,12 +202,20 @@ impl FromReadSolutionsReader {
///
/// Example in TSV (the API is the same for JSON and XML):
/// ```
- /// use sparesults::{QueryResultsFormat, QueryResultsParser, FromReadQueryResultsReader};
/// use oxrdf::Variable;
+ /// use sparesults::{FromReadQueryResultsReader, QueryResultsFormat, QueryResultsParser};
///
/// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Tsv);
- /// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(b"?foo\t?bar\n\"ex1\"\t\"ex2\"".as_slice())? {
- /// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]);
+ /// if let FromReadQueryResultsReader::Solutions(solutions) =
+ /// json_parser.parse_read(b"?foo\t?bar\n\"ex1\"\t\"ex2\"".as_slice())?
+ /// {
+ /// assert_eq!(
+ /// solutions.variables(),
+ /// &[
+ /// Variable::new_unchecked("foo"),
+ /// Variable::new_unchecked("bar")
+ /// ]
+ /// );
/// }
/// # Result::<(),sparesults::ParseError>::Ok(())
/// ```
diff --git a/lib/sparesults/src/serializer.rs b/lib/sparesults/src/serializer.rs
index 13c21628..1d4a02b1 100644
--- a/lib/sparesults/src/serializer.rs
+++ b/lib/sparesults/src/serializer.rs
@@ -241,14 +241,23 @@ impl From for QueryResultsSerializer {
///
/// Example in TSV (the API is the same for JSON, XML and CSV):
/// ```
-/// use sparesults::{QueryResultsFormat, QueryResultsSerializer};
/// use oxrdf::{LiteralRef, Variable, VariableRef};
+/// use sparesults::{QueryResultsFormat, QueryResultsSerializer};
/// use std::iter::once;
///
/// let tsv_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Tsv);
/// let mut buffer = Vec::new();
-/// let mut writer = tsv_serializer.serialize_solutions_to_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")])?;
-/// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?;
+/// let mut writer = tsv_serializer.serialize_solutions_to_write(
+/// &mut buffer,
+/// vec![
+/// Variable::new_unchecked("foo"),
+/// Variable::new_unchecked("bar"),
+/// ],
+/// )?;
+/// writer.write(once((
+/// VariableRef::new_unchecked("foo"),
+/// LiteralRef::from("test"),
+/// )))?;
/// writer.finish()?;
/// assert_eq!(buffer, b"?foo\t?bar\n\"test\"\t\n");
/// # std::io::Result::Ok(())
@@ -321,16 +330,29 @@ impl ToWriteSolutionsWriter {
///
/// Example in TSV (the API is the same for JSON, CSV and XML):
/// ```
-/// use sparesults::{QueryResultsFormat, QueryResultsSerializer};
/// use oxrdf::{LiteralRef, Variable, VariableRef};
+/// use sparesults::{QueryResultsFormat, QueryResultsSerializer};
/// use std::iter::once;
///
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> std::io::Result<()> {
/// let tsv_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Tsv);
/// let mut buffer = Vec::new();
-/// let mut writer = tsv_serializer.serialize_solutions_to_tokio_async_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]).await?;
-/// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test")))).await?;
+/// let mut writer = tsv_serializer
+/// .serialize_solutions_to_tokio_async_write(
+/// &mut buffer,
+/// vec![
+/// Variable::new_unchecked("foo"),
+/// Variable::new_unchecked("bar"),
+/// ],
+/// )
+/// .await?;
+/// writer
+/// .write(once((
+/// VariableRef::new_unchecked("foo"),
+/// LiteralRef::from("test"),
+/// )))
+/// .await?;
/// writer.finish().await?;
/// assert_eq!(buffer, b"?foo\t?bar\n\"test\"\t\n");
/// # Ok(())
diff --git a/lib/sparesults/src/solution.rs b/lib/sparesults/src/solution.rs
index 0d81adc2..826a9eea 100644
--- a/lib/sparesults/src/solution.rs
+++ b/lib/sparesults/src/solution.rs
@@ -44,10 +44,16 @@ impl QuerySolution {
/// It is also the number of columns in the solutions table.
///
/// ```
+ /// use oxrdf::{Literal, Variable};
/// use sparesults::QuerySolution;
- /// use oxrdf::{Variable, Literal};
///
- /// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None]));
+ /// let solution = QuerySolution::from((
+ /// vec![
+ /// Variable::new_unchecked("foo"),
+ /// Variable::new_unchecked("bar"),
+ /// ],
+ /// vec![Some(Literal::from(1).into()), None],
+ /// ));
/// assert_eq!(solution.len(), 2);
/// ```
#[inline]
@@ -58,13 +64,25 @@ impl QuerySolution {
/// Is there any variable bound in the table?
///
/// ```
+ /// use oxrdf::{Literal, Variable};
/// use sparesults::QuerySolution;
- /// use oxrdf::{Variable, Literal};
///
- /// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None]));
+ /// let solution = QuerySolution::from((
+ /// vec![
+ /// Variable::new_unchecked("foo"),
+ /// Variable::new_unchecked("bar"),
+ /// ],
+ /// vec![Some(Literal::from(1).into()), None],
+ /// ));
/// assert!(!solution.is_empty());
///
- /// let empty_solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![None, None]));
+ /// let empty_solution = QuerySolution::from((
+ /// vec![
+ /// Variable::new_unchecked("foo"),
+ /// Variable::new_unchecked("bar"),
+ /// ],
+ /// vec![None, None],
+ /// ));
/// assert!(empty_solution.is_empty());
/// ```
#[inline]
@@ -75,11 +93,20 @@ impl QuerySolution {
/// Returns an iterator over bound variables.
///
/// ```
+ /// use oxrdf::{Literal, Variable};
/// use sparesults::QuerySolution;
- /// use oxrdf::{Variable, Literal};
///
- /// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None]));
- /// assert_eq!(solution.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from(1).into())]);
+ /// let solution = QuerySolution::from((
+ /// vec![
+ /// Variable::new_unchecked("foo"),
+ /// Variable::new_unchecked("bar"),
+ /// ],
+ /// vec![Some(Literal::from(1).into()), None],
+ /// ));
+ /// assert_eq!(
+ /// solution.iter().collect::>(),
+ /// vec![(&Variable::new_unchecked("foo"), &Literal::from(1).into())]
+ /// );
/// ```
#[inline]
pub fn iter(&self) -> impl Iterator- {
@@ -89,10 +116,16 @@ impl QuerySolution {
/// Returns the ordered slice of variable values.
///
/// ```
+ /// use oxrdf::{Literal, Variable};
/// use sparesults::QuerySolution;
- /// use oxrdf::{Variable, Literal};
///
- /// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None]));
+ /// let solution = QuerySolution::from((
+ /// vec![
+ /// Variable::new_unchecked("foo"),
+ /// Variable::new_unchecked("bar"),
+ /// ],
+ /// vec![Some(Literal::from(1).into()), None],
+ /// ));
/// assert_eq!(solution.values(), &[Some(Literal::from(1).into()), None]);
/// ```
#[inline]
@@ -103,11 +136,23 @@ impl QuerySolution {
/// Returns the ordered slice of the solution variables, bound or not.
///
/// ```
+ /// use oxrdf::{Literal, Variable};
/// use sparesults::QuerySolution;
- /// use oxrdf::{Variable, Literal};
///
- /// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None]));
- /// assert_eq!(solution.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]);
+ /// let solution = QuerySolution::from((
+ /// vec![
+ /// Variable::new_unchecked("foo"),
+ /// Variable::new_unchecked("bar"),
+ /// ],
+ /// vec![Some(Literal::from(1).into()), None],
+ /// ));
+ /// assert_eq!(
+ /// solution.variables(),
+ /// &[
+ /// Variable::new_unchecked("foo"),
+ /// Variable::new_unchecked("bar")
+ /// ]
+ /// );
/// ```
#[inline]
pub fn variables(&self) -> &[Variable] {
@@ -126,8 +171,8 @@ impl>, S: Into>>> From<(V, S)> for Quer
}
impl<'a> IntoIterator for &'a QuerySolution {
- type Item = (&'a Variable, &'a Term);
type IntoIter = Iter<'a>;
+ type Item = (&'a Variable, &'a Term);
#[inline]
fn into_iter(self) -> Self::IntoIter {
@@ -214,11 +259,20 @@ impl fmt::Debug for QuerySolution {
/// An iterator over [`QuerySolution`] bound variables.
///
/// ```
+/// use oxrdf::{Literal, Variable};
/// use sparesults::QuerySolution;
-/// use oxrdf::{Variable, Literal};
///
-/// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None]));
-/// assert_eq!(solution.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from(1).into())]);
+/// let solution = QuerySolution::from((
+/// vec![
+/// Variable::new_unchecked("foo"),
+/// Variable::new_unchecked("bar"),
+/// ],
+/// vec![Some(Literal::from(1).into()), None],
+/// ));
+/// assert_eq!(
+/// solution.iter().collect::>(),
+/// vec![(&Variable::new_unchecked("foo"), &Literal::from(1).into())]
+/// );
/// ```
pub struct Iter<'a> {
inner: Zip, std::slice::Iter<'a, Option>>,
diff --git a/lib/sparesults/src/xml.rs b/lib/sparesults/src/xml.rs
index 3c2d91a8..c0450fac 100644
--- a/lib/sparesults/src/xml.rs
+++ b/lib/sparesults/src/xml.rs
@@ -2,7 +2,6 @@
use crate::error::{ParseError, SyntaxError};
use oxrdf::vocab::rdf;
-use oxrdf::Variable;
use oxrdf::*;
use quick_xml::events::{BytesDecl, BytesEnd, BytesStart, BytesText, Event};
use quick_xml::{Reader, Writer};
@@ -245,7 +244,7 @@ impl XmlQueryResultsReader {
let mut variables = Vec::default();
let mut state = State::Start;
- //Read header
+ // Read header
loop {
buffer.clear();
let event = reader.read_event_into(&mut buffer)?;
@@ -553,7 +552,7 @@ impl XmlSolutionsReader {
}
State::BNode => {
if term.is_none() {
- //We default to a random bnode
+ // We default to a random bnode
term = Some(BlankNode::default().into())
}
state = self
@@ -563,7 +562,7 @@ impl XmlSolutionsReader {
}
State::Literal => {
if term.is_none() {
- //We default to the empty literal
+ // We default to the empty literal
term = Some(build_literal("", lang.take(), datatype.take())?.into())
}
state = self
diff --git a/lib/spargebra/src/parser.rs b/lib/spargebra/src/parser.rs
index 65a251e5..03a71932 100644
--- a/lib/spargebra/src/parser.rs
+++ b/lib/spargebra/src/parser.rs
@@ -365,7 +365,7 @@ enum PartialGraphPattern {
}
fn new_join(l: GraphPattern, r: GraphPattern) -> GraphPattern {
- //Avoid to output empty BGPs
+ // Avoid to output empty BGPs
if let GraphPattern::Bgp { patterns: pl } = &l {
if pl.is_empty() {
return r;
@@ -449,7 +449,7 @@ fn build_select(
let mut p = r#where;
let mut with_aggregate = false;
- //GROUP BY
+ // GROUP BY
let aggregates = state.aggregates.pop().unwrap_or_default();
if group.is_none() && !aggregates.is_empty() {
group = Some((vec![], vec![]));
@@ -471,7 +471,7 @@ fn build_select(
with_aggregate = true;
}
- //HAVING
+ // HAVING
if let Some(expr) = having {
p = GraphPattern::Filter {
expr,
@@ -479,12 +479,12 @@ fn build_select(
};
}
- //VALUES
+ // VALUES
if let Some(data) = values {
p = new_join(p, data);
}
- //SELECT
+ // SELECT
let mut pv = Vec::new();
let with_project = match select.variables {
SelectionVariables::Explicit(sel_items) => {
@@ -533,7 +533,7 @@ fn build_select(
if with_aggregate {
return Err("SELECT * is not authorized with GROUP BY");
}
- //TODO: is it really useful to do a projection?
+ // TODO: is it really useful to do a projection?
p.on_in_scope_variable(|v| {
if !pv.contains(v) {
pv.push(v.clone());
@@ -547,7 +547,7 @@ fn build_select(
let mut m = p;
- //ORDER BY
+ // ORDER BY
if let Some(expression) = order_by {
m = GraphPattern::OrderBy {
inner: Box::new(m),
@@ -555,7 +555,7 @@ fn build_select(
};
}
- //PROJECT
+ // PROJECT
if with_project {
m = GraphPattern::Project {
inner: Box::new(m),
@@ -568,7 +568,7 @@ fn build_select(
SelectionOption::Default => (),
}
- //OFFSET LIMIT
+ // OFFSET LIMIT
if let Some((start, length)) = offset_limit {
m = GraphPattern::Slice {
inner: Box::new(m),
diff --git a/lib/spargebra/src/query.rs b/lib/spargebra/src/query.rs
index 8716de73..5739b7b8 100644
--- a/lib/spargebra/src/query.rs
+++ b/lib/spargebra/src/query.rs
@@ -13,7 +13,10 @@ use std::str::FromStr;
/// let query_str = "SELECT ?s ?p ?o WHERE { ?s ?p ?o . }";
/// let query = Query::parse(query_str, None)?;
/// assert_eq!(query.to_string(), query_str);
-/// assert_eq!(query.to_sse(), "(project (?s ?p ?o) (bgp (triple ?s ?p ?o)))");
+/// assert_eq!(
+/// query.to_sse(),
+/// "(project (?s ?p ?o) (bgp (triple ?s ?p ?o)))"
+/// );
/// # Ok::<_, spargebra::ParseError>(())
/// ```
#[derive(Eq, PartialEq, Debug, Clone, Hash)]
diff --git a/lib/spargebra/src/term.rs b/lib/spargebra/src/term.rs
index 362b3959..ba5fb8e3 100644
--- a/lib/spargebra/src/term.rs
+++ b/lib/spargebra/src/term.rs
@@ -141,7 +141,7 @@ impl TryFrom for GroundTerm {
/// The default string formatter is returning a N-Quads representation.
///
/// ```
-/// use spargebra::term::{NamedNode, GroundTriple};
+/// use spargebra::term::{GroundTriple, NamedNode};
///
/// assert_eq!(
/// " ",
@@ -149,7 +149,8 @@ impl TryFrom for GroundTerm {
/// subject: NamedNode::new("http://example.com/s")?.into(),
/// predicate: NamedNode::new("http://example.com/p")?,
/// object: NamedNode::new("http://example.com/o")?.into(),
-/// }.to_string()
+/// }
+/// .to_string()
/// );
/// # Result::<_,oxrdf::IriParseError>::Ok(())
/// ```
diff --git a/lib/sparopt/src/algebra.rs b/lib/sparopt/src/algebra.rs
index 51ecf6fa..b9bb30f7 100644
--- a/lib/sparopt/src/algebra.rs
+++ b/lib/sparopt/src/algebra.rs
@@ -197,10 +197,10 @@ impl Expression {
xsd::BOOLEAN => match literal.value() {
"true" | "1" => Some(true),
"false" | "0" => Some(false),
- _ => None, //TODO
+ _ => None, // TODO
},
xsd::STRING => Some(!literal.value().is_empty()),
- _ => None, //TODO
+ _ => None, // TODO
}
} else {
None
diff --git a/lib/sparopt/src/optimizer.rs b/lib/sparopt/src/optimizer.rs
index 5dc9d404..facc5b0c 100644
--- a/lib/sparopt/src/optimizer.rs
+++ b/lib/sparopt/src/optimizer.rs
@@ -102,7 +102,7 @@ impl Optimizer {
let expression = Self::normalize_expression(expression, &inner_types);
let expression_type = infer_expression_type(&expression, &inner_types);
if expression_type == VariableType::UNDEF {
- //TODO: valid?
+ // TODO: valid?
inner
} else {
GraphPattern::extend(inner, variable, expression)
@@ -397,7 +397,7 @@ impl Optimizer {
expression,
variable,
} => {
- //TODO: handle the case where the filter overrides an expression variable (should not happen in SPARQL but allowed in the algebra)
+ // TODO: handle the case where the filter overrides an expression variable (should not happen in SPARQL but allowed in the algebra)
let mut inner_filters = Vec::new();
let mut final_filters = Vec::new();
for filter in filters {
@@ -735,7 +735,7 @@ fn is_fit_for_for_loop_join(
global_input_types: &VariableTypes,
entry_types: &VariableTypes,
) -> bool {
- //TODO: think more about it
+ // TODO: think more about it
match pattern {
GraphPattern::Values { .. }
| GraphPattern::QuadPattern { .. }
diff --git a/lib/sparopt/src/type_inference.rs b/lib/sparopt/src/type_inference.rs
index 161ba58a..d53b63e4 100644
--- a/lib/sparopt/src/type_inference.rs
+++ b/lib/sparopt/src/type_inference.rs
@@ -49,7 +49,7 @@ pub fn infer_graph_pattern_types(
infer_graph_pattern_types(right, infer_graph_pattern_types(left, types))
}
GraphPattern::LeftJoin { left, right, .. } => {
- let mut right_types = infer_graph_pattern_types(right, types.clone()); //TODO: expression
+ let mut right_types = infer_graph_pattern_types(right, types.clone()); // TODO: expression
for t in right_types.inner.values_mut() {
t.undef = true; // Right might be unset
}
@@ -352,24 +352,14 @@ pub struct VariableType {
}
impl VariableType {
- pub const UNDEF: Self = Self {
+ const ANY: Self = Self {
undef: true,
- named_node: false,
- blank_node: false,
- literal: false,
- #[cfg(feature = "rdf-star")]
- triple: false,
- };
-
- const NAMED_NODE: Self = Self {
- undef: false,
named_node: true,
- blank_node: false,
- literal: false,
+ blank_node: true,
+ literal: true,
#[cfg(feature = "rdf-star")]
- triple: false,
+ triple: true,
};
-
const BLANK_NODE: Self = Self {
undef: false,
named_node: false,
@@ -378,7 +368,6 @@ impl VariableType {
#[cfg(feature = "rdf-star")]
triple: false,
};
-
const LITERAL: Self = Self {
undef: false,
named_node: false,
@@ -387,16 +376,14 @@ impl VariableType {
#[cfg(feature = "rdf-star")]
triple: false,
};
-
- #[cfg(feature = "rdf-star")]
- const TRIPLE: Self = Self {
+ const NAMED_NODE: Self = Self {
undef: false,
- named_node: false,
+ named_node: true,
blank_node: false,
literal: false,
- triple: true,
+ #[cfg(feature = "rdf-star")]
+ triple: false,
};
-
const SUBJECT: Self = Self {
undef: false,
named_node: true,
@@ -405,7 +392,6 @@ impl VariableType {
#[cfg(feature = "rdf-star")]
triple: true,
};
-
const TERM: Self = Self {
undef: false,
named_node: true,
@@ -414,14 +400,21 @@ impl VariableType {
#[cfg(feature = "rdf-star")]
triple: true,
};
-
- const ANY: Self = Self {
+ #[cfg(feature = "rdf-star")]
+ const TRIPLE: Self = Self {
+ undef: false,
+ named_node: false,
+ blank_node: false,
+ literal: false,
+ triple: true,
+ };
+ pub const UNDEF: Self = Self {
undef: true,
- named_node: true,
- blank_node: true,
- literal: true,
+ named_node: false,
+ blank_node: false,
+ literal: false,
#[cfg(feature = "rdf-star")]
- triple: true,
+ triple: false,
};
}
diff --git a/lib/sparql-smith/src/lib.rs b/lib/sparql-smith/src/lib.rs
index 01ca45e5..3bc99c0b 100644
--- a/lib/sparql-smith/src/lib.rs
+++ b/lib/sparql-smith/src/lib.rs
@@ -44,7 +44,7 @@ struct QueryContent {
#[derive(Arbitrary)]
enum QueryVariant {
Select(SelectQuery),
- //TODO: Other variants!
+ // TODO: Other variants!
}
impl<'a> Arbitrary<'a> for Query {
@@ -246,7 +246,7 @@ impl fmt::Display for GroupCondition {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::BuiltInCall(c) => write!(f, "{c}"),
- //Self::FunctionCall(c) => write!(f, "{}", c),
+ // Self::FunctionCall(c) => write!(f, "{}", c),
Self::Projection(e, v) => {
if let Some(v) = v {
write!(f, "({e} AS {v})")
@@ -705,7 +705,7 @@ impl fmt::Display for Constraint {
match self {
Self::BrackettedExpression(e) => write!(f, "{e}"),
Self::BuiltInCall(c) => write!(f, "{c}"),
- //Self::FunctionCall(c) => write!(f, "{}", c),
+ // Self::FunctionCall(c) => write!(f, "{}", c),
}
}
}
@@ -1530,7 +1530,7 @@ enum BuiltInCall {
IsLiteral(Box),
IsNumeric(Box),
Exists(ExistsFunc),
- NotExists(NotExistsFunc), //TODO: Other functions
+ NotExists(NotExistsFunc), // TODO: Other functions
}
impl fmt::Display for BuiltInCall {
@@ -1585,15 +1585,15 @@ impl fmt::Display for NotExistsFunc {
struct IriOrFunction {
// [128] iriOrFunction ::= iri ArgList?
iri: Iri,
- //TODO args: Option,
+ // TODO args: Option,
}
impl fmt::Display for IriOrFunction {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.iri)?;
- /*if let Some(args) = &self.args {
- write!(f, "{}", args)?;
- }*/
+ // if let Some(args) = &self.args {
+ // write!(f, "{}", args)?;
+ // }
Ok(())
}
}
diff --git a/lib/src/io/format.rs b/lib/src/io/format.rs
index b07c1709..08b61d8a 100644
--- a/lib/src/io/format.rs
+++ b/lib/src/io/format.rs
@@ -23,7 +23,10 @@ impl GraphFormat {
/// ```
/// use oxigraph::io::GraphFormat;
///
- /// assert_eq!(GraphFormat::NTriples.iri(), "http://www.w3.org/ns/formats/N-Triples")
+ /// assert_eq!(
+ /// GraphFormat::NTriples.iri(),
+ /// "http://www.w3.org/ns/formats/N-Triples"
+ /// )
/// ```
#[inline]
pub fn iri(self) -> &'static str {
@@ -65,6 +68,7 @@ impl GraphFormat {
Self::RdfXml => "rdf",
}
}
+
/// Looks for a known format from a media type.
///
/// It supports some media type aliases.
@@ -74,7 +78,10 @@ impl GraphFormat {
/// ```
/// use oxigraph::io::GraphFormat;
///
- /// assert_eq!(GraphFormat::from_media_type("text/turtle; charset=utf-8"), Some(GraphFormat::Turtle))
+ /// assert_eq!(
+ /// GraphFormat::from_media_type("text/turtle; charset=utf-8"),
+ /// Some(GraphFormat::Turtle)
+ /// )
/// ```
#[inline]
pub fn from_media_type(media_type: &str) -> Option {
@@ -94,7 +101,10 @@ impl GraphFormat {
/// ```
/// use oxigraph::io::GraphFormat;
///
- /// assert_eq!(GraphFormat::from_extension("nt"), Some(GraphFormat::NTriples))
+ /// assert_eq!(
+ /// GraphFormat::from_extension("nt"),
+ /// Some(GraphFormat::NTriples)
+ /// )
/// ```
#[inline]
pub fn from_extension(extension: &str) -> Option {
@@ -151,7 +161,10 @@ impl DatasetFormat {
/// ```
/// use oxigraph::io::DatasetFormat;
///
- /// assert_eq!(DatasetFormat::NQuads.iri(), "http://www.w3.org/ns/formats/N-Quads")
+ /// assert_eq!(
+ /// DatasetFormat::NQuads.iri(),
+ /// "http://www.w3.org/ns/formats/N-Quads"
+ /// )
/// ```
#[inline]
pub fn iri(self) -> &'static str {
@@ -190,6 +203,7 @@ impl DatasetFormat {
Self::TriG => "trig",
}
}
+
/// Looks for a known format from a media type.
///
/// It supports some media type aliases.
@@ -198,7 +212,10 @@ impl DatasetFormat {
/// ```
/// use oxigraph::io::DatasetFormat;
///
- /// assert_eq!(DatasetFormat::from_media_type("application/n-quads; charset=utf-8"), Some(DatasetFormat::NQuads))
+ /// assert_eq!(
+ /// DatasetFormat::from_media_type("application/n-quads; charset=utf-8"),
+ /// Some(DatasetFormat::NQuads)
+ /// )
/// ```
#[inline]
pub fn from_media_type(media_type: &str) -> Option {
@@ -217,7 +234,10 @@ impl DatasetFormat {
/// ```
/// use oxigraph::io::DatasetFormat;
///
- /// assert_eq!(DatasetFormat::from_extension("nq"), Some(DatasetFormat::NQuads))
+ /// assert_eq!(
+ /// DatasetFormat::from_extension("nq"),
+ /// Some(DatasetFormat::NQuads)
+ /// )
/// ```
#[inline]
pub fn from_extension(extension: &str) -> Option {
diff --git a/lib/src/io/read.rs b/lib/src/io/read.rs
index 3400b8e2..841b166a 100644
--- a/lib/src/io/read.rs
+++ b/lib/src/io/read.rs
@@ -21,7 +21,9 @@ use std::io::Read;
/// let file = " .";
///
/// let parser = GraphParser::from_format(GraphFormat::NTriples);
-/// let triples = parser.read_triples(file.as_bytes()).collect::,_>>()?;
+/// let triples = parser
+/// .read_triples(file.as_bytes())
+/// .collect::, _>>()?;
///
/// assert_eq!(triples.len(), 1);
/// assert_eq!(triples[0].subject.to_string(), "");
@@ -50,8 +52,11 @@ impl GraphParser {
///
/// let file = " .";
///
- /// let parser = GraphParser::from_format(GraphFormat::Turtle).with_base_iri("http://example.com")?;
- /// let triples = parser.read_triples(file.as_bytes()).collect::,_>>()?;
+ /// let parser =
+ /// GraphParser::from_format(GraphFormat::Turtle).with_base_iri("http://example.com")?;
+ /// let triples = parser
+ /// .read_triples(file.as_bytes())
+ /// .collect::, _>>()?;
///
/// assert_eq!(triples.len(), 1);
/// assert_eq!(triples[0].subject.to_string(), "");
@@ -81,7 +86,9 @@ impl GraphParser {
/// let file = " .";
///
/// let parser = GraphParser::from_format(GraphFormat::NTriples);
-/// let triples = parser.read_triples(file.as_bytes()).collect::,_>>()?;
+/// let triples = parser
+/// .read_triples(file.as_bytes())
+/// .collect::, _>>()?;
///
/// assert_eq!(triples.len(), 1);
/// assert_eq!(triples[0].subject.to_string(), "");
@@ -139,8 +146,11 @@ impl DatasetParser {
///
/// let file = " { }";
///
- /// let parser = DatasetParser::from_format(DatasetFormat::TriG).with_base_iri("http://example.com")?;
- /// let triples = parser.read_quads(file.as_bytes()).collect::,_>>()?;
+ /// let parser =
+ /// DatasetParser::from_format(DatasetFormat::TriG).with_base_iri("http://example.com")?;
+ /// let triples = parser
+ /// .read_quads(file.as_bytes())
+ /// .collect::, _>>()?;
///
/// assert_eq!(triples.len(), 1);
/// assert_eq!(triples[0].subject.to_string(), "");
diff --git a/lib/src/io/write.rs b/lib/src/io/write.rs
index 7a9007c0..7f27cd9f 100644
--- a/lib/src/io/write.rs
+++ b/lib/src/io/write.rs
@@ -21,13 +21,16 @@ use std::io::{self, Write};
/// let mut buffer = Vec::new();
/// let mut writer = GraphSerializer::from_format(GraphFormat::NTriples).triple_writer(&mut buffer);
/// writer.write(&Triple {
-/// subject: NamedNode::new("http://example.com/s")?.into(),
-/// predicate: NamedNode::new("http://example.com/p")?,
-/// object: NamedNode::new("http://example.com/o")?.into()
+/// subject: NamedNode::new("http://example.com/s")?.into(),
+/// predicate: NamedNode::new("http://example.com/p")?,
+/// object: NamedNode::new("http://example.com/o")?.into(),
/// })?;
/// writer.finish()?;
///
-/// assert_eq!(buffer.as_slice(), " .\n".as_bytes());
+/// assert_eq!(
+/// buffer.as_slice(),
+/// " .\n".as_bytes()
+/// );
/// # Result::<_,Box>::Ok(())
/// ```
#[deprecated(note = "use RdfSerializer instead", since = "0.4.0")]
@@ -66,13 +69,16 @@ impl GraphSerializer {
/// let mut buffer = Vec::new();
/// let mut writer = GraphSerializer::from_format(GraphFormat::NTriples).triple_writer(&mut buffer);
/// writer.write(&Triple {
-/// subject: NamedNode::new("http://example.com/s")?.into(),
-/// predicate: NamedNode::new("http://example.com/p")?,
-/// object: NamedNode::new("http://example.com/o")?.into()
+/// subject: NamedNode::new("http://example.com/s")?.into(),
+/// predicate: NamedNode::new("http://example.com/p")?,
+/// object: NamedNode::new("http://example.com/o")?.into(),
/// })?;
/// writer.finish()?;
///
-/// assert_eq!(buffer.as_slice(), " .\n".as_bytes());
+/// assert_eq!(
+/// buffer.as_slice(),
+/// " .\n".as_bytes()
+/// );
/// # Result::<_,Box>::Ok(())
/// ```
#[must_use]
diff --git a/lib/src/sparql/algebra.rs b/lib/src/sparql/algebra.rs
index b046de80..819a9bd9 100644
--- a/lib/src/sparql/algebra.rs
+++ b/lib/src/sparql/algebra.rs
@@ -23,7 +23,10 @@ use std::str::FromStr;
/// // We edit the query dataset specification
/// let default = vec![NamedNode::new("http://example.com")?.into()];
/// query.dataset_mut().set_default_graph(default.clone());
-/// assert_eq!(query.dataset().default_graph_graphs(), Some(default.as_slice()));
+/// assert_eq!(
+/// query.dataset().default_graph_graphs(),
+/// Some(default.as_slice())
+/// );
/// # Ok::<_, Box>(())
/// ```
#[derive(Eq, PartialEq, Debug, Clone, Hash)]
@@ -58,7 +61,7 @@ impl Query {
impl fmt::Display for Query {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- self.inner.fmt(f) //TODO: override
+ self.inner.fmt(f) // TODO: override
}
}
@@ -217,8 +220,15 @@ impl QueryDataset {
/// ```
/// use oxigraph::sparql::Query;
///
- /// assert!(Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)?.dataset().is_default_dataset());
- /// assert!(!Query::parse("SELECT ?s ?p ?o FROM WHERE { ?s ?p ?o . }", None)?.dataset().is_default_dataset());
+ /// assert!(Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)?
+ /// .dataset()
+ /// .is_default_dataset());
+ /// assert!(!Query::parse(
+ /// "SELECT ?s ?p ?o FROM WHERE { ?s ?p ?o . }",
+ /// None
+ /// )?
+ /// .dataset()
+ /// .is_default_dataset());
///
/// # Ok::<_, Box>(())
/// ```
@@ -250,7 +260,10 @@ impl QueryDataset {
/// let mut query = Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)?;
/// let default = vec![NamedNode::new("http://example.com")?.into()];
/// query.dataset_mut().set_default_graph(default.clone());
- /// assert_eq!(query.dataset().default_graph_graphs(), Some(default.as_slice()));
+ /// assert_eq!(
+ /// query.dataset().default_graph_graphs(),
+ /// Some(default.as_slice())
+ /// );
///
/// # Ok::<_, Box>(())
/// ```
@@ -271,8 +284,13 @@ impl QueryDataset {
///
/// let mut query = Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)?;
/// let named = vec![NamedNode::new("http://example.com")?.into()];
- /// query.dataset_mut().set_available_named_graphs(named.clone());
- /// assert_eq!(query.dataset().available_named_graphs(), Some(named.as_slice()));
+ /// query
+ /// .dataset_mut()
+ /// .set_available_named_graphs(named.clone());
+ /// assert_eq!(
+ /// query.dataset().available_named_graphs(),
+ /// Some(named.as_slice())
+ /// );
///
/// # Ok::<_, Box>(())
/// ```
diff --git a/lib/src/sparql/error.rs b/lib/src/sparql/error.rs
index 4728efb7..43234d67 100644
--- a/lib/src/sparql/error.rs
+++ b/lib/src/sparql/error.rs
@@ -5,8 +5,7 @@ use crate::sparql::ParseError;
use crate::storage::StorageError;
use std::convert::Infallible;
use std::error::Error;
-use std::fmt;
-use std::io;
+use std::{fmt, io};
/// A SPARQL evaluation error.
#[derive(Debug)]
diff --git a/lib/src/sparql/eval.rs b/lib/src/sparql/eval.rs
index 25c4b3cf..19c6884d 100644
--- a/lib/src/sparql/eval.rs
+++ b/lib/src/sparql/eval.rs
@@ -33,8 +33,7 @@ use std::cmp::Ordering;
use std::collections::hash_map::DefaultHasher;
use std::collections::{HashMap, HashSet};
use std::hash::{Hash, Hasher};
-use std::iter::Iterator;
-use std::iter::{empty, once};
+use std::iter::{empty, once, Iterator};
use std::rc::Rc;
use std::sync::Arc;
use std::{fmt, io, str};
@@ -112,8 +111,8 @@ impl EncodedTuple {
}
impl IntoIterator for EncodedTuple {
- type Item = Option;
type IntoIter = std::vec::IntoIter