Uses nightly rustfmt on imports and comments

pull/749/head
Tpt 11 months ago committed by Thomas Tanon
parent f354bc7546
commit d838d55f02
  1. 5
      cli/src/main.rs
  2. 20
      lib/oxrdf/src/blank_node.rs
  3. 17
      lib/oxrdf/src/dataset.rs
  4. 7
      lib/oxrdf/src/graph.rs
  5. 7
      lib/oxrdf/src/literal.rs
  6. 78
      lib/oxrdf/src/parser.rs
  7. 10
      lib/oxrdf/src/triple.rs
  8. 12
      lib/oxrdf/src/variable.rs
  9. 10
      lib/oxrdfio/src/format.rs
  10. 36
      lib/oxrdfio/src/parser.rs
  11. 7
      lib/oxrdfio/src/serializer.rs
  12. 29
      lib/oxrdfxml/src/parser.rs
  13. 161
      lib/oxsdatatypes/src/date_time.rs
  14. 18
      lib/oxsdatatypes/src/decimal.rs
  15. 24
      lib/oxsdatatypes/src/double.rs
  16. 41
      lib/oxsdatatypes/src/duration.rs
  17. 24
      lib/oxsdatatypes/src/float.rs
  18. 7
      lib/oxsdatatypes/src/integer.rs
  19. 4
      lib/oxttl/src/lexer.rs
  20. 6
      lib/oxttl/src/line_formats.rs
  21. 42
      lib/oxttl/src/n3.rs
  22. 14
      lib/oxttl/src/terse.rs
  23. 4
      lib/oxttl/src/toolkit/lexer.rs
  24. 38
      lib/oxttl/src/trig.rs
  25. 35
      lib/oxttl/src/turtle.rs
  26. 4
      lib/sparesults/src/csv.rs
  27. 21
      lib/sparesults/src/format.rs
  28. 3
      lib/sparesults/src/json.rs
  29. 36
      lib/sparesults/src/parser.rs
  30. 34
      lib/sparesults/src/serializer.rs
  31. 88
      lib/sparesults/src/solution.rs
  32. 7
      lib/sparesults/src/xml.rs
  33. 18
      lib/spargebra/src/parser.rs
  34. 5
      lib/spargebra/src/query.rs
  35. 5
      lib/spargebra/src/term.rs
  36. 4
      lib/sparopt/src/algebra.rs
  37. 6
      lib/sparopt/src/optimizer.rs
  38. 51
      lib/sparopt/src/type_inference.rs
  39. 16
      lib/sparql-smith/src/lib.rs
  40. 32
      lib/src/io/format.rs
  41. 22
      lib/src/io/read.rs
  42. 22
      lib/src/io/write.rs
  43. 32
      lib/src/sparql/algebra.rs
  44. 3
      lib/src/sparql/error.rs
  45. 21
      lib/src/sparql/eval.rs
  46. 15
      lib/src/sparql/mod.rs
  47. 24
      lib/src/sparql/model.rs
  48. 20
      lib/src/sparql/service.rs
  49. 2
      lib/src/sparql/update.rs
  50. 14
      lib/src/storage/backend/rocksdb.rs
  51. 3
      lib/src/storage/error.rs
  52. 4
      lib/src/storage/mod.rs
  53. 3
      lib/src/storage/numeric_encoder.rs
  54. 3
      lib/src/storage/small_string.rs
  55. 241
      lib/src/store.rs
  56. 5
      python/src/io.rs
  57. 4
      python/src/model.rs
  58. 19
      python/src/sparql.rs
  59. 10
      rustfmt.toml
  60. 4
      testsuite/src/parser_evaluator.rs
  61. 6
      testsuite/tests/sparql.rs

@ -469,7 +469,7 @@ pub fn main() -> anyhow::Result<()> {
file.display(),
error
)
//TODO: hard fail
// TODO: hard fail
}
})
}
@ -1845,7 +1845,8 @@ mod tests {
use super::*;
use anyhow::Result;
use assert_cmd::Command;
use assert_fs::{prelude::*, NamedTempFile, TempDir};
use assert_fs::prelude::*;
use assert_fs::{NamedTempFile, TempDir};
use flate2::write::GzEncoder;
use flate2::Compression;
use oxhttp::model::Method;

@ -1,8 +1,7 @@
use rand::random;
use std::error::Error;
use std::fmt;
use std::io::Write;
use std::str;
use std::{fmt, str};
/// An owned RDF [blank node](https://www.w3.org/TR/rdf11-concepts/#dfn-blank-node).
///
@ -15,10 +14,7 @@ use std::str;
/// ```
/// use oxrdf::BlankNode;
///
/// assert_eq!(
/// "_:a122",
/// BlankNode::new("a122")?.to_string()
/// );
/// assert_eq!("_:a122", BlankNode::new("a122")?.to_string());
/// # Result::<_,oxrdf::BlankNodeIdParseError>::Ok(())
/// ```
#[derive(Eq, PartialEq, Debug, Clone, Hash)]
@ -36,7 +32,7 @@ impl BlankNode {
/// The blank node identifier must be valid according to N-Triples, Turtle, and SPARQL grammars.
///
/// In most cases, it is much more convenient to create a blank node using [`BlankNode::default()`]
///that creates a random ID that could be easily inlined by Oxigraph stores.
/// that creates a random ID that could be easily inlined by Oxigraph stores.
pub fn new(id: impl Into<String>) -> Result<Self, BlankNodeIdParseError> {
let id = id.into();
validate_blank_node_identifier(&id)?;
@ -133,10 +129,7 @@ impl Default for BlankNode {
/// ```
/// use oxrdf::BlankNodeRef;
///
/// assert_eq!(
/// "_:a122",
/// BlankNodeRef::new("a122")?.to_string()
/// );
/// assert_eq!("_:a122", BlankNodeRef::new("a122")?.to_string());
/// # Result::<_,oxrdf::BlankNodeIdParseError>::Ok(())
/// ```
#[derive(Eq, PartialEq, Debug, Clone, Copy, Hash)]
@ -192,7 +185,10 @@ impl<'a> BlankNodeRef<'a> {
/// ```
/// use oxrdf::BlankNode;
///
/// assert_eq!(BlankNode::new_from_unique_id(128).as_ref().unique_id(), Some(128));
/// assert_eq!(
/// BlankNode::new_from_unique_id(128).as_ref().unique_id(),
/// Some(128)
/// );
/// assert_eq!(BlankNode::new("foo")?.as_ref().unique_id(), None);
/// # Result::<_,oxrdf::BlankNodeIdParseError>::Ok(())
/// ```

@ -20,19 +20,20 @@
//! assert_eq!(vec![TripleRef::new(ex, ex, ex)], results);
//!
//! // Print
//! assert_eq!(dataset.to_string(), "<http://example.com> <http://example.com> <http://example.com> <http://example.com> .\n");
//! assert_eq!(
//! dataset.to_string(),
//! "<http://example.com> <http://example.com> <http://example.com> <http://example.com> .\n"
//! );
//! # Result::<_,Box<dyn std::error::Error>>::Ok(())
//! ```
//!
//! See also [`Graph`] if you only care about plain triples.
use crate::interning::*;
use crate::SubjectRef;
use crate::*;
use std::cmp::min;
use std::collections::hash_map::DefaultHasher;
use std::collections::BTreeSet;
use std::collections::{HashMap, HashSet};
use std::collections::{BTreeSet, HashMap, HashSet};
use std::fmt;
use std::hash::{Hash, Hasher};
@ -924,8 +925,8 @@ impl PartialEq for Dataset {
impl Eq for Dataset {}
impl<'a> IntoIterator for &'a Dataset {
type Item = QuadRef<'a>;
type IntoIter = Iter<'a>;
type Item = QuadRef<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
@ -1282,8 +1283,8 @@ impl<'a> GraphView<'a> {
}
impl<'a> IntoIterator for GraphView<'a> {
type Item = TripleRef<'a>;
type IntoIter = GraphViewIter<'a>;
type Item = TripleRef<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
@ -1291,8 +1292,8 @@ impl<'a> IntoIterator for GraphView<'a> {
}
impl<'a, 'b> IntoIterator for &'b GraphView<'a> {
type Item = TripleRef<'a>;
type IntoIter = GraphViewIter<'a>;
type Item = TripleRef<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
@ -1493,8 +1494,8 @@ impl<'a, 'b, T: Into<TripleRef<'b>>> Extend<T> for GraphViewMut<'a> {
}
impl<'a> IntoIterator for &'a GraphViewMut<'a> {
type Item = TripleRef<'a>;
type IntoIter = GraphViewIter<'a>;
type Item = TripleRef<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()

@ -16,7 +16,10 @@
//! assert_eq!(vec![triple], results);
//!
//! // Print
//! assert_eq!(graph.to_string(), "<http://example.com> <http://example.com> <http://example.com> .\n");
//! assert_eq!(
//! graph.to_string(),
//! "<http://example.com> <http://example.com> <http://example.com> .\n"
//! );
//! # Result::<_,Box<dyn std::error::Error>>::Ok(())
//! ```
//!
@ -226,8 +229,8 @@ impl PartialEq for Graph {
impl Eq for Graph {}
impl<'a> IntoIterator for &'a Graph {
type Item = TripleRef<'a>;
type IntoIter = Iter<'a>;
type Item = TripleRef<'a>;
fn into_iter(self) -> Self::IntoIter {
self.iter()

@ -1,6 +1,5 @@
use crate::named_node::NamedNode;
use crate::vocab::rdf;
use crate::vocab::xsd;
use crate::vocab::{rdf, xsd};
use crate::NamedNodeRef;
use oxilangtag::{LanguageTag, LanguageTagParseError};
#[cfg(feature = "oxsdatatypes")]
@ -15,8 +14,8 @@ use std::option::Option;
/// The default string formatter is returning an N-Triples, Turtle, and SPARQL compatible representation:
/// ```
/// # use oxilangtag::LanguageTagParseError;
/// use oxrdf::Literal;
/// use oxrdf::vocab::xsd;
/// use oxrdf::Literal;
///
/// assert_eq!(
/// "\"foo\\nbar\"",
@ -427,8 +426,8 @@ impl From<DayTimeDuration> for Literal {
///
/// The default string formatter is returning an N-Triples, Turtle, and SPARQL compatible representation:
/// ```
/// use oxrdf::LiteralRef;
/// use oxrdf::vocab::xsd;
/// use oxrdf::LiteralRef;
///
/// assert_eq!(
/// "\"foo\\nbar\"",

@ -5,10 +5,9 @@ use crate::{
};
#[cfg(feature = "rdf-star")]
use crate::{Subject, Triple};
use std::char;
use std::error::Error;
use std::fmt;
use std::str::{Chars, FromStr};
use std::{char, fmt};
/// This limit is set in order to avoid stack overflow error when parsing nested triples due to too many recursive calls.
/// The actual limit value is a wet finger compromise between not failing to parse valid files and avoiding to trigger stack overflow errors.
@ -23,7 +22,10 @@ impl FromStr for NamedNode {
/// use oxrdf::NamedNode;
/// use std::str::FromStr;
///
/// assert_eq!(NamedNode::from_str("<http://example.com>").unwrap(), NamedNode::new("http://example.com").unwrap())
/// assert_eq!(
/// NamedNode::from_str("<http://example.com>").unwrap(),
/// NamedNode::new("http://example.com").unwrap()
/// )
/// ```
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (term, left) = read_named_node(s)?;
@ -45,7 +47,10 @@ impl FromStr for BlankNode {
/// use oxrdf::BlankNode;
/// use std::str::FromStr;
///
/// assert_eq!(BlankNode::from_str("_:ex").unwrap(), BlankNode::new("ex").unwrap())
/// assert_eq!(
/// BlankNode::from_str("_:ex").unwrap(),
/// BlankNode::new("ex").unwrap()
/// )
/// ```
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (term, left) = read_blank_node(s)?;
@ -64,16 +69,41 @@ impl FromStr for Literal {
/// Parses a literal from its NTriples or Turtle serialization
///
/// ```
/// use oxrdf::{Literal, NamedNode, vocab::xsd};
/// use oxrdf::vocab::xsd;
/// use oxrdf::{Literal, NamedNode};
/// use std::str::FromStr;
///
/// assert_eq!(Literal::from_str("\"ex\\n\"").unwrap(), Literal::new_simple_literal("ex\n"));
/// assert_eq!(Literal::from_str("\"ex\"@en").unwrap(), Literal::new_language_tagged_literal("ex", "en").unwrap());
/// assert_eq!(Literal::from_str("\"2020\"^^<http://www.w3.org/2001/XMLSchema#gYear>").unwrap(), Literal::new_typed_literal("2020", NamedNode::new("http://www.w3.org/2001/XMLSchema#gYear").unwrap()));
/// assert_eq!(Literal::from_str("true").unwrap(), Literal::new_typed_literal("true", xsd::BOOLEAN));
/// assert_eq!(Literal::from_str("+122").unwrap(), Literal::new_typed_literal("+122", xsd::INTEGER));
/// assert_eq!(Literal::from_str("-122.23").unwrap(), Literal::new_typed_literal("-122.23", xsd::DECIMAL));
/// assert_eq!(Literal::from_str("-122e+1").unwrap(), Literal::new_typed_literal("-122e+1", xsd::DOUBLE));
/// assert_eq!(
/// Literal::from_str("\"ex\\n\"").unwrap(),
/// Literal::new_simple_literal("ex\n")
/// );
/// assert_eq!(
/// Literal::from_str("\"ex\"@en").unwrap(),
/// Literal::new_language_tagged_literal("ex", "en").unwrap()
/// );
/// assert_eq!(
/// Literal::from_str("\"2020\"^^<http://www.w3.org/2001/XMLSchema#gYear>").unwrap(),
/// Literal::new_typed_literal(
/// "2020",
/// NamedNode::new("http://www.w3.org/2001/XMLSchema#gYear").unwrap()
/// )
/// );
/// assert_eq!(
/// Literal::from_str("true").unwrap(),
/// Literal::new_typed_literal("true", xsd::BOOLEAN)
/// );
/// assert_eq!(
/// Literal::from_str("+122").unwrap(),
/// Literal::new_typed_literal("+122", xsd::INTEGER)
/// );
/// assert_eq!(
/// Literal::from_str("-122.23").unwrap(),
/// Literal::new_typed_literal("-122.23", xsd::DECIMAL)
/// );
/// assert_eq!(
/// Literal::from_str("-122e+1").unwrap(),
/// Literal::new_typed_literal("-122e+1", xsd::DOUBLE)
/// );
/// ```
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (term, left) = read_literal(s)?;
@ -93,12 +123,19 @@ impl FromStr for Term {
/// use oxrdf::*;
/// use std::str::FromStr;
///
/// assert_eq!(Term::from_str("\"ex\"").unwrap(), Literal::new_simple_literal("ex").into());
/// assert_eq!(Term::from_str("<< _:s <http://example.com/p> \"o\" >>").unwrap(), Triple::new(
/// BlankNode::new("s").unwrap(),
/// NamedNode::new("http://example.com/p").unwrap(),
/// Literal::new_simple_literal("o")
/// ).into());
/// assert_eq!(
/// Term::from_str("\"ex\"").unwrap(),
/// Literal::new_simple_literal("ex").into()
/// );
/// assert_eq!(
/// Term::from_str("<< _:s <http://example.com/p> \"o\" >>").unwrap(),
/// Triple::new(
/// BlankNode::new("s").unwrap(),
/// NamedNode::new("http://example.com/p").unwrap(),
/// Literal::new_simple_literal("o")
/// )
/// .into()
/// );
/// ```
fn from_str(s: &str) -> Result<Self, Self::Err> {
let (term, left) = read_term(s, 0)?;
@ -118,7 +155,10 @@ impl FromStr for Variable {
/// use oxrdf::Variable;
/// use std::str::FromStr;
///
/// assert_eq!(Variable::from_str("$foo").unwrap(), Variable::new("foo").unwrap())
/// assert_eq!(
/// Variable::from_str("$foo").unwrap(),
/// Variable::new("foo").unwrap()
/// )
/// ```
fn from_str(s: &str) -> Result<Self, Self::Err> {
if !s.starts_with('?') && !s.starts_with('$') {

@ -698,7 +698,7 @@ impl<'a> From<TermRef<'a>> for Term {
///
/// The default string formatter is returning an N-Triples, Turtle, and SPARQL compatible representation:
/// ```
/// use oxrdf::{Triple, NamedNode};
/// use oxrdf::{NamedNode, Triple};
///
/// assert_eq!(
/// "<http://example.com/s> <http://example.com/p> <http://example.com/o>",
@ -706,7 +706,8 @@ impl<'a> From<TermRef<'a>> for Term {
/// subject: NamedNode::new("http://example.com/s")?.into(),
/// predicate: NamedNode::new("http://example.com/p")?,
/// object: NamedNode::new("http://example.com/o")?.into(),
/// }.to_string()
/// }
/// .to_string()
/// );
/// # Result::<_,oxrdf::IriParseError>::Ok(())
/// ```
@ -769,7 +770,7 @@ impl fmt::Display for Triple {
///
/// The default string formatter is returning an N-Triples, Turtle, and SPARQL compatible representation:
/// ```
/// use oxrdf::{TripleRef, NamedNodeRef};
/// use oxrdf::{NamedNodeRef, TripleRef};
///
/// assert_eq!(
/// "<http://example.com/s> <http://example.com/p> <http://example.com/o>",
@ -777,7 +778,8 @@ impl fmt::Display for Triple {
/// subject: NamedNodeRef::new("http://example.com/s")?.into(),
/// predicate: NamedNodeRef::new("http://example.com/p")?,
/// object: NamedNodeRef::new("http://example.com/o")?.into(),
/// }.to_string()
/// }
/// .to_string()
/// );
/// # Result::<_,oxrdf::IriParseError>::Ok(())
/// ```

@ -8,10 +8,7 @@ use std::fmt;
/// ```
/// use oxrdf::{Variable, VariableNameParseError};
///
/// assert_eq!(
/// "?foo",
/// Variable::new("foo")?.to_string()
/// );
/// assert_eq!("?foo", Variable::new("foo")?.to_string());
/// # Result::<_,VariableNameParseError>::Ok(())
/// ```
#[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)]
@ -67,12 +64,9 @@ impl fmt::Display for Variable {
///
/// The default string formatter is returning a SPARQL compatible representation:
/// ```
/// use oxrdf::{VariableRef, VariableNameParseError};
/// use oxrdf::{VariableNameParseError, VariableRef};
///
/// assert_eq!(
/// "?foo",
/// VariableRef::new("foo")?.to_string()
/// );
/// assert_eq!("?foo", VariableRef::new("foo")?.to_string());
/// # Result::<_,VariableNameParseError>::Ok(())
/// ```
#[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)]

@ -26,7 +26,10 @@ impl RdfFormat {
/// ```
/// use oxrdfio::RdfFormat;
///
/// assert_eq!(RdfFormat::NTriples.iri(), "http://www.w3.org/ns/formats/N-Triples")
/// assert_eq!(
/// RdfFormat::NTriples.iri(),
/// "http://www.w3.org/ns/formats/N-Triples"
/// )
/// ```
#[inline]
pub const fn iri(self) -> &'static str {
@ -136,7 +139,10 @@ impl RdfFormat {
/// ```
/// use oxrdfio::RdfFormat;
///
/// assert_eq!(RdfFormat::from_media_type("text/turtle; charset=utf-8"), Some(RdfFormat::Turtle))
/// assert_eq!(
/// RdfFormat::from_media_type("text/turtle; charset=utf-8"),
/// Some(RdfFormat::Turtle)
/// )
/// ```
#[inline]
pub fn from_media_type(media_type: &str) -> Option<Self> {

@ -48,7 +48,9 @@ use tokio::io::AsyncRead;
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> .";
///
/// let parser = RdfParser::from_format(RdfFormat::NTriples);
/// let quads = parser.parse_read(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
/// let quads = parser
/// .parse_read(file.as_bytes())
/// .collect::<Result<Vec<_>, _>>()?;
///
/// assert_eq!(quads.len(), 1);
/// assert_eq!(quads[0].subject.to_string(), "<http://example.com/s>");
@ -129,9 +131,12 @@ impl RdfParser {
/// The format the parser uses.
///
/// ```
/// use oxrdfio::{RdfParser, RdfFormat};
/// use oxrdfio::{RdfFormat, RdfParser};
///
/// assert_eq!(RdfParser::from_format(RdfFormat::Turtle).format(), RdfFormat::Turtle);
/// assert_eq!(
/// RdfParser::from_format(RdfFormat::Turtle).format(),
/// RdfFormat::Turtle
/// );
/// ```
pub fn format(&self) -> RdfFormat {
match &self.inner {
@ -152,7 +157,9 @@ impl RdfParser {
/// let file = "</s> </p> </o> .";
///
/// let parser = RdfParser::from_format(RdfFormat::Turtle).with_base_iri("http://example.com")?;
/// let quads = parser.parse_read(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
/// let quads = parser
/// .parse_read(file.as_bytes())
/// .collect::<Result<Vec<_>, _>>()?;
///
/// assert_eq!(quads.len(), 1);
/// assert_eq!(quads[0].subject.to_string(), "<http://example.com/s>");
@ -179,8 +186,11 @@ impl RdfParser {
///
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> .";
///
/// let parser = RdfParser::from_format(RdfFormat::Turtle).with_default_graph(NamedNode::new("http://example.com/g")?);
/// let quads = parser.parse_read(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
/// let parser = RdfParser::from_format(RdfFormat::Turtle)
/// .with_default_graph(NamedNode::new("http://example.com/g")?);
/// let quads = parser
/// .parse_read(file.as_bytes())
/// .collect::<Result<Vec<_>, _>>()?;
///
/// assert_eq!(quads.len(), 1);
/// assert_eq!(quads[0].graph_name.to_string(), "<http://example.com/g>");
@ -221,10 +231,12 @@ impl RdfParser {
///
/// let result1 = RdfParser::from_format(RdfFormat::NQuads)
/// .rename_blank_nodes()
/// .parse_read(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
/// .parse_read(file.as_bytes())
/// .collect::<Result<Vec<_>, _>>()?;
/// let result2 = RdfParser::from_format(RdfFormat::NQuads)
/// .rename_blank_nodes()
/// .parse_read(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
/// .parse_read(file.as_bytes())
/// .collect::<Result<Vec<_>, _>>()?;
/// assert_ne!(result1, result2);
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
/// ```
@ -262,7 +274,9 @@ impl RdfParser {
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> .";
///
/// let parser = RdfParser::from_format(RdfFormat::NTriples);
/// let quads = parser.parse_read(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
/// let quads = parser
/// .parse_read(file.as_bytes())
/// .collect::<Result<Vec<_>, _>>()?;
///
/// assert_eq!(quads.len(), 1);
/// assert_eq!(quads[0].subject.to_string(), "<http://example.com/s>");
@ -358,7 +372,9 @@ impl From<RdfFormat> for RdfParser {
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> .";
///
/// let parser = RdfParser::from_format(RdfFormat::NTriples);
/// let quads = parser.parse_read(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
/// let quads = parser
/// .parse_read(file.as_bytes())
/// .collect::<Result<Vec<_>, _>>()?;
///
/// assert_eq!(quads.len(), 1);
/// assert_eq!(quads[0].subject.to_string(), "<http://example.com/s>");

@ -63,9 +63,12 @@ impl RdfSerializer {
/// The format the serializer serializes to.
///
/// ```
/// use oxrdfio::{RdfSerializer, RdfFormat};
/// use oxrdfio::{RdfFormat, RdfSerializer};
///
/// assert_eq!(RdfSerializer::from_format(RdfFormat::Turtle).format(), RdfFormat::Turtle);
/// assert_eq!(
/// RdfSerializer::from_format(RdfFormat::Turtle).format(),
/// RdfFormat::Turtle
/// );
/// ```
pub fn format(&self) -> RdfFormat {
self.format

@ -26,7 +26,8 @@ use tokio::io::{AsyncRead, BufReader as AsyncBufReader};
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxrdfxml::RdfXmlParser;
///
/// let file = br#"<?xml version="1.0"?>
@ -84,7 +85,8 @@ impl RdfXmlParser {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxrdfxml::RdfXmlParser;
///
/// let file = br#"<?xml version="1.0"?>
@ -119,7 +121,8 @@ impl RdfXmlParser {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxrdfxml::RdfXmlParser;
///
/// # #[tokio::main(flavor = "current_thread")]
@ -179,7 +182,8 @@ impl RdfXmlParser {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxrdfxml::RdfXmlParser;
///
/// let file = br#"<?xml version="1.0"?>
@ -246,8 +250,9 @@ impl<R: Read> FromReadRdfXmlReader<R> {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdfxml::RdfXmlParser;
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxrdfxml::RdfXmlParser;
///
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxrdfxml::ParseError> {
@ -368,7 +373,7 @@ enum RdfXmlState {
li_counter: u64,
},
PropertyElt {
//Resource, Literal or Empty property element
// Resource, Literal or Empty property element
iri: NamedNode,
base_iri: Option<Iri<String>>,
language: Option<String>,
@ -392,7 +397,7 @@ enum RdfXmlState {
subject: Subject,
writer: Writer<Vec<u8>>,
id_attr: Option<NamedNode>,
emit: bool, //false for parseTypeOtherPropertyElt support
emit: bool, // false for parseTypeOtherPropertyElt support
},
}
@ -523,7 +528,7 @@ impl<R> RdfXmlReader<R> {
PropertyElt { subject: Subject },
}
//Literal case
// Literal case
if let Some(RdfXmlState::ParseTypeLiteralPropertyElt { writer, .. }) = self.state.last_mut()
{
let mut clean_event = BytesStart::new(
@ -542,7 +547,7 @@ impl<R> RdfXmlReader<R> {
let tag_name = self.resolve_tag_name(event.name())?;
//We read attributes
// We read attributes
let (mut language, mut base_iri) = if let Some(current_state) = self.state.last() {
(
current_state.language().cloned(),
@ -652,7 +657,7 @@ impl<R> RdfXmlReader<R> {
}
}
//Parsing with the base URI
// Parsing with the base URI
let id_attr = match id_attr {
Some(iri) => {
let iri = self.resolve_iri(&base_iri, iri)?;
@ -855,7 +860,7 @@ impl<R> RdfXmlReader<R> {
event: &BytesEnd<'_>,
results: &mut Vec<Triple>,
) -> Result<(), ParseError> {
//Literal case
// Literal case
if self.in_literal_depth > 0 {
if let Some(RdfXmlState::ParseTypeLiteralPropertyElt { writer, .. }) =
self.state.last_mut()

@ -17,6 +17,13 @@ pub struct DateTime {
}
impl DateTime {
pub const MAX: Self = Self {
timestamp: Timestamp::MAX,
};
pub const MIN: Self = Self {
timestamp: Timestamp::MIN,
};
#[inline]
pub(super) fn new(
year: i64,
@ -241,14 +248,6 @@ impl DateTime {
pub fn is_identical_with(self, other: Self) -> bool {
self.timestamp.is_identical_with(other.timestamp)
}
pub const MIN: Self = Self {
timestamp: Timestamp::MIN,
};
pub const MAX: Self = Self {
timestamp: Timestamp::MAX,
};
}
/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes).
@ -317,6 +316,21 @@ pub struct Time {
}
impl Time {
#[cfg(test)]
const MAX: Self = Self {
timestamp: Timestamp {
value: Decimal::new_from_i128_unchecked(62_230_255_200),
timezone_offset: Some(TimezoneOffset::MIN),
},
};
#[cfg(test)]
const MIN: Self = Self {
timestamp: Timestamp {
value: Decimal::new_from_i128_unchecked(62_230_154_400),
timezone_offset: Some(TimezoneOffset::MAX),
},
};
#[inline]
fn new(
mut hour: u8,
@ -493,22 +507,6 @@ impl Time {
pub fn is_identical_with(self, other: Self) -> bool {
self.timestamp.is_identical_with(other.timestamp)
}
#[cfg(test)]
const MIN: Self = Self {
timestamp: Timestamp {
value: Decimal::new_from_i128_unchecked(62_230_154_400),
timezone_offset: Some(TimezoneOffset::MAX),
},
};
#[cfg(test)]
const MAX: Self = Self {
timestamp: Timestamp {
value: Decimal::new_from_i128_unchecked(62_230_255_200),
timezone_offset: Some(TimezoneOffset::MIN),
},
};
}
/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes).
@ -566,6 +564,19 @@ pub struct Date {
}
impl Date {
pub const MAX: Self = Self {
timestamp: Timestamp {
value: Decimal::new_from_i128_unchecked(170_141_183_460_469_216_800),
timezone_offset: Some(TimezoneOffset::MAX),
},
};
pub const MIN: Self = Self {
timestamp: Timestamp {
value: Decimal::new_from_i128_unchecked(-170_141_183_460_469_216_800),
timezone_offset: Some(TimezoneOffset::MIN),
},
};
#[inline]
fn new(
year: i64,
@ -742,19 +753,6 @@ impl Date {
pub fn is_identical_with(self, other: Self) -> bool {
self.timestamp.is_identical_with(other.timestamp)
}
pub const MIN: Self = Self {
timestamp: Timestamp {
value: Decimal::new_from_i128_unchecked(-170_141_183_460_469_216_800),
timezone_offset: Some(TimezoneOffset::MIN),
},
};
pub const MAX: Self = Self {
timestamp: Timestamp {
value: Decimal::new_from_i128_unchecked(170_141_183_460_469_216_800),
timezone_offset: Some(TimezoneOffset::MAX),
},
};
}
/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes).
@ -805,6 +803,19 @@ pub struct GYearMonth {
}
impl GYearMonth {
pub const MAX: Self = Self {
timestamp: Timestamp {
value: Decimal::new_from_i128_unchecked(170_141_183_460_469_216_800),
timezone_offset: Some(TimezoneOffset::MAX),
},
};
pub const MIN: Self = Self {
timestamp: Timestamp {
value: Decimal::new_from_i128_unchecked(-170_141_183_460_466_970_400),
timezone_offset: Some(TimezoneOffset::MIN),
},
};
#[inline]
fn new(
year: i64,
@ -876,19 +887,6 @@ impl GYearMonth {
pub fn is_identical_with(self, other: Self) -> bool {
self.timestamp.is_identical_with(other.timestamp)
}
pub const MIN: Self = Self {
timestamp: Timestamp {
value: Decimal::new_from_i128_unchecked(-170_141_183_460_466_970_400),
timezone_offset: Some(TimezoneOffset::MIN),
},
};
pub const MAX: Self = Self {
timestamp: Timestamp {
value: Decimal::new_from_i128_unchecked(170_141_183_460_469_216_800),
timezone_offset: Some(TimezoneOffset::MAX),
},
};
}
/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes).
@ -947,6 +945,19 @@ pub struct GYear {
}
impl GYear {
pub const MAX: Self = Self {
timestamp: Timestamp {
value: Decimal::new_from_i128_unchecked(170_141_183_460_461_440_800),
timezone_offset: Some(TimezoneOffset::MAX),
},
};
pub const MIN: Self = Self {
timestamp: Timestamp {
value: Decimal::new_from_i128_unchecked(-170_141_183_460_461_700_000),
timezone_offset: Some(TimezoneOffset::MIN),
},
};
#[inline]
fn new(
year: i64,
@ -1011,19 +1022,6 @@ impl GYear {
pub fn is_identical_with(self, other: Self) -> bool {
self.timestamp.is_identical_with(other.timestamp)
}
pub const MIN: Self = Self {
timestamp: Timestamp {
value: Decimal::new_from_i128_unchecked(-170_141_183_460_461_700_000),
timezone_offset: Some(TimezoneOffset::MIN),
},
};
pub const MAX: Self = Self {
timestamp: Timestamp {
value: Decimal::new_from_i128_unchecked(170_141_183_460_461_440_800),
timezone_offset: Some(TimezoneOffset::MAX),
},
};
}
/// Conversion according to [XPath cast rules](https://www.w3.org/TR/xpath-functions-31/#casting-to-datetimes).
@ -1461,6 +1459,10 @@ pub struct TimezoneOffset {
}
impl TimezoneOffset {
pub const MAX: Self = Self { offset: 14 * 60 };
pub const MIN: Self = Self { offset: -14 * 60 };
pub const UTC: Self = Self { offset: 0 };
/// From offset in minute with respect to UTC
#[inline]
pub fn new(offset_in_minutes: i16) -> Result<Self, InvalidTimezoneError> {
@ -1489,10 +1491,6 @@ impl TimezoneOffset {
pub fn to_be_bytes(self) -> [u8; 2] {
self.offset.to_be_bytes()
}
pub const MIN: Self = Self { offset: -14 * 60 };
pub const UTC: Self = Self { offset: 0 };
pub const MAX: Self = Self { offset: 14 * 60 };
}
impl TryFrom<DayTimeDuration> for TimezoneOffset {
@ -1576,7 +1574,7 @@ impl PartialEq for Timestamp {
fn eq(&self, other: &Self) -> bool {
match (self.timezone_offset, other.timezone_offset) {
(Some(_), Some(_)) | (None, None) => self.value.eq(&other.value),
_ => false, //TODO: implicit timezone
_ => false, // TODO: implicit timezone
}
}
}
@ -1622,6 +1620,15 @@ impl Hash for Timestamp {
}
impl Timestamp {
pub const MAX: Self = Self {
value: Decimal::MAX,
timezone_offset: Some(TimezoneOffset::MAX),
};
pub const MIN: Self = Self {
value: Decimal::MIN,
timezone_offset: Some(TimezoneOffset::MIN),
};
#[inline]
fn new(props: &DateTimeSevenPropertyModel) -> Result<Self, DateTimeOverflowError> {
Ok(Self {
@ -1790,7 +1797,7 @@ impl Timestamp {
(Some(_), Some(_)) | (None, None) => {
Some(DayTimeDuration::new(self.value.checked_sub(rhs.value)?))
}
_ => None, //TODO: implicit timezone
_ => None, // TODO: implicit timezone
}
}
@ -1816,13 +1823,13 @@ impl Timestamp {
Self {
value: self
.value
.checked_add(i64::from(from_timezone.offset) * 60)?, // We keep the literal value
.checked_add(i64::from(from_timezone.offset) * 60)?, /* We keep the literal value */
timezone_offset: None,
}
}
} else if let Some(to_timezone) = timezone_offset {
Self {
value: self.value.checked_sub(i64::from(to_timezone.offset) * 60)?, // We keep the literal value
value: self.value.checked_sub(i64::from(to_timezone.offset) * 60)?, /* We keep the literal value */
timezone_offset: Some(to_timezone),
}
} else {
@ -1851,16 +1858,6 @@ impl Timestamp {
pub fn is_identical_with(self, other: Self) -> bool {
self.value == other.value && self.timezone_offset == other.timezone_offset
}
pub const MIN: Self = Self {
value: Decimal::MIN,
timezone_offset: Some(TimezoneOffset::MIN),
};
pub const MAX: Self = Self {
value: Decimal::MAX,
timezone_offset: Some(TimezoneOffset::MAX),
};
}
#[cfg(feature = "custom-now")]
@ -1960,7 +1957,7 @@ fn normalize_second(
mi: i64,
se: Decimal,
) -> Option<(i64, u8, u8, u8, u8, Decimal)> {
let mi = mi.checked_add(i64::try_from(se.as_i128().checked_div(60)?).ok()?)?; //TODO: good idea?
let mi = mi.checked_add(i64::try_from(se.as_i128().checked_div(60)?).ok()?)?; // TODO: good idea?
let se = se.checked_rem(60)?;
let (yr, mo, da, hr, mi) = normalize_minute(yr, mo, da, hr, mi)?;
Some((yr, mo, da, hr, mi, se))

@ -19,6 +19,11 @@ pub struct Decimal {
}
impl Decimal {
pub const MAX: Self = Self { value: i128::MAX };
pub const MIN: Self = Self { value: i128::MIN };
#[cfg(test)]
pub const STEP: Self = Self { value: 1 };
/// Constructs the decimal i / 10^n
#[inline]
pub const fn new(i: i128, n: u32) -> Result<Self, TooLargeForDecimalError> {
@ -260,13 +265,6 @@ impl Decimal {
pub(super) const fn as_i128(self) -> i128 {
self.value / DECIMAL_PART_POW
}
pub const MIN: Self = Self { value: i128::MIN };
pub const MAX: Self = Self { value: i128::MAX };
#[cfg(test)]
pub const STEP: Self = Self { value: 1 };
}
impl From<bool> for Decimal {
@ -499,7 +497,7 @@ impl FromStr for Decimal {
}
input = &input[1..];
if input.is_empty() && !with_before_dot {
//We only have a dot
// We only have a dot
return Err(PARSE_UNEXPECTED_END);
}
while input.last() == Some(&b'0') {
@ -520,11 +518,11 @@ impl FromStr for Decimal {
}
}
if exp == 0 {
//Underflow
// Underflow
return Err(PARSE_UNDERFLOW);
}
} else if !with_before_dot {
//It's empty
// It's empty
return Err(PARSE_UNEXPECTED_END);
}

@ -17,6 +17,16 @@ pub struct Double {
}
impl Double {
pub const INFINITY: Self = Self {
value: f64::INFINITY,
};
pub const MAX: Self = Self { value: f64::MAX };
pub const MIN: Self = Self { value: f64::MIN };
pub const NAN: Self = Self { value: f64::NAN };
pub const NEG_INFINITY: Self = Self {
value: f64::NEG_INFINITY,
};
#[inline]
#[must_use]
pub fn from_be_bytes(bytes: [u8; 8]) -> Self {
@ -77,20 +87,6 @@ impl Double {
pub fn is_identical_with(self, other: Self) -> bool {
self.value.to_bits() == other.value.to_bits()
}
pub const MIN: Self = Self { value: f64::MIN };
pub const MAX: Self = Self { value: f64::MAX };
pub const INFINITY: Self = Self {
value: f64::INFINITY,
};
pub const NEG_INFINITY: Self = Self {
value: f64::NEG_INFINITY,
};
pub const NAN: Self = Self { value: f64::NAN };
}
impl From<Double> for f64 {

@ -15,6 +15,15 @@ pub struct Duration {
}
impl Duration {
pub const MAX: Self = Self {
year_month: YearMonthDuration::MAX,
day_time: DayTimeDuration::MAX,
};
pub const MIN: Self = Self {
year_month: YearMonthDuration::MIN,
day_time: DayTimeDuration::MIN,
};
#[inline]
pub fn new(
months: impl Into<i64>,
@ -160,16 +169,6 @@ impl Duration {
pub fn is_identical_with(self, other: Self) -> bool {
self == other
}
pub const MIN: Self = Self {
year_month: YearMonthDuration::MIN,
day_time: DayTimeDuration::MIN,
};
pub const MAX: Self = Self {
year_month: YearMonthDuration::MAX,
day_time: DayTimeDuration::MAX,
};
}
impl TryFrom<StdDuration> for Duration {
@ -301,6 +300,9 @@ pub struct YearMonthDuration {
}
impl YearMonthDuration {
pub const MAX: Self = Self { months: i64::MAX };
pub const MIN: Self = Self { months: i64::MIN };
#[inline]
pub fn new(months: impl Into<i64>) -> Self {
Self {
@ -374,10 +376,6 @@ impl YearMonthDuration {
pub fn is_identical_with(self, other: Self) -> bool {
self == other
}
pub const MIN: Self = Self { months: i64::MIN };
pub const MAX: Self = Self { months: i64::MAX };
}
impl From<YearMonthDuration> for Duration {
@ -469,6 +467,13 @@ pub struct DayTimeDuration {
}
impl DayTimeDuration {
pub const MAX: Self = Self {
seconds: Decimal::MAX,
};
pub const MIN: Self = Self {
seconds: Decimal::MIN,
};
#[inline]
pub fn new(seconds: impl Into<Decimal>) -> Self {
Self {
@ -558,14 +563,6 @@ impl DayTimeDuration {
pub fn is_identical_with(self, other: Self) -> bool {
self == other
}
pub const MIN: Self = Self {
seconds: Decimal::MIN,
};
pub const MAX: Self = Self {
seconds: Decimal::MAX,
};
}
impl From<DayTimeDuration> for Duration {

@ -17,6 +17,16 @@ pub struct Float {
}
impl Float {
pub const INFINITY: Self = Self {
value: f32::INFINITY,
};
pub const MAX: Self = Self { value: f32::MAX };
pub const MIN: Self = Self { value: f32::MIN };
pub const NAN: Self = Self { value: f32::NAN };
pub const NEG_INFINITY: Self = Self {
value: f32::NEG_INFINITY,
};
#[inline]
#[must_use]
pub fn from_be_bytes(bytes: [u8; 4]) -> Self {
@ -77,20 +87,6 @@ impl Float {
pub fn is_identical_with(self, other: Self) -> bool {
self.value.to_bits() == other.value.to_bits()
}
pub const MIN: Self = Self { value: f32::MIN };
pub const MAX: Self = Self { value: f32::MAX };
pub const INFINITY: Self = Self {
value: f32::INFINITY,
};
pub const NEG_INFINITY: Self = Self {
value: f32::NEG_INFINITY,
};
pub const NAN: Self = Self { value: f32::NAN };
}
impl From<Float> for f32 {

@ -14,6 +14,9 @@ pub struct Integer {
}
impl Integer {
pub const MAX: Self = Self { value: i64::MAX };
pub const MIN: Self = Self { value: i64::MIN };
#[inline]
#[must_use]
pub fn from_be_bytes(bytes: [u8; 8]) -> Self {
@ -134,10 +137,6 @@ impl Integer {
pub fn is_identical_with(self, other: Self) -> bool {
self == other
}
pub const MIN: Self = Self { value: i64::MIN };
pub const MAX: Self = Self { value: i64::MAX };
}
impl From<bool> for Integer {

@ -49,8 +49,8 @@ pub struct N3Lexer {
// TODO: simplify by not giving is_end and fail with an "unexpected eof" is none is returned when is_end=true?
impl TokenRecognizer for N3Lexer {
type Token<'a> = N3Token<'a>;
type Options = N3LexerOptions;
type Token<'a> = N3Token<'a>;
fn recognize_next_token<'a>(
&mut self,
@ -790,7 +790,7 @@ impl N3Lexer {
format!("Unexpected escape character '\\{}'", char::from(c)),
)
.into()),
)), //TODO: read until end of string
)), // TODO: read until end of string
}
}

@ -39,9 +39,9 @@ enum NQuadsState {
}
impl RuleRecognizer for NQuadsRecognizer {
type TokenRecognizer = N3Lexer;
type Output = Quad;
type Context = NQuadsRecognizerContext;
type Output = Quad;
type TokenRecognizer = N3Lexer;
fn error_recovery_state(mut self) -> Self {
self.stack.clear();
@ -251,7 +251,7 @@ impl RuleRecognizer for NQuadsRecognizer {
self.emit_quad(results, GraphName::DefaultGraph);
errors.push("Triples should be followed by a dot".into())
}
_ => errors.push("Unexpected end".into()), //TODO
_ => errors.push("Unexpected end".into()), // TODO
}
}

@ -181,7 +181,8 @@ impl From<Quad> for N3Quad {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNode, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNode;
/// use oxttl::n3::{N3Parser, N3Term};
///
/// let file = br#"@base <http://example.com/> .
@ -260,7 +261,9 @@ impl N3Parser {
/// <bar> a schema:Person ;
/// schema:name "Bar" ."#;
///
/// let rdf_type = N3Term::NamedNode(NamedNode::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?);
/// let rdf_type = N3Term::NamedNode(NamedNode::new(
/// "http://www.w3.org/1999/02/22-rdf-syntax-ns#type",
/// )?);
/// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?);
/// let mut count = 0;
/// for triple in N3Parser::new().parse_read(file.as_ref()) {
@ -282,7 +285,8 @@ impl N3Parser {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNode, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNode;
/// use oxttl::n3::{N3Parser, N3Term};
///
/// # #[tokio::main(flavor = "current_thread")]
@ -322,14 +326,16 @@ impl N3Parser {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNode, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNode;
/// use oxttl::n3::{N3Parser, N3Term};
///
/// let file: [&[u8]; 5] = [b"@base <http://example.com/>",
/// let file: [&[u8]; 5] = [
/// b"@base <http://example.com/>",
/// b". @prefix schema: <http://schema.org/> .",
/// b"<foo> a schema:Person",
/// b" ; schema:name \"Foo\" . <bar>",
/// b" a schema:Person ; schema:name \"Bar\" ."
/// b" a schema:Person ; schema:name \"Bar\" .",
/// ];
///
/// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned());
@ -340,7 +346,7 @@ impl N3Parser {
/// while !parser.is_end() {
/// // We feed more data to the parser
/// if let Some(chunk) = file_chunks.next() {
/// parser.extend_from_slice(chunk);
/// parser.extend_from_slice(chunk);
/// } else {
/// parser.end(); // It's finished
/// }
@ -366,7 +372,8 @@ impl N3Parser {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNode, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNode;
/// use oxttl::n3::{N3Parser, N3Term};
///
/// let file = br#"@base <http://example.com/> .
@ -459,7 +466,8 @@ impl<R: Read> Iterator for FromReadN3Reader<R> {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNode, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNode;
/// use oxttl::n3::{N3Parser, N3Term};
///
/// # #[tokio::main(flavor = "current_thread")]
@ -561,14 +569,16 @@ impl<R: AsyncRead + Unpin> FromTokioAsyncReadN3Reader<R> {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNode, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNode;
/// use oxttl::n3::{N3Parser, N3Term};
///
/// let file: [&[u8]; 5] = [b"@base <http://example.com/>",
/// let file: [&[u8]; 5] = [
/// b"@base <http://example.com/>",
/// b". @prefix schema: <http://schema.org/> .",
/// b"<foo> a schema:Person",
/// b" ; schema:name \"Foo\" . <bar>",
/// b" a schema:Person ; schema:name \"Bar\" ."
/// b" a schema:Person ; schema:name \"Bar\" .",
/// ];
///
/// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned());
@ -579,7 +589,7 @@ impl<R: AsyncRead + Unpin> FromTokioAsyncReadN3Reader<R> {
/// while !parser.is_end() {
/// // We feed more data to the parser
/// if let Some(chunk) = file_chunks.next() {
/// parser.extend_from_slice(chunk);
/// parser.extend_from_slice(chunk);
/// } else {
/// parser.end(); // It's finished
/// }
@ -697,9 +707,9 @@ struct N3RecognizerContext {
}
impl RuleRecognizer for N3Recognizer {
type TokenRecognizer = N3Lexer;
type Output = N3Quad;
type Context = N3RecognizerContext;
type Output = N3Quad;
type TokenRecognizer = N3Lexer;
fn error_recovery_state(mut self) -> Self {
self.stack.clear();
@ -1191,7 +1201,7 @@ impl RuleRecognizer for N3Recognizer {
) {
match &*self.stack {
[] | [N3State::N3Doc] => (),
_ => errors.push("Unexpected end".into()), //TODO
_ => errors.push("Unexpected end".into()), // TODO
}
}

@ -4,12 +4,10 @@ use crate::lexer::{resolve_local_name, N3Lexer, N3LexerMode, N3LexerOptions, N3T
use crate::toolkit::{Lexer, Parser, RuleRecognizer, RuleRecognizerError};
use crate::{MAX_BUFFER_SIZE, MIN_BUFFER_SIZE};
use oxiri::Iri;
use oxrdf::vocab::{rdf, xsd};
#[cfg(feature = "rdf-star")]
use oxrdf::Triple;
use oxrdf::{
vocab::{rdf, xsd},
BlankNode, GraphName, Literal, NamedNode, NamedOrBlankNode, Quad, Subject, Term,
};
use oxrdf::{BlankNode, GraphName, Literal, NamedNode, NamedOrBlankNode, Quad, Subject, Term};
use std::collections::HashMap;
pub struct TriGRecognizer {
@ -30,9 +28,9 @@ pub struct TriGRecognizerContext {
}
impl RuleRecognizer for TriGRecognizer {
type TokenRecognizer = N3Lexer;
type Output = Quad;
type Context = TriGRecognizerContext;
type Output = Quad;
type TokenRecognizer = N3Lexer;
fn error_recovery_state(mut self) -> Self {
self.stack.clear();
@ -784,7 +782,7 @@ impl RuleRecognizer for TriGRecognizer {
}
}
} else if token == N3Token::Punctuation(".") || token == N3Token::Punctuation("}") {
//TODO: be smarter depending if we are in '{' or not
// TODO: be smarter depending if we are in '{' or not
self.stack.push(TriGState::TriGDoc);
self
} else {
@ -819,7 +817,7 @@ impl RuleRecognizer for TriGRecognizer {
self.emit_quad(results);
errors.push("Triples should be followed by a dot".into())
}
_ => errors.push("Unexpected end".into()), //TODO
_ => errors.push("Unexpected end".into()), // TODO
}
}

@ -366,7 +366,7 @@ impl<R: TokenRecognizer> Lexer<R> {
_ => return Some(()),
}
i += 1;
//TODO: SIMD
// TODO: SIMD
}
} else {
for c in &self.data[self.position.buffer_offset..] {
@ -376,7 +376,7 @@ impl<R: TokenRecognizer> Lexer<R> {
} else {
return Some(());
}
//TODO: SIMD
// TODO: SIMD
}
}
Some(())

@ -6,7 +6,8 @@ use crate::terse::TriGRecognizer;
use crate::toolkit::FromTokioAsyncReadIterator;
use crate::toolkit::{FromReadIterator, ParseError, Parser, SyntaxError};
use oxiri::{Iri, IriParseError};
use oxrdf::{vocab::xsd, GraphName, NamedNode, Quad, QuadRef, Subject, TermRef};
use oxrdf::vocab::xsd;
use oxrdf::{GraphName, NamedNode, Quad, QuadRef, Subject, TermRef};
use std::collections::HashMap;
use std::fmt;
use std::io::{self, Read, Write};
@ -19,7 +20,8 @@ use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxttl::TriGParser;
///
/// let file = br#"@base <http://example.com/> .
@ -97,7 +99,8 @@ impl TriGParser {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxttl::TriGParser;
///
/// let file = br#"@base <http://example.com/> .
@ -128,7 +131,8 @@ impl TriGParser {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxttl::TriGParser;
///
/// # #[tokio::main(flavor = "current_thread")]
@ -167,14 +171,16 @@ impl TriGParser {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxttl::TriGParser;
///
/// let file: [&[u8]; 5] = [b"@base <http://example.com/>",
/// let file: [&[u8]; 5] = [
/// b"@base <http://example.com/>",
/// b". @prefix schema: <http://schema.org/> .",
/// b"<foo> a schema:Person",
/// b" ; schema:name \"Foo\" . <bar>",
/// b" a schema:Person ; schema:name \"Bar\" ."
/// b" a schema:Person ; schema:name \"Bar\" .",
/// ];
///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
@ -184,7 +190,7 @@ impl TriGParser {
/// while !parser.is_end() {
/// // We feed more data to the parser
/// if let Some(chunk) = file_chunks.next() {
/// parser.extend_from_slice(chunk);
/// parser.extend_from_slice(chunk);
/// } else {
/// parser.end(); // It's finished
/// }
@ -217,7 +223,8 @@ impl TriGParser {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxttl::TriGParser;
///
/// let file = br#"@base <http://example.com/> .
@ -309,7 +316,8 @@ impl<R: Read> Iterator for FromReadTriGReader<R> {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxttl::TriGParser;
///
/// # #[tokio::main(flavor = "current_thread")]
@ -410,14 +418,16 @@ impl<R: AsyncRead + Unpin> FromTokioAsyncReadTriGReader<R> {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxttl::TriGParser;
///
/// let file: [&[u8]; 5] = [b"@base <http://example.com/>",
/// let file: [&[u8]; 5] = [
/// b"@base <http://example.com/>",
/// b". @prefix schema: <http://schema.org/> .",
/// b"<foo> a schema:Person",
/// b" ; schema:name \"Foo\" . <bar>",
/// b" a schema:Person ; schema:name \"Bar\" ."
/// b" a schema:Person ; schema:name \"Bar\" .",
/// ];
///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
@ -427,7 +437,7 @@ impl<R: AsyncRead + Unpin> FromTokioAsyncReadTriGReader<R> {
/// while !parser.is_end() {
/// // We feed more data to the parser
/// if let Some(chunk) = file_chunks.next() {
/// parser.extend_from_slice(chunk);
/// parser.extend_from_slice(chunk);
/// } else {
/// parser.end(); // It's finished
/// }

@ -21,7 +21,8 @@ use tokio::io::{AsyncRead, AsyncWrite};
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxttl::TurtleParser;
///
/// let file = br#"@base <http://example.com/> .
@ -99,7 +100,8 @@ impl TurtleParser {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxttl::TurtleParser;
///
/// let file = br#"@base <http://example.com/> .
@ -130,7 +132,8 @@ impl TurtleParser {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxttl::TurtleParser;
///
/// # #[tokio::main(flavor = "current_thread")]
@ -169,14 +172,16 @@ impl TurtleParser {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxttl::TurtleParser;
///
/// let file: [&[u8]; 5] = [b"@base <http://example.com/>",
/// let file: [&[u8]; 5] = [
/// b"@base <http://example.com/>",
/// b". @prefix schema: <http://schema.org/> .",
/// b"<foo> a schema:Person",
/// b" ; schema:name \"Foo\" . <bar>",
/// b" a schema:Person ; schema:name \"Bar\" ."
/// b" a schema:Person ; schema:name \"Bar\" .",
/// ];
///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
@ -186,7 +191,7 @@ impl TurtleParser {
/// while !parser.is_end() {
/// // We feed more data to the parser
/// if let Some(chunk) = file_chunks.next() {
/// parser.extend_from_slice(chunk);
/// parser.extend_from_slice(chunk);
/// } else {
/// parser.end(); // It's finished
/// }
@ -219,7 +224,8 @@ impl TurtleParser {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxttl::TurtleParser;
///
/// let file = br#"@base <http://example.com/> .
@ -311,7 +317,8 @@ impl<R: Read> Iterator for FromReadTurtleReader<R> {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxttl::TurtleParser;
///
/// # #[tokio::main(flavor = "current_thread")]
@ -412,14 +419,16 @@ impl<R: AsyncRead + Unpin> FromTokioAsyncReadTurtleReader<R> {
///
/// Count the number of people:
/// ```
/// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdf::vocab::rdf;
/// use oxrdf::NamedNodeRef;
/// use oxttl::TurtleParser;
///
/// let file: [&[u8]; 5] = [b"@base <http://example.com/>",
/// let file: [&[u8]; 5] = [
/// b"@base <http://example.com/>",
/// b". @prefix schema: <http://schema.org/> .",
/// b"<foo> a schema:Person",
/// b" ; schema:name \"Foo\" . <bar>",
/// b" a schema:Person ; schema:name \"Bar\" ."
/// b" a schema:Person ; schema:name \"Bar\" .",
/// ];
///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
@ -429,7 +438,7 @@ impl<R: AsyncRead + Unpin> FromTokioAsyncReadTurtleReader<R> {
/// while !parser.is_end() {
/// // We feed more data to the parser
/// if let Some(chunk) = file_chunks.next() {
/// parser.extend_from_slice(chunk);
/// parser.extend_from_slice(chunk);
/// } else {
/// parser.end(); // It's finished
/// }

@ -2,8 +2,8 @@
use crate::error::{ParseError, SyntaxError, SyntaxErrorKind, TextPosition};
use memchr::memchr;
use oxrdf::Variable;
use oxrdf::{vocab::xsd, *};
use oxrdf::vocab::xsd;
use oxrdf::*;
use std::io::{self, Read, Write};
use std::str::{self, FromStr};
#[cfg(feature = "async-tokio")]

@ -20,7 +20,10 @@ impl QueryResultsFormat {
/// ```
/// use sparesults::QueryResultsFormat;
///
/// assert_eq!(QueryResultsFormat::Json.iri(), "http://www.w3.org/ns/formats/SPARQL_Results_JSON")
/// assert_eq!(
/// QueryResultsFormat::Json.iri(),
/// "http://www.w3.org/ns/formats/SPARQL_Results_JSON"
/// )
/// ```
#[inline]
pub fn iri(self) -> &'static str {
@ -31,12 +34,16 @@ impl QueryResultsFormat {
Self::Tsv => "http://www.w3.org/ns/formats/SPARQL_Results_TSV",
}
}
/// The format [IANA media type](https://tools.ietf.org/html/rfc2046).
///
/// ```
/// use sparesults::QueryResultsFormat;
///
/// assert_eq!(QueryResultsFormat::Json.media_type(), "application/sparql-results+json")
/// assert_eq!(
/// QueryResultsFormat::Json.media_type(),
/// "application/sparql-results+json"
/// )
/// ```
#[inline]
pub fn media_type(self) -> &'static str {
@ -91,7 +98,10 @@ impl QueryResultsFormat {
/// ```
/// use sparesults::QueryResultsFormat;
///
/// assert_eq!(QueryResultsFormat::from_media_type("application/sparql-results+json; charset=utf-8"), Some(QueryResultsFormat::Json))
/// assert_eq!(
/// QueryResultsFormat::from_media_type("application/sparql-results+json; charset=utf-8"),
/// Some(QueryResultsFormat::Json)
/// )
/// ```
#[inline]
pub fn from_media_type(media_type: &str) -> Option<Self> {
@ -134,7 +144,10 @@ impl QueryResultsFormat {
/// ```
/// use sparesults::QueryResultsFormat;
///
/// assert_eq!(QueryResultsFormat::from_extension("json"), Some(QueryResultsFormat::Json))
/// assert_eq!(
/// QueryResultsFormat::from_extension("json"),
/// Some(QueryResultsFormat::Json)
/// )
/// ```
#[inline]
pub fn from_extension(extension: &str) -> Option<Self> {

@ -5,7 +5,6 @@ use crate::error::{ParseError, SyntaxError};
use json_event_parser::ToTokioAsyncWriteJsonWriter;
use json_event_parser::{FromReadJsonReader, JsonEvent, ToWriteJsonWriter};
use oxrdf::vocab::rdf;
use oxrdf::Variable;
use oxrdf::*;
use std::collections::BTreeMap;
use std::io::{self, Read, Write};
@ -522,7 +521,7 @@ fn read_value<R: Read>(
JsonEvent::EndObject => {
if let Some(s) = state {
if s == State::Value {
state = None; //End of triple
state = None; // End of triple
} else {
return Err(
SyntaxError::msg("Term description values should be string").into()

@ -130,8 +130,8 @@ impl From<QueryResultsFormat> for QueryResultsParser {
///
/// Example in TSV (the API is the same for JSON and XML):
/// ```
/// use sparesults::{QueryResultsFormat, QueryResultsParser, FromReadQueryResultsReader};
/// use oxrdf::{Literal, Variable};
/// use sparesults::{FromReadQueryResultsReader, QueryResultsFormat, QueryResultsParser};
///
/// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Tsv);
///
@ -141,10 +141,24 @@ impl From<QueryResultsFormat> for QueryResultsParser {
/// }
///
/// // solutions
/// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(b"?foo\t?bar\n\"test\"\t".as_slice())? {
/// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]);
/// if let FromReadQueryResultsReader::Solutions(solutions) =
/// json_parser.parse_read(b"?foo\t?bar\n\"test\"\t".as_slice())?
/// {
/// assert_eq!(
/// solutions.variables(),
/// &[
/// Variable::new_unchecked("foo"),
/// Variable::new_unchecked("bar")
/// ]
/// );
/// for solution in solutions {
/// assert_eq!(solution?.iter().collect::<Vec<_>>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]);
/// assert_eq!(
/// solution?.iter().collect::<Vec<_>>(),
/// vec![(
/// &Variable::new_unchecked("foo"),
/// &Literal::from("test").into()
/// )]
/// );
/// }
/// }
/// # Result::<(),sparesults::ParseError>::Ok(())
@ -188,12 +202,20 @@ impl<R: Read> FromReadSolutionsReader<R> {
///
/// Example in TSV (the API is the same for JSON and XML):
/// ```
/// use sparesults::{QueryResultsFormat, QueryResultsParser, FromReadQueryResultsReader};
/// use oxrdf::Variable;
/// use sparesults::{FromReadQueryResultsReader, QueryResultsFormat, QueryResultsParser};
///
/// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Tsv);
/// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(b"?foo\t?bar\n\"ex1\"\t\"ex2\"".as_slice())? {
/// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]);
/// if let FromReadQueryResultsReader::Solutions(solutions) =
/// json_parser.parse_read(b"?foo\t?bar\n\"ex1\"\t\"ex2\"".as_slice())?
/// {
/// assert_eq!(
/// solutions.variables(),
/// &[
/// Variable::new_unchecked("foo"),
/// Variable::new_unchecked("bar")
/// ]
/// );
/// }
/// # Result::<(),sparesults::ParseError>::Ok(())
/// ```

@ -241,14 +241,23 @@ impl From<QueryResultsFormat> for QueryResultsSerializer {
///
/// Example in TSV (the API is the same for JSON, XML and CSV):
/// ```
/// use sparesults::{QueryResultsFormat, QueryResultsSerializer};
/// use oxrdf::{LiteralRef, Variable, VariableRef};
/// use sparesults::{QueryResultsFormat, QueryResultsSerializer};
/// use std::iter::once;
///
/// let tsv_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Tsv);
/// let mut buffer = Vec::new();
/// let mut writer = tsv_serializer.serialize_solutions_to_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")])?;
/// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?;
/// let mut writer = tsv_serializer.serialize_solutions_to_write(
/// &mut buffer,
/// vec![
/// Variable::new_unchecked("foo"),
/// Variable::new_unchecked("bar"),
/// ],
/// )?;
/// writer.write(once((
/// VariableRef::new_unchecked("foo"),
/// LiteralRef::from("test"),
/// )))?;
/// writer.finish()?;
/// assert_eq!(buffer, b"?foo\t?bar\n\"test\"\t\n");
/// # std::io::Result::Ok(())
@ -321,16 +330,29 @@ impl<W: Write> ToWriteSolutionsWriter<W> {
///
/// Example in TSV (the API is the same for JSON, CSV and XML):
/// ```
/// use sparesults::{QueryResultsFormat, QueryResultsSerializer};
/// use oxrdf::{LiteralRef, Variable, VariableRef};
/// use sparesults::{QueryResultsFormat, QueryResultsSerializer};
/// use std::iter::once;
///
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> std::io::Result<()> {
/// let tsv_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Tsv);
/// let mut buffer = Vec::new();
/// let mut writer = tsv_serializer.serialize_solutions_to_tokio_async_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]).await?;
/// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test")))).await?;
/// let mut writer = tsv_serializer
/// .serialize_solutions_to_tokio_async_write(
/// &mut buffer,
/// vec![
/// Variable::new_unchecked("foo"),
/// Variable::new_unchecked("bar"),
/// ],
/// )
/// .await?;
/// writer
/// .write(once((
/// VariableRef::new_unchecked("foo"),
/// LiteralRef::from("test"),
/// )))
/// .await?;
/// writer.finish().await?;
/// assert_eq!(buffer, b"?foo\t?bar\n\"test\"\t\n");
/// # Ok(())

@ -44,10 +44,16 @@ impl QuerySolution {
/// It is also the number of columns in the solutions table.
///
/// ```
/// use oxrdf::{Literal, Variable};
/// use sparesults::QuerySolution;
/// use oxrdf::{Variable, Literal};
///
/// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None]));
/// let solution = QuerySolution::from((
/// vec![
/// Variable::new_unchecked("foo"),
/// Variable::new_unchecked("bar"),
/// ],
/// vec![Some(Literal::from(1).into()), None],
/// ));
/// assert_eq!(solution.len(), 2);
/// ```
#[inline]
@ -58,13 +64,25 @@ impl QuerySolution {
/// Is there any variable bound in the table?
///
/// ```
/// use oxrdf::{Literal, Variable};
/// use sparesults::QuerySolution;
/// use oxrdf::{Variable, Literal};
///
/// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None]));
/// let solution = QuerySolution::from((
/// vec![
/// Variable::new_unchecked("foo"),
/// Variable::new_unchecked("bar"),
/// ],
/// vec![Some(Literal::from(1).into()), None],
/// ));
/// assert!(!solution.is_empty());
///
/// let empty_solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![None, None]));
/// let empty_solution = QuerySolution::from((
/// vec![
/// Variable::new_unchecked("foo"),
/// Variable::new_unchecked("bar"),
/// ],
/// vec![None, None],
/// ));
/// assert!(empty_solution.is_empty());
/// ```
#[inline]
@ -75,11 +93,20 @@ impl QuerySolution {
/// Returns an iterator over bound variables.
///
/// ```
/// use oxrdf::{Literal, Variable};
/// use sparesults::QuerySolution;
/// use oxrdf::{Variable, Literal};
///
/// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None]));
/// assert_eq!(solution.iter().collect::<Vec<_>>(), vec![(&Variable::new_unchecked("foo"), &Literal::from(1).into())]);
/// let solution = QuerySolution::from((
/// vec![
/// Variable::new_unchecked("foo"),
/// Variable::new_unchecked("bar"),
/// ],
/// vec![Some(Literal::from(1).into()), None],
/// ));
/// assert_eq!(
/// solution.iter().collect::<Vec<_>>(),
/// vec![(&Variable::new_unchecked("foo"), &Literal::from(1).into())]
/// );
/// ```
#[inline]
pub fn iter(&self) -> impl Iterator<Item = (&Variable, &Term)> {
@ -89,10 +116,16 @@ impl QuerySolution {
/// Returns the ordered slice of variable values.
///
/// ```
/// use oxrdf::{Literal, Variable};
/// use sparesults::QuerySolution;
/// use oxrdf::{Variable, Literal};
///
/// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None]));
/// let solution = QuerySolution::from((
/// vec![
/// Variable::new_unchecked("foo"),
/// Variable::new_unchecked("bar"),
/// ],
/// vec![Some(Literal::from(1).into()), None],
/// ));
/// assert_eq!(solution.values(), &[Some(Literal::from(1).into()), None]);
/// ```
#[inline]
@ -103,11 +136,23 @@ impl QuerySolution {
/// Returns the ordered slice of the solution variables, bound or not.
///
/// ```
/// use oxrdf::{Literal, Variable};
/// use sparesults::QuerySolution;
/// use oxrdf::{Variable, Literal};
///
/// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None]));
/// assert_eq!(solution.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]);
/// let solution = QuerySolution::from((
/// vec![
/// Variable::new_unchecked("foo"),
/// Variable::new_unchecked("bar"),
/// ],
/// vec![Some(Literal::from(1).into()), None],
/// ));
/// assert_eq!(
/// solution.variables(),
/// &[
/// Variable::new_unchecked("foo"),
/// Variable::new_unchecked("bar")
/// ]
/// );
/// ```
#[inline]
pub fn variables(&self) -> &[Variable] {
@ -126,8 +171,8 @@ impl<V: Into<Arc<[Variable]>>, S: Into<Vec<Option<Term>>>> From<(V, S)> for Quer
}
impl<'a> IntoIterator for &'a QuerySolution {
type Item = (&'a Variable, &'a Term);
type IntoIter = Iter<'a>;
type Item = (&'a Variable, &'a Term);
#[inline]
fn into_iter(self) -> Self::IntoIter {
@ -214,11 +259,20 @@ impl fmt::Debug for QuerySolution {
/// An iterator over [`QuerySolution`] bound variables.
///
/// ```
/// use oxrdf::{Literal, Variable};
/// use sparesults::QuerySolution;
/// use oxrdf::{Variable, Literal};
///
/// let solution = QuerySolution::from((vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")], vec![Some(Literal::from(1).into()), None]));
/// assert_eq!(solution.iter().collect::<Vec<_>>(), vec![(&Variable::new_unchecked("foo"), &Literal::from(1).into())]);
/// let solution = QuerySolution::from((
/// vec![
/// Variable::new_unchecked("foo"),
/// Variable::new_unchecked("bar"),
/// ],
/// vec![Some(Literal::from(1).into()), None],
/// ));
/// assert_eq!(
/// solution.iter().collect::<Vec<_>>(),
/// vec![(&Variable::new_unchecked("foo"), &Literal::from(1).into())]
/// );
/// ```
pub struct Iter<'a> {
inner: Zip<std::slice::Iter<'a, Variable>, std::slice::Iter<'a, Option<Term>>>,

@ -2,7 +2,6 @@
use crate::error::{ParseError, SyntaxError};
use oxrdf::vocab::rdf;
use oxrdf::Variable;
use oxrdf::*;
use quick_xml::events::{BytesDecl, BytesEnd, BytesStart, BytesText, Event};
use quick_xml::{Reader, Writer};
@ -245,7 +244,7 @@ impl<R: Read> XmlQueryResultsReader<R> {
let mut variables = Vec::default();
let mut state = State::Start;
//Read header
// Read header
loop {
buffer.clear();
let event = reader.read_event_into(&mut buffer)?;
@ -553,7 +552,7 @@ impl<R: Read> XmlSolutionsReader<R> {
}
State::BNode => {
if term.is_none() {
//We default to a random bnode
// We default to a random bnode
term = Some(BlankNode::default().into())
}
state = self
@ -563,7 +562,7 @@ impl<R: Read> XmlSolutionsReader<R> {
}
State::Literal => {
if term.is_none() {
//We default to the empty literal
// We default to the empty literal
term = Some(build_literal("", lang.take(), datatype.take())?.into())
}
state = self

@ -365,7 +365,7 @@ enum PartialGraphPattern {
}
fn new_join(l: GraphPattern, r: GraphPattern) -> GraphPattern {
//Avoid to output empty BGPs
// Avoid to output empty BGPs
if let GraphPattern::Bgp { patterns: pl } = &l {
if pl.is_empty() {
return r;
@ -449,7 +449,7 @@ fn build_select(
let mut p = r#where;
let mut with_aggregate = false;
//GROUP BY
// GROUP BY
let aggregates = state.aggregates.pop().unwrap_or_default();
if group.is_none() && !aggregates.is_empty() {
group = Some((vec![], vec![]));
@ -471,7 +471,7 @@ fn build_select(
with_aggregate = true;
}
//HAVING
// HAVING
if let Some(expr) = having {
p = GraphPattern::Filter {
expr,
@ -479,12 +479,12 @@ fn build_select(
};
}
//VALUES
// VALUES
if let Some(data) = values {
p = new_join(p, data);
}
//SELECT
// SELECT
let mut pv = Vec::new();
let with_project = match select.variables {
SelectionVariables::Explicit(sel_items) => {
@ -533,7 +533,7 @@ fn build_select(
if with_aggregate {
return Err("SELECT * is not authorized with GROUP BY");
}
//TODO: is it really useful to do a projection?
// TODO: is it really useful to do a projection?
p.on_in_scope_variable(|v| {
if !pv.contains(v) {
pv.push(v.clone());
@ -547,7 +547,7 @@ fn build_select(
let mut m = p;
//ORDER BY
// ORDER BY
if let Some(expression) = order_by {
m = GraphPattern::OrderBy {
inner: Box::new(m),
@ -555,7 +555,7 @@ fn build_select(
};
}
//PROJECT
// PROJECT
if with_project {
m = GraphPattern::Project {
inner: Box::new(m),
@ -568,7 +568,7 @@ fn build_select(
SelectionOption::Default => (),
}
//OFFSET LIMIT
// OFFSET LIMIT
if let Some((start, length)) = offset_limit {
m = GraphPattern::Slice {
inner: Box::new(m),

@ -13,7 +13,10 @@ use std::str::FromStr;
/// let query_str = "SELECT ?s ?p ?o WHERE { ?s ?p ?o . }";
/// let query = Query::parse(query_str, None)?;
/// assert_eq!(query.to_string(), query_str);
/// assert_eq!(query.to_sse(), "(project (?s ?p ?o) (bgp (triple ?s ?p ?o)))");
/// assert_eq!(
/// query.to_sse(),
/// "(project (?s ?p ?o) (bgp (triple ?s ?p ?o)))"
/// );
/// # Ok::<_, spargebra::ParseError>(())
/// ```
#[derive(Eq, PartialEq, Debug, Clone, Hash)]

@ -141,7 +141,7 @@ impl TryFrom<Term> for GroundTerm {
/// The default string formatter is returning a N-Quads representation.
///
/// ```
/// use spargebra::term::{NamedNode, GroundTriple};
/// use spargebra::term::{GroundTriple, NamedNode};
///
/// assert_eq!(
/// "<http://example.com/s> <http://example.com/p> <http://example.com/o>",
@ -149,7 +149,8 @@ impl TryFrom<Term> for GroundTerm {
/// subject: NamedNode::new("http://example.com/s")?.into(),
/// predicate: NamedNode::new("http://example.com/p")?,
/// object: NamedNode::new("http://example.com/o")?.into(),
/// }.to_string()
/// }
/// .to_string()
/// );
/// # Result::<_,oxrdf::IriParseError>::Ok(())
/// ```

@ -197,10 +197,10 @@ impl Expression {
xsd::BOOLEAN => match literal.value() {
"true" | "1" => Some(true),
"false" | "0" => Some(false),
_ => None, //TODO
_ => None, // TODO
},
xsd::STRING => Some(!literal.value().is_empty()),
_ => None, //TODO
_ => None, // TODO
}
} else {
None

@ -102,7 +102,7 @@ impl Optimizer {
let expression = Self::normalize_expression(expression, &inner_types);
let expression_type = infer_expression_type(&expression, &inner_types);
if expression_type == VariableType::UNDEF {
//TODO: valid?
// TODO: valid?
inner
} else {
GraphPattern::extend(inner, variable, expression)
@ -397,7 +397,7 @@ impl Optimizer {
expression,
variable,
} => {
//TODO: handle the case where the filter overrides an expression variable (should not happen in SPARQL but allowed in the algebra)
// TODO: handle the case where the filter overrides an expression variable (should not happen in SPARQL but allowed in the algebra)
let mut inner_filters = Vec::new();
let mut final_filters = Vec::new();
for filter in filters {
@ -735,7 +735,7 @@ fn is_fit_for_for_loop_join(
global_input_types: &VariableTypes,
entry_types: &VariableTypes,
) -> bool {
//TODO: think more about it
// TODO: think more about it
match pattern {
GraphPattern::Values { .. }
| GraphPattern::QuadPattern { .. }

@ -49,7 +49,7 @@ pub fn infer_graph_pattern_types(
infer_graph_pattern_types(right, infer_graph_pattern_types(left, types))
}
GraphPattern::LeftJoin { left, right, .. } => {
let mut right_types = infer_graph_pattern_types(right, types.clone()); //TODO: expression
let mut right_types = infer_graph_pattern_types(right, types.clone()); // TODO: expression
for t in right_types.inner.values_mut() {
t.undef = true; // Right might be unset
}
@ -352,24 +352,14 @@ pub struct VariableType {
}
impl VariableType {
pub const UNDEF: Self = Self {
const ANY: Self = Self {
undef: true,
named_node: false,
blank_node: false,
literal: false,
#[cfg(feature = "rdf-star")]
triple: false,
};
const NAMED_NODE: Self = Self {
undef: false,
named_node: true,
blank_node: false,
literal: false,
blank_node: true,
literal: true,
#[cfg(feature = "rdf-star")]
triple: false,
triple: true,
};
const BLANK_NODE: Self = Self {
undef: false,
named_node: false,
@ -378,7 +368,6 @@ impl VariableType {
#[cfg(feature = "rdf-star")]
triple: false,
};
const LITERAL: Self = Self {
undef: false,
named_node: false,
@ -387,16 +376,14 @@ impl VariableType {
#[cfg(feature = "rdf-star")]
triple: false,
};
#[cfg(feature = "rdf-star")]
const TRIPLE: Self = Self {
const NAMED_NODE: Self = Self {
undef: false,
named_node: false,
named_node: true,
blank_node: false,
literal: false,
triple: true,
#[cfg(feature = "rdf-star")]
triple: false,
};
const SUBJECT: Self = Self {
undef: false,
named_node: true,
@ -405,7 +392,6 @@ impl VariableType {
#[cfg(feature = "rdf-star")]
triple: true,
};
const TERM: Self = Self {
undef: false,
named_node: true,
@ -414,14 +400,21 @@ impl VariableType {
#[cfg(feature = "rdf-star")]
triple: true,
};
const ANY: Self = Self {
#[cfg(feature = "rdf-star")]
const TRIPLE: Self = Self {
undef: false,
named_node: false,
blank_node: false,
literal: false,
triple: true,
};
pub const UNDEF: Self = Self {
undef: true,
named_node: true,
blank_node: true,
literal: true,
named_node: false,
blank_node: false,
literal: false,
#[cfg(feature = "rdf-star")]
triple: true,
triple: false,
};
}

@ -44,7 +44,7 @@ struct QueryContent {
#[derive(Arbitrary)]
enum QueryVariant {
Select(SelectQuery),
//TODO: Other variants!
// TODO: Other variants!
}
impl<'a> Arbitrary<'a> for Query {
@ -246,7 +246,7 @@ impl fmt::Display for GroupCondition {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::BuiltInCall(c) => write!(f, "{c}"),
//Self::FunctionCall(c) => write!(f, "{}", c),
// Self::FunctionCall(c) => write!(f, "{}", c),
Self::Projection(e, v) => {
if let Some(v) = v {
write!(f, "({e} AS {v})")
@ -705,7 +705,7 @@ impl fmt::Display for Constraint {
match self {
Self::BrackettedExpression(e) => write!(f, "{e}"),
Self::BuiltInCall(c) => write!(f, "{c}"),
//Self::FunctionCall(c) => write!(f, "{}", c),
// Self::FunctionCall(c) => write!(f, "{}", c),
}
}
}
@ -1530,7 +1530,7 @@ enum BuiltInCall {
IsLiteral(Box<Expression>),
IsNumeric(Box<Expression>),
Exists(ExistsFunc),
NotExists(NotExistsFunc), //TODO: Other functions
NotExists(NotExistsFunc), // TODO: Other functions
}
impl fmt::Display for BuiltInCall {
@ -1585,15 +1585,15 @@ impl fmt::Display for NotExistsFunc {
struct IriOrFunction {
// [128] iriOrFunction ::= iri ArgList?
iri: Iri,
//TODO args: Option<ArgList>,
// TODO args: Option<ArgList>,
}
impl fmt::Display for IriOrFunction {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.iri)?;
/*if let Some(args) = &self.args {
write!(f, "{}", args)?;
}*/
// if let Some(args) = &self.args {
// write!(f, "{}", args)?;
// }
Ok(())
}
}

@ -23,7 +23,10 @@ impl GraphFormat {
/// ```
/// use oxigraph::io::GraphFormat;
///
/// assert_eq!(GraphFormat::NTriples.iri(), "http://www.w3.org/ns/formats/N-Triples")
/// assert_eq!(
/// GraphFormat::NTriples.iri(),
/// "http://www.w3.org/ns/formats/N-Triples"
/// )
/// ```
#[inline]
pub fn iri(self) -> &'static str {
@ -65,6 +68,7 @@ impl GraphFormat {
Self::RdfXml => "rdf",
}
}
/// Looks for a known format from a media type.
///
/// It supports some media type aliases.
@ -74,7 +78,10 @@ impl GraphFormat {
/// ```
/// use oxigraph::io::GraphFormat;
///
/// assert_eq!(GraphFormat::from_media_type("text/turtle; charset=utf-8"), Some(GraphFormat::Turtle))
/// assert_eq!(
/// GraphFormat::from_media_type("text/turtle; charset=utf-8"),
/// Some(GraphFormat::Turtle)
/// )
/// ```
#[inline]
pub fn from_media_type(media_type: &str) -> Option<Self> {
@ -94,7 +101,10 @@ impl GraphFormat {
/// ```
/// use oxigraph::io::GraphFormat;
///
/// assert_eq!(GraphFormat::from_extension("nt"), Some(GraphFormat::NTriples))
/// assert_eq!(
/// GraphFormat::from_extension("nt"),
/// Some(GraphFormat::NTriples)
/// )
/// ```
#[inline]
pub fn from_extension(extension: &str) -> Option<Self> {
@ -151,7 +161,10 @@ impl DatasetFormat {
/// ```
/// use oxigraph::io::DatasetFormat;
///
/// assert_eq!(DatasetFormat::NQuads.iri(), "http://www.w3.org/ns/formats/N-Quads")
/// assert_eq!(
/// DatasetFormat::NQuads.iri(),
/// "http://www.w3.org/ns/formats/N-Quads"
/// )
/// ```
#[inline]
pub fn iri(self) -> &'static str {
@ -190,6 +203,7 @@ impl DatasetFormat {
Self::TriG => "trig",
}
}
/// Looks for a known format from a media type.
///
/// It supports some media type aliases.
@ -198,7 +212,10 @@ impl DatasetFormat {
/// ```
/// use oxigraph::io::DatasetFormat;
///
/// assert_eq!(DatasetFormat::from_media_type("application/n-quads; charset=utf-8"), Some(DatasetFormat::NQuads))
/// assert_eq!(
/// DatasetFormat::from_media_type("application/n-quads; charset=utf-8"),
/// Some(DatasetFormat::NQuads)
/// )
/// ```
#[inline]
pub fn from_media_type(media_type: &str) -> Option<Self> {
@ -217,7 +234,10 @@ impl DatasetFormat {
/// ```
/// use oxigraph::io::DatasetFormat;
///
/// assert_eq!(DatasetFormat::from_extension("nq"), Some(DatasetFormat::NQuads))
/// assert_eq!(
/// DatasetFormat::from_extension("nq"),
/// Some(DatasetFormat::NQuads)
/// )
/// ```
#[inline]
pub fn from_extension(extension: &str) -> Option<Self> {

@ -21,7 +21,9 @@ use std::io::Read;
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> .";
///
/// let parser = GraphParser::from_format(GraphFormat::NTriples);
/// let triples = parser.read_triples(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
/// let triples = parser
/// .read_triples(file.as_bytes())
/// .collect::<Result<Vec<_>, _>>()?;
///
/// assert_eq!(triples.len(), 1);
/// assert_eq!(triples[0].subject.to_string(), "<http://example.com/s>");
@ -50,8 +52,11 @@ impl GraphParser {
///
/// let file = "</s> </p> </o> .";
///
/// let parser = GraphParser::from_format(GraphFormat::Turtle).with_base_iri("http://example.com")?;
/// let triples = parser.read_triples(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
/// let parser =
/// GraphParser::from_format(GraphFormat::Turtle).with_base_iri("http://example.com")?;
/// let triples = parser
/// .read_triples(file.as_bytes())
/// .collect::<Result<Vec<_>, _>>()?;
///
/// assert_eq!(triples.len(), 1);
/// assert_eq!(triples[0].subject.to_string(), "<http://example.com/s>");
@ -81,7 +86,9 @@ impl GraphParser {
/// let file = "<http://example.com/s> <http://example.com/p> <http://example.com/o> .";
///
/// let parser = GraphParser::from_format(GraphFormat::NTriples);
/// let triples = parser.read_triples(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
/// let triples = parser
/// .read_triples(file.as_bytes())
/// .collect::<Result<Vec<_>, _>>()?;
///
/// assert_eq!(triples.len(), 1);
/// assert_eq!(triples[0].subject.to_string(), "<http://example.com/s>");
@ -139,8 +146,11 @@ impl DatasetParser {
///
/// let file = "<g> { </s> </p> </o> }";
///
/// let parser = DatasetParser::from_format(DatasetFormat::TriG).with_base_iri("http://example.com")?;
/// let triples = parser.read_quads(file.as_bytes()).collect::<Result<Vec<_>,_>>()?;
/// let parser =
/// DatasetParser::from_format(DatasetFormat::TriG).with_base_iri("http://example.com")?;
/// let triples = parser
/// .read_quads(file.as_bytes())
/// .collect::<Result<Vec<_>, _>>()?;
///
/// assert_eq!(triples.len(), 1);
/// assert_eq!(triples[0].subject.to_string(), "<http://example.com/s>");

@ -21,13 +21,16 @@ use std::io::{self, Write};
/// let mut buffer = Vec::new();
/// let mut writer = GraphSerializer::from_format(GraphFormat::NTriples).triple_writer(&mut buffer);
/// writer.write(&Triple {
/// subject: NamedNode::new("http://example.com/s")?.into(),
/// predicate: NamedNode::new("http://example.com/p")?,
/// object: NamedNode::new("http://example.com/o")?.into()
/// subject: NamedNode::new("http://example.com/s")?.into(),
/// predicate: NamedNode::new("http://example.com/p")?,
/// object: NamedNode::new("http://example.com/o")?.into(),
/// })?;
/// writer.finish()?;
///
/// assert_eq!(buffer.as_slice(), "<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n".as_bytes());
/// assert_eq!(
/// buffer.as_slice(),
/// "<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n".as_bytes()
/// );
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
/// ```
#[deprecated(note = "use RdfSerializer instead", since = "0.4.0")]
@ -66,13 +69,16 @@ impl GraphSerializer {
/// let mut buffer = Vec::new();
/// let mut writer = GraphSerializer::from_format(GraphFormat::NTriples).triple_writer(&mut buffer);
/// writer.write(&Triple {
/// subject: NamedNode::new("http://example.com/s")?.into(),
/// predicate: NamedNode::new("http://example.com/p")?,
/// object: NamedNode::new("http://example.com/o")?.into()
/// subject: NamedNode::new("http://example.com/s")?.into(),
/// predicate: NamedNode::new("http://example.com/p")?,
/// object: NamedNode::new("http://example.com/o")?.into(),
/// })?;
/// writer.finish()?;
///
/// assert_eq!(buffer.as_slice(), "<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n".as_bytes());
/// assert_eq!(
/// buffer.as_slice(),
/// "<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n".as_bytes()
/// );
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
/// ```
#[must_use]

@ -23,7 +23,10 @@ use std::str::FromStr;
/// // We edit the query dataset specification
/// let default = vec![NamedNode::new("http://example.com")?.into()];
/// query.dataset_mut().set_default_graph(default.clone());
/// assert_eq!(query.dataset().default_graph_graphs(), Some(default.as_slice()));
/// assert_eq!(
/// query.dataset().default_graph_graphs(),
/// Some(default.as_slice())
/// );
/// # Ok::<_, Box<dyn std::error::Error>>(())
/// ```
#[derive(Eq, PartialEq, Debug, Clone, Hash)]
@ -58,7 +61,7 @@ impl Query {
impl fmt::Display for Query {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.inner.fmt(f) //TODO: override
self.inner.fmt(f) // TODO: override
}
}
@ -217,8 +220,15 @@ impl QueryDataset {
/// ```
/// use oxigraph::sparql::Query;
///
/// assert!(Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)?.dataset().is_default_dataset());
/// assert!(!Query::parse("SELECT ?s ?p ?o FROM <http://example.com> WHERE { ?s ?p ?o . }", None)?.dataset().is_default_dataset());
/// assert!(Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)?
/// .dataset()
/// .is_default_dataset());
/// assert!(!Query::parse(
/// "SELECT ?s ?p ?o FROM <http://example.com> WHERE { ?s ?p ?o . }",
/// None
/// )?
/// .dataset()
/// .is_default_dataset());
///
/// # Ok::<_, Box<dyn std::error::Error>>(())
/// ```
@ -250,7 +260,10 @@ impl QueryDataset {
/// let mut query = Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)?;
/// let default = vec![NamedNode::new("http://example.com")?.into()];
/// query.dataset_mut().set_default_graph(default.clone());
/// assert_eq!(query.dataset().default_graph_graphs(), Some(default.as_slice()));
/// assert_eq!(
/// query.dataset().default_graph_graphs(),
/// Some(default.as_slice())
/// );
///
/// # Ok::<_, Box<dyn std::error::Error>>(())
/// ```
@ -271,8 +284,13 @@ impl QueryDataset {
///
/// let mut query = Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)?;
/// let named = vec![NamedNode::new("http://example.com")?.into()];
/// query.dataset_mut().set_available_named_graphs(named.clone());
/// assert_eq!(query.dataset().available_named_graphs(), Some(named.as_slice()));
/// query
/// .dataset_mut()
/// .set_available_named_graphs(named.clone());
/// assert_eq!(
/// query.dataset().available_named_graphs(),
/// Some(named.as_slice())
/// );
///
/// # Ok::<_, Box<dyn std::error::Error>>(())
/// ```

@ -5,8 +5,7 @@ use crate::sparql::ParseError;
use crate::storage::StorageError;
use std::convert::Infallible;
use std::error::Error;
use std::fmt;
use std::io;
use std::{fmt, io};
/// A SPARQL evaluation error.
#[derive(Debug)]

@ -33,8 +33,7 @@ use std::cmp::Ordering;
use std::collections::hash_map::DefaultHasher;
use std::collections::{HashMap, HashSet};
use std::hash::{Hash, Hasher};
use std::iter::Iterator;
use std::iter::{empty, once};
use std::iter::{empty, once, Iterator};
use std::rc::Rc;
use std::sync::Arc;
use std::{fmt, io, str};
@ -112,8 +111,8 @@ impl EncodedTuple {
}
impl IntoIterator for EncodedTuple {
type Item = Option<EncodedTerm>;
type IntoIter = std::vec::IntoIter<Option<EncodedTerm>>;
type Item = Option<EncodedTerm>;
fn into_iter(self) -> Self::IntoIter {
self.inner.into_iter()
@ -1010,7 +1009,7 @@ impl SimpleEvaluator {
}
})
.for_each(|tuple| {
//TODO avoid copy for key?
// TODO avoid copy for key?
let key = key_variables
.iter()
.map(|v| tuple.get(*v).cloned())
@ -3127,7 +3126,7 @@ pub(super) fn compile_pattern(pattern: &str, flags: Option<&str>) -> Option<Rege
'x' => {
regex_builder.ignore_whitespace(true);
}
_ => (), //TODO: implement q
_ => (), // TODO: implement q
}
}
}
@ -3632,7 +3631,7 @@ fn compare_str_str_id(dataset: &DatasetView, a: &str, b: &StrHash) -> Option<Ord
}
fn datatype(dataset: &DatasetView, value: &EncodedTerm) -> Option<EncodedTerm> {
//TODO: optimize?
// TODO: optimize?
match value {
EncodedTerm::NamedNode { .. }
| EncodedTerm::SmallBlankNode { .. }
@ -4387,6 +4386,7 @@ impl PathEvaluator {
}
}
}
fn eval_to_in_unknown_graph(
&self,
path: &PropertyPath,
@ -4968,7 +4968,7 @@ impl Iterator for ConstructIterator {
));
}
}
self.bnodes.clear(); //We do not reuse old bnodes
self.bnodes.clear(); // We do not reuse old bnodes
}
}
}
@ -5300,7 +5300,7 @@ impl Accumulator for SumAccumulator {
if let Some(sum) = &self.sum {
if let Some(operands) = element.and_then(|e| NumericBinaryOperands::new(sum.clone(), e))
{
//TODO: unify with addition?
// TODO: unify with addition?
self.sum = match operands {
NumericBinaryOperands::Float(v1, v2) => Some((v1 + v2).into()),
NumericBinaryOperands::Double(v1, v2) => Some((v1 + v2).into()),
@ -5343,8 +5343,8 @@ impl Accumulator for AvgAccumulator {
if self.count == 0 {
Some(0.into())
} else {
//TODO: deduplicate?
//TODO: duration?
// TODO: deduplicate?
// TODO: duration?
let count = Integer::from(self.count);
match sum {
EncodedTerm::FloatLiteral(sum) => Some((sum / Float::from(count)).into()),
@ -5584,6 +5584,7 @@ impl EncodedTupleSet {
len: 0,
}
}
fn insert(&mut self, tuple: EncodedTuple) {
self.map
.entry(self.tuple_key(&tuple))

@ -144,13 +144,13 @@ pub(crate) fn evaluate_query(
///
/// Usage example disabling the federated query support:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::sparql::QueryOptions;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
/// store.query_opt(
/// "SELECT * WHERE { SERVICE <https://query.wikidata.org/sparql> {} }",
/// QueryOptions::default().without_service_handler()
/// QueryOptions::default().without_service_handler(),
/// )?;
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
/// ```
@ -209,9 +209,9 @@ impl QueryOptions {
///
/// Example with a function serializing terms to N-Triples:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::sparql::{QueryOptions, QueryResults};
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
///
@ -219,10 +219,13 @@ impl QueryOptions {
/// "SELECT (<http://www.w3.org/ns/formats/N-Triples>(1) AS ?nt) WHERE {}",
/// QueryOptions::default().with_custom_function(
/// NamedNode::new("http://www.w3.org/ns/formats/N-Triples")?,
/// |args| args.get(0).map(|t| Literal::from(t.to_string()).into())
/// )
/// |args| args.get(0).map(|t| Literal::from(t.to_string()).into()),
/// ),
/// )? {
/// assert_eq!(solutions.next().unwrap()?.get("nt"), Some(&Literal::from("\"1\"^^<http://www.w3.org/2001/XMLSchema#integer>").into()));
/// assert_eq!(
/// solutions.next().unwrap()?.get("nt"),
/// Some(&Literal::from("\"1\"^^<http://www.w3.org/2001/XMLSchema#integer>").into())
/// );
/// }
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
/// ```

@ -104,17 +104,24 @@ impl QueryResults {
/// This method fails if it is called on the `Solution` or `Boolean` results.
///
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::io::RdfFormat;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let graph = "<http://example.com> <http://example.com> <http://example.com> .\n";
///
/// let store = Store::new()?;
/// store.load_graph(graph.as_bytes(), RdfFormat::NTriples, GraphName::DefaultGraph, None)?;
/// store.load_graph(
/// graph.as_bytes(),
/// RdfFormat::NTriples,
/// GraphName::DefaultGraph,
/// None,
/// )?;
///
/// let mut results = Vec::new();
/// store.query("CONSTRUCT WHERE { ?s ?p ?o }")?.write_graph(&mut results, RdfFormat::NTriples)?;
/// store
/// .query("CONSTRUCT WHERE { ?s ?p ?o }")?
/// .write_graph(&mut results, RdfFormat::NTriples)?;
/// assert_eq!(results, graph.as_bytes());
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
/// ```
@ -159,8 +166,8 @@ impl<R: Read + 'static> From<FromReadQueryResultsReader<R>> for QueryResults {
/// An iterator over [`QuerySolution`]s.
///
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::sparql::QueryResults;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
/// if let QueryResults::Solutions(solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }")? {
@ -193,12 +200,15 @@ impl QuerySolutionIter {
/// The variables used in the solutions.
///
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::sparql::{QueryResults, Variable};
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
/// if let QueryResults::Solutions(solutions) = store.query("SELECT ?s ?o WHERE { ?s ?p ?o }")? {
/// assert_eq!(solutions.variables(), &[Variable::new("s")?, Variable::new("o")?]);
/// assert_eq!(
/// solutions.variables(),
/// &[Variable::new("s")?, Variable::new("o")?]
/// );
/// }
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
/// ```
@ -234,8 +244,8 @@ impl Iterator for QuerySolutionIter {
/// An iterator over the triples that compose a graph solution.
///
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::sparql::QueryResults;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
/// if let QueryResults::Graph(triples) = store.query("CONSTRUCT WHERE { ?s ?p ?o }")? {

@ -13,18 +13,22 @@ use std::time::Duration;
/// before evaluating a SPARQL query that uses SERVICE calls.
///
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::sparql::{QueryOptions, QueryResults, ServiceHandler, Query, EvaluationError};
/// use oxigraph::sparql::{EvaluationError, Query, QueryOptions, QueryResults, ServiceHandler};
/// use oxigraph::store::Store;
///
/// struct TestServiceHandler {
/// store: Store
/// store: Store,
/// }
///
/// impl ServiceHandler for TestServiceHandler {
/// type Error = EvaluationError;
///
/// fn handle(&self, service_name: NamedNode, query: Query) -> Result<QueryResults, Self::Error> {
/// fn handle(
/// &self,
/// service_name: NamedNode,
/// query: Query,
/// ) -> Result<QueryResults, Self::Error> {
/// if service_name == "http://example.com/service" {
/// self.store.query(query)
/// } else {
@ -35,14 +39,16 @@ use std::time::Duration;
///
/// let store = Store::new()?;
/// let service = TestServiceHandler {
/// store: Store::new()?
/// store: Store::new()?,
/// };
/// let ex = NamedNodeRef::new("http://example.com")?;
/// service.store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?;
/// service
/// .store
/// .insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?;
///
/// if let QueryResults::Solutions(mut solutions) = store.query_opt(
/// "SELECT ?s WHERE { SERVICE <http://example.com/service> { ?s ?p ?o } }",
/// QueryOptions::default().with_service_handler(service)
/// QueryOptions::default().with_service_handler(service),
/// )? {
/// assert_eq!(solutions.next().unwrap()?.get("s"), Some(&ex.into()));
/// }

@ -139,7 +139,7 @@ impl<'a, 'b: 'a> SimpleUpdateEvaluator<'a, 'b> {
let mut bnodes = HashMap::new();
let (eval, _) = evaluator.graph_pattern_evaluator(&pattern, &mut variables);
let tuples =
eval(EncodedTuple::with_capacity(variables.len())).collect::<Result<Vec<_>, _>>()?; //TODO: would be much better to stream
eval(EncodedTuple::with_capacity(variables.len())).collect::<Result<Vec<_>, _>>()?; // TODO: would be much better to stream
for tuple in tuples {
for quad in delete {
if let Some(quad) =

@ -19,16 +19,14 @@ use std::collections::HashMap;
use std::env::temp_dir;
use std::error::Error;
use std::ffi::{CStr, CString};
use std::fmt;
use std::fs::remove_dir_all;
use std::io;
use std::marker::PhantomData;
use std::ops::Deref;
use std::path::{Path, PathBuf};
use std::rc::{Rc, Weak};
use std::sync::{Arc, OnceLock};
use std::thread::{available_parallelism, yield_now};
use std::{ptr, slice};
use std::{fmt, io, ptr, slice};
macro_rules! ffi_result {
( $($function:ident)::*( $arg1:expr $(, $arg:expr)* $(,)? ) ) => {{
@ -711,7 +709,7 @@ impl Db {
column_family: &ColumnFamily,
key: &[u8],
) -> Result<bool, StorageError> {
Ok(self.get(column_family, key)?.is_some()) //TODO: optimize
Ok(self.get(column_family, key)?.is_some()) // TODO: optimize
}
pub fn insert(
@ -970,7 +968,7 @@ impl Reader {
column_family: &ColumnFamily,
key: &[u8],
) -> Result<bool, StorageError> {
Ok(self.get(column_family, key)?.is_some()) //TODO: optimize
Ok(self.get(column_family, key)?.is_some()) // TODO: optimize
}
#[allow(clippy::iter_not_returning_iterator)]
@ -983,7 +981,7 @@ impl Reader {
column_family: &ColumnFamily,
prefix: &[u8],
) -> Result<Iter, StorageError> {
//We generate the upper bound
// We generate the upper bound
let upper_bound = {
let mut bound = prefix.to_vec();
let mut found = false;
@ -1101,7 +1099,7 @@ impl Transaction<'_> {
column_family: &ColumnFamily,
key: &[u8],
) -> Result<bool, StorageError> {
Ok(self.get_for_update(column_family, key)?.is_some()) //TODO: optimize
Ok(self.get_for_update(column_family, key)?.is_some()) // TODO: optimize
}
pub fn insert(
@ -1228,7 +1226,7 @@ pub struct Iter {
is_currently_valid: bool,
_upper_bound: Option<Vec<u8>>,
_reader: Reader, // needed to ensure that DB still lives while iter is used
options: *mut rocksdb_readoptions_t, // needed to ensure that options still lives while iter is used
options: *mut rocksdb_readoptions_t, /* needed to ensure that options still lives while iter is used */
}
impl Drop for Iter {

@ -1,8 +1,7 @@
use crate::io::{ParseError, RdfFormat};
use oxiri::IriParseError;
use std::error::Error;
use std::fmt;
use std::io;
use std::{fmt, io};
/// An error related to storage operations (reads, writes...).
#[derive(Debug)]

@ -579,7 +579,7 @@ impl StorageReader {
pub fn named_graphs(&self) -> DecodingGraphIterator {
DecodingGraphIterator {
iter: self.reader.iter(&self.storage.graphs_cf).unwrap(), //TODO: propagate error?
iter: self.reader.iter(&self.storage.graphs_cf).unwrap(), // TODO: propagate error?
}
}
@ -786,7 +786,7 @@ impl StorageReader {
#[cfg(target_family = "wasm")]
#[allow(clippy::unused_self, clippy::unnecessary_wraps)]
pub fn validate(&self) -> Result<(), StorageError> {
Ok(()) //TODO
Ok(()) // TODO
}
}

@ -6,8 +6,7 @@ use crate::storage::small_string::SmallString;
use oxsdatatypes::*;
use siphasher::sip128::{Hasher128, SipHasher24};
use std::fmt::Debug;
use std::hash::Hash;
use std::hash::Hasher;
use std::hash::{Hash, Hasher};
use std::str;
use std::sync::Arc;

@ -1,11 +1,10 @@
use std::borrow::Borrow;
use std::cmp::Ordering;
use std::error::Error;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::ops::Deref;
use std::str;
use std::str::{FromStr, Utf8Error};
use std::{fmt, str};
/// A small inline string
#[derive(Clone, Copy, Default)]

@ -4,9 +4,9 @@
//!
//! Usage example:
//! ```
//! use oxigraph::store::Store;
//! use oxigraph::sparql::QueryResults;
//! use oxigraph::model::*;
//! use oxigraph::sparql::QueryResults;
//! use oxigraph::store::Store;
//!
//! let store = Store::new()?;
//!
@ -16,7 +16,7 @@
//! store.insert(&quad)?;
//!
//! // quad filter
//! let results: Result<Vec<Quad>,_> = store.quads_for_pattern(None, None, None, None).collect();
//! let results: Result<Vec<Quad>, _> = store.quads_for_pattern(None, None, None, None).collect();
//! assert_eq!(vec![quad], results?);
//!
//! // SPARQL query
@ -56,9 +56,9 @@ use std::{fmt, str};
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::sparql::QueryResults;
/// use oxigraph::model::*;
/// use oxigraph::sparql::QueryResults;
/// use oxigraph::store::Store;
/// # use std::fs::remove_dir_all;
///
/// # {
@ -70,7 +70,7 @@ use std::{fmt, str};
/// store.insert(&quad)?;
///
/// // quad filter
/// let results: Result<Vec<Quad>,_> = store.quads_for_pattern(None, None, None, None).collect();
/// let results: Result<Vec<Quad>, _> = store.quads_for_pattern(None, None, None, None).collect();
/// assert_eq!(vec![quad], results?);
///
/// // SPARQL query
@ -160,9 +160,9 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::sparql::QueryResults;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
///
@ -171,8 +171,11 @@ impl Store {
/// store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?;
///
/// // SPARQL query
/// if let QueryResults::Solutions(mut solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }")? {
/// assert_eq!(solutions.next().unwrap()?.get("s"), Some(&ex.into_owned().into()));
/// if let QueryResults::Solutions(mut solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }")? {
/// assert_eq!(
/// solutions.next().unwrap()?.get("s"),
/// Some(&ex.into_owned().into())
/// );
/// }
/// # Result::<_, Box<dyn std::error::Error>>::Ok(())
/// ```
@ -187,19 +190,22 @@ impl Store {
///
/// Usage example with a custom function serializing terms to N-Triples:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::sparql::{QueryOptions, QueryResults};
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
/// if let QueryResults::Solutions(mut solutions) = store.query_opt(
/// "SELECT (<http://www.w3.org/ns/formats/N-Triples>(1) AS ?nt) WHERE {}",
/// QueryOptions::default().with_custom_function(
/// NamedNode::new("http://www.w3.org/ns/formats/N-Triples")?,
/// |args| args.get(0).map(|t| Literal::from(t.to_string()).into())
/// )
/// |args| args.get(0).map(|t| Literal::from(t.to_string()).into()),
/// ),
/// )? {
/// assert_eq!(solutions.next().unwrap()?.get("nt"), Some(&Literal::from("\"1\"^^<http://www.w3.org/2001/XMLSchema#integer>").into()));
/// assert_eq!(
/// solutions.next().unwrap()?.get("nt"),
/// Some(&Literal::from("\"1\"^^<http://www.w3.org/2001/XMLSchema#integer>").into())
/// );
/// }
/// # Result::<_, Box<dyn std::error::Error>>::Ok(())
/// ```
@ -219,14 +225,17 @@ impl Store {
///
/// Usage example serialising the explanation with statistics in JSON:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::sparql::{QueryOptions, QueryResults};
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
/// if let (Ok(QueryResults::Solutions(solutions)), explanation) = store.explain_query_opt("SELECT ?s WHERE { VALUES ?s { 1 2 3 } }", QueryOptions::default(), true)? {
/// if let (Ok(QueryResults::Solutions(solutions)), explanation) = store.explain_query_opt(
/// "SELECT ?s WHERE { VALUES ?s { 1 2 3 } }",
/// QueryOptions::default(),
/// true,
/// )? {
/// // We make sure to have read all the solutions
/// for _ in solutions {
/// }
/// for _ in solutions {}
/// let mut buf = Vec::new();
/// explanation.write_in_json(&mut buf)?;
/// }
@ -245,8 +254,8 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
///
@ -256,7 +265,9 @@ impl Store {
/// store.insert(&quad)?;
///
/// // quad filter by object
/// let results = store.quads_for_pattern(None, None, Some((&ex).into()), None).collect::<Result<Vec<_>,_>>()?;
/// let results = store
/// .quads_for_pattern(None, None, Some((&ex).into()), None)
/// .collect::<Result<Vec<_>, _>>()?;
/// assert_eq!(vec![quad], results);
/// # Result::<_, Box<dyn std::error::Error>>::Ok(())
/// ```
@ -283,8 +294,8 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
///
@ -294,7 +305,7 @@ impl Store {
/// store.insert(&quad)?;
///
/// // quad filter by object
/// let results = store.iter().collect::<Result<Vec<_>,_>>()?;
/// let results = store.iter().collect::<Result<Vec<_>, _>>()?;
/// assert_eq!(vec![quad], results);
/// # Result::<_, Box<dyn std::error::Error>>::Ok(())
/// ```
@ -306,8 +317,8 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let ex = NamedNodeRef::new("http://example.com")?;
/// let quad = QuadRef::new(ex, ex, ex, ex);
@ -330,13 +341,13 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let ex = NamedNodeRef::new("http://example.com")?;
/// let store = Store::new()?;
/// store.insert(QuadRef::new(ex, ex, ex, ex))?;
/// store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?;
/// store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?;
/// assert_eq!(2, store.len()?);
/// # Result::<_, Box<dyn std::error::Error>>::Ok(())
/// ```
@ -348,8 +359,8 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
/// assert!(store.is_empty()?);
@ -371,8 +382,8 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::{StorageError, Store};
/// use oxigraph::model::*;
/// use oxigraph::store::{StorageError, Store};
///
/// let store = Store::new()?;
/// let a = NamedNodeRef::new("http://example.com/a")?;
@ -399,13 +410,14 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
///
/// // insertion
/// store.update("INSERT DATA { <http://example.com> <http://example.com> <http://example.com> }")?;
/// store
/// .update("INSERT DATA { <http://example.com> <http://example.com> <http://example.com> }")?;
///
/// // we inspect the store contents
/// let ex = NamedNodeRef::new("http://example.com")?;
@ -504,15 +516,20 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::io::RdfFormat;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
///
/// // insertion
/// let file = b"<http://example.com> <http://example.com> <http://example.com> .";
/// store.load_graph(file.as_ref(), RdfFormat::NTriples, GraphName::DefaultGraph, None)?;
/// store.load_graph(
/// file.as_ref(),
/// RdfFormat::NTriples,
/// GraphName::DefaultGraph,
/// None,
/// )?;
///
/// // we inspect the store contents
/// let ex = NamedNodeRef::new("http://example.com")?;
@ -547,14 +564,15 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::io::RdfFormat;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
///
/// // insertion
/// let file = b"<http://example.com> <http://example.com> <http://example.com> <http://example.com> .";
/// let file =
/// b"<http://example.com> <http://example.com> <http://example.com> <http://example.com> .";
/// store.load_dataset(file.as_ref(), RdfFormat::NQuads, None)?;
///
/// // we inspect the store contents
@ -587,8 +605,8 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let ex = NamedNodeRef::new("http://example.com")?;
/// let quad = QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph);
@ -624,8 +642,8 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let ex = NamedNodeRef::new("http://example.com")?;
/// let quad = QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph);
@ -646,10 +664,12 @@ impl Store {
/// Dumps the store into a file.
///
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::io::RdfFormat;
/// use oxigraph::store::Store;
///
/// let file = "<http://example.com> <http://example.com> <http://example.com> <http://example.com> .\n".as_bytes();
/// let file =
/// "<http://example.com> <http://example.com> <http://example.com> <http://example.com> .\n"
/// .as_bytes();
///
/// let store = Store::new()?;
/// store.load_from_read(RdfFormat::NQuads, file)?;
@ -678,9 +698,9 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::io::RdfFormat;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let file = "<http://example.com> <http://example.com> <http://example.com> .\n".as_bytes();
///
@ -709,9 +729,9 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::io::RdfFormat;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let file = "<http://example.com> <http://example.com> <http://example.com> .\n".as_bytes();
///
@ -736,10 +756,12 @@ impl Store {
/// Dumps the store into a file.
///
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::io::RdfFormat;
/// use oxigraph::store::Store;
///
/// let file = "<http://example.com> <http://example.com> <http://example.com> <http://example.com> .\n".as_bytes();
/// let file =
/// "<http://example.com> <http://example.com> <http://example.com> <http://example.com> .\n"
/// .as_bytes();
///
/// let store = Store::new()?;
/// store.load_from_read(RdfFormat::NQuads, file)?;
@ -761,14 +783,17 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let ex = NamedNode::new("http://example.com")?;
/// let store = Store::new()?;
/// store.insert(QuadRef::new(&ex, &ex, &ex, &ex))?;
/// store.insert(QuadRef::new(&ex, &ex, &ex, GraphNameRef::DefaultGraph))?;
/// assert_eq!(vec![NamedOrBlankNode::from(ex)], store.named_graphs().collect::<Result<Vec<_>,_>>()?);
/// assert_eq!(
/// vec![NamedOrBlankNode::from(ex)],
/// store.named_graphs().collect::<Result<Vec<_>, _>>()?
/// );
/// # Result::<_, Box<dyn std::error::Error>>::Ok(())
/// ```
pub fn named_graphs(&self) -> GraphNameIter {
@ -783,8 +808,8 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::{NamedNode, QuadRef};
/// use oxigraph::store::Store;
///
/// let ex = NamedNode::new("http://example.com")?;
/// let store = Store::new()?;
@ -806,14 +831,17 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::NamedNodeRef;
/// use oxigraph::store::Store;
///
/// let ex = NamedNodeRef::new("http://example.com")?;
/// let store = Store::new()?;
/// store.insert_named_graph(ex)?;
///
/// assert_eq!(store.named_graphs().collect::<Result<Vec<_>,_>>()?, vec![ex.into_owned().into()]);
/// assert_eq!(
/// store.named_graphs().collect::<Result<Vec<_>, _>>()?,
/// vec![ex.into_owned().into()]
/// );
/// # Result::<_, Box<dyn std::error::Error>>::Ok(())
/// ```
pub fn insert_named_graph<'a>(
@ -828,8 +856,8 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::{NamedNodeRef, QuadRef};
/// use oxigraph::store::Store;
///
/// let ex = NamedNodeRef::new("http://example.com")?;
/// let quad = QuadRef::new(ex, ex, ex, ex);
@ -856,8 +884,8 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::{NamedNodeRef, QuadRef};
/// use oxigraph::store::Store;
///
/// let ex = NamedNodeRef::new("http://example.com")?;
/// let quad = QuadRef::new(ex, ex, ex, ex);
@ -882,13 +910,13 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let ex = NamedNodeRef::new("http://example.com")?;
/// let store = Store::new()?;
/// store.insert(QuadRef::new(ex, ex, ex, ex))?;
/// store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?;
/// store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?;
/// assert_eq!(2, store.len()?);
///
/// store.clear()?;
@ -944,15 +972,18 @@ impl Store {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::io::RdfFormat;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
///
/// // quads file insertion
/// let file = b"<http://example.com> <http://example.com> <http://example.com> <http://example.com> .";
/// store.bulk_loader().load_from_read(RdfFormat::NQuads, file.as_ref())?;
/// let file =
/// b"<http://example.com> <http://example.com> <http://example.com> <http://example.com> .";
/// store
/// .bulk_loader()
/// .load_from_read(RdfFormat::NQuads, file.as_ref())?;
///
/// // we inspect the store contents
/// let ex = NamedNodeRef::new("http://example.com")?;
@ -995,16 +1026,23 @@ impl<'a> Transaction<'a> {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::sparql::{EvaluationError, QueryResults};
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
/// store.transaction(|mut transaction| {
/// if let QueryResults::Solutions(solutions) = transaction.query("SELECT ?s WHERE { ?s ?p ?o }")? {
/// if let QueryResults::Solutions(solutions) =
/// transaction.query("SELECT ?s WHERE { ?s ?p ?o }")?
/// {
/// for solution in solutions {
/// if let Some(Term::NamedNode(s)) = solution?.get("s") {
/// transaction.insert(QuadRef::new(s, vocab::rdf::TYPE, NamedNodeRef::new_unchecked("http://example.com"), GraphNameRef::DefaultGraph))?;
/// if let Some(Term::NamedNode(s)) = solution?.get("s") {
/// transaction.insert(QuadRef::new(
/// s,
/// vocab::rdf::TYPE,
/// NamedNodeRef::new_unchecked("http://example.com"),
/// GraphNameRef::DefaultGraph,
/// ))?;
/// }
/// }
/// }
@ -1023,9 +1061,9 @@ impl<'a> Transaction<'a> {
///
/// Usage example with a custom function serializing terms to N-Triples:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::sparql::{EvaluationError, QueryOptions, QueryResults};
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
/// store.transaction(|mut transaction| {
@ -1033,13 +1071,20 @@ impl<'a> Transaction<'a> {
/// "SELECT ?s (<http://www.w3.org/ns/formats/N-Triples>(?s) AS ?nt) WHERE { ?s ?p ?o }",
/// QueryOptions::default().with_custom_function(
/// NamedNode::new_unchecked("http://www.w3.org/ns/formats/N-Triples"),
/// |args| args.get(0).map(|t| Literal::from(t.to_string()).into())
/// )
/// |args| args.get(0).map(|t| Literal::from(t.to_string()).into()),
/// ),
/// )? {
/// for solution in solutions {
/// let solution = solution?;
/// if let (Some(Term::NamedNode(s)), Some(nt)) = (solution.get("s"), solution.get("nt")) {
/// transaction.insert(QuadRef::new(s, NamedNodeRef::new_unchecked("http://example.com/n-triples-representation"), nt, GraphNameRef::DefaultGraph))?;
/// if let (Some(Term::NamedNode(s)), Some(nt)) =
/// (solution.get("s"), solution.get("nt"))
/// {
/// transaction.insert(QuadRef::new(
/// s,
/// NamedNodeRef::new_unchecked("http://example.com/n-triples-representation"),
/// nt,
/// GraphNameRef::DefaultGraph,
/// ))?;
/// }
/// }
/// }
@ -1060,8 +1105,8 @@ impl<'a> Transaction<'a> {
///
/// Usage example:
/// ```
/// use oxigraph::store::{StorageError, Store};
/// use oxigraph::model::*;
/// use oxigraph::store::{StorageError, Store};
///
/// let store = Store::new()?;
/// let a = NamedNodeRef::new("http://example.com/a")?;
@ -1123,14 +1168,16 @@ impl<'a> Transaction<'a> {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::sparql::EvaluationError;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
/// store.transaction(|mut transaction| {
/// // insertion
/// transaction.update("INSERT DATA { <http://example.com> <http://example.com> <http://example.com> }")?;
/// transaction.update(
/// "INSERT DATA { <http://example.com> <http://example.com> <http://example.com> }",
/// )?;
///
/// // we inspect the store contents
/// let ex = NamedNodeRef::new_unchecked("http://example.com");
@ -1210,16 +1257,21 @@ impl<'a> Transaction<'a> {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::io::RdfFormat;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
///
/// // insertion
/// let file = b"<http://example.com> <http://example.com> <http://example.com> .";
/// store.transaction(|mut transaction| {
/// transaction.load_graph(file.as_ref(), RdfFormat::NTriples, GraphName::DefaultGraph, None)
/// transaction.load_graph(
/// file.as_ref(),
/// RdfFormat::NTriples,
/// GraphName::DefaultGraph,
/// None,
/// )
/// })?;
///
/// // we inspect the store contents
@ -1253,14 +1305,15 @@ impl<'a> Transaction<'a> {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::io::RdfFormat;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
///
/// // insertion
/// let file = b"<http://example.com> <http://example.com> <http://example.com> <http://example.com> .";
/// let file =
/// b"<http://example.com> <http://example.com> <http://example.com> <http://example.com> .";
/// store.transaction(|mut transaction| {
/// transaction.load_dataset(file.as_ref(), RdfFormat::NQuads, None)
/// })?;
@ -1295,16 +1348,14 @@ impl<'a> Transaction<'a> {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let ex = NamedNodeRef::new_unchecked("http://example.com");
/// let quad = QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph);
///
/// let store = Store::new()?;
/// store.transaction(|mut transaction| {
/// transaction.insert(quad)
/// })?;
/// store.transaction(|mut transaction| transaction.insert(quad))?;
/// assert!(store.contains(quad)?);
/// # Result::<_,oxigraph::store::StorageError>::Ok(())
/// ```
@ -1329,8 +1380,8 @@ impl<'a> Transaction<'a> {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let ex = NamedNodeRef::new_unchecked("http://example.com");
/// let quad = QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph);
@ -1371,15 +1422,16 @@ impl<'a> Transaction<'a> {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::NamedNodeRef;
/// use oxigraph::store::Store;
///
/// let ex = NamedNodeRef::new_unchecked("http://example.com");
/// let store = Store::new()?;
/// store.transaction(|mut transaction| {
/// transaction.insert_named_graph(ex)
/// })?;
/// assert_eq!(store.named_graphs().collect::<Result<Vec<_>,_>>()?, vec![ex.into_owned().into()]);
/// store.transaction(|mut transaction| transaction.insert_named_graph(ex))?;
/// assert_eq!(
/// store.named_graphs().collect::<Result<Vec<_>, _>>()?,
/// vec![ex.into_owned().into()]
/// );
/// # Result::<_,oxigraph::store::StorageError>::Ok(())
/// ```
pub fn insert_named_graph<'b>(
@ -1393,8 +1445,8 @@ impl<'a> Transaction<'a> {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::{NamedNodeRef, QuadRef};
/// use oxigraph::store::Store;
///
/// let ex = NamedNodeRef::new_unchecked("http://example.com");
/// let quad = QuadRef::new(ex, ex, ex, ex);
@ -1420,8 +1472,8 @@ impl<'a> Transaction<'a> {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::{NamedNodeRef, QuadRef};
/// use oxigraph::store::Store;
///
/// let ex = NamedNodeRef::new_unchecked("http://example.com");
/// let quad = QuadRef::new(ex, ex, ex, ex);
@ -1445,8 +1497,8 @@ impl<'a> Transaction<'a> {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let ex = NamedNodeRef::new_unchecked("http://example.com");
/// let store = Store::new()?;
@ -1518,15 +1570,18 @@ impl Iterator for GraphNameIter {
///
/// Usage example with loading a dataset:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::io::RdfFormat;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
///
/// // quads file insertion
/// let file = b"<http://example.com> <http://example.com> <http://example.com> <http://example.com> .";
/// store.bulk_loader().load_from_read(RdfFormat::NQuads, file.as_ref())?;
/// let file =
/// b"<http://example.com> <http://example.com> <http://example.com> <http://example.com> .";
/// store
/// .bulk_loader()
/// .load_from_read(RdfFormat::NQuads, file.as_ref())?;
///
/// // we inspect the store contents
/// let ex = NamedNodeRef::new("http://example.com")?;
@ -1684,15 +1739,18 @@ impl BulkLoader {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::io::RdfFormat;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
///
/// // insertion
/// let file = b"<http://example.com> <http://example.com> <http://example.com> <http://example.com> .";
/// store.bulk_loader().load_dataset(file.as_ref(), RdfFormat::NQuads, None)?;
/// let file =
/// b"<http://example.com> <http://example.com> <http://example.com> <http://example.com> .";
/// store
/// .bulk_loader()
/// .load_dataset(file.as_ref(), RdfFormat::NQuads, None)?;
///
/// // we inspect the store contents
/// let ex = NamedNodeRef::new("http://example.com")?;
@ -1745,15 +1803,20 @@ impl BulkLoader {
///
/// Usage example:
/// ```
/// use oxigraph::store::Store;
/// use oxigraph::io::RdfFormat;
/// use oxigraph::model::*;
/// use oxigraph::store::Store;
///
/// let store = Store::new()?;
///
/// // insertion
/// let file = b"<http://example.com> <http://example.com> <http://example.com> .";
/// store.bulk_loader().load_graph(file.as_ref(), RdfFormat::NTriples, GraphName::DefaultGraph, None)?;
/// store.bulk_loader().load_graph(
/// file.as_ref(),
/// RdfFormat::NTriples,
/// GraphName::DefaultGraph,
/// None,
/// )?;
///
/// // we inspect the store contents
/// let ex = NamedNodeRef::new("http://example.com")?;

@ -199,31 +199,26 @@ impl PyRdfFormat {
const N3: Self = Self {
inner: RdfFormat::N3,
};
/// `N-Quads <https://www.w3.org/TR/n-quads/>`_
#[classattr]
const N_QUADS: Self = Self {
inner: RdfFormat::NQuads,
};
/// `N-Triples <https://www.w3.org/TR/n-triples/>`_
#[classattr]
const N_TRIPLES: Self = Self {
inner: RdfFormat::NTriples,
};
/// `RDF/XML <https://www.w3.org/TR/rdf-syntax-grammar/>`_
#[classattr]
const RDF_XML: Self = Self {
inner: RdfFormat::RdfXml,
};
/// `TriG <https://www.w3.org/TR/trig/>`_
#[classattr]
const TRIG: Self = Self {
inner: RdfFormat::TriG,
};
/// `Turtle <https://www.w3.org/TR/turtle/>`_
#[classattr]
const TURTLE: Self = Self {

@ -6,8 +6,7 @@ use pyo3::prelude::*;
use pyo3::types::{PyDict, PyTuple};
use pyo3::PyTypeInfo;
use std::collections::hash_map::DefaultHasher;
use std::hash::Hash;
use std::hash::Hasher;
use std::hash::{Hash, Hasher};
use std::vec::IntoIter;
/// An RDF `node identified by an IRI <https://www.w3.org/TR/rdf11-concepts/#dfn-iri>`_.
@ -345,7 +344,6 @@ impl PyLiteral {
/// >>> Literal('example', language='en').language
/// 'en'
/// >>> Literal('example').language
///
#[getter]
fn language(&self) -> Option<&str> {
self.inner.language()

@ -515,29 +515,26 @@ pub struct PyQueryResultsFormat {
#[pymethods]
impl PyQueryResultsFormat {
/// `SPARQL Query Results XML Format <https://www.w3.org/TR/rdf-sparql-XMLres/>`_
/// `SPARQL Query Results CSV Format <https://www.w3.org/TR/sparql11-results-csv-tsv/>`_
#[classattr]
const XML: Self = Self {
inner: QueryResultsFormat::Xml,
const CSV: Self = Self {
inner: QueryResultsFormat::Csv,
};
/// `SPARQL Query Results JSON Format <https://www.w3.org/TR/sparql11-results-json/>`_
#[classattr]
const JSON: Self = Self {
inner: QueryResultsFormat::Json,
};
/// `SPARQL Query Results CSV Format <https://www.w3.org/TR/sparql11-results-csv-tsv/>`_
#[classattr]
const CSV: Self = Self {
inner: QueryResultsFormat::Csv,
};
/// `SPARQL Query Results TSV Format <https://www.w3.org/TR/sparql11-results-csv-tsv/>`_
#[classattr]
const TSV: Self = Self {
inner: QueryResultsFormat::Tsv,
};
/// `SPARQL Query Results XML Format <https://www.w3.org/TR/rdf-sparql-XMLres/>`_
#[classattr]
const XML: Self = Self {
inner: QueryResultsFormat::Xml,
};
/// :return: the format canonical IRI according to the `Unique URIs for file formats registry <https://www.w3.org/ns/formats/>`_.
/// :rtype: str

@ -1,3 +1,11 @@
force_explicit_abi = true
format_code_in_doc_comments = true
format_macro_matchers = true
imports_granularity = "Module"
newline_style = "Unix"
normalize_comments = true
normalize_doc_attributes = true
reorder_impl_items = true
group_imports = "One"
use_field_init_shorthand = true
use_try_shorthand = true
use_try_shorthand = true

@ -77,11 +77,11 @@ pub fn register_parser_tests(evaluator: &mut TestEvaluator) {
);
evaluator.register(
"https://w3c.github.io/rdf-canon/tests/vocab#RDFC10NegativeEvalTest",
|_| Ok(()), //TODO: not a proper implementation
|_| Ok(()), // TODO: not a proper implementation
);
evaluator.register(
"https://w3c.github.io/rdf-canon/tests/vocab#RDFC10MapTest",
|_| Ok(()), //TODO: not a proper implementation
|_| Ok(()), // TODO: not a proper implementation
);
evaluator.register(
"https://github.com/oxigraph/oxigraph/tests#TestNTripleRecovery",

@ -8,7 +8,7 @@ fn sparql10_w3c_query_syntax_testsuite() -> Result<()> {
check_testsuite(
"https://w3c.github.io/rdf-tests/sparql/sparql10/manifest-syntax.ttl",
&[
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/syntax-sparql3/manifest#syn-bad-26", // tokenizer
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/syntax-sparql3/manifest#syn-bad-26", /* tokenizer */
],
)
}
@ -53,9 +53,9 @@ fn sparql11_query_w3c_evaluation_testsuite() -> Result<()> {
check_testsuite(
"https://w3c.github.io/rdf-tests/sparql/sparql11/manifest-sparql11-query.ttl",
&[
//BNODE() scope is currently wrong
// BNODE() scope is currently wrong
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/functions/manifest#bnode01",
//SERVICE name from a BGP
// SERVICE name from a BGP
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/service/manifest#service5",
],
)

Loading…
Cancel
Save