Merge branch 'oxigraph:main' into main

pull/747/head
etiennept 1 year ago committed by GitHub
commit 467cacc0bf
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 14
      Cargo.lock
  2. 3
      README.md
  3. 4
      cli/Cargo.toml
  4. 40
      cli/src/main.rs
  5. 2
      js/Cargo.toml
  6. 17
      js/src/model.rs
  7. 2
      js/src/store.rs
  8. 6
      lib/Cargo.toml
  9. 12
      lib/oxrdf/src/dataset.rs
  10. 4
      lib/oxrdf/src/graph.rs
  11. 4
      lib/oxrdf/src/interning.rs
  12. 6
      lib/oxrdf/src/literal.rs
  13. 22
      lib/oxrdf/src/parser.rs
  14. 9
      lib/oxrdf/src/vocab.rs
  15. 4
      lib/oxrdfio/Cargo.toml
  16. 6
      lib/oxrdfio/src/error.rs
  17. 2
      lib/oxrdfio/src/parser.rs
  18. 32
      lib/oxrdfxml/README.md
  19. 86
      lib/oxrdfxml/src/parser.rs
  20. 2
      lib/oxsdatatypes/README.md
  21. 2
      lib/oxsdatatypes/src/boolean.rs
  22. 40
      lib/oxsdatatypes/src/date_time.rs
  23. 12
      lib/oxsdatatypes/src/decimal.rs
  24. 2
      lib/oxsdatatypes/src/double.rs
  25. 42
      lib/oxsdatatypes/src/duration.rs
  26. 2
      lib/oxsdatatypes/src/float.rs
  27. 6
      lib/oxsdatatypes/src/integer.rs
  28. 2
      lib/oxttl/Cargo.toml
  29. 25
      lib/oxttl/src/lexer.rs
  30. 2
      lib/oxttl/src/line_formats.rs
  31. 58
      lib/oxttl/src/n3.rs
  32. 32
      lib/oxttl/src/nquads.rs
  33. 34
      lib/oxttl/src/ntriples.rs
  34. 2
      lib/oxttl/src/terse.rs
  35. 2
      lib/oxttl/src/toolkit/error.rs
  36. 56
      lib/oxttl/src/trig.rs
  37. 56
      lib/oxttl/src/turtle.rs
  38. 12
      lib/sparesults/src/parser.rs
  39. 16
      lib/sparesults/src/serializer.rs
  40. 14
      lib/sparesults/src/solution.rs
  41. 6
      lib/spargebra/src/query.rs
  42. 26
      lib/spargebra/src/term.rs
  43. 6
      lib/spargebra/src/update.rs
  44. 32
      lib/sparopt/src/algebra.rs
  45. 4
      lib/src/io/format.rs
  46. 4
      lib/src/io/read.rs
  47. 12
      lib/src/sparql/algebra.rs
  48. 28
      lib/src/sparql/eval.rs
  49. 6
      lib/src/sparql/model.rs
  50. 4
      lib/src/sparql/results.rs
  51. 16
      lib/src/sparql/service.rs
  52. 4
      lib/src/storage/backend/rocksdb.rs
  53. 4
      lib/src/storage/error.rs
  54. 6
      lib/src/storage/mod.rs
  55. 10
      lib/src/storage/small_string.rs
  56. 4
      lib/src/store.rs
  57. 36
      lints/test_debian_compatibility.py
  58. 2
      oxrocksdb-sys/Cargo.toml
  59. 2
      python/Cargo.toml
  60. 1
      testsuite/oxigraph-tests/parser/escaped_trailing_dot.nq
  61. 1
      testsuite/oxigraph-tests/parser/escaped_trailing_dot.nt
  62. 2
      testsuite/oxigraph-tests/parser/escaped_trailing_dot.trig
  63. 2
      testsuite/oxigraph-tests/parser/escaped_trailing_dot.ttl
  64. 14
      testsuite/oxigraph-tests/parser/manifest.ttl
  65. 4
      testsuite/src/manifest.rs
  66. 2
      testsuite/src/sparql_evaluator.rs

14
Cargo.lock generated

@ -1033,7 +1033,7 @@ dependencies = [
[[package]] [[package]]
name = "oxigraph" name = "oxigraph"
version = "0.4.0-alpha.2" version = "0.4.0-alpha.3-dev"
dependencies = [ dependencies = [
"codspeed-criterion-compat", "codspeed-criterion-compat",
"digest", "digest",
@ -1063,7 +1063,7 @@ dependencies = [
[[package]] [[package]]
name = "oxigraph-cli" name = "oxigraph-cli"
version = "0.4.0-alpha.2" version = "0.4.0-alpha.3-dev"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"assert_cmd", "assert_cmd",
@ -1082,7 +1082,7 @@ dependencies = [
[[package]] [[package]]
name = "oxigraph-js" name = "oxigraph-js"
version = "0.4.0-alpha.2" version = "0.4.0-alpha.3-dev"
dependencies = [ dependencies = [
"console_error_panic_hook", "console_error_panic_hook",
"js-sys", "js-sys",
@ -1129,7 +1129,7 @@ dependencies = [
[[package]] [[package]]
name = "oxrdfio" name = "oxrdfio"
version = "0.1.0-alpha.1" version = "0.1.0-alpha.2-dev"
dependencies = [ dependencies = [
"oxrdf", "oxrdf",
"oxrdfxml", "oxrdfxml",
@ -1150,7 +1150,7 @@ dependencies = [
[[package]] [[package]]
name = "oxrocksdb-sys" name = "oxrocksdb-sys"
version = "0.4.0-alpha.2" version = "0.4.0-alpha.3-dev"
dependencies = [ dependencies = [
"bindgen", "bindgen",
"cc", "cc",
@ -1167,7 +1167,7 @@ dependencies = [
[[package]] [[package]]
name = "oxttl" name = "oxttl"
version = "0.1.0-alpha.1" version = "0.1.0-alpha.2-dev"
dependencies = [ dependencies = [
"memchr", "memchr",
"oxilangtag", "oxilangtag",
@ -1403,7 +1403,7 @@ dependencies = [
[[package]] [[package]]
name = "pyoxigraph" name = "pyoxigraph"
version = "0.4.0-alpha.2" version = "0.4.0-alpha.3-dev"
dependencies = [ dependencies = [
"oxigraph", "oxigraph",
"pyo3", "pyo3",

@ -4,7 +4,8 @@
[![Released API docs](https://docs.rs/oxigraph/badge.svg)](https://docs.rs/oxigraph) [![Released API docs](https://docs.rs/oxigraph/badge.svg)](https://docs.rs/oxigraph)
[![PyPI](https://img.shields.io/pypi/v/pyoxigraph)](https://pypi.org/project/pyoxigraph/) [![PyPI](https://img.shields.io/pypi/v/pyoxigraph)](https://pypi.org/project/pyoxigraph/)
[![npm](https://img.shields.io/npm/v/oxigraph)](https://www.npmjs.com/package/oxigraph) [![npm](https://img.shields.io/npm/v/oxigraph)](https://www.npmjs.com/package/oxigraph)
[![actions status](https://github.com/oxigraph/oxigraph/workflows/build/badge.svg)](https://github.com/oxigraph/oxigraph/actions) [![tests status](https://github.com/oxigraph/oxigraph/actions/workflows/tests.yml/badge.svg)](https://github.com/oxigraph/oxigraph/actions)
[![artifacts status](https://github.com/oxigraph/oxigraph/actions/workflows/artifacts.yml/badge.svg)](https://github.com/oxigraph/oxigraph/actions)
[![dependency status](https://deps.rs/repo/github/oxigraph/oxigraph/status.svg)](https://deps.rs/repo/github/oxigraph/oxigraph) [![dependency status](https://deps.rs/repo/github/oxigraph/oxigraph/status.svg)](https://deps.rs/repo/github/oxigraph/oxigraph)
[![Gitter](https://badges.gitter.im/oxigraph/community.svg)](https://gitter.im/oxigraph/community) [![Gitter](https://badges.gitter.im/oxigraph/community.svg)](https://gitter.im/oxigraph/community)
[![Twitter URL](https://img.shields.io/twitter/url?style=social&url=https%3A%2F%2Ftwitter.com%2Foxigraph)](https://twitter.com/oxigraph) [![Twitter URL](https://img.shields.io/twitter/url?style=social&url=https%3A%2F%2Ftwitter.com%2Foxigraph)](https://twitter.com/oxigraph)

@ -1,6 +1,6 @@
[package] [package]
name = "oxigraph-cli" name = "oxigraph-cli"
version = "0.4.0-alpha.2" version = "0.4.0-alpha.3-dev"
authors = ["Tpt <thomas@pellissier-tanon.fr>"] authors = ["Tpt <thomas@pellissier-tanon.fr>"]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
readme = "README.md" readme = "README.md"
@ -29,7 +29,7 @@ rustls-webpki = ["oxigraph/http-client-rustls-webpki"]
anyhow = "1.0.72" anyhow = "1.0.72"
oxhttp = { version = "0.2.0-alpha.3", features = ["flate2"] } oxhttp = { version = "0.2.0-alpha.3", features = ["flate2"] }
clap = { version = "4.0", features = ["derive"] } clap = { version = "4.0", features = ["derive"] }
oxigraph = { version = "0.4.0-alpha.2", path = "../lib" } oxigraph = { version = "0.4.0-alpha.3-dev", path = "../lib" }
rand = "0.8" rand = "0.8"
url = "2.4" url = "2.4"
oxiri = "0.2.3-alpha.1" oxiri = "0.2.3-alpha.1"

@ -2354,7 +2354,7 @@ mod tests {
.build(); .build();
ServerTest::new()?.test_body( ServerTest::new()?.test_body(
request, request,
"{\"head\":{\"vars\":[\"s\",\"p\",\"o\"]},\"results\":{\"bindings\":[]}}", r#"{"head":{"vars":["s","p","o"]},"results":{"bindings":[]}}"#,
) )
} }
@ -2369,7 +2369,7 @@ mod tests {
.build(); .build();
ServerTest::new()?.test_body( ServerTest::new()?.test_body(
request, request,
"{\"head\":{\"vars\":[\"s\",\"p\",\"o\"]},\"results\":{\"bindings\":[]}}", r#"{"head":{"vars":["s","p","o"]},"results":{"bindings":[]}}"#,
) )
} }
@ -2387,7 +2387,7 @@ mod tests {
.build(); .build();
ServerTest::new()?.test_body( ServerTest::new()?.test_body(
request, request,
"{\"head\":{\"vars\":[\"s\",\"p\",\"o\"]},\"results\":{\"bindings\":[]}}", r#"{"head":{"vars":["s","p","o"]},"results":{"bindings":[]}}"#,
) )
} }
@ -2414,7 +2414,7 @@ mod tests {
.build(); .build();
ServerTest::new()?.test_body( ServerTest::new()?.test_body(
request, request,
"{\"head\":{\"vars\":[\"s\",\"p\",\"o\"]},\"results\":{\"bindings\":[]}}", r#"{"head":{"vars":["s","p","o"]},"results":{"bindings":[]}}"#,
) )
} }
@ -2429,7 +2429,7 @@ mod tests {
.build(); .build();
ServerTest::new()?.test_body( ServerTest::new()?.test_body(
request, request,
"{\"head\":{\"vars\":[\"s\",\"p\",\"o\"]},\"results\":{\"bindings\":[]}}", r#"{"head":{"vars":["s","p","o"]},"results":{"bindings":[]}}"#,
) )
} }
#[test] #[test]
@ -2679,16 +2679,16 @@ mod tests {
let request = Request::builder(Method::PUT, "http://localhost/store/person/1.ttl".parse()?) let request = Request::builder(Method::PUT, "http://localhost/store/person/1.ttl".parse()?)
.with_header(HeaderName::CONTENT_TYPE, "text/turtle; charset=utf-8")? .with_header(HeaderName::CONTENT_TYPE, "text/turtle; charset=utf-8")?
.with_body( .with_body(
" r#"
@prefix foaf: <http://xmlns.com/foaf/0.1/> . @prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix v: <http://www.w3.org/2006/vcard/ns#> . @prefix v: <http://www.w3.org/2006/vcard/ns#> .
<http://$HOST$/$GRAPHSTORE$/person/1> a foaf:Person; <http://$HOST$/$GRAPHSTORE$/person/1> a foaf:Person;
foaf:businessCard [ foaf:businessCard [
a v:VCard; a v:VCard;
v:fn \"John Doe\" v:fn "John Doe"
]. ].
", "#,
); );
server.test_status(request, Status::CREATED)?; server.test_status(request, Status::CREATED)?;
@ -2717,16 +2717,16 @@ mod tests {
let request = Request::builder(Method::PUT, "http://localhost/store/person/1.ttl".parse()?) let request = Request::builder(Method::PUT, "http://localhost/store/person/1.ttl".parse()?)
.with_header(HeaderName::CONTENT_TYPE, "text/turtle; charset=utf-8")? .with_header(HeaderName::CONTENT_TYPE, "text/turtle; charset=utf-8")?
.with_body( .with_body(
" r#"
@prefix foaf: <http://xmlns.com/foaf/0.1/> . @prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix v: <http://www.w3.org/2006/vcard/ns#> . @prefix v: <http://www.w3.org/2006/vcard/ns#> .
<http://$HOST$/$GRAPHSTORE$/person/1> a foaf:Person; <http://$HOST$/$GRAPHSTORE$/person/1> a foaf:Person;
foaf:businessCard [ foaf:businessCard [
a v:VCard; a v:VCard;
v:fn \"Jane Doe\" v:fn "Jane Doe"
]. ].
", "#,
); );
server.test_status(request, Status::NO_CONTENT)?; server.test_status(request, Status::NO_CONTENT)?;
@ -2740,16 +2740,16 @@ mod tests {
let request = Request::builder(Method::PUT, "http://localhost/store?default".parse()?) let request = Request::builder(Method::PUT, "http://localhost/store?default".parse()?)
.with_header(HeaderName::CONTENT_TYPE, "text/turtle; charset=utf-8")? .with_header(HeaderName::CONTENT_TYPE, "text/turtle; charset=utf-8")?
.with_body( .with_body(
" r#"
@prefix foaf: <http://xmlns.com/foaf/0.1/> . @prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix v: <http://www.w3.org/2006/vcard/ns#> . @prefix v: <http://www.w3.org/2006/vcard/ns#> .
[] a foaf:Person; [] a foaf:Person;
foaf:businessCard [ foaf:businessCard [
a v:VCard; a v:VCard;
v:given-name \"Alice\" v:given-name "Alice"
] . ] .
", "#,
); );
server.test_status(request, Status::NO_CONTENT)?; // The default graph always exists in Oxigraph server.test_status(request, Status::NO_CONTENT)?; // The default graph always exists in Oxigraph
@ -2781,16 +2781,16 @@ mod tests {
let request = Request::builder(Method::PUT, "http://localhost/store/person/2.ttl".parse()?) let request = Request::builder(Method::PUT, "http://localhost/store/person/2.ttl".parse()?)
.with_header(HeaderName::CONTENT_TYPE, "text/turtle; charset=utf-8")? .with_header(HeaderName::CONTENT_TYPE, "text/turtle; charset=utf-8")?
.with_body( .with_body(
" r#"
@prefix foaf: <http://xmlns.com/foaf/0.1/> . @prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix v: <http://www.w3.org/2006/vcard/ns#> . @prefix v: <http://www.w3.org/2006/vcard/ns#> .
[] a foaf:Person; [] a foaf:Person;
foaf:businessCard [ foaf:businessCard [
a v:VCard; a v:VCard;
v:given-name \"Alice\" v:given-name "Alice"
] . ] .
", "#,
); );
server.test_status(request, Status::NO_CONTENT)?; server.test_status(request, Status::NO_CONTENT)?;
@ -2839,16 +2839,16 @@ mod tests {
let request = Request::builder(Method::POST, "http://localhost/store".parse()?) let request = Request::builder(Method::POST, "http://localhost/store".parse()?)
.with_header(HeaderName::CONTENT_TYPE, "text/turtle; charset=utf-8")? .with_header(HeaderName::CONTENT_TYPE, "text/turtle; charset=utf-8")?
.with_body( .with_body(
" r#"
@prefix foaf: <http://xmlns.com/foaf/0.1/> . @prefix foaf: <http://xmlns.com/foaf/0.1/> .
@prefix v: <http://www.w3.org/2006/vcard/ns#> . @prefix v: <http://www.w3.org/2006/vcard/ns#> .
[] a foaf:Person; [] a foaf:Person;
foaf:businessCard [ foaf:businessCard [
a v:VCard; a v:VCard;
v:given-name \"Alice\" v:given-name "Alice"
] . ] .
", "#,
); );
let response = server.exec(request); let response = server.exec(request);
assert_eq!(response.status(), Status::CREATED); assert_eq!(response.status(), Status::CREATED);

@ -1,6 +1,6 @@
[package] [package]
name = "oxigraph-js" name = "oxigraph-js"
version = "0.4.0-alpha.2" version = "0.4.0-alpha.3-dev"
authors = ["Tpt <thomas@pellissier-tanon.fr>"] authors = ["Tpt <thomas@pellissier-tanon.fr>"]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
readme = "README.md" readme = "README.md"

@ -1,9 +1,4 @@
#![allow( #![allow(dead_code, clippy::inherent_to_string, clippy::unused_self)]
dead_code,
clippy::inherent_to_string,
clippy::unused_self,
clippy::use_self
)]
use crate::format_err; use crate::format_err;
use crate::utils::to_err; use crate::utils::to_err;
@ -564,7 +559,7 @@ impl From<Quad> for JsTerm {
impl TryFrom<JsTerm> for NamedNode { impl TryFrom<JsTerm> for NamedNode {
type Error = JsValue; type Error = JsValue;
fn try_from(value: JsTerm) -> Result<Self, JsValue> { fn try_from(value: JsTerm) -> Result<Self, Self::Error> {
match value { match value {
JsTerm::NamedNode(node) => Ok(node.into()), JsTerm::NamedNode(node) => Ok(node.into()),
JsTerm::BlankNode(node) => Err(format_err!( JsTerm::BlankNode(node) => Err(format_err!(
@ -588,7 +583,7 @@ impl TryFrom<JsTerm> for NamedNode {
impl TryFrom<JsTerm> for NamedOrBlankNode { impl TryFrom<JsTerm> for NamedOrBlankNode {
type Error = JsValue; type Error = JsValue;
fn try_from(value: JsTerm) -> Result<Self, JsValue> { fn try_from(value: JsTerm) -> Result<Self, Self::Error> {
match value { match value {
JsTerm::NamedNode(node) => Ok(node.into()), JsTerm::NamedNode(node) => Ok(node.into()),
JsTerm::BlankNode(node) => Ok(node.into()), JsTerm::BlankNode(node) => Ok(node.into()),
@ -614,7 +609,7 @@ impl TryFrom<JsTerm> for NamedOrBlankNode {
impl TryFrom<JsTerm> for Subject { impl TryFrom<JsTerm> for Subject {
type Error = JsValue; type Error = JsValue;
fn try_from(value: JsTerm) -> Result<Self, JsValue> { fn try_from(value: JsTerm) -> Result<Self, Self::Error> {
match value { match value {
JsTerm::NamedNode(node) => Ok(node.into()), JsTerm::NamedNode(node) => Ok(node.into()),
JsTerm::BlankNode(node) => Ok(node.into()), JsTerm::BlankNode(node) => Ok(node.into()),
@ -637,7 +632,7 @@ impl TryFrom<JsTerm> for Subject {
impl TryFrom<JsTerm> for Term { impl TryFrom<JsTerm> for Term {
type Error = JsValue; type Error = JsValue;
fn try_from(value: JsTerm) -> Result<Self, JsValue> { fn try_from(value: JsTerm) -> Result<Self, Self::Error> {
match value { match value {
JsTerm::NamedNode(node) => Ok(node.into()), JsTerm::NamedNode(node) => Ok(node.into()),
JsTerm::BlankNode(node) => Ok(node.into()), JsTerm::BlankNode(node) => Ok(node.into()),
@ -657,7 +652,7 @@ impl TryFrom<JsTerm> for Term {
impl TryFrom<JsTerm> for GraphName { impl TryFrom<JsTerm> for GraphName {
type Error = JsValue; type Error = JsValue;
fn try_from(value: JsTerm) -> Result<Self, JsValue> { fn try_from(value: JsTerm) -> Result<Self, Self::Error> {
match value { match value {
JsTerm::NamedNode(node) => Ok(node.into()), JsTerm::NamedNode(node) => Ok(node.into()),
JsTerm::BlankNode(node) => Ok(node.into()), JsTerm::BlankNode(node) => Ok(node.into()),

@ -1,5 +1,3 @@
#![allow(clippy::use_self)]
use crate::format_err; use crate::format_err;
use crate::model::*; use crate::model::*;
use crate::utils::to_err; use crate::utils::to_err;

@ -1,6 +1,6 @@
[package] [package]
name = "oxigraph" name = "oxigraph"
version = "0.4.0-alpha.2" version = "0.4.0-alpha.3-dev"
authors = ["Tpt <thomas@pellissier-tanon.fr>"] authors = ["Tpt <thomas@pellissier-tanon.fr>"]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
readme = "README.md" readme = "README.md"
@ -33,7 +33,7 @@ md-5 = "0.10"
oxilangtag = "0.1" oxilangtag = "0.1"
oxiri = "0.2.3-alpha.1" oxiri = "0.2.3-alpha.1"
oxrdf = { version = "0.2.0-alpha.1", path = "oxrdf", features = ["rdf-star", "oxsdatatypes"] } oxrdf = { version = "0.2.0-alpha.1", path = "oxrdf", features = ["rdf-star", "oxsdatatypes"] }
oxrdfio = { version = "0.1.0-alpha.1", path = "oxrdfio", features = ["rdf-star"] } oxrdfio = { version = "0.1.0-alpha.2-dev", path = "oxrdfio", features = ["rdf-star"] }
oxsdatatypes = { version = "0.2.0-alpha.1", path = "oxsdatatypes" } oxsdatatypes = { version = "0.2.0-alpha.1", path = "oxsdatatypes" }
rand = "0.8" rand = "0.8"
regex = "1.7" regex = "1.7"
@ -46,7 +46,7 @@ sparopt = { version = "0.1.0-alpha.1", path = "sparopt", features = ["rdf-star",
[target.'cfg(not(target_family = "wasm"))'.dependencies] [target.'cfg(not(target_family = "wasm"))'.dependencies]
libc = "0.2.147" libc = "0.2.147"
oxrocksdb-sys = { version = "0.4.0-alpha.2", path = "../oxrocksdb-sys" } oxrocksdb-sys = { version = "0.4.0-alpha.3-dev", path = "../oxrocksdb-sys" }
oxhttp = { version = "0.2.0-alpha.3", optional = true } oxhttp = { version = "0.2.0-alpha.3", optional = true }
[target.'cfg(all(target_family = "wasm", target_os = "unknown"))'.dependencies] [target.'cfg(all(target_family = "wasm", target_os = "unknown"))'.dependencies]

@ -927,7 +927,7 @@ impl<'a> IntoIterator for &'a Dataset {
type Item = QuadRef<'a>; type Item = QuadRef<'a>;
type IntoIter = Iter<'a>; type IntoIter = Iter<'a>;
fn into_iter(self) -> Iter<'a> { fn into_iter(self) -> Self::IntoIter {
self.iter() self.iter()
} }
} }
@ -1285,7 +1285,7 @@ impl<'a> IntoIterator for GraphView<'a> {
type Item = TripleRef<'a>; type Item = TripleRef<'a>;
type IntoIter = GraphViewIter<'a>; type IntoIter = GraphViewIter<'a>;
fn into_iter(self) -> GraphViewIter<'a> { fn into_iter(self) -> Self::IntoIter {
self.iter() self.iter()
} }
} }
@ -1294,7 +1294,7 @@ impl<'a, 'b> IntoIterator for &'b GraphView<'a> {
type Item = TripleRef<'a>; type Item = TripleRef<'a>;
type IntoIter = GraphViewIter<'a>; type IntoIter = GraphViewIter<'a>;
fn into_iter(self) -> GraphViewIter<'a> { fn into_iter(self) -> Self::IntoIter {
self.iter() self.iter()
} }
} }
@ -1496,7 +1496,7 @@ impl<'a> IntoIterator for &'a GraphViewMut<'a> {
type Item = TripleRef<'a>; type Item = TripleRef<'a>;
type IntoIter = GraphViewIter<'a>; type IntoIter = GraphViewIter<'a>;
fn into_iter(self) -> GraphViewIter<'a> { fn into_iter(self) -> Self::IntoIter {
self.iter() self.iter()
} }
} }
@ -1527,7 +1527,7 @@ pub struct Iter<'a> {
impl<'a> Iterator for Iter<'a> { impl<'a> Iterator for Iter<'a> {
type Item = QuadRef<'a>; type Item = QuadRef<'a>;
fn next(&mut self) -> Option<QuadRef<'a>> { fn next(&mut self) -> Option<Self::Item> {
self.inner self.inner
.next() .next()
.map(|(s, p, o, g)| self.dataset.decode_spog((s, p, o, g))) .map(|(s, p, o, g)| self.dataset.decode_spog((s, p, o, g)))
@ -1551,7 +1551,7 @@ pub struct GraphViewIter<'a> {
impl<'a> Iterator for GraphViewIter<'a> { impl<'a> Iterator for GraphViewIter<'a> {
type Item = TripleRef<'a>; type Item = TripleRef<'a>;
fn next(&mut self) -> Option<TripleRef<'a>> { fn next(&mut self) -> Option<Self::Item> {
self.inner self.inner
.next() .next()
.map(|(_, s, p, o)| self.dataset.decode_spo((s, p, o))) .map(|(_, s, p, o)| self.dataset.decode_spo((s, p, o)))

@ -229,7 +229,7 @@ impl<'a> IntoIterator for &'a Graph {
type Item = TripleRef<'a>; type Item = TripleRef<'a>;
type IntoIter = Iter<'a>; type IntoIter = Iter<'a>;
fn into_iter(self) -> Iter<'a> { fn into_iter(self) -> Self::IntoIter {
self.iter() self.iter()
} }
} }
@ -276,7 +276,7 @@ pub struct Iter<'a> {
impl<'a> Iterator for Iter<'a> { impl<'a> Iterator for Iter<'a> {
type Item = TripleRef<'a>; type Item = TripleRef<'a>;
fn next(&mut self) -> Option<TripleRef<'a>> { fn next(&mut self) -> Option<Self::Item> {
self.inner.next() self.inner.next()
} }
} }

@ -510,8 +510,8 @@ struct IdentityHasherBuilder;
impl BuildHasher for IdentityHasherBuilder { impl BuildHasher for IdentityHasherBuilder {
type Hasher = IdentityHasher; type Hasher = IdentityHasher;
fn build_hasher(&self) -> IdentityHasher { fn build_hasher(&self) -> Self::Hasher {
IdentityHasher::default() Self::Hasher::default()
} }
} }

@ -24,12 +24,12 @@ use std::option::Option;
/// ); /// );
/// ///
/// assert_eq!( /// assert_eq!(
/// "\"1999-01-01\"^^<http://www.w3.org/2001/XMLSchema#date>", /// r#""1999-01-01"^^<http://www.w3.org/2001/XMLSchema#date>"#,
/// Literal::new_typed_literal("1999-01-01", xsd::DATE).to_string() /// Literal::new_typed_literal("1999-01-01", xsd::DATE).to_string()
/// ); /// );
/// ///
/// assert_eq!( /// assert_eq!(
/// "\"foo\"@en", /// r#""foo"@en"#,
/// Literal::new_language_tagged_literal("foo", "en")?.to_string() /// Literal::new_language_tagged_literal("foo", "en")?.to_string()
/// ); /// );
/// # Result::<(), LanguageTagParseError>::Ok(()) /// # Result::<(), LanguageTagParseError>::Ok(())
@ -436,7 +436,7 @@ impl From<DayTimeDuration> for Literal {
/// ); /// );
/// ///
/// assert_eq!( /// assert_eq!(
/// "\"1999-01-01\"^^<http://www.w3.org/2001/XMLSchema#date>", /// r#""1999-01-01"^^<http://www.w3.org/2001/XMLSchema#date>"#,
/// LiteralRef::new_typed_literal("1999-01-01", xsd::DATE).to_string() /// LiteralRef::new_typed_literal("1999-01-01", xsd::DATE).to_string()
/// ); /// );
/// ``` /// ```

@ -25,10 +25,10 @@ impl FromStr for NamedNode {
/// ///
/// assert_eq!(NamedNode::from_str("<http://example.com>").unwrap(), NamedNode::new("http://example.com").unwrap()) /// assert_eq!(NamedNode::from_str("<http://example.com>").unwrap(), NamedNode::new("http://example.com").unwrap())
/// ``` /// ```
fn from_str(s: &str) -> Result<Self, TermParseError> { fn from_str(s: &str) -> Result<Self, Self::Err> {
let (term, left) = read_named_node(s)?; let (term, left) = read_named_node(s)?;
if !left.is_empty() { if !left.is_empty() {
return Err(TermParseError::msg( return Err(Self::Err::msg(
"Named node serialization should end with a >", "Named node serialization should end with a >",
)); ));
} }
@ -47,10 +47,10 @@ impl FromStr for BlankNode {
/// ///
/// assert_eq!(BlankNode::from_str("_:ex").unwrap(), BlankNode::new("ex").unwrap()) /// assert_eq!(BlankNode::from_str("_:ex").unwrap(), BlankNode::new("ex").unwrap())
/// ``` /// ```
fn from_str(s: &str) -> Result<Self, TermParseError> { fn from_str(s: &str) -> Result<Self, Self::Err> {
let (term, left) = read_blank_node(s)?; let (term, left) = read_blank_node(s)?;
if !left.is_empty() { if !left.is_empty() {
return Err(TermParseError::msg( return Err(Self::Err::msg(
"Blank node serialization should not contain whitespaces", "Blank node serialization should not contain whitespaces",
)); ));
} }
@ -75,10 +75,10 @@ impl FromStr for Literal {
/// assert_eq!(Literal::from_str("-122.23").unwrap(), Literal::new_typed_literal("-122.23", xsd::DECIMAL)); /// assert_eq!(Literal::from_str("-122.23").unwrap(), Literal::new_typed_literal("-122.23", xsd::DECIMAL));
/// assert_eq!(Literal::from_str("-122e+1").unwrap(), Literal::new_typed_literal("-122e+1", xsd::DOUBLE)); /// assert_eq!(Literal::from_str("-122e+1").unwrap(), Literal::new_typed_literal("-122e+1", xsd::DOUBLE));
/// ``` /// ```
fn from_str(s: &str) -> Result<Self, TermParseError> { fn from_str(s: &str) -> Result<Self, Self::Err> {
let (term, left) = read_literal(s)?; let (term, left) = read_literal(s)?;
if !left.is_empty() { if !left.is_empty() {
return Err(TermParseError::msg("Invalid literal serialization")); return Err(Self::Err::msg("Invalid literal serialization"));
} }
Ok(term) Ok(term)
} }
@ -100,10 +100,10 @@ impl FromStr for Term {
/// Literal::new_simple_literal("o") /// Literal::new_simple_literal("o")
/// ).into()); /// ).into());
/// ``` /// ```
fn from_str(s: &str) -> Result<Self, TermParseError> { fn from_str(s: &str) -> Result<Self, Self::Err> {
let (term, left) = read_term(s, 0)?; let (term, left) = read_term(s, 0)?;
if !left.is_empty() { if !left.is_empty() {
return Err(TermParseError::msg("Invalid term serialization")); return Err(Self::Err::msg("Invalid term serialization"));
} }
Ok(term) Ok(term)
} }
@ -120,13 +120,13 @@ impl FromStr for Variable {
/// ///
/// assert_eq!(Variable::from_str("$foo").unwrap(), Variable::new("foo").unwrap()) /// assert_eq!(Variable::from_str("$foo").unwrap(), Variable::new("foo").unwrap())
/// ``` /// ```
fn from_str(s: &str) -> Result<Self, TermParseError> { fn from_str(s: &str) -> Result<Self, Self::Err> {
if !s.starts_with('?') && !s.starts_with('$') { if !s.starts_with('?') && !s.starts_with('$') {
return Err(TermParseError::msg( return Err(Self::Err::msg(
"Variable serialization should start with ? or $", "Variable serialization should start with ? or $",
)); ));
} }
Self::new(&s[1..]).map_err(|error| TermParseError { Self::new(&s[1..]).map_err(|error| Self::Err {
kind: TermParseErrorKind::Variable { kind: TermParseErrorKind::Variable {
value: s.to_owned(), value: s.to_owned(),
error, error,

@ -231,3 +231,12 @@ pub mod xsd {
pub const YEAR_MONTH_DURATION: NamedNodeRef<'_> = pub const YEAR_MONTH_DURATION: NamedNodeRef<'_> =
NamedNodeRef::new_unchecked("http://www.w3.org/2001/XMLSchema#yearMonthDuration"); NamedNodeRef::new_unchecked("http://www.w3.org/2001/XMLSchema#yearMonthDuration");
} }
pub mod geosparql {
//! [GeoSpatial](https://opengeospatial.github.io/ogc-geosparql/) vocabulary.
use crate::named_node::NamedNodeRef;
/// Geospatial datatype like `"Point({longitude} {latitude})"^^geo:wktLiteral`
pub const WKT_LITERAL: NamedNodeRef<'_> =
NamedNodeRef::new_unchecked("http://www.opengis.net/ont/geosparql#wktLiteral");
}

@ -1,6 +1,6 @@
[package] [package]
name = "oxrdfio" name = "oxrdfio"
version = "0.1.0-alpha.1" version = "0.1.0-alpha.2-dev"
authors = ["Tpt <thomas@pellissier-tanon.fr>"] authors = ["Tpt <thomas@pellissier-tanon.fr>"]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
readme = "README.md" readme = "README.md"
@ -21,7 +21,7 @@ rdf-star = ["oxrdf/rdf-star", "oxttl/rdf-star"]
[dependencies] [dependencies]
oxrdf = { version = "0.2.0-alpha.1", path = "../oxrdf" } oxrdf = { version = "0.2.0-alpha.1", path = "../oxrdf" }
oxrdfxml = { version = "0.1.0-alpha.1", path = "../oxrdfxml" } oxrdfxml = { version = "0.1.0-alpha.1", path = "../oxrdfxml" }
oxttl = { version = "0.1.0-alpha.1", path = "../oxttl" } oxttl = { version = "0.1.0-alpha.2-dev", path = "../oxttl" }
tokio = { version = "1.29", optional = true, features = ["io-util"] } tokio = { version = "1.29", optional = true, features = ["io-util"] }
[dev-dependencies] [dev-dependencies]

@ -42,7 +42,7 @@ impl Error for ParseError {
impl From<oxttl::SyntaxError> for SyntaxError { impl From<oxttl::SyntaxError> for SyntaxError {
#[inline] #[inline]
fn from(error: oxttl::SyntaxError) -> Self { fn from(error: oxttl::SyntaxError) -> Self {
SyntaxError { Self {
inner: SyntaxErrorKind::Turtle(error), inner: SyntaxErrorKind::Turtle(error),
} }
} }
@ -61,7 +61,7 @@ impl From<oxttl::ParseError> for ParseError {
impl From<oxrdfxml::SyntaxError> for SyntaxError { impl From<oxrdfxml::SyntaxError> for SyntaxError {
#[inline] #[inline]
fn from(error: oxrdfxml::SyntaxError) -> Self { fn from(error: oxrdfxml::SyntaxError) -> Self {
SyntaxError { Self {
inner: SyntaxErrorKind::RdfXml(error), inner: SyntaxErrorKind::RdfXml(error),
} }
} }
@ -166,7 +166,7 @@ impl From<SyntaxError> for io::Error {
match error.inner { match error.inner {
SyntaxErrorKind::Turtle(error) => error.into(), SyntaxErrorKind::Turtle(error) => error.into(),
SyntaxErrorKind::RdfXml(error) => error.into(), SyntaxErrorKind::RdfXml(error) => error.into(),
SyntaxErrorKind::Msg { msg } => io::Error::new(io::ErrorKind::InvalidData, msg), SyntaxErrorKind::Msg { msg } => Self::new(io::ErrorKind::InvalidData, msg),
} }
} }
} }

@ -382,7 +382,7 @@ enum FromReadQuadReaderKind<R: Read> {
impl<R: Read> Iterator for FromReadQuadReader<R> { impl<R: Read> Iterator for FromReadQuadReader<R> {
type Item = Result<Quad, ParseError>; type Item = Result<Quad, ParseError>;
fn next(&mut self) -> Option<Result<Quad, ParseError>> { fn next(&mut self) -> Option<Self::Item> {
Some(match &mut self.parser { Some(match &mut self.parser {
FromReadQuadReaderKind::N3(parser) => match parser.next()? { FromReadQuadReaderKind::N3(parser) => match parser.next()? {
Ok(quad) => self.mapper.map_n3_quad(quad), Ok(quad) => self.mapper.map_n3_quad(quad),

@ -12,31 +12,33 @@ OxRdfXml is a parser and serializer for [RDF/XML](https://www.w3.org/TR/rdf-synt
The entry points of this library are the two [`RdfXmlParser`] and [`RdfXmlSerializer`] structs. The entry points of this library are the two [`RdfXmlParser`] and [`RdfXmlSerializer`] structs.
Usage example counting the number of people in a RDF/XML file: Usage example counting the number of people in a RDF/XML file:
```rust ```rust
use oxrdf::{NamedNodeRef, vocab::rdf}; use oxrdf::{NamedNodeRef, vocab::rdf};
use oxrdfxml::RdfXmlParser; use oxrdfxml::RdfXmlParser;
let file = b"<?xml version=\"1.0\"?> fn main() {
<rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:schema=\"http://schema.org/\"> let file = br#"<?xml version="1.0"?>
<rdf:Description rdf:about=\"http://example.com/foo\"> <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:schema="http://schema.org/">
<rdf:type rdf:resource=\"http://schema.org/Person\" /> <rdf:Description rdf:about="http://example.com/foo">
<rdf:type rdf:resource="http://schema.org/Person" />
<schema:name>Foo</schema:name> <schema:name>Foo</schema:name>
</rdf:Description> </rdf:Description>
<schema:Person rdf:about=\"http://example.com/bar\" schema:name=\"Bar\" /> <schema:Person rdf:about="http://example.com/bar" schema:name="Bar" />
</rdf:RDF>"; </rdf:RDF>"#;
let schema_person = NamedNodeRef::new("http://schema.org/Person").unwrap(); let schema_person = NamedNodeRef::new("http://schema.org/Person").unwrap();
let mut count = 0; let mut count = 0;
for triple in RdfXmlParser::new().parse_read(file.as_ref()) { for triple in RdfXmlParser::new().parse_read(file.as_ref()) {
let triple = triple.unwrap(); let triple = triple.unwrap();
if triple.predicate == rdf::TYPE && triple.object == schema_person.into() { if triple.predicate == rdf::TYPE && triple.object == schema_person.into() {
count += 1; count += 1;
}
} }
assert_eq!(2, count);
} }
assert_eq!(2, count);
``` ```
## License ## License
This project is licensed under either of This project is licensed under either of

@ -29,14 +29,14 @@ use tokio::io::{AsyncRead, BufReader as AsyncBufReader};
/// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdfxml::RdfXmlParser; /// use oxrdfxml::RdfXmlParser;
/// ///
/// let file = b"<?xml version=\"1.0\"?> /// let file = br#"<?xml version="1.0"?>
/// <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:schema=\"http://schema.org/\"> /// <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:schema="http://schema.org/">
/// <rdf:Description rdf:about=\"http://example.com/foo\"> /// <rdf:Description rdf:about="http://example.com/foo">
/// <rdf:type rdf:resource=\"http://schema.org/Person\" /> /// <rdf:type rdf:resource="http://schema.org/Person" />
/// <schema:name>Foo</schema:name> /// <schema:name>Foo</schema:name>
/// </rdf:Description> /// </rdf:Description>
/// <schema:Person rdf:about=\"http://example.com/bar\" schema:name=\"Bar\" /> /// <schema:Person rdf:about="http://example.com/bar" schema:name="Bar" />
/// </rdf:RDF>"; /// </rdf:RDF>"#;
/// ///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
/// let mut count = 0; /// let mut count = 0;
@ -87,14 +87,14 @@ impl RdfXmlParser {
/// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdfxml::RdfXmlParser; /// use oxrdfxml::RdfXmlParser;
/// ///
/// let file = b"<?xml version=\"1.0\"?> /// let file = br#"<?xml version="1.0"?>
/// <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:schema=\"http://schema.org/\"> /// <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:schema="http://schema.org/">
/// <rdf:Description rdf:about=\"http://example.com/foo\"> /// <rdf:Description rdf:about="http://example.com/foo">
/// <rdf:type rdf:resource=\"http://schema.org/Person\" /> /// <rdf:type rdf:resource="http://schema.org/Person" />
/// <schema:name>Foo</schema:name> /// <schema:name>Foo</schema:name>
/// </rdf:Description> /// </rdf:Description>
/// <schema:Person rdf:about=\"http://example.com/bar\" schema:name=\"Bar\" /> /// <schema:Person rdf:about="http://example.com/bar" schema:name="Bar" />
/// </rdf:RDF>"; /// </rdf:RDF>"#;
/// ///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
/// let mut count = 0; /// let mut count = 0;
@ -124,14 +124,14 @@ impl RdfXmlParser {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxrdfxml::ParseError> { /// # async fn main() -> Result<(), oxrdfxml::ParseError> {
/// let file = b"<?xml version=\"1.0\"?> /// let file = br#"<?xml version="1.0"?>
/// <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:schema=\"http://schema.org/\"> /// <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:schema="http://schema.org/">
/// <rdf:Description rdf:about=\"http://example.com/foo\"> /// <rdf:Description rdf:about="http://example.com/foo">
/// <rdf:type rdf:resource=\"http://schema.org/Person\" /> /// <rdf:type rdf:resource="http://schema.org/Person" />
/// <schema:name>Foo</schema:name> /// <schema:name>Foo</schema:name>
/// </rdf:Description> /// </rdf:Description>
/// <schema:Person rdf:about=\"http://example.com/bar\" schema:name=\"Bar\" /> /// <schema:Person rdf:about="http://example.com/bar" schema:name="Bar" />
/// </rdf:RDF>"; /// </rdf:RDF>"#;
/// ///
/// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person");
/// let mut count = 0; /// let mut count = 0;
@ -182,14 +182,14 @@ impl RdfXmlParser {
/// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxrdfxml::RdfXmlParser; /// use oxrdfxml::RdfXmlParser;
/// ///
/// let file = b"<?xml version=\"1.0\"?> /// let file = br#"<?xml version="1.0"?>
/// <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:schema=\"http://schema.org/\"> /// <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:schema="http://schema.org/">
/// <rdf:Description rdf:about=\"http://example.com/foo\"> /// <rdf:Description rdf:about="http://example.com/foo">
/// <rdf:type rdf:resource=\"http://schema.org/Person\" /> /// <rdf:type rdf:resource="http://schema.org/Person" />
/// <schema:name>Foo</schema:name> /// <schema:name>Foo</schema:name>
/// </rdf:Description> /// </rdf:Description>
/// <schema:Person rdf:about=\"http://example.com/bar\" schema:name=\"Bar\" /> /// <schema:Person rdf:about="http://example.com/bar" schema:name="Bar" />
/// </rdf:RDF>"; /// </rdf:RDF>"#;
/// ///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
/// let mut count = 0; /// let mut count = 0;
@ -212,7 +212,7 @@ pub struct FromReadRdfXmlReader<R: Read> {
impl<R: Read> Iterator for FromReadRdfXmlReader<R> { impl<R: Read> Iterator for FromReadRdfXmlReader<R> {
type Item = Result<Triple, ParseError>; type Item = Result<Triple, ParseError>;
fn next(&mut self) -> Option<Result<Triple, ParseError>> { fn next(&mut self) -> Option<Self::Item> {
loop { loop {
if let Some(triple) = self.results.pop() { if let Some(triple) = self.results.pop() {
return Some(Ok(triple)); return Some(Ok(triple));
@ -251,14 +251,14 @@ impl<R: Read> FromReadRdfXmlReader<R> {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxrdfxml::ParseError> { /// # async fn main() -> Result<(), oxrdfxml::ParseError> {
/// let file = b"<?xml version=\"1.0\"?> /// let file = br#"<?xml version="1.0"?>
/// <rdf:RDF xmlns:rdf=\"http://www.w3.org/1999/02/22-rdf-syntax-ns#\" xmlns:schema=\"http://schema.org/\"> /// <rdf:RDF xmlns:rdf="http://www.w3.org/1999/02/22-rdf-syntax-ns#" xmlns:schema="http://schema.org/">
/// <rdf:Description rdf:about=\"http://example.com/foo\"> /// <rdf:Description rdf:about="http://example.com/foo">
/// <rdf:type rdf:resource=\"http://schema.org/Person\" /> /// <rdf:type rdf:resource="http://schema.org/Person" />
/// <schema:name>Foo</schema:name> /// <schema:name>Foo</schema:name>
/// </rdf:Description> /// </rdf:Description>
/// <schema:Person rdf:about=\"http://example.com/bar\" schema:name=\"Bar\" /> /// <schema:Person rdf:about="http://example.com/bar" schema:name="Bar" />
/// </rdf:RDF>"; /// </rdf:RDF>"#;
/// ///
/// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person");
/// let mut count = 0; /// let mut count = 0;
@ -399,23 +399,23 @@ enum RdfXmlState {
impl RdfXmlState { impl RdfXmlState {
fn base_iri(&self) -> Option<&Iri<String>> { fn base_iri(&self) -> Option<&Iri<String>> {
match self { match self {
RdfXmlState::Doc { base_iri, .. } Self::Doc { base_iri, .. }
| RdfXmlState::Rdf { base_iri, .. } | Self::Rdf { base_iri, .. }
| RdfXmlState::NodeElt { base_iri, .. } | Self::NodeElt { base_iri, .. }
| RdfXmlState::PropertyElt { base_iri, .. } | Self::PropertyElt { base_iri, .. }
| RdfXmlState::ParseTypeCollectionPropertyElt { base_iri, .. } | Self::ParseTypeCollectionPropertyElt { base_iri, .. }
| RdfXmlState::ParseTypeLiteralPropertyElt { base_iri, .. } => base_iri.as_ref(), | Self::ParseTypeLiteralPropertyElt { base_iri, .. } => base_iri.as_ref(),
} }
} }
fn language(&self) -> Option<&String> { fn language(&self) -> Option<&String> {
match self { match self {
RdfXmlState::Doc { .. } => None, Self::Doc { .. } => None,
RdfXmlState::Rdf { language, .. } Self::Rdf { language, .. }
| RdfXmlState::NodeElt { language, .. } | Self::NodeElt { language, .. }
| RdfXmlState::PropertyElt { language, .. } | Self::PropertyElt { language, .. }
| RdfXmlState::ParseTypeCollectionPropertyElt { language, .. } | Self::ParseTypeCollectionPropertyElt { language, .. }
| RdfXmlState::ParseTypeLiteralPropertyElt { language, .. } => language.as_ref(), | Self::ParseTypeLiteralPropertyElt { language, .. } => language.as_ref(),
} }
} }
} }

@ -38,7 +38,7 @@ The `DateTime::now()` function needs special OS support.
Currently: Currently:
- If the `custom-now` feature is enabled, a function computing `now` must be set: - If the `custom-now` feature is enabled, a function computing `now` must be set:
```rust ```rust
use oxsdatatypes::Duration; use oxsdatatypes::Duration;
#[no_mangle] #[no_mangle]
fn custom_ox_now() -> Duration { fn custom_ox_now() -> Duration {

@ -66,7 +66,7 @@ impl FromStr for Boolean {
type Err = ParseBoolError; type Err = ParseBoolError;
#[inline] #[inline]
fn from_str(input: &str) -> Result<Self, ParseBoolError> { fn from_str(input: &str) -> Result<Self, Self::Err> {
Ok(match input { Ok(match input {
"true" | "1" => true, "true" | "1" => true,
"false" | "0" => false, "false" | "0" => false,

@ -256,7 +256,7 @@ impl TryFrom<Date> for DateTime {
type Error = DateTimeOverflowError; type Error = DateTimeOverflowError;
#[inline] #[inline]
fn try_from(date: Date) -> Result<Self, DateTimeOverflowError> { fn try_from(date: Date) -> Result<Self, Self::Error> {
Self::new( Self::new(
date.year(), date.year(),
date.month(), date.month(),
@ -272,7 +272,7 @@ impl TryFrom<Date> for DateTime {
impl FromStr for DateTime { impl FromStr for DateTime {
type Err = ParseDateTimeError; type Err = ParseDateTimeError;
fn from_str(input: &str) -> Result<Self, ParseDateTimeError> { fn from_str(input: &str) -> Result<Self, Self::Err> {
ensure_complete(input, date_time_lexical_rep) ensure_complete(input, date_time_lexical_rep)
} }
} }
@ -528,7 +528,7 @@ impl From<DateTime> for Time {
impl FromStr for Time { impl FromStr for Time {
type Err = ParseDateTimeError; type Err = ParseDateTimeError;
fn from_str(input: &str) -> Result<Self, ParseDateTimeError> { fn from_str(input: &str) -> Result<Self, Self::Err> {
ensure_complete(input, time_lexical_rep) ensure_complete(input, time_lexical_rep)
} }
} }
@ -762,7 +762,7 @@ impl TryFrom<DateTime> for Date {
type Error = DateTimeOverflowError; type Error = DateTimeOverflowError;
#[inline] #[inline]
fn try_from(date_time: DateTime) -> Result<Self, DateTimeOverflowError> { fn try_from(date_time: DateTime) -> Result<Self, Self::Error> {
Self::new( Self::new(
date_time.year(), date_time.year(),
date_time.month(), date_time.month(),
@ -775,7 +775,7 @@ impl TryFrom<DateTime> for Date {
impl FromStr for Date { impl FromStr for Date {
type Err = ParseDateTimeError; type Err = ParseDateTimeError;
fn from_str(input: &str) -> Result<Self, ParseDateTimeError> { fn from_str(input: &str) -> Result<Self, Self::Err> {
ensure_complete(input, date_lexical_rep) ensure_complete(input, date_lexical_rep)
} }
} }
@ -896,7 +896,7 @@ impl TryFrom<DateTime> for GYearMonth {
type Error = DateTimeOverflowError; type Error = DateTimeOverflowError;
#[inline] #[inline]
fn try_from(date_time: DateTime) -> Result<Self, DateTimeOverflowError> { fn try_from(date_time: DateTime) -> Result<Self, Self::Error> {
Self::new( Self::new(
date_time.year(), date_time.year(),
date_time.month(), date_time.month(),
@ -917,7 +917,7 @@ impl From<Date> for GYearMonth {
impl FromStr for GYearMonth { impl FromStr for GYearMonth {
type Err = ParseDateTimeError; type Err = ParseDateTimeError;
fn from_str(input: &str) -> Result<Self, ParseDateTimeError> { fn from_str(input: &str) -> Result<Self, Self::Err> {
ensure_complete(input, g_year_month_lexical_rep) ensure_complete(input, g_year_month_lexical_rep)
} }
} }
@ -1031,7 +1031,7 @@ impl TryFrom<DateTime> for GYear {
type Error = DateTimeOverflowError; type Error = DateTimeOverflowError;
#[inline] #[inline]
fn try_from(date_time: DateTime) -> Result<Self, DateTimeOverflowError> { fn try_from(date_time: DateTime) -> Result<Self, Self::Error> {
Self::new(date_time.year(), date_time.timezone_offset()) Self::new(date_time.year(), date_time.timezone_offset())
} }
} }
@ -1041,7 +1041,7 @@ impl TryFrom<Date> for GYear {
type Error = DateTimeOverflowError; type Error = DateTimeOverflowError;
#[inline] #[inline]
fn try_from(date: Date) -> Result<Self, DateTimeOverflowError> { fn try_from(date: Date) -> Result<Self, Self::Error> {
Self::new(date.year(), date.timezone_offset()) Self::new(date.year(), date.timezone_offset())
} }
} }
@ -1050,7 +1050,7 @@ impl TryFrom<GYearMonth> for GYear {
type Error = DateTimeOverflowError; type Error = DateTimeOverflowError;
#[inline] #[inline]
fn try_from(year_month: GYearMonth) -> Result<Self, DateTimeOverflowError> { fn try_from(year_month: GYearMonth) -> Result<Self, Self::Error> {
Self::new(year_month.year(), year_month.timezone_offset()) Self::new(year_month.year(), year_month.timezone_offset())
} }
} }
@ -1058,7 +1058,7 @@ impl TryFrom<GYearMonth> for GYear {
impl FromStr for GYear { impl FromStr for GYear {
type Err = ParseDateTimeError; type Err = ParseDateTimeError;
fn from_str(input: &str) -> Result<Self, ParseDateTimeError> { fn from_str(input: &str) -> Result<Self, Self::Err> {
ensure_complete(input, g_year_lexical_rep) ensure_complete(input, g_year_lexical_rep)
} }
} }
@ -1186,7 +1186,7 @@ impl From<Date> for GMonthDay {
impl FromStr for GMonthDay { impl FromStr for GMonthDay {
type Err = ParseDateTimeError; type Err = ParseDateTimeError;
fn from_str(input: &str) -> Result<Self, ParseDateTimeError> { fn from_str(input: &str) -> Result<Self, Self::Err> {
ensure_complete(input, g_month_day_lexical_rep) ensure_complete(input, g_month_day_lexical_rep)
} }
} }
@ -1315,7 +1315,7 @@ impl From<GMonthDay> for GMonth {
impl FromStr for GMonth { impl FromStr for GMonth {
type Err = ParseDateTimeError; type Err = ParseDateTimeError;
fn from_str(input: &str) -> Result<Self, ParseDateTimeError> { fn from_str(input: &str) -> Result<Self, Self::Err> {
ensure_complete(input, g_month_lexical_rep) ensure_complete(input, g_month_lexical_rep)
} }
} }
@ -1436,7 +1436,7 @@ impl From<GMonthDay> for GDay {
impl FromStr for GDay { impl FromStr for GDay {
type Err = ParseDateTimeError; type Err = ParseDateTimeError;
fn from_str(input: &str) -> Result<Self, ParseDateTimeError> { fn from_str(input: &str) -> Result<Self, Self::Err> {
ensure_complete(input, g_day_lexical_rep) ensure_complete(input, g_day_lexical_rep)
} }
} }
@ -1499,18 +1499,18 @@ impl TryFrom<DayTimeDuration> for TimezoneOffset {
type Error = InvalidTimezoneError; type Error = InvalidTimezoneError;
#[inline] #[inline]
fn try_from(value: DayTimeDuration) -> Result<Self, InvalidTimezoneError> { fn try_from(value: DayTimeDuration) -> Result<Self, Self::Error> {
let offset_in_minutes = value.minutes() + value.hours() * 60; let offset_in_minutes = value.minutes() + value.hours() * 60;
let result = Self::new( let result = Self::new(
offset_in_minutes offset_in_minutes
.try_into() .try_into()
.map_err(|_| InvalidTimezoneError { offset_in_minutes })?, .map_err(|_| Self::Error { offset_in_minutes })?,
)?; )?;
if DayTimeDuration::from(result) == value { if DayTimeDuration::from(result) == value {
Ok(result) Ok(result)
} else { } else {
// The value is not an integral number of minutes or overflow problems // The value is not an integral number of minutes or overflow problems
Err(InvalidTimezoneError { offset_in_minutes }) Err(Self::Error { offset_in_minutes })
} }
} }
} }
@ -1519,9 +1519,9 @@ impl TryFrom<Duration> for TimezoneOffset {
type Error = InvalidTimezoneError; type Error = InvalidTimezoneError;
#[inline] #[inline]
fn try_from(value: Duration) -> Result<Self, InvalidTimezoneError> { fn try_from(value: Duration) -> Result<Self, Self::Error> {
DayTimeDuration::try_from(value) DayTimeDuration::try_from(value)
.map_err(|_| InvalidTimezoneError { .map_err(|_| Self::Error {
offset_in_minutes: 0, offset_in_minutes: 0,
})? })?
.try_into() .try_into()
@ -2426,7 +2426,7 @@ impl Error for DateTimeOverflowError {}
impl From<DateTimeOverflowError> for ParseDateTimeError { impl From<DateTimeOverflowError> for ParseDateTimeError {
fn from(error: DateTimeOverflowError) -> Self { fn from(error: DateTimeOverflowError) -> Self {
ParseDateTimeError { Self {
kind: ParseDateTimeErrorKind::Overflow(error), kind: ParseDateTimeErrorKind::Overflow(error),
} }
} }

@ -361,7 +361,7 @@ impl TryFrom<i128> for Decimal {
type Error = TooLargeForDecimalError; type Error = TooLargeForDecimalError;
#[inline] #[inline]
fn try_from(value: i128) -> Result<Self, TooLargeForDecimalError> { fn try_from(value: i128) -> Result<Self, Self::Error> {
Ok(Self { Ok(Self {
value: value value: value
.checked_mul(DECIMAL_PART_POW) .checked_mul(DECIMAL_PART_POW)
@ -374,7 +374,7 @@ impl TryFrom<u128> for Decimal {
type Error = TooLargeForDecimalError; type Error = TooLargeForDecimalError;
#[inline] #[inline]
fn try_from(value: u128) -> Result<Self, TooLargeForDecimalError> { fn try_from(value: u128) -> Result<Self, Self::Error> {
Ok(Self { Ok(Self {
value: i128::try_from(value) value: i128::try_from(value)
.map_err(|_| TooLargeForDecimalError)? .map_err(|_| TooLargeForDecimalError)?
@ -395,7 +395,7 @@ impl TryFrom<Float> for Decimal {
type Error = TooLargeForDecimalError; type Error = TooLargeForDecimalError;
#[inline] #[inline]
fn try_from(value: Float) -> Result<Self, TooLargeForDecimalError> { fn try_from(value: Float) -> Result<Self, Self::Error> {
Double::from(value).try_into() Double::from(value).try_into()
} }
} }
@ -405,7 +405,7 @@ impl TryFrom<Double> for Decimal {
#[inline] #[inline]
#[allow(clippy::cast_precision_loss, clippy::cast_possible_truncation)] #[allow(clippy::cast_precision_loss, clippy::cast_possible_truncation)]
fn try_from(value: Double) -> Result<Self, TooLargeForDecimalError> { fn try_from(value: Double) -> Result<Self, Self::Error> {
let shifted = f64::from(value) * (DECIMAL_PART_POW as f64); let shifted = f64::from(value) * (DECIMAL_PART_POW as f64);
if (i128::MIN as f64) <= shifted && shifted <= (i128::MAX as f64) { if (i128::MIN as f64) <= shifted && shifted <= (i128::MAX as f64) {
Ok(Self { Ok(Self {
@ -448,7 +448,7 @@ impl TryFrom<Decimal> for Integer {
type Error = TooLargeForIntegerError; type Error = TooLargeForIntegerError;
#[inline] #[inline]
fn try_from(value: Decimal) -> Result<Self, TooLargeForIntegerError> { fn try_from(value: Decimal) -> Result<Self, Self::Error> {
Ok(i64::try_from( Ok(i64::try_from(
value value
.value .value
@ -464,7 +464,7 @@ impl FromStr for Decimal {
type Err = ParseDecimalError; type Err = ParseDecimalError;
/// Parses decimals lexical mapping /// Parses decimals lexical mapping
fn from_str(input: &str) -> Result<Self, ParseDecimalError> { fn from_str(input: &str) -> Result<Self, Self::Err> {
// (\+|-)?([0-9]+(\.[0-9]*)?|\.[0-9]+) // (\+|-)?([0-9]+(\.[0-9]*)?|\.[0-9]+)
let input = input.as_bytes(); let input = input.as_bytes();
if input.is_empty() { if input.is_empty() {

@ -189,7 +189,7 @@ impl FromStr for Double {
type Err = ParseFloatError; type Err = ParseFloatError;
#[inline] #[inline]
fn from_str(input: &str) -> Result<Self, ParseFloatError> { fn from_str(input: &str) -> Result<Self, Self::Err> {
Ok(f64::from_str(input)?.into()) Ok(f64::from_str(input)?.into())
} }
} }

@ -176,7 +176,7 @@ impl TryFrom<StdDuration> for Duration {
type Error = DurationOverflowError; type Error = DurationOverflowError;
#[inline] #[inline]
fn try_from(value: StdDuration) -> Result<Self, DurationOverflowError> { fn try_from(value: StdDuration) -> Result<Self, Self::Error> {
Ok(DayTimeDuration::try_from(value)?.into()) Ok(DayTimeDuration::try_from(value)?.into())
} }
} }
@ -184,10 +184,10 @@ impl TryFrom<StdDuration> for Duration {
impl FromStr for Duration { impl FromStr for Duration {
type Err = ParseDurationError; type Err = ParseDurationError;
fn from_str(input: &str) -> Result<Self, ParseDurationError> { fn from_str(input: &str) -> Result<Self, Self::Err> {
let parts = ensure_complete(input, duration_parts)?; let parts = ensure_complete(input, duration_parts)?;
if parts.year_month.is_none() && parts.day_time.is_none() { if parts.year_month.is_none() && parts.day_time.is_none() {
return Err(ParseDurationError::msg("Empty duration")); return Err(Self::Err::msg("Empty duration"));
} }
Ok(Self::new( Ok(Self::new(
parts.year_month.unwrap_or(0), parts.year_month.unwrap_or(0),
@ -394,7 +394,7 @@ impl TryFrom<Duration> for YearMonthDuration {
type Error = DurationOverflowError; type Error = DurationOverflowError;
#[inline] #[inline]
fn try_from(value: Duration) -> Result<Self, DurationOverflowError> { fn try_from(value: Duration) -> Result<Self, Self::Error> {
if value.day_time == DayTimeDuration::default() { if value.day_time == DayTimeDuration::default() {
Ok(value.year_month) Ok(value.year_month)
} else { } else {
@ -406,16 +406,18 @@ impl TryFrom<Duration> for YearMonthDuration {
impl FromStr for YearMonthDuration { impl FromStr for YearMonthDuration {
type Err = ParseDurationError; type Err = ParseDurationError;
fn from_str(input: &str) -> Result<Self, ParseDurationError> { fn from_str(input: &str) -> Result<Self, Self::Err> {
let parts = ensure_complete(input, duration_parts)?; let parts = ensure_complete(input, duration_parts)?;
if parts.day_time.is_some() { if parts.day_time.is_some() {
return Err(ParseDurationError::msg( return Err(Self::Err::msg(
"There must not be any day or time component in a yearMonthDuration", "There must not be any day or time component in a yearMonthDuration",
)); ));
} }
Ok(Self::new(parts.year_month.ok_or( Ok(Self::new(
ParseDurationError::msg("No year and month values found"), parts
)?)) .year_month
.ok_or(Self::Err::msg("No year and month values found"))?,
))
} }
} }
@ -580,7 +582,7 @@ impl TryFrom<Duration> for DayTimeDuration {
type Error = DurationOverflowError; type Error = DurationOverflowError;
#[inline] #[inline]
fn try_from(value: Duration) -> Result<Self, DurationOverflowError> { fn try_from(value: Duration) -> Result<Self, Self::Error> {
if value.year_month == YearMonthDuration::default() { if value.year_month == YearMonthDuration::default() {
Ok(value.day_time) Ok(value.day_time)
} else { } else {
@ -593,7 +595,7 @@ impl TryFrom<StdDuration> for DayTimeDuration {
type Error = DurationOverflowError; type Error = DurationOverflowError;
#[inline] #[inline]
fn try_from(value: StdDuration) -> Result<Self, DurationOverflowError> { fn try_from(value: StdDuration) -> Result<Self, Self::Error> {
Ok(Self { Ok(Self {
seconds: Decimal::new( seconds: Decimal::new(
i128::try_from(value.as_nanos()).map_err(|_| DurationOverflowError)?, i128::try_from(value.as_nanos()).map_err(|_| DurationOverflowError)?,
@ -608,7 +610,7 @@ impl TryFrom<DayTimeDuration> for StdDuration {
type Error = DurationOverflowError; type Error = DurationOverflowError;
#[inline] #[inline]
fn try_from(value: DayTimeDuration) -> Result<Self, DurationOverflowError> { fn try_from(value: DayTimeDuration) -> Result<Self, Self::Error> {
if value.seconds.is_negative() { if value.seconds.is_negative() {
return Err(DurationOverflowError); return Err(DurationOverflowError);
} }
@ -621,7 +623,7 @@ impl TryFrom<DayTimeDuration> for StdDuration {
.ok_or(DurationOverflowError)? .ok_or(DurationOverflowError)?
.checked_floor() .checked_floor()
.ok_or(DurationOverflowError)?; .ok_or(DurationOverflowError)?;
Ok(StdDuration::new( Ok(Self::new(
secs.as_i128() secs.as_i128()
.try_into() .try_into()
.map_err(|_| DurationOverflowError)?, .map_err(|_| DurationOverflowError)?,
@ -636,16 +638,18 @@ impl TryFrom<DayTimeDuration> for StdDuration {
impl FromStr for DayTimeDuration { impl FromStr for DayTimeDuration {
type Err = ParseDurationError; type Err = ParseDurationError;
fn from_str(input: &str) -> Result<Self, ParseDurationError> { fn from_str(input: &str) -> Result<Self, Self::Err> {
let parts = ensure_complete(input, duration_parts)?; let parts = ensure_complete(input, duration_parts)?;
if parts.year_month.is_some() { if parts.year_month.is_some() {
return Err(ParseDurationError::msg( return Err(Self::Err::msg(
"There must not be any year or month component in a dayTimeDuration", "There must not be any year or month component in a dayTimeDuration",
)); ));
} }
Ok(Self::new(parts.day_time.ok_or(ParseDurationError::msg( Ok(Self::new(
"No day or time values found", parts
))?)) .day_time
.ok_or(Self::Err::msg("No day or time values found"))?,
))
} }
} }
@ -973,7 +977,7 @@ impl fmt::Display for DurationOverflowError {
impl Error for DurationOverflowError {} impl Error for DurationOverflowError {}
/// The year-month and the day-time components of a [`Duration\] have an opposite sign. /// The year-month and the day-time components of a [`Duration`] have an opposite sign.
#[derive(Debug, Clone, Copy)] #[derive(Debug, Clone, Copy)]
pub struct OppositeSignInDurationComponentsError; pub struct OppositeSignInDurationComponentsError;

@ -179,7 +179,7 @@ impl FromStr for Float {
type Err = ParseFloatError; type Err = ParseFloatError;
#[inline] #[inline]
fn from_str(input: &str) -> Result<Self, ParseFloatError> { fn from_str(input: &str) -> Result<Self, Self::Err> {
Ok(f32::from_str(input)?.into()) Ok(f32::from_str(input)?.into())
} }
} }

@ -228,7 +228,7 @@ impl FromStr for Integer {
type Err = ParseIntError; type Err = ParseIntError;
#[inline] #[inline]
fn from_str(input: &str) -> Result<Self, ParseIntError> { fn from_str(input: &str) -> Result<Self, Self::Err> {
Ok(i64::from_str(input)?.into()) Ok(i64::from_str(input)?.into())
} }
} }
@ -244,7 +244,7 @@ impl TryFrom<Float> for Integer {
type Error = TooLargeForIntegerError; type Error = TooLargeForIntegerError;
#[inline] #[inline]
fn try_from(value: Float) -> Result<Self, TooLargeForIntegerError> { fn try_from(value: Float) -> Result<Self, Self::Error> {
Decimal::try_from(value) Decimal::try_from(value)
.map_err(|_| TooLargeForIntegerError)? .map_err(|_| TooLargeForIntegerError)?
.try_into() .try_into()
@ -255,7 +255,7 @@ impl TryFrom<Double> for Integer {
type Error = TooLargeForIntegerError; type Error = TooLargeForIntegerError;
#[inline] #[inline]
fn try_from(value: Double) -> Result<Self, TooLargeForIntegerError> { fn try_from(value: Double) -> Result<Self, Self::Error> {
Decimal::try_from(value) Decimal::try_from(value)
.map_err(|_| TooLargeForIntegerError)? .map_err(|_| TooLargeForIntegerError)?
.try_into() .try_into()

@ -1,6 +1,6 @@
[package] [package]
name = "oxttl" name = "oxttl"
version = "0.1.0-alpha.1" version = "0.1.0-alpha.2-dev"
authors = ["Tpt <thomas@pellissier-tanon.fr>"] authors = ["Tpt <thomas@pellissier-tanon.fr>"]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
readme = "README.md" readme = "README.md"

@ -56,7 +56,7 @@ impl TokenRecognizer for N3Lexer {
&mut self, &mut self,
data: &'a [u8], data: &'a [u8],
is_ending: bool, is_ending: bool,
options: &N3LexerOptions, options: &Self::Options,
) -> Option<(usize, Result<N3Token<'a>, TokenRecognizerError>)> { ) -> Option<(usize, Result<N3Token<'a>, TokenRecognizerError>)> {
match *data.first()? { match *data.first()? {
b'<' => match *data.get(1)? { b'<' => match *data.get(1)? {
@ -354,6 +354,7 @@ impl N3Lexer {
let mut buffer = None; // Buffer if there are some escaped characters let mut buffer = None; // Buffer if there are some escaped characters
let mut position_that_is_already_in_buffer = 0; let mut position_that_is_already_in_buffer = 0;
let mut might_be_invalid_iri = false; let mut might_be_invalid_iri = false;
let mut ends_with_unescaped_dot = 0;
loop { loop {
if let Some(r) = Self::recognize_unicode_char(&data[i..], i) { if let Some(r) = Self::recognize_unicode_char(&data[i..], i) {
match r { match r {
@ -369,6 +370,7 @@ impl N3Lexer {
).into()))); ).into())));
} }
i += 1; i += 1;
ends_with_unescaped_dot = 0;
} else if c == '\\' { } else if c == '\\' {
i += 1; i += 1;
let a = char::from(*data.get(i)?); let a = char::from(*data.get(i)?);
@ -416,6 +418,7 @@ impl N3Lexer {
buffer.push(a); buffer.push(a);
i += 1; i += 1;
position_that_is_already_in_buffer = i; position_that_is_already_in_buffer = i;
ends_with_unescaped_dot = 0;
} else if i == 0 { } else if i == 0 {
if !(Self::is_possible_pn_chars_u(c) || c == ':' || c.is_ascii_digit()) if !(Self::is_possible_pn_chars_u(c) || c == ':' || c.is_ascii_digit())
{ {
@ -427,13 +430,17 @@ impl N3Lexer {
|| c == ':'; || c == ':';
} }
i += consumed; i += consumed;
} else if Self::is_possible_pn_chars(c) || c == ':' || c == '.' { } else if Self::is_possible_pn_chars(c) || c == ':' {
if !self.unchecked { if !self.unchecked {
might_be_invalid_iri |= might_be_invalid_iri |=
Self::is_possible_pn_chars_base_but_not_valid_iri(c) Self::is_possible_pn_chars_base_but_not_valid_iri(c)
|| c == ':'; || c == ':';
} }
i += consumed; i += consumed;
ends_with_unescaped_dot = 0;
} else if c == '.' {
i += consumed;
ends_with_unescaped_dot += 1;
} else { } else {
let buffer = if let Some(mut buffer) = buffer { let buffer = if let Some(mut buffer) = buffer {
buffer.push_str( buffer.push_str(
@ -445,22 +452,20 @@ impl N3Lexer {
Err(e) => return Some((i, Err(e))), Err(e) => return Some((i, Err(e))),
}, },
); );
// We do not include the last dot // We do not include the last dots
while buffer.ends_with('.') { for _ in 0..ends_with_unescaped_dot {
buffer.pop(); buffer.pop();
i -= 1;
} }
i -= ends_with_unescaped_dot;
Cow::Owned(buffer) Cow::Owned(buffer)
} else { } else {
let mut data = match str_from_utf8(&data[..i], 0..i) { let mut data = match str_from_utf8(&data[..i], 0..i) {
Ok(data) => data, Ok(data) => data,
Err(e) => return Some((i, Err(e))), Err(e) => return Some((i, Err(e))),
}; };
// We do not include the last dot // We do not include the last dots
while let Some(d) = data.strip_suffix('.') { data = &data[..data.len() - ends_with_unescaped_dot];
data = d; i -= ends_with_unescaped_dot;
i -= 1;
}
Cow::Borrowed(data) Cow::Borrowed(data)
}; };
return Some((i, Ok((buffer, might_be_invalid_iri)))); return Some((i, Ok((buffer, might_be_invalid_iri))));

@ -274,7 +274,7 @@ impl NQuadsRecognizer {
true, true,
Some(b"#"), Some(b"#"),
), ),
NQuadsRecognizer { Self {
stack: vec![NQuadsState::ExpectSubject], stack: vec![NQuadsState::ExpectSubject],
subjects: Vec::new(), subjects: Vec::new(),
predicates: Vec::new(), predicates: Vec::new(),

@ -184,12 +184,12 @@ impl From<Quad> for N3Quad {
/// use oxrdf::{NamedNode, vocab::rdf}; /// use oxrdf::{NamedNode, vocab::rdf};
/// use oxttl::n3::{N3Parser, N3Term}; /// use oxttl::n3::{N3Parser, N3Term};
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" . /// schema:name "Foo" .
/// <bar> a schema:Person ; /// <bar> a schema:Person ;
/// schema:name \"Bar\" ."; /// schema:name "Bar" ."#;
/// ///
/// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned()); /// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned());
/// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?); /// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?);
@ -253,12 +253,12 @@ impl N3Parser {
/// use oxrdf::NamedNode; /// use oxrdf::NamedNode;
/// use oxttl::n3::{N3Parser, N3Term}; /// use oxttl::n3::{N3Parser, N3Term};
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" . /// schema:name "Foo" .
/// <bar> a schema:Person ; /// <bar> a schema:Person ;
/// schema:name \"Bar\" ."; /// schema:name "Bar" ."#;
/// ///
/// let rdf_type = N3Term::NamedNode(NamedNode::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?); /// let rdf_type = N3Term::NamedNode(NamedNode::new("http://www.w3.org/1999/02/22-rdf-syntax-ns#type")?);
/// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?); /// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?);
@ -287,12 +287,12 @@ impl N3Parser {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> { /// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" . /// schema:name "Foo" .
/// <bar> a schema:Person ; /// <bar> a schema:Person ;
/// schema:name \"Bar\" ."; /// schema:name "Bar" ."#;
/// ///
/// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned()); /// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned());
/// let schema_person = N3Term::NamedNode(NamedNode::new_unchecked("http://schema.org/Person")); /// let schema_person = N3Term::NamedNode(NamedNode::new_unchecked("http://schema.org/Person"));
@ -369,12 +369,12 @@ impl N3Parser {
/// use oxrdf::{NamedNode, vocab::rdf}; /// use oxrdf::{NamedNode, vocab::rdf};
/// use oxttl::n3::{N3Parser, N3Term}; /// use oxttl::n3::{N3Parser, N3Term};
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" . /// schema:name "Foo" .
/// <bar> a schema:Person ; /// <bar> a schema:Person ;
/// schema:name \"Bar\" ."; /// schema:name "Bar" ."#;
/// ///
/// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned()); /// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned());
/// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?); /// let schema_person = N3Term::NamedNode(NamedNode::new("http://schema.org/Person")?);
@ -403,10 +403,10 @@ impl<R: Read> FromReadN3Reader<R> {
/// ``` /// ```
/// use oxttl::N3Parser; /// use oxttl::N3Parser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = N3Parser::new().parse_read(file.as_ref()); /// let mut reader = N3Parser::new().parse_read(file.as_ref());
/// assert!(reader.prefixes().is_empty()); // No prefix at the beginning /// assert!(reader.prefixes().is_empty()); // No prefix at the beginning
@ -424,10 +424,10 @@ impl<R: Read> FromReadN3Reader<R> {
/// ``` /// ```
/// use oxttl::N3Parser; /// use oxttl::N3Parser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = N3Parser::new().parse_read(file.as_ref()); /// let mut reader = N3Parser::new().parse_read(file.as_ref());
/// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser. /// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser.
@ -450,7 +450,7 @@ impl<R: Read> FromReadN3Reader<R> {
impl<R: Read> Iterator for FromReadN3Reader<R> { impl<R: Read> Iterator for FromReadN3Reader<R> {
type Item = Result<N3Quad, ParseError>; type Item = Result<N3Quad, ParseError>;
fn next(&mut self) -> Option<Result<N3Quad, ParseError>> { fn next(&mut self) -> Option<Self::Item> {
self.inner.next() self.inner.next()
} }
} }
@ -464,12 +464,12 @@ impl<R: Read> Iterator for FromReadN3Reader<R> {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> { /// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" . /// schema:name "Foo" .
/// <bar> a schema:Person ; /// <bar> a schema:Person ;
/// schema:name \"Bar\" ."; /// schema:name "Bar" ."#;
/// ///
/// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned()); /// let rdf_type = N3Term::NamedNode(rdf::TYPE.into_owned());
/// let schema_person = N3Term::NamedNode(NamedNode::new_unchecked("http://schema.org/Person")); /// let schema_person = N3Term::NamedNode(NamedNode::new_unchecked("http://schema.org/Person"));
@ -509,10 +509,10 @@ impl<R: AsyncRead + Unpin> FromTokioAsyncReadN3Reader<R> {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> { /// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = N3Parser::new().parse_tokio_async_read(file.as_ref()); /// let mut reader = N3Parser::new().parse_tokio_async_read(file.as_ref());
/// assert!(reader.prefixes().is_empty()); // No prefix at the beginning /// assert!(reader.prefixes().is_empty()); // No prefix at the beginning
@ -533,10 +533,10 @@ impl<R: AsyncRead + Unpin> FromTokioAsyncReadN3Reader<R> {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> { /// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = N3Parser::new().parse_tokio_async_read(file.as_ref()); /// let mut reader = N3Parser::new().parse_tokio_async_read(file.as_ref());
/// assert!(reader.base_iri().is_none()); // No base IRI at the beginning /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning
@ -633,10 +633,10 @@ impl LowLevelN3Reader {
/// ``` /// ```
/// use oxttl::N3Parser; /// use oxttl::N3Parser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = N3Parser::new().parse(); /// let mut reader = N3Parser::new().parse();
/// reader.extend_from_slice(file); /// reader.extend_from_slice(file);
@ -655,10 +655,10 @@ impl LowLevelN3Reader {
/// ``` /// ```
/// use oxttl::N3Parser; /// use oxttl::N3Parser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = N3Parser::new().parse(); /// let mut reader = N3Parser::new().parse();
/// reader.extend_from_slice(file); /// reader.extend_from_slice(file);
@ -1214,7 +1214,7 @@ impl N3Recognizer {
true, true,
Some(b"#"), Some(b"#"),
), ),
N3Recognizer { Self {
stack: vec![N3State::N3Doc], stack: vec![N3State::N3Doc],
terms: Vec::new(), terms: Vec::new(),
predicates: Vec::new(), predicates: Vec::new(),

@ -19,10 +19,10 @@ use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};
/// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxttl::NQuadsParser; /// use oxttl::NQuadsParser;
/// ///
/// let file = b"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// let file = br#"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/foo> <http://schema.org/name> \"Foo\" . /// <http://example.com/foo> <http://schema.org/name> "Foo" .
/// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/bar> <http://schema.org/name> \"Bar\" ."; /// <http://example.com/bar> <http://schema.org/name> "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
/// let mut count = 0; /// let mut count = 0;
@ -76,10 +76,10 @@ impl NQuadsParser {
/// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxttl::NQuadsParser; /// use oxttl::NQuadsParser;
/// ///
/// let file = b"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// let file = br#"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/foo> <http://schema.org/name> \"Foo\" . /// <http://example.com/foo> <http://schema.org/name> "Foo" .
/// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/bar> <http://schema.org/name> \"Bar\" ."; /// <http://example.com/bar> <http://schema.org/name> "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
/// let mut count = 0; /// let mut count = 0;
@ -107,10 +107,10 @@ impl NQuadsParser {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> { /// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// let file = br#"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/foo> <http://schema.org/name> \"Foo\" . /// <http://example.com/foo> <http://schema.org/name> "Foo" .
/// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/bar> <http://schema.org/name> \"Bar\" ."; /// <http://example.com/bar> <http://schema.org/name> "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person");
/// let mut count = 0; /// let mut count = 0;
@ -191,10 +191,10 @@ impl NQuadsParser {
/// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxttl::NQuadsParser; /// use oxttl::NQuadsParser;
/// ///
/// let file = b"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// let file = br#"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/foo> <http://schema.org/name> \"Foo\" . /// <http://example.com/foo> <http://schema.org/name> "Foo" .
/// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/bar> <http://schema.org/name> \"Bar\" ."; /// <http://example.com/bar> <http://schema.org/name> "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
/// let mut count = 0; /// let mut count = 0;
@ -215,7 +215,7 @@ pub struct FromReadNQuadsReader<R: Read> {
impl<R: Read> Iterator for FromReadNQuadsReader<R> { impl<R: Read> Iterator for FromReadNQuadsReader<R> {
type Item = Result<Quad, ParseError>; type Item = Result<Quad, ParseError>;
fn next(&mut self) -> Option<Result<Quad, ParseError>> { fn next(&mut self) -> Option<Self::Item> {
self.inner.next() self.inner.next()
} }
} }
@ -229,10 +229,10 @@ impl<R: Read> Iterator for FromReadNQuadsReader<R> {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> { /// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// let file = br#"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/foo> <http://schema.org/name> \"Foo\" . /// <http://example.com/foo> <http://schema.org/name> "Foo" .
/// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/bar> <http://schema.org/name> \"Bar\" ."; /// <http://example.com/bar> <http://schema.org/name> "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person");
/// let mut count = 0; /// let mut count = 0;

@ -19,10 +19,10 @@ use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};
/// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxttl::NTriplesParser; /// use oxttl::NTriplesParser;
/// ///
/// let file = b"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// let file = br#"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/foo> <http://schema.org/name> \"Foo\" . /// <http://example.com/foo> <http://schema.org/name> "Foo" .
/// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/bar> <http://schema.org/name> \"Bar\" ."; /// <http://example.com/bar> <http://schema.org/name> "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
/// let mut count = 0; /// let mut count = 0;
@ -76,10 +76,10 @@ impl NTriplesParser {
/// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxttl::NTriplesParser; /// use oxttl::NTriplesParser;
/// ///
/// let file = b"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// let file = br#"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/foo> <http://schema.org/name> \"Foo\" . /// <http://example.com/foo> <http://schema.org/name> "Foo" .
/// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/bar> <http://schema.org/name> \"Bar\" ."; /// <http://example.com/bar> <http://schema.org/name> "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
/// let mut count = 0; /// let mut count = 0;
@ -107,10 +107,10 @@ impl NTriplesParser {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> { /// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// let file = br#"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/foo> <http://schema.org/name> \"Foo\" . /// <http://example.com/foo> <http://schema.org/name> "Foo" .
/// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/bar> <http://schema.org/name> \"Bar\" ."; /// <http://example.com/bar> <http://schema.org/name> "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person");
/// let mut count = 0; /// let mut count = 0;
@ -191,10 +191,10 @@ impl NTriplesParser {
/// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxttl::NTriplesParser; /// use oxttl::NTriplesParser;
/// ///
/// let file = b"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// let file = br#"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/foo> <http://schema.org/name> \"Foo\" . /// <http://example.com/foo> <http://schema.org/name> "Foo" .
/// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/bar> <http://schema.org/name> \"Bar\" ."; /// <http://example.com/bar> <http://schema.org/name> "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
/// let mut count = 0; /// let mut count = 0;
@ -215,7 +215,7 @@ pub struct FromReadNTriplesReader<R: Read> {
impl<R: Read> Iterator for FromReadNTriplesReader<R> { impl<R: Read> Iterator for FromReadNTriplesReader<R> {
type Item = Result<Triple, ParseError>; type Item = Result<Triple, ParseError>;
fn next(&mut self) -> Option<Result<Triple, ParseError>> { fn next(&mut self) -> Option<Self::Item> {
Some(self.inner.next()?.map(Into::into)) Some(self.inner.next()?.map(Into::into))
} }
} }
@ -229,10 +229,10 @@ impl<R: Read> Iterator for FromReadNTriplesReader<R> {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> { /// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// let file = br#"<http://example.com/foo> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/foo> <http://schema.org/name> \"Foo\" . /// <http://example.com/foo> <http://schema.org/name> "Foo" .
/// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> . /// <http://example.com/bar> <http://www.w3.org/1999/02/22-rdf-syntax-ns#type> <http://schema.org/Person> .
/// <http://example.com/bar> <http://schema.org/name> \"Bar\" ."; /// <http://example.com/bar> <http://schema.org/name> "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person");
/// let mut count = 0; /// let mut count = 0;
@ -565,7 +565,7 @@ mod tests {
fn unchecked_parsing() { fn unchecked_parsing() {
let triples = NTriplesParser::new() let triples = NTriplesParser::new()
.unchecked() .unchecked()
.parse_read("<foo> <bar> \"baz\"@toolonglangtag .".as_bytes()) .parse_read(r#"<foo> <bar> "baz"@toolonglangtag ."#.as_bytes())
.collect::<Result<Vec<_>, _>>() .collect::<Result<Vec<_>, _>>()
.unwrap(); .unwrap();
assert_eq!( assert_eq!(

@ -844,7 +844,7 @@ impl TriGRecognizer {
true, true,
Some(b"#"), Some(b"#"),
), ),
TriGRecognizer { Self {
stack: vec![TriGState::TriGDoc], stack: vec![TriGState::TriGDoc],
cur_subject: Vec::new(), cur_subject: Vec::new(),
cur_predicate: Vec::new(), cur_predicate: Vec::new(),

@ -72,7 +72,7 @@ impl Error for SyntaxError {}
impl From<SyntaxError> for io::Error { impl From<SyntaxError> for io::Error {
#[inline] #[inline]
fn from(error: SyntaxError) -> Self { fn from(error: SyntaxError) -> Self {
io::Error::new(io::ErrorKind::InvalidData, error) Self::new(io::ErrorKind::InvalidData, error)
} }
} }

@ -22,12 +22,12 @@ use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt};
/// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxttl::TriGParser; /// use oxttl::TriGParser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" . /// schema:name "Foo" .
/// <bar> a schema:Person ; /// <bar> a schema:Person ;
/// schema:name \"Bar\" ."; /// schema:name "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
/// let mut count = 0; /// let mut count = 0;
@ -100,12 +100,12 @@ impl TriGParser {
/// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxttl::TriGParser; /// use oxttl::TriGParser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" . /// schema:name "Foo" .
/// <bar> a schema:Person ; /// <bar> a schema:Person ;
/// schema:name \"Bar\" ."; /// schema:name "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
/// let mut count = 0; /// let mut count = 0;
@ -133,12 +133,12 @@ impl TriGParser {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> { /// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" . /// schema:name "Foo" .
/// <bar> a schema:Person ; /// <bar> a schema:Person ;
/// schema:name \"Bar\" ."; /// schema:name "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person");
/// let mut count = 0; /// let mut count = 0;
@ -220,12 +220,12 @@ impl TriGParser {
/// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxttl::TriGParser; /// use oxttl::TriGParser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" . /// schema:name "Foo" .
/// <bar> a schema:Person ; /// <bar> a schema:Person ;
/// schema:name \"Bar\" ."; /// schema:name "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
/// let mut count = 0; /// let mut count = 0;
@ -253,10 +253,10 @@ impl<R: Read> FromReadTriGReader<R> {
/// ``` /// ```
/// use oxttl::TriGParser; /// use oxttl::TriGParser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = TriGParser::new().parse_read(file.as_ref()); /// let mut reader = TriGParser::new().parse_read(file.as_ref());
/// assert!(reader.prefixes().is_empty()); // No prefix at the beginning /// assert!(reader.prefixes().is_empty()); // No prefix at the beginning
@ -274,10 +274,10 @@ impl<R: Read> FromReadTriGReader<R> {
/// ``` /// ```
/// use oxttl::TriGParser; /// use oxttl::TriGParser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = TriGParser::new().parse_read(file.as_ref()); /// let mut reader = TriGParser::new().parse_read(file.as_ref());
/// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser. /// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser.
@ -300,7 +300,7 @@ impl<R: Read> FromReadTriGReader<R> {
impl<R: Read> Iterator for FromReadTriGReader<R> { impl<R: Read> Iterator for FromReadTriGReader<R> {
type Item = Result<Quad, ParseError>; type Item = Result<Quad, ParseError>;
fn next(&mut self) -> Option<Result<Quad, ParseError>> { fn next(&mut self) -> Option<Self::Item> {
self.inner.next() self.inner.next()
} }
} }
@ -314,12 +314,12 @@ impl<R: Read> Iterator for FromReadTriGReader<R> {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> { /// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" . /// schema:name "Foo" .
/// <bar> a schema:Person ; /// <bar> a schema:Person ;
/// schema:name \"Bar\" ."; /// schema:name "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person");
/// let mut count = 0; /// let mut count = 0;
@ -358,10 +358,10 @@ impl<R: AsyncRead + Unpin> FromTokioAsyncReadTriGReader<R> {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> { /// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = TriGParser::new().parse_tokio_async_read(file.as_ref()); /// let mut reader = TriGParser::new().parse_tokio_async_read(file.as_ref());
/// assert!(reader.prefixes().is_empty()); // No prefix at the beginning /// assert!(reader.prefixes().is_empty()); // No prefix at the beginning
@ -382,10 +382,10 @@ impl<R: AsyncRead + Unpin> FromTokioAsyncReadTriGReader<R> {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> { /// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = TriGParser::new().parse_tokio_async_read(file.as_ref()); /// let mut reader = TriGParser::new().parse_tokio_async_read(file.as_ref());
/// assert!(reader.base_iri().is_none()); // No base IRI at the beginning /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning
@ -481,10 +481,10 @@ impl LowLevelTriGReader {
/// ``` /// ```
/// use oxttl::TriGParser; /// use oxttl::TriGParser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = TriGParser::new().parse(); /// let mut reader = TriGParser::new().parse();
/// reader.extend_from_slice(file); /// reader.extend_from_slice(file);
@ -503,10 +503,10 @@ impl LowLevelTriGReader {
/// ``` /// ```
/// use oxttl::TriGParser; /// use oxttl::TriGParser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = TriGParser::new().parse(); /// let mut reader = TriGParser::new().parse();
/// reader.extend_from_slice(file); /// reader.extend_from_slice(file);

@ -24,12 +24,12 @@ use tokio::io::{AsyncRead, AsyncWrite};
/// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxttl::TurtleParser; /// use oxttl::TurtleParser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" . /// schema:name "Foo" .
/// <bar> a schema:Person ; /// <bar> a schema:Person ;
/// schema:name \"Bar\" ."; /// schema:name "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
/// let mut count = 0; /// let mut count = 0;
@ -102,12 +102,12 @@ impl TurtleParser {
/// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxttl::TurtleParser; /// use oxttl::TurtleParser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" . /// schema:name "Foo" .
/// <bar> a schema:Person ; /// <bar> a schema:Person ;
/// schema:name \"Bar\" ."; /// schema:name "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
/// let mut count = 0; /// let mut count = 0;
@ -135,12 +135,12 @@ impl TurtleParser {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> { /// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" . /// schema:name "Foo" .
/// <bar> a schema:Person ; /// <bar> a schema:Person ;
/// schema:name \"Bar\" ."; /// schema:name "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person");
/// let mut count = 0; /// let mut count = 0;
@ -222,12 +222,12 @@ impl TurtleParser {
/// use oxrdf::{NamedNodeRef, vocab::rdf}; /// use oxrdf::{NamedNodeRef, vocab::rdf};
/// use oxttl::TurtleParser; /// use oxttl::TurtleParser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" . /// schema:name "Foo" .
/// <bar> a schema:Person ; /// <bar> a schema:Person ;
/// schema:name \"Bar\" ."; /// schema:name "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new("http://schema.org/Person")?; /// let schema_person = NamedNodeRef::new("http://schema.org/Person")?;
/// let mut count = 0; /// let mut count = 0;
@ -255,10 +255,10 @@ impl<R: Read> FromReadTurtleReader<R> {
/// ``` /// ```
/// use oxttl::TurtleParser; /// use oxttl::TurtleParser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = TurtleParser::new().parse_read(file.as_ref()); /// let mut reader = TurtleParser::new().parse_read(file.as_ref());
/// assert!(reader.prefixes().is_empty()); // No prefix at the beginning /// assert!(reader.prefixes().is_empty()); // No prefix at the beginning
@ -276,10 +276,10 @@ impl<R: Read> FromReadTurtleReader<R> {
/// ``` /// ```
/// use oxttl::TurtleParser; /// use oxttl::TurtleParser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = TurtleParser::new().parse_read(file.as_ref()); /// let mut reader = TurtleParser::new().parse_read(file.as_ref());
/// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser. /// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser.
@ -302,7 +302,7 @@ impl<R: Read> FromReadTurtleReader<R> {
impl<R: Read> Iterator for FromReadTurtleReader<R> { impl<R: Read> Iterator for FromReadTurtleReader<R> {
type Item = Result<Triple, ParseError>; type Item = Result<Triple, ParseError>;
fn next(&mut self) -> Option<Result<Triple, ParseError>> { fn next(&mut self) -> Option<Self::Item> {
Some(self.inner.next()?.map(Into::into)) Some(self.inner.next()?.map(Into::into))
} }
} }
@ -316,12 +316,12 @@ impl<R: Read> Iterator for FromReadTurtleReader<R> {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> { /// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" . /// schema:name "Foo" .
/// <bar> a schema:Person ; /// <bar> a schema:Person ;
/// schema:name \"Bar\" ."; /// schema:name "Bar" ."#;
/// ///
/// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person"); /// let schema_person = NamedNodeRef::new_unchecked("http://schema.org/Person");
/// let mut count = 0; /// let mut count = 0;
@ -360,10 +360,10 @@ impl<R: AsyncRead + Unpin> FromTokioAsyncReadTurtleReader<R> {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> { /// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = TurtleParser::new().parse_tokio_async_read(file.as_ref()); /// let mut reader = TurtleParser::new().parse_tokio_async_read(file.as_ref());
/// assert!(reader.prefixes().is_empty()); // No prefix at the beginning /// assert!(reader.prefixes().is_empty()); // No prefix at the beginning
@ -384,10 +384,10 @@ impl<R: AsyncRead + Unpin> FromTokioAsyncReadTurtleReader<R> {
/// ///
/// # #[tokio::main(flavor = "current_thread")] /// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> { /// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = TurtleParser::new().parse_tokio_async_read(file.as_ref()); /// let mut reader = TurtleParser::new().parse_tokio_async_read(file.as_ref());
/// assert!(reader.base_iri().is_none()); // No base IRI at the beginning /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning
@ -483,10 +483,10 @@ impl LowLevelTurtleReader {
/// ``` /// ```
/// use oxttl::TurtleParser; /// use oxttl::TurtleParser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = TurtleParser::new().parse(); /// let mut reader = TurtleParser::new().parse();
/// reader.extend_from_slice(file); /// reader.extend_from_slice(file);
@ -505,10 +505,10 @@ impl LowLevelTurtleReader {
/// ``` /// ```
/// use oxttl::TurtleParser; /// use oxttl::TurtleParser;
/// ///
/// let file = b"@base <http://example.com/> . /// let file = br#"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> . /// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ; /// <foo> a schema:Person ;
/// schema:name \"Foo\" ."; /// schema:name "Foo" ."#;
/// ///
/// let mut reader = TurtleParser::new().parse(); /// let mut reader = TurtleParser::new().parse();
/// reader.extend_from_slice(file); /// reader.extend_from_slice(file);

@ -22,11 +22,11 @@ use std::sync::Arc;
/// ///
/// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Json); /// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Json);
/// // boolean /// // boolean
/// if let FromReadQueryResultsReader::Boolean(v) = json_parser.parse_read(b"{\"boolean\":true}".as_slice())? { /// if let FromReadQueryResultsReader::Boolean(v) = json_parser.parse_read(br#"{"boolean":true}"#.as_slice())? {
/// assert_eq!(v, true); /// assert_eq!(v, true);
/// } /// }
/// // solutions /// // solutions
/// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}}]}}".as_slice())? { /// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#.as_slice())? {
/// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]); /// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]);
/// for solution in solutions { /// for solution in solutions {
/// assert_eq!(solution?.iter().collect::<Vec<_>>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]); /// assert_eq!(solution?.iter().collect::<Vec<_>>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]);
@ -57,12 +57,12 @@ impl QueryResultsParser {
/// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Xml); /// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Xml);
/// ///
/// // boolean /// // boolean
/// if let FromReadQueryResultsReader::Boolean(v) = json_parser.parse_read(b"<sparql xmlns=\"http://www.w3.org/2005/sparql-results#\"><head/><boolean>true</boolean></sparql>".as_slice())? { /// if let FromReadQueryResultsReader::Boolean(v) = json_parser.parse_read(br#"<sparql xmlns="http://www.w3.org/2005/sparql-results#"><head/><boolean>true</boolean></sparql>"#.as_slice())? {
/// assert_eq!(v, true); /// assert_eq!(v, true);
/// } /// }
/// ///
/// // solutions /// // solutions
/// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(b"<sparql xmlns=\"http://www.w3.org/2005/sparql-results#\"><head><variable name=\"foo\"/><variable name=\"bar\"/></head><results><result><binding name=\"foo\"><literal>test</literal></binding></result></results></sparql>".as_slice())? { /// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(br#"<sparql xmlns="http://www.w3.org/2005/sparql-results#"><head><variable name="foo"/><variable name="bar"/></head><results><result><binding name="foo"><literal>test</literal></binding></result></results></sparql>"#.as_slice())? {
/// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]); /// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]);
/// for solution in solutions { /// for solution in solutions {
/// assert_eq!(solution?.iter().collect::<Vec<_>>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]); /// assert_eq!(solution?.iter().collect::<Vec<_>>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]);
@ -164,7 +164,7 @@ pub enum FromReadQueryResultsReader<R: Read> {
/// use oxrdf::{Literal, Variable}; /// use oxrdf::{Literal, Variable};
/// ///
/// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Json); /// let json_parser = QueryResultsParser::from_format(QueryResultsFormat::Json);
/// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}}]}}".as_slice())? { /// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#.as_slice())? {
/// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]); /// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]);
/// for solution in solutions { /// for solution in solutions {
/// assert_eq!(solution?.iter().collect::<Vec<_>>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]); /// assert_eq!(solution?.iter().collect::<Vec<_>>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]);
@ -206,7 +206,7 @@ impl<R: Read> FromReadSolutionsReader<R> {
impl<R: Read> Iterator for FromReadSolutionsReader<R> { impl<R: Read> Iterator for FromReadSolutionsReader<R> {
type Item = Result<QuerySolution, ParseError>; type Item = Result<QuerySolution, ParseError>;
fn next(&mut self) -> Option<Result<QuerySolution, ParseError>> { fn next(&mut self) -> Option<Self::Item> {
Some( Some(
match &mut self.solutions { match &mut self.solutions {
SolutionsReaderKind::Xml(reader) => reader.read_next(), SolutionsReaderKind::Xml(reader) => reader.read_next(),

@ -35,14 +35,14 @@ use tokio::io::AsyncWrite;
/// // boolean /// // boolean
/// let mut buffer = Vec::new(); /// let mut buffer = Vec::new();
/// json_serializer.serialize_boolean_to_write(&mut buffer, true)?; /// json_serializer.serialize_boolean_to_write(&mut buffer, true)?;
/// assert_eq!(buffer, b"{\"head\":{},\"boolean\":true}"); /// assert_eq!(buffer, br#"{"head":{},"boolean":true}"#);
/// ///
/// // solutions /// // solutions
/// let mut buffer = Vec::new(); /// let mut buffer = Vec::new();
/// let mut writer = json_serializer.serialize_solutions_to_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")])?; /// let mut writer = json_serializer.serialize_solutions_to_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")])?;
/// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?; /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?;
/// writer.finish()?; /// writer.finish()?;
/// assert_eq!(buffer, b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}}]}}"); /// assert_eq!(buffer, br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#);
/// # std::io::Result::Ok(()) /// # std::io::Result::Ok(())
/// ``` /// ```
pub struct QueryResultsSerializer { pub struct QueryResultsSerializer {
@ -65,7 +65,7 @@ impl QueryResultsSerializer {
/// let xml_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Xml); /// let xml_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Xml);
/// let mut buffer = Vec::new(); /// let mut buffer = Vec::new();
/// xml_serializer.serialize_boolean_to_write(&mut buffer, true)?; /// xml_serializer.serialize_boolean_to_write(&mut buffer, true)?;
/// assert_eq!(buffer, b"<?xml version=\"1.0\"?><sparql xmlns=\"http://www.w3.org/2005/sparql-results#\"><head></head><boolean>true</boolean></sparql>"); /// assert_eq!(buffer, br#"<?xml version="1.0"?><sparql xmlns="http://www.w3.org/2005/sparql-results#"><head></head><boolean>true</boolean></sparql>"#);
/// # std::io::Result::Ok(()) /// # std::io::Result::Ok(())
/// ``` /// ```
pub fn serialize_boolean_to_write<W: Write>(&self, write: W, value: bool) -> io::Result<W> { pub fn serialize_boolean_to_write<W: Write>(&self, write: W, value: bool) -> io::Result<W> {
@ -89,7 +89,7 @@ impl QueryResultsSerializer {
/// let json_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Json); /// let json_serializer = QueryResultsSerializer::from_format(QueryResultsFormat::Json);
/// let mut buffer = Vec::new(); /// let mut buffer = Vec::new();
/// json_serializer.serialize_boolean_to_tokio_async_write(&mut buffer, false).await?; /// json_serializer.serialize_boolean_to_tokio_async_write(&mut buffer, false).await?;
/// assert_eq!(buffer, b"{\"head\":{},\"boolean\":false}"); /// assert_eq!(buffer, br#"{"head":{},"boolean":false}"r);
/// # Ok(()) /// # Ok(())
/// # } /// # }
/// ``` /// ```
@ -134,7 +134,7 @@ impl QueryResultsSerializer {
/// let mut writer = xml_serializer.serialize_solutions_to_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")])?; /// let mut writer = xml_serializer.serialize_solutions_to_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")])?;
/// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?; /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?;
/// writer.finish()?; /// writer.finish()?;
/// assert_eq!(buffer, b"<?xml version=\"1.0\"?><sparql xmlns=\"http://www.w3.org/2005/sparql-results#\"><head><variable name=\"foo\"/><variable name=\"bar\"/></head><results><result><binding name=\"foo\"><literal>test</literal></binding></result></results></sparql>"); /// assert_eq!(buffer, br#"<?xml version="1.0"?><sparql xmlns="http://www.w3.org/2005/sparql-results#"><head><variable name="foo"/><variable name="bar"/></head><results><result><binding name="foo"><literal>test</literal></binding></result></results></sparql>"#);
/// # std::io::Result::Ok(()) /// # std::io::Result::Ok(())
/// ``` /// ```
pub fn serialize_solutions_to_write<W: Write>( pub fn serialize_solutions_to_write<W: Write>(
@ -183,7 +183,7 @@ impl QueryResultsSerializer {
/// let mut writer = json_serializer.serialize_solutions_to_tokio_async_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]).await?; /// let mut writer = json_serializer.serialize_solutions_to_tokio_async_write(&mut buffer, vec![Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]).await?;
/// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test")))).await?; /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test")))).await?;
/// writer.finish().await?; /// writer.finish().await?;
/// assert_eq!(buffer, b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}}]}}"); /// assert_eq!(buffer, br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#);
/// # Ok(()) /// # Ok(())
/// # } /// # }
/// ``` /// ```
@ -280,7 +280,7 @@ impl<W: Write> ToWriteSolutionsWriter<W> {
/// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?; /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?;
/// writer.write(&QuerySolution::from((vec![Variable::new_unchecked("bar")], vec![Some(Literal::from("test").into())])))?; /// writer.write(&QuerySolution::from((vec![Variable::new_unchecked("bar")], vec![Some(Literal::from("test").into())])))?;
/// writer.finish()?; /// writer.finish()?;
/// assert_eq!(buffer, b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}},{\"bar\":{\"type\":\"literal\",\"value\":\"test\"}}]}}"); /// assert_eq!(buffer, br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}},{"bar":{"type":"literal","value":"test"}}]}}"#);
/// # std::io::Result::Ok(()) /// # std::io::Result::Ok(())
/// ``` /// ```
pub fn write<'a>( pub fn write<'a>(
@ -368,7 +368,7 @@ impl<W: AsyncWrite + Unpin> ToTokioAsyncWriteSolutionsWriter<W> {
/// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test")))).await?; /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test")))).await?;
/// writer.write(&QuerySolution::from((vec![Variable::new_unchecked("bar")], vec![Some(Literal::from("test").into())]))).await?; /// writer.write(&QuerySolution::from((vec![Variable::new_unchecked("bar")], vec![Some(Literal::from("test").into())]))).await?;
/// writer.finish().await?; /// writer.finish().await?;
/// assert_eq!(buffer, b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}},{\"bar\":{\"type\":\"literal\",\"value\":\"test\"}}]}}"); /// assert_eq!(buffer, br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}},{"bar":{"type":"literal","value":"test"}}]}}"#);
/// # Ok(()) /// # Ok(())
/// # } /// # }
/// ``` /// ```

@ -130,7 +130,7 @@ impl<'a> IntoIterator for &'a QuerySolution {
type IntoIter = Iter<'a>; type IntoIter = Iter<'a>;
#[inline] #[inline]
fn into_iter(self) -> Iter<'a> { fn into_iter(self) -> Self::IntoIter {
Iter { Iter {
inner: self.variables.iter().zip(&self.values), inner: self.variables.iter().zip(&self.values),
} }
@ -142,7 +142,7 @@ impl Index<usize> for QuerySolution {
#[allow(clippy::panic)] #[allow(clippy::panic)]
#[inline] #[inline]
fn index(&self, index: usize) -> &Term { fn index(&self, index: usize) -> &Self::Output {
self.get(index) self.get(index)
.unwrap_or_else(|| panic!("The column {index} is not set in this solution")) .unwrap_or_else(|| panic!("The column {index} is not set in this solution"))
} }
@ -153,7 +153,7 @@ impl Index<&str> for QuerySolution {
#[allow(clippy::panic)] #[allow(clippy::panic)]
#[inline] #[inline]
fn index(&self, index: &str) -> &Term { fn index(&self, index: &str) -> &Self::Output {
self.get(index) self.get(index)
.unwrap_or_else(|| panic!("The variable ?{index} is not set in this solution")) .unwrap_or_else(|| panic!("The variable ?{index} is not set in this solution"))
} }
@ -164,7 +164,7 @@ impl Index<VariableRef<'_>> for QuerySolution {
#[allow(clippy::panic)] #[allow(clippy::panic)]
#[inline] #[inline]
fn index(&self, index: VariableRef<'_>) -> &Term { fn index(&self, index: VariableRef<'_>) -> &Self::Output {
self.get(index) self.get(index)
.unwrap_or_else(|| panic!("The variable {index} is not set in this solution")) .unwrap_or_else(|| panic!("The variable {index} is not set in this solution"))
} }
@ -173,7 +173,7 @@ impl Index<Variable> for QuerySolution {
type Output = Term; type Output = Term;
#[inline] #[inline]
fn index(&self, index: Variable) -> &Term { fn index(&self, index: Variable) -> &Self::Output {
self.index(index.as_ref()) self.index(index.as_ref())
} }
} }
@ -182,7 +182,7 @@ impl Index<&Variable> for QuerySolution {
type Output = Term; type Output = Term;
#[inline] #[inline]
fn index(&self, index: &Variable) -> &Term { fn index(&self, index: &Variable) -> &Self::Output {
self.index(index.as_ref()) self.index(index.as_ref())
} }
} }
@ -228,7 +228,7 @@ impl<'a> Iterator for Iter<'a> {
type Item = (&'a Variable, &'a Term); type Item = (&'a Variable, &'a Term);
#[inline] #[inline]
fn next(&mut self) -> Option<(&'a Variable, &'a Term)> { fn next(&mut self) -> Option<Self::Item> {
for (variable, value) in &mut self.inner { for (variable, value) in &mut self.inner {
if let Some(value) = value { if let Some(value) = value {
return Some((variable, value)); return Some((variable, value));

@ -275,7 +275,7 @@ impl fmt::Display for Query {
impl FromStr for Query { impl FromStr for Query {
type Err = ParseError; type Err = ParseError;
fn from_str(query: &str) -> Result<Self, ParseError> { fn from_str(query: &str) -> Result<Self, Self::Err> {
Self::parse(query, None) Self::parse(query, None)
} }
} }
@ -283,7 +283,7 @@ impl FromStr for Query {
impl<'a> TryFrom<&'a str> for Query { impl<'a> TryFrom<&'a str> for Query {
type Error = ParseError; type Error = ParseError;
fn try_from(query: &str) -> Result<Self, ParseError> { fn try_from(query: &str) -> Result<Self, Self::Error> {
Self::from_str(query) Self::from_str(query)
} }
} }
@ -291,7 +291,7 @@ impl<'a> TryFrom<&'a str> for Query {
impl<'a> TryFrom<&'a String> for Query { impl<'a> TryFrom<&'a String> for Query {
type Error = ParseError; type Error = ParseError;
fn try_from(query: &String) -> Result<Self, ParseError> { fn try_from(query: &String) -> Result<Self, Self::Error> {
Self::from_str(query) Self::from_str(query)
} }
} }

@ -48,7 +48,7 @@ impl TryFrom<Subject> for GroundSubject {
type Error = (); type Error = ();
#[inline] #[inline]
fn try_from(subject: Subject) -> Result<Self, ()> { fn try_from(subject: Subject) -> Result<Self, Self::Error> {
match subject { match subject {
Subject::NamedNode(t) => Ok(t.into()), Subject::NamedNode(t) => Ok(t.into()),
Subject::BlankNode(_) => Err(()), Subject::BlankNode(_) => Err(()),
@ -62,7 +62,7 @@ impl TryFrom<GroundTerm> for GroundSubject {
type Error = (); type Error = ();
#[inline] #[inline]
fn try_from(term: GroundTerm) -> Result<Self, ()> { fn try_from(term: GroundTerm) -> Result<Self, Self::Error> {
match term { match term {
GroundTerm::NamedNode(t) => Ok(t.into()), GroundTerm::NamedNode(t) => Ok(t.into()),
GroundTerm::Literal(_) => Err(()), GroundTerm::Literal(_) => Err(()),
@ -125,7 +125,7 @@ impl TryFrom<Term> for GroundTerm {
type Error = (); type Error = ();
#[inline] #[inline]
fn try_from(term: Term) -> Result<Self, ()> { fn try_from(term: Term) -> Result<Self, Self::Error> {
match term { match term {
Term::NamedNode(t) => Ok(t.into()), Term::NamedNode(t) => Ok(t.into()),
Term::BlankNode(_) => Err(()), Term::BlankNode(_) => Err(()),
@ -171,7 +171,7 @@ impl TryFrom<Triple> for GroundTriple {
type Error = (); type Error = ();
#[inline] #[inline]
fn try_from(triple: Triple) -> Result<Self, ()> { fn try_from(triple: Triple) -> Result<Self, Self::Error> {
Ok(Self { Ok(Self {
subject: triple.subject.try_into()?, subject: triple.subject.try_into()?,
predicate: triple.predicate, predicate: triple.predicate,
@ -221,7 +221,7 @@ impl TryFrom<GraphNamePattern> for GraphName {
type Error = (); type Error = ();
#[inline] #[inline]
fn try_from(pattern: GraphNamePattern) -> Result<Self, ()> { fn try_from(pattern: GraphNamePattern) -> Result<Self, Self::Error> {
match pattern { match pattern {
GraphNamePattern::NamedNode(t) => Ok(t.into()), GraphNamePattern::NamedNode(t) => Ok(t.into()),
GraphNamePattern::DefaultGraph => Ok(Self::DefaultGraph), GraphNamePattern::DefaultGraph => Ok(Self::DefaultGraph),
@ -295,7 +295,7 @@ impl TryFrom<QuadPattern> for Quad {
type Error = (); type Error = ();
#[inline] #[inline]
fn try_from(quad: QuadPattern) -> Result<Self, ()> { fn try_from(quad: QuadPattern) -> Result<Self, Self::Error> {
Ok(Self { Ok(Self {
subject: quad.subject.try_into()?, subject: quad.subject.try_into()?,
predicate: quad.predicate.try_into()?, predicate: quad.predicate.try_into()?,
@ -370,7 +370,7 @@ impl TryFrom<Quad> for GroundQuad {
type Error = (); type Error = ();
#[inline] #[inline]
fn try_from(quad: Quad) -> Result<Self, ()> { fn try_from(quad: Quad) -> Result<Self, Self::Error> {
Ok(Self { Ok(Self {
subject: quad.subject.try_into()?, subject: quad.subject.try_into()?,
predicate: quad.predicate, predicate: quad.predicate,
@ -425,7 +425,7 @@ impl TryFrom<NamedNodePattern> for NamedNode {
type Error = (); type Error = ();
#[inline] #[inline]
fn try_from(pattern: NamedNodePattern) -> Result<Self, ()> { fn try_from(pattern: NamedNodePattern) -> Result<Self, Self::Error> {
match pattern { match pattern {
NamedNodePattern::NamedNode(t) => Ok(t), NamedNodePattern::NamedNode(t) => Ok(t),
NamedNodePattern::Variable(_) => Err(()), NamedNodePattern::Variable(_) => Err(()),
@ -559,7 +559,7 @@ impl TryFrom<TermPattern> for Subject {
type Error = (); type Error = ();
#[inline] #[inline]
fn try_from(term: TermPattern) -> Result<Self, ()> { fn try_from(term: TermPattern) -> Result<Self, Self::Error> {
match term { match term {
TermPattern::NamedNode(t) => Ok(t.into()), TermPattern::NamedNode(t) => Ok(t.into()),
TermPattern::BlankNode(t) => Ok(t.into()), TermPattern::BlankNode(t) => Ok(t.into()),
@ -574,7 +574,7 @@ impl TryFrom<TermPattern> for Term {
type Error = (); type Error = ();
#[inline] #[inline]
fn try_from(pattern: TermPattern) -> Result<Self, ()> { fn try_from(pattern: TermPattern) -> Result<Self, Self::Error> {
match pattern { match pattern {
TermPattern::NamedNode(t) => Ok(t.into()), TermPattern::NamedNode(t) => Ok(t.into()),
TermPattern::BlankNode(t) => Ok(t.into()), TermPattern::BlankNode(t) => Ok(t.into()),
@ -686,7 +686,7 @@ impl TryFrom<TermPattern> for GroundTermPattern {
type Error = (); type Error = ();
#[inline] #[inline]
fn try_from(pattern: TermPattern) -> Result<Self, ()> { fn try_from(pattern: TermPattern) -> Result<Self, Self::Error> {
Ok(match pattern { Ok(match pattern {
TermPattern::NamedNode(named_node) => named_node.into(), TermPattern::NamedNode(named_node) => named_node.into(),
TermPattern::BlankNode(_) => return Err(()), TermPattern::BlankNode(_) => return Err(()),
@ -828,7 +828,7 @@ impl TryFrom<TriplePattern> for Triple {
type Error = (); type Error = ();
#[inline] #[inline]
fn try_from(triple: TriplePattern) -> Result<Self, ()> { fn try_from(triple: TriplePattern) -> Result<Self, Self::Error> {
Ok(Self { Ok(Self {
subject: triple.subject.try_into()?, subject: triple.subject.try_into()?,
predicate: triple.predicate.try_into()?, predicate: triple.predicate.try_into()?,
@ -1000,7 +1000,7 @@ impl TryFrom<QuadPattern> for GroundQuadPattern {
type Error = (); type Error = ();
#[inline] #[inline]
fn try_from(pattern: QuadPattern) -> Result<Self, ()> { fn try_from(pattern: QuadPattern) -> Result<Self, Self::Error> {
Ok(Self { Ok(Self {
subject: pattern.subject.try_into()?, subject: pattern.subject.try_into()?,
predicate: pattern.predicate, predicate: pattern.predicate,

@ -70,7 +70,7 @@ impl fmt::Display for Update {
impl FromStr for Update { impl FromStr for Update {
type Err = ParseError; type Err = ParseError;
fn from_str(update: &str) -> Result<Self, ParseError> { fn from_str(update: &str) -> Result<Self, Self::Err> {
Self::parse(update, None) Self::parse(update, None)
} }
} }
@ -78,7 +78,7 @@ impl FromStr for Update {
impl<'a> TryFrom<&'a str> for Update { impl<'a> TryFrom<&'a str> for Update {
type Error = ParseError; type Error = ParseError;
fn try_from(update: &str) -> Result<Self, ParseError> { fn try_from(update: &str) -> Result<Self, Self::Error> {
Self::from_str(update) Self::from_str(update)
} }
} }
@ -86,7 +86,7 @@ impl<'a> TryFrom<&'a str> for Update {
impl<'a> TryFrom<&'a String> for Update { impl<'a> TryFrom<&'a String> for Update {
type Error = ParseError; type Error = ParseError;
fn try_from(update: &String) -> Result<Self, ParseError> { fn try_from(update: &String) -> Result<Self, Self::Error> {
Self::from_str(update) Self::from_str(update)
} }
} }

@ -364,25 +364,25 @@ impl Expression {
fn returns_boolean(&self) -> bool { fn returns_boolean(&self) -> bool {
match self { match self {
Expression::Or(_) Self::Or(_)
| Expression::And(_) | Self::And(_)
| Expression::Equal(_, _) | Self::Equal(_, _)
| Expression::SameTerm(_, _) | Self::SameTerm(_, _)
| Expression::Greater(_, _) | Self::Greater(_, _)
| Expression::GreaterOrEqual(_, _) | Self::GreaterOrEqual(_, _)
| Expression::Less(_, _) | Self::Less(_, _)
| Expression::LessOrEqual(_, _) | Self::LessOrEqual(_, _)
| Expression::Not(_) | Self::Not(_)
| Expression::Exists(_) | Self::Exists(_)
| Expression::Bound(_) | Self::Bound(_)
| Expression::FunctionCall( | Self::FunctionCall(
Function::IsBlank | Function::IsIri | Function::IsLiteral | Function::IsNumeric, Function::IsBlank | Function::IsIri | Function::IsLiteral | Function::IsNumeric,
_, _,
) => true, ) => true,
#[cfg(feature = "rdf-star")] #[cfg(feature = "rdf-star")]
Expression::FunctionCall(Function::IsTriple, _) => true, Self::FunctionCall(Function::IsTriple, _) => true,
Expression::Literal(literal) => literal.datatype() == xsd::BOOLEAN, Self::Literal(literal) => literal.datatype() == xsd::BOOLEAN,
Expression::If(_, a, b) => a.returns_boolean() && b.returns_boolean(), Self::If(_, a, b) => a.returns_boolean() && b.returns_boolean(),
_ => false, _ => false,
} }
} }
@ -847,7 +847,7 @@ impl GraphPattern {
} }
} }
if all.is_empty() { if all.is_empty() {
GraphPattern::empty() Self::empty()
} else { } else {
Self::Union { Self::Union {
inner: order_vec(all), inner: order_vec(all),

@ -258,7 +258,7 @@ impl TryFrom<DatasetFormat> for GraphFormat {
/// Attempts to find a graph format that is a subset of this [`DatasetFormat`]. /// Attempts to find a graph format that is a subset of this [`DatasetFormat`].
#[inline] #[inline]
fn try_from(value: DatasetFormat) -> Result<Self, ()> { fn try_from(value: DatasetFormat) -> Result<Self, Self::Error> {
match value { match value {
DatasetFormat::NQuads => Ok(Self::NTriples), DatasetFormat::NQuads => Ok(Self::NTriples),
DatasetFormat::TriG => Ok(Self::Turtle), DatasetFormat::TriG => Ok(Self::Turtle),
@ -271,7 +271,7 @@ impl TryFrom<GraphFormat> for DatasetFormat {
/// Attempts to find a dataset format that is a superset of this [`GraphFormat`]. /// Attempts to find a dataset format that is a superset of this [`GraphFormat`].
#[inline] #[inline]
fn try_from(value: GraphFormat) -> Result<Self, ()> { fn try_from(value: GraphFormat) -> Result<Self, Self::Error> {
match value { match value {
GraphFormat::NTriples => Ok(Self::NQuads), GraphFormat::NTriples => Ok(Self::NQuads),
GraphFormat::Turtle => Ok(Self::TriG), GraphFormat::Turtle => Ok(Self::TriG),

@ -95,7 +95,7 @@ pub struct TripleReader<R: Read> {
impl<R: Read> Iterator for TripleReader<R> { impl<R: Read> Iterator for TripleReader<R> {
type Item = Result<Triple, ParseError>; type Item = Result<Triple, ParseError>;
fn next(&mut self) -> Option<Result<Triple, ParseError>> { fn next(&mut self) -> Option<Self::Item> {
Some(self.parser.next()?.map(Into::into).map_err(Into::into)) Some(self.parser.next()?.map(Into::into).map_err(Into::into))
} }
} }
@ -184,7 +184,7 @@ pub struct QuadReader<R: Read> {
impl<R: Read> Iterator for QuadReader<R> { impl<R: Read> Iterator for QuadReader<R> {
type Item = Result<Quad, ParseError>; type Item = Result<Quad, ParseError>;
fn next(&mut self) -> Option<Result<Quad, ParseError>> { fn next(&mut self) -> Option<Self::Item> {
Some(self.parser.next()?.map_err(Into::into)) Some(self.parser.next()?.map_err(Into::into))
} }
} }

@ -65,7 +65,7 @@ impl fmt::Display for Query {
impl FromStr for Query { impl FromStr for Query {
type Err = spargebra::ParseError; type Err = spargebra::ParseError;
fn from_str(query: &str) -> Result<Self, spargebra::ParseError> { fn from_str(query: &str) -> Result<Self, Self::Err> {
Self::parse(query, None) Self::parse(query, None)
} }
} }
@ -73,7 +73,7 @@ impl FromStr for Query {
impl TryFrom<&str> for Query { impl TryFrom<&str> for Query {
type Error = spargebra::ParseError; type Error = spargebra::ParseError;
fn try_from(query: &str) -> Result<Self, spargebra::ParseError> { fn try_from(query: &str) -> Result<Self, Self::Error> {
Self::from_str(query) Self::from_str(query)
} }
} }
@ -81,7 +81,7 @@ impl TryFrom<&str> for Query {
impl TryFrom<&String> for Query { impl TryFrom<&String> for Query {
type Error = spargebra::ParseError; type Error = spargebra::ParseError;
fn try_from(query: &String) -> Result<Self, spargebra::ParseError> { fn try_from(query: &String) -> Result<Self, Self::Error> {
Self::from_str(query) Self::from_str(query)
} }
} }
@ -158,7 +158,7 @@ impl fmt::Display for Update {
impl FromStr for Update { impl FromStr for Update {
type Err = spargebra::ParseError; type Err = spargebra::ParseError;
fn from_str(update: &str) -> Result<Self, spargebra::ParseError> { fn from_str(update: &str) -> Result<Self, Self::Err> {
Self::parse(update, None) Self::parse(update, None)
} }
} }
@ -166,7 +166,7 @@ impl FromStr for Update {
impl TryFrom<&str> for Update { impl TryFrom<&str> for Update {
type Error = spargebra::ParseError; type Error = spargebra::ParseError;
fn try_from(update: &str) -> Result<Self, spargebra::ParseError> { fn try_from(update: &str) -> Result<Self, Self::Error> {
Self::from_str(update) Self::from_str(update)
} }
} }
@ -174,7 +174,7 @@ impl TryFrom<&str> for Update {
impl TryFrom<&String> for Update { impl TryFrom<&String> for Update {
type Error = spargebra::ParseError; type Error = spargebra::ParseError;
fn try_from(update: &String) -> Result<Self, spargebra::ParseError> { fn try_from(update: &String) -> Result<Self, Self::Error> {
Self::from_str(update) Self::from_str(update)
} }
} }

@ -3892,9 +3892,9 @@ impl TupleSelector {
fn get_pattern_value(&self, tuple: &EncodedTuple) -> Option<EncodedTerm> { fn get_pattern_value(&self, tuple: &EncodedTuple) -> Option<EncodedTerm> {
match self { match self {
TupleSelector::Constant(c) => Some(c.clone()), Self::Constant(c) => Some(c.clone()),
TupleSelector::Variable(v) => tuple.get(*v).cloned(), Self::Variable(v) => tuple.get(*v).cloned(),
TupleSelector::TriplePattern(triple) => Some( Self::TriplePattern(triple) => Some(
EncodedTriple { EncodedTriple {
subject: triple.subject.get_pattern_value(tuple)?, subject: triple.subject.get_pattern_value(tuple)?,
predicate: triple.predicate.get_pattern_value(tuple)?, predicate: triple.predicate.get_pattern_value(tuple)?,
@ -4732,7 +4732,7 @@ struct CartesianProductJoinIterator {
impl Iterator for CartesianProductJoinIterator { impl Iterator for CartesianProductJoinIterator {
type Item = Result<EncodedTuple, EvaluationError>; type Item = Result<EncodedTuple, EvaluationError>;
fn next(&mut self) -> Option<Result<EncodedTuple, EvaluationError>> { fn next(&mut self) -> Option<Self::Item> {
loop { loop {
if let Some(result) = self.buffered_results.pop() { if let Some(result) = self.buffered_results.pop() {
return Some(result); return Some(result);
@ -4767,7 +4767,7 @@ struct HashJoinIterator {
impl Iterator for HashJoinIterator { impl Iterator for HashJoinIterator {
type Item = Result<EncodedTuple, EvaluationError>; type Item = Result<EncodedTuple, EvaluationError>;
fn next(&mut self) -> Option<Result<EncodedTuple, EvaluationError>> { fn next(&mut self) -> Option<Self::Item> {
loop { loop {
if let Some(result) = self.buffered_results.pop() { if let Some(result) = self.buffered_results.pop() {
return Some(result); return Some(result);
@ -4806,7 +4806,7 @@ struct HashLeftJoinIterator {
impl Iterator for HashLeftJoinIterator { impl Iterator for HashLeftJoinIterator {
type Item = Result<EncodedTuple, EvaluationError>; type Item = Result<EncodedTuple, EvaluationError>;
fn next(&mut self) -> Option<Result<EncodedTuple, EvaluationError>> { fn next(&mut self) -> Option<Self::Item> {
loop { loop {
if let Some(result) = self.buffered_results.pop() { if let Some(result) = self.buffered_results.pop() {
return Some(result); return Some(result);
@ -4854,7 +4854,7 @@ struct ForLoopLeftJoinIterator {
impl Iterator for ForLoopLeftJoinIterator { impl Iterator for ForLoopLeftJoinIterator {
type Item = Result<EncodedTuple, EvaluationError>; type Item = Result<EncodedTuple, EvaluationError>;
fn next(&mut self) -> Option<Result<EncodedTuple, EvaluationError>> { fn next(&mut self) -> Option<Self::Item> {
if let Some(tuple) = self.current_right.next() { if let Some(tuple) = self.current_right.next() {
return Some(tuple); return Some(tuple);
} }
@ -4881,7 +4881,7 @@ struct UnionIterator {
impl Iterator for UnionIterator { impl Iterator for UnionIterator {
type Item = Result<EncodedTuple, EvaluationError>; type Item = Result<EncodedTuple, EvaluationError>;
fn next(&mut self) -> Option<Result<EncodedTuple, EvaluationError>> { fn next(&mut self) -> Option<Self::Item> {
loop { loop {
if let Some(tuple) = self.current_iterator.next() { if let Some(tuple) = self.current_iterator.next() {
return Some(tuple); return Some(tuple);
@ -4903,7 +4903,7 @@ struct ConsecutiveDeduplication {
impl Iterator for ConsecutiveDeduplication { impl Iterator for ConsecutiveDeduplication {
type Item = Result<EncodedTuple, EvaluationError>; type Item = Result<EncodedTuple, EvaluationError>;
fn next(&mut self) -> Option<Result<EncodedTuple, EvaluationError>> { fn next(&mut self) -> Option<Self::Item> {
// Basic idea. We buffer the previous result and we only emit it when we kow the next one or it's the end // Basic idea. We buffer the previous result and we only emit it when we kow the next one or it's the end
loop { loop {
if let Some(next) = self.inner.next() { if let Some(next) = self.inner.next() {
@ -4944,7 +4944,7 @@ struct ConstructIterator {
impl Iterator for ConstructIterator { impl Iterator for ConstructIterator {
type Item = Result<Triple, EvaluationError>; type Item = Result<Triple, EvaluationError>;
fn next(&mut self) -> Option<Result<Triple, EvaluationError>> { fn next(&mut self) -> Option<Self::Item> {
loop { loop {
if let Some(result) = self.buffered_results.pop() { if let Some(result) = self.buffered_results.pop() {
return Some(result); return Some(result);
@ -5046,7 +5046,7 @@ struct DescribeIterator {
impl Iterator for DescribeIterator { impl Iterator for DescribeIterator {
type Item = Result<Triple, EvaluationError>; type Item = Result<Triple, EvaluationError>;
fn next(&mut self) -> Option<Result<Triple, EvaluationError>> { fn next(&mut self) -> Option<Self::Item> {
loop { loop {
if let Some(quad) = self.quads.next() { if let Some(quad) = self.quads.next() {
return Some(match quad { return Some(match quad {
@ -5097,7 +5097,7 @@ impl<T1, T2, I1: Iterator<Item = T1>, I2: Iterator<Item = T2>> Iterator
{ {
type Item = (Option<T1>, Option<T2>); type Item = (Option<T1>, Option<T2>);
fn next(&mut self) -> Option<(Option<T1>, Option<T2>)> { fn next(&mut self) -> Option<Self::Item> {
match (self.a.next(), self.b.next()) { match (self.a.next(), self.b.next()) {
(None, None) => None, (None, None) => None,
r => Some(r), r => Some(r),
@ -5220,7 +5220,7 @@ impl<
{ {
type Item = Result<O, EvaluationError>; type Item = Result<O, EvaluationError>;
fn next(&mut self) -> Option<Result<O, EvaluationError>> { fn next(&mut self) -> Option<Self::Item> {
loop { loop {
if let Some(current) = &mut self.current { if let Some(current) = &mut self.current {
if let Some(next) = current.next() { if let Some(next) = current.next() {
@ -5629,7 +5629,7 @@ struct StatsIterator {
impl Iterator for StatsIterator { impl Iterator for StatsIterator {
type Item = Result<EncodedTuple, EvaluationError>; type Item = Result<EncodedTuple, EvaluationError>;
fn next(&mut self) -> Option<Result<EncodedTuple, EvaluationError>> { fn next(&mut self) -> Option<Self::Item> {
let start = Timer::now(); let start = Timer::now();
let result = self.inner.next(); let result = self.inner.next();
self.stats.exec_duration.set( self.stats.exec_duration.set(

@ -43,7 +43,7 @@ impl QueryResults {
/// ///
/// let mut results = Vec::new(); /// let mut results = Vec::new();
/// store.query("SELECT ?s WHERE { ?s ?p ?o }")?.write(&mut results, QueryResultsFormat::Json)?; /// store.query("SELECT ?s WHERE { ?s ?p ?o }")?.write(&mut results, QueryResultsFormat::Json)?;
/// assert_eq!(results, "{\"head\":{\"vars\":[\"s\"]},\"results\":{\"bindings\":[{\"s\":{\"type\":\"uri\",\"value\":\"http://example.com\"}}]}}".as_bytes()); /// assert_eq!(results, r#"{"head":{"vars":["s"]},"results":{"bindings":[{"s":{"type":"uri","value":"http://example.com"}}]}}"#.as_bytes());
/// # Result::<_,Box<dyn std::error::Error>>::Ok(()) /// # Result::<_,Box<dyn std::error::Error>>::Ok(())
/// ``` /// ```
pub fn write( pub fn write(
@ -221,7 +221,7 @@ impl Iterator for QuerySolutionIter {
type Item = Result<QuerySolution, EvaluationError>; type Item = Result<QuerySolution, EvaluationError>;
#[inline] #[inline]
fn next(&mut self) -> Option<Result<QuerySolution, EvaluationError>> { fn next(&mut self) -> Option<Self::Item> {
self.iter.next() self.iter.next()
} }
@ -253,7 +253,7 @@ impl Iterator for QueryTripleIter {
type Item = Result<Triple, EvaluationError>; type Item = Result<Triple, EvaluationError>;
#[inline] #[inline]
fn next(&mut self) -> Option<Result<Triple, EvaluationError>> { fn next(&mut self) -> Option<Self::Item> {
self.iter.next() self.iter.next()
} }

@ -30,13 +30,13 @@
//! //!
//! // Let's test with a boolean //! // Let's test with a boolean
//! assert_eq!( //! assert_eq!(
//! convert_json_to_tsv(b"{\"boolean\":true}".as_slice()).unwrap(), //! convert_json_to_tsv(br#"{"boolean":true}"#.as_slice()).unwrap(),
//! b"true" //! b"true"
//! ); //! );
//! //!
//! // And with a set of solutions //! // And with a set of solutions
//! assert_eq!( //! assert_eq!(
//! convert_json_to_tsv(b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}}]}}".as_slice()).unwrap(), //! convert_json_to_tsv(br#"{"head":{"vars":["foo","bar"]},"results":{"bindings":[{"foo":{"type":"literal","value":"test"}}]}}"#.as_slice()).unwrap(),
//! b"?foo\t?bar\n\"test\"\t\n" //! b"?foo\t?bar\n\"test\"\t\n"
//! ); //! );
//! ``` //! ```

@ -24,7 +24,7 @@ use std::time::Duration;
/// impl ServiceHandler for TestServiceHandler { /// impl ServiceHandler for TestServiceHandler {
/// type Error = EvaluationError; /// type Error = EvaluationError;
/// ///
/// fn handle(&self,service_name: NamedNode, query: Query) -> Result<QueryResults,EvaluationError> { /// fn handle(&self, service_name: NamedNode, query: Query) -> Result<QueryResults, Self::Error> {
/// if service_name == "http://example.com/service" { /// if service_name == "http://example.com/service" {
/// self.store.query(query) /// self.store.query(query)
/// } else { /// } else {
@ -61,7 +61,7 @@ pub struct EmptyServiceHandler;
impl ServiceHandler for EmptyServiceHandler { impl ServiceHandler for EmptyServiceHandler {
type Error = EvaluationError; type Error = EvaluationError;
fn handle(&self, name: NamedNode, _: Query) -> Result<QueryResults, EvaluationError> { fn handle(&self, name: NamedNode, _: Query) -> Result<QueryResults, Self::Error> {
Err(EvaluationError::UnsupportedService(name)) Err(EvaluationError::UnsupportedService(name))
} }
} }
@ -79,11 +79,7 @@ impl<S: ServiceHandler> ErrorConversionServiceHandler<S> {
impl<S: ServiceHandler> ServiceHandler for ErrorConversionServiceHandler<S> { impl<S: ServiceHandler> ServiceHandler for ErrorConversionServiceHandler<S> {
type Error = EvaluationError; type Error = EvaluationError;
fn handle( fn handle(&self, service_name: NamedNode, query: Query) -> Result<QueryResults, Self::Error> {
&self,
service_name: NamedNode,
query: Query,
) -> Result<QueryResults, EvaluationError> {
self.handler self.handler
.handle(service_name, query) .handle(service_name, query)
.map_err(|e| EvaluationError::Service(Box::new(e))) .map_err(|e| EvaluationError::Service(Box::new(e)))
@ -105,11 +101,7 @@ impl SimpleServiceHandler {
impl ServiceHandler for SimpleServiceHandler { impl ServiceHandler for SimpleServiceHandler {
type Error = EvaluationError; type Error = EvaluationError;
fn handle( fn handle(&self, service_name: NamedNode, query: Query) -> Result<QueryResults, Self::Error> {
&self,
service_name: NamedNode,
query: Query,
) -> Result<QueryResults, EvaluationError> {
let (content_type, body) = self let (content_type, body) = self
.client .client
.post( .post(

@ -1157,7 +1157,7 @@ impl Drop for PinnableSlice {
impl Deref for PinnableSlice { impl Deref for PinnableSlice {
type Target = [u8]; type Target = [u8];
fn deref(&self) -> &[u8] { fn deref(&self) -> &Self::Target {
unsafe { unsafe {
let mut len = 0; let mut len = 0;
let val = rocksdb_pinnableslice_value(self.0, &mut len); let val = rocksdb_pinnableslice_value(self.0, &mut len);
@ -1200,7 +1200,7 @@ impl Drop for Buffer {
impl Deref for Buffer { impl Deref for Buffer {
type Target = [u8]; type Target = [u8];
fn deref(&self) -> &[u8] { fn deref(&self) -> &Self::Target {
unsafe { slice::from_raw_parts(self.base, self.len) } unsafe { slice::from_raw_parts(self.base, self.len) }
} }
} }

@ -179,7 +179,7 @@ impl From<LoaderError> for io::Error {
LoaderError::Storage(error) => error.into(), LoaderError::Storage(error) => error.into(),
LoaderError::Parsing(error) => error.into(), LoaderError::Parsing(error) => error.into(),
LoaderError::InvalidBaseIri { .. } => { LoaderError::InvalidBaseIri { .. } => {
io::Error::new(io::ErrorKind::InvalidInput, error.to_string()) Self::new(io::ErrorKind::InvalidInput, error.to_string())
} }
} }
} }
@ -242,7 +242,7 @@ impl From<SerializerError> for io::Error {
SerializerError::Storage(error) => error.into(), SerializerError::Storage(error) => error.into(),
SerializerError::Io(error) => error, SerializerError::Io(error) => error,
SerializerError::DatasetFormatExpected(_) => { SerializerError::DatasetFormatExpected(_) => {
io::Error::new(io::ErrorKind::InvalidInput, error.to_string()) Self::new(io::ErrorKind::InvalidInput, error.to_string())
} }
} }
} }

@ -814,7 +814,7 @@ impl ChainedDecodingQuadIterator {
impl Iterator for ChainedDecodingQuadIterator { impl Iterator for ChainedDecodingQuadIterator {
type Item = Result<EncodedQuad, StorageError>; type Item = Result<EncodedQuad, StorageError>;
fn next(&mut self) -> Option<Result<EncodedQuad, StorageError>> { fn next(&mut self) -> Option<Self::Item> {
if let Some(result) = self.first.next() { if let Some(result) = self.first.next() {
Some(result) Some(result)
} else if let Some(second) = self.second.as_mut() { } else if let Some(second) = self.second.as_mut() {
@ -833,7 +833,7 @@ pub struct DecodingQuadIterator {
impl Iterator for DecodingQuadIterator { impl Iterator for DecodingQuadIterator {
type Item = Result<EncodedQuad, StorageError>; type Item = Result<EncodedQuad, StorageError>;
fn next(&mut self) -> Option<Result<EncodedQuad, StorageError>> { fn next(&mut self) -> Option<Self::Item> {
if let Err(e) = self.iter.status() { if let Err(e) = self.iter.status() {
return Some(Err(e)); return Some(Err(e));
} }
@ -850,7 +850,7 @@ pub struct DecodingGraphIterator {
impl Iterator for DecodingGraphIterator { impl Iterator for DecodingGraphIterator {
type Item = Result<EncodedTerm, StorageError>; type Item = Result<EncodedTerm, StorageError>;
fn next(&mut self) -> Option<Result<EncodedTerm, StorageError>> { fn next(&mut self) -> Option<Self::Item> {
if let Err(e) = self.iter.status() { if let Err(e) = self.iter.status() {
return Some(Err(e)); return Some(Err(e));
} }

@ -65,7 +65,7 @@ impl Deref for SmallString {
type Target = str; type Target = str;
#[inline] #[inline]
fn deref(&self) -> &str { fn deref(&self) -> &Self::Target {
self.as_str() self.as_str()
} }
} }
@ -146,17 +146,17 @@ impl FromStr for SmallString {
type Err = BadSmallStringError; type Err = BadSmallStringError;
#[inline] #[inline]
fn from_str(value: &str) -> Result<Self, BadSmallStringError> { fn from_str(value: &str) -> Result<Self, Self::Err> {
if value.len() <= 15 { if value.len() <= 15 {
let mut inner = [0; 16]; let mut inner = [0; 16];
inner[..value.len()].copy_from_slice(value.as_bytes()); inner[..value.len()].copy_from_slice(value.as_bytes());
inner[15] = value inner[15] = value
.len() .len()
.try_into() .try_into()
.map_err(|_| BadSmallStringError::TooLong(value.len()))?; .map_err(|_| Self::Err::TooLong(value.len()))?;
Ok(Self { inner }) Ok(Self { inner })
} else { } else {
Err(BadSmallStringError::TooLong(value.len())) Err(Self::Err::TooLong(value.len()))
} }
} }
} }
@ -165,7 +165,7 @@ impl<'a> TryFrom<&'a str> for SmallString {
type Error = BadSmallStringError; type Error = BadSmallStringError;
#[inline] #[inline]
fn try_from(value: &'a str) -> Result<Self, BadSmallStringError> { fn try_from(value: &'a str) -> Result<Self, Self::Error> {
Self::from_str(value) Self::from_str(value)
} }
} }

@ -1471,7 +1471,7 @@ pub struct QuadIter {
impl Iterator for QuadIter { impl Iterator for QuadIter {
type Item = Result<Quad, StorageError>; type Item = Result<Quad, StorageError>;
fn next(&mut self) -> Option<Result<Quad, StorageError>> { fn next(&mut self) -> Option<Self::Item> {
Some(match self.iter.next()? { Some(match self.iter.next()? {
Ok(quad) => self.reader.decode_quad(&quad), Ok(quad) => self.reader.decode_quad(&quad),
Err(error) => Err(error), Err(error) => Err(error),
@ -1488,7 +1488,7 @@ pub struct GraphNameIter {
impl Iterator for GraphNameIter { impl Iterator for GraphNameIter {
type Item = Result<NamedOrBlankNode, StorageError>; type Item = Result<NamedOrBlankNode, StorageError>;
fn next(&mut self) -> Option<Result<NamedOrBlankNode, StorageError>> { fn next(&mut self) -> Option<Self::Item> {
Some( Some(
self.iter self.iter
.next()? .next()?

@ -1,11 +1,20 @@
import json import json
import subprocess import subprocess
from pathlib import Path from pathlib import Path
from time import sleep
from urllib.error import HTTPError
from urllib.request import urlopen from urllib.request import urlopen
TARGET_DEBIAN_VERSIONS = ["sid"] TARGET_DEBIAN_VERSIONS = ["sid"]
IGNORE_PACKAGES = {"oxigraph-js", "oxigraph-testsuite", "pyoxigraph", "sparql-smith"} IGNORE_PACKAGES = {"oxigraph-js", "oxigraph-testsuite", "pyoxigraph", "sparql-smith"}
ALLOWED_MISSING_PACKAGES = {"codspeed-criterion-compat", "escargot", "json-event-parser", "oxhttp", "oxiri", "quick-xml"} ALLOWED_MISSING_PACKAGES = {
"codspeed-criterion-compat",
"escargot",
"json-event-parser",
"oxhttp",
"oxiri",
"quick-xml",
}
base_path = Path(__file__).parent.parent base_path = Path(__file__).parent.parent
@ -25,6 +34,23 @@ def parse_version(version):
return tuple(int(e) for e in version.split("-")[0].split(".")) return tuple(int(e) for e in version.split("-")[0].split("."))
def fetch_debian_package_desc(debian_name):
url = f"https://sources.debian.org/api/src/{debian_name}/"
for i in range(0, 10):
try:
with urlopen(url) as response:
return json.loads(response.read().decode())
except HTTPError as e:
if e.code / 100 == 5:
wait = 2**i
print(f"Error {e} from {url}, retrying after {wait}s")
sleep(wait)
else:
print(f"Failed to fetch debian name {debian_name} from {url}: {e}")
raise e
raise Exception(f"Failed to fetch {url}")
for package_id in cargo_metadata["workspace_default_members"]: for package_id in cargo_metadata["workspace_default_members"]:
package = package_by_id[package_id] package = package_by_id[package_id]
if package["name"] in IGNORE_PACKAGES: if package["name"] in IGNORE_PACKAGES:
@ -37,11 +63,9 @@ for package_id in cargo_metadata["workspace_default_members"]:
continue continue
candidate_debian_name = f"rust-{dependency['name'].replace('_', '-')}" candidate_debian_name = f"rust-{dependency['name'].replace('_', '-')}"
if dependency["name"] not in debian_cache: if dependency["name"] not in debian_cache:
with urlopen( debian_cache[candidate_debian_name] = fetch_debian_package_desc(
f"https://sources.debian.org/api/src/{candidate_debian_name}/" candidate_debian_name
) as response: )
debian_package = json.loads(response.read().decode())
debian_cache[candidate_debian_name] = debian_package
debian_package = debian_cache[candidate_debian_name] debian_package = debian_cache[candidate_debian_name]
if "error" in debian_package: if "error" in debian_package:
errors.add(f"No Debian package found for {dependency['name']}") errors.add(f"No Debian package found for {dependency['name']}")

@ -1,6 +1,6 @@
[package] [package]
name = "oxrocksdb-sys" name = "oxrocksdb-sys"
version = "0.4.0-alpha.2" version = "0.4.0-alpha.3-dev"
authors = ["Tpt <thomas@pellissier-tanon.fr>"] authors = ["Tpt <thomas@pellissier-tanon.fr>"]
license = "GPL-2.0 OR Apache-2.0" license = "GPL-2.0 OR Apache-2.0"
repository = "https://github.com/oxigraph/oxigraph/tree/main/oxrocksdb-sys" repository = "https://github.com/oxigraph/oxigraph/tree/main/oxrocksdb-sys"

@ -1,6 +1,6 @@
[package] [package]
name = "pyoxigraph" name = "pyoxigraph"
version = "0.4.0-alpha.2" version = "0.4.0-alpha.3-dev"
authors = ["Tpt <thomas@pellissier-tanon.fr>"] authors = ["Tpt <thomas@pellissier-tanon.fr>"]
license = "MIT OR Apache-2.0" license = "MIT OR Apache-2.0"
readme = "README.md" readme = "README.md"

@ -0,0 +1 @@
<http://example.com/s> <http://example.com/p> <http://example.com/o.> .

@ -0,0 +1 @@
<http://example.com/s> <http://example.com/p> <http://example.com/o.> .

@ -0,0 +1,2 @@
@prefix ex: <http://example.com/> .
ex:s ex:p ex:o\. .

@ -0,0 +1,2 @@
@prefix ex: <http://example.com/> .
ex:s ex:p ex:o\. .

@ -18,6 +18,8 @@
<#keyword_vs_prefix_ttl> <#keyword_vs_prefix_ttl>
<#keyword_vs_prefix_trig> <#keyword_vs_prefix_trig>
<#at_keywords_as_lang_tag> <#at_keywords_as_lang_tag>
<#escaped_trailing_dot_ttl>
<#escaped_trailing_dot_trig>
) . ) .
<#no_end_line_jump> <#no_end_line_jump>
@ -88,3 +90,15 @@
mf:name "usage of at keywords as language tags" ; mf:name "usage of at keywords as language tags" ;
mf:action <at_keywords_as_lang_tag.ttl> ; mf:action <at_keywords_as_lang_tag.ttl> ;
mf:result <at_keywords_as_lang_tag.nt> . mf:result <at_keywords_as_lang_tag.nt> .
<#escaped_trailing_dot_ttl>
rdf:type rdft:TestTurtleEval ;
mf:name "escaped dot at the end of a local name" ;
mf:action <escaped_trailing_dot.ttl> ;
mf:result <escaped_trailing_dot.nt> .
<#escaped_trailing_dot_trig>
rdf:type rdft:TestTrigEval ;
mf:name "escaped dot at the end of a local name" ;
mf:action <escaped_trailing_dot.trig> ;
mf:result <escaped_trailing_dot.nq> .

@ -58,7 +58,7 @@ pub struct TestManifest {
impl Iterator for TestManifest { impl Iterator for TestManifest {
type Item = Result<Test>; type Item = Result<Test>;
fn next(&mut self) -> Option<Result<Test>> { fn next(&mut self) -> Option<Self::Item> {
loop { loop {
if let Some(next) = self.next_test().transpose() { if let Some(next) = self.next_test().transpose() {
return Some(next); return Some(next);
@ -355,7 +355,7 @@ impl<'a> RdfListIterator<'a> {
impl<'a> Iterator for RdfListIterator<'a> { impl<'a> Iterator for RdfListIterator<'a> {
type Item = Term; type Item = Term;
fn next(&mut self) -> Option<Term> { fn next(&mut self) -> Option<Self::Item> {
match self.current_node { match self.current_node {
Some(current) => { Some(current) => {
let result = self let result = self

@ -727,7 +727,7 @@ impl Drop for StoreRef {
impl Deref for StoreRef { impl Deref for StoreRef {
type Target = Store; type Target = Store;
fn deref(&self) -> &Store { fn deref(&self) -> &Self::Target {
&self.store &self.store
} }
} }

Loading…
Cancel
Save