diff --git a/lib/oxrdf/src/blank_node.rs b/lib/oxrdf/src/blank_node.rs index e813dd24..3c06a548 100644 --- a/lib/oxrdf/src/blank_node.rs +++ b/lib/oxrdf/src/blank_node.rs @@ -19,7 +19,6 @@ use std::str; /// "_:a122", /// BlankNode::new("a122")?.to_string() /// ); -/// # Result::<_,oxrdf::BlankNodeIdParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Hash)] pub struct BlankNode(BlankNodeContent); @@ -137,7 +136,6 @@ impl Default for BlankNode { /// "_:a122", /// BlankNodeRef::new("a122")?.to_string() /// ); -/// # Result::<_,oxrdf::BlankNodeIdParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Copy, Hash)] pub struct BlankNodeRef<'a>(BlankNodeRefContent<'a>); @@ -194,7 +192,6 @@ impl<'a> BlankNodeRef<'a> { /// /// assert_eq!(BlankNode::new_from_unique_id(128).as_ref().unique_id(), Some(128)); /// assert_eq!(BlankNode::new("foo")?.as_ref().unique_id(), None); - /// # Result::<_,oxrdf::BlankNodeIdParseError>::Ok(()) /// ``` #[inline] pub const fn unique_id(&self) -> Option { diff --git a/lib/oxrdf/src/dataset.rs b/lib/oxrdf/src/dataset.rs index f240a33a..0933681a 100644 --- a/lib/oxrdf/src/dataset.rs +++ b/lib/oxrdf/src/dataset.rs @@ -61,7 +61,6 @@ use std::hash::{Hash, Hasher}; /// // direct access to a dataset graph /// let results: Vec<_> = dataset.graph(ex).iter().collect(); /// assert_eq!(vec![TripleRef::new(ex, ex, ex)], results); -/// # Result::<_,Box>::Ok(()) /// ``` #[derive(Debug, Default, Clone)] pub struct Dataset { @@ -121,7 +120,6 @@ impl Dataset { /// /// let results: Vec<_> = dataset.graph(ex).iter().collect(); /// assert_eq!(vec![TripleRef::new(ex, ex, ex)], results); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn graph<'a, 'b>(&'a self, graph_name: impl Into>) -> GraphView<'a> { let graph_name = self @@ -152,7 +150,6 @@ impl Dataset { /// // We have also changes the dataset itself /// let results: Vec<_> = dataset.iter().collect(); /// assert_eq!(vec![QuadRef::new(ex, ex, ex, ex)], results); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn graph_mut<'a, 'b>( &'a mut self, @@ -529,7 +526,6 @@ impl Dataset { /// graph1.canonicalize(); /// graph2.canonicalize(); /// assert_eq!(graph1, graph2); - /// # Result::<_,Box>::Ok(()) /// ``` /// /// Warning 1: Blank node ids depends on the current shape of the graph. Adding a new quad might change the ids of a lot of blank nodes. @@ -987,7 +983,6 @@ impl fmt::Display for Dataset { /// /// let results: Vec<_> = dataset.graph(ex).iter().collect(); /// assert_eq!(vec![TripleRef::new(ex, ex, ex)], results); -/// # Result::<_,Box>::Ok(()) /// ``` #[derive(Clone, Debug)] pub struct GraphView<'a> { @@ -1330,7 +1325,6 @@ impl<'a> fmt::Display for GraphView<'a> { /// // We have also changes the dataset itself /// let results: Vec<_> = dataset.iter().collect(); /// assert_eq!(vec![QuadRef::new(ex, ex, ex, ex)], results); -/// # Result::<_,Box>::Ok(()) /// ``` #[derive(Debug)] pub struct GraphViewMut<'a> { diff --git a/lib/oxrdf/src/graph.rs b/lib/oxrdf/src/graph.rs index 980e3ebf..63d79080 100644 --- a/lib/oxrdf/src/graph.rs +++ b/lib/oxrdf/src/graph.rs @@ -47,7 +47,6 @@ use std::fmt; /// // simple filter /// let results: Vec<_> = graph.triples_for_subject(ex).collect(); /// assert_eq!(vec![triple], results); -/// # Result::<_,Box>::Ok(()) /// ``` #[derive(Debug, Default, Clone)] pub struct Graph { @@ -203,7 +202,6 @@ impl Graph { /// graph1.canonicalize(); /// graph2.canonicalize(); /// assert_eq!(graph1, graph2); - /// # Result::<_,Box>::Ok(()) /// ``` /// /// Warning 1: Blank node ids depends on the current shape of the graph. Adding a new triple might change the ids of a lot of blank nodes. diff --git a/lib/oxrdf/src/literal.rs b/lib/oxrdf/src/literal.rs index 976b863d..9fbae545 100644 --- a/lib/oxrdf/src/literal.rs +++ b/lib/oxrdf/src/literal.rs @@ -32,7 +32,6 @@ use std::option::Option; /// "\"foo\"@en", /// Literal::new_language_tagged_literal("foo", "en")?.to_string() /// ); -/// # Result::<(), LanguageTagParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Hash)] pub struct Literal(LiteralContent); diff --git a/lib/oxrdf/src/named_node.rs b/lib/oxrdf/src/named_node.rs index 9b545bcc..8c56ed98 100644 --- a/lib/oxrdf/src/named_node.rs +++ b/lib/oxrdf/src/named_node.rs @@ -12,7 +12,6 @@ use std::fmt; /// "", /// NamedNode::new("http://example.com/foo")?.to_string() /// ); -/// # Result::<_,oxrdf::IriParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub struct NamedNode { @@ -101,7 +100,6 @@ impl PartialEq for &str { /// "", /// NamedNodeRef::new("http://example.com/foo")?.to_string() /// ); -/// # Result::<_,oxrdf::IriParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)] pub struct NamedNodeRef<'a> { diff --git a/lib/oxrdf/src/triple.rs b/lib/oxrdf/src/triple.rs index 850b1375..823347a5 100644 --- a/lib/oxrdf/src/triple.rs +++ b/lib/oxrdf/src/triple.rs @@ -708,7 +708,6 @@ impl<'a> From> for Term { /// object: NamedNode::new("http://example.com/o")?.into(), /// }.to_string() /// ); -/// # Result::<_,oxrdf::IriParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Hash)] pub struct Triple { @@ -779,7 +778,6 @@ impl fmt::Display for Triple { /// object: NamedNodeRef::new("http://example.com/o")?.into(), /// }.to_string() /// ); -/// # Result::<_,oxrdf::IriParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Copy, Hash)] @@ -1060,7 +1058,6 @@ impl<'a> From> for GraphName { /// graph_name: NamedNode::new("http://example.com/g")?.into(), /// }.to_string() /// ); -/// # Result::<_,oxrdf::IriParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Hash)] pub struct Quad { @@ -1138,7 +1135,6 @@ impl From for Triple { /// graph_name: NamedNodeRef::new("http://example.com/g")?.into(), /// }.to_string() /// ); -/// # Result::<_,oxrdf::IriParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Copy, Hash)] pub struct QuadRef<'a> { diff --git a/lib/oxrdf/src/variable.rs b/lib/oxrdf/src/variable.rs index 8bde4d6e..9409dd15 100644 --- a/lib/oxrdf/src/variable.rs +++ b/lib/oxrdf/src/variable.rs @@ -12,7 +12,6 @@ use std::fmt; /// "?foo", /// Variable::new("foo")?.to_string() /// ); -/// # Result::<_,VariableNameParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Hash)] pub struct Variable { @@ -73,7 +72,6 @@ impl fmt::Display for Variable { /// "?foo", /// VariableRef::new("foo")?.to_string() /// ); -/// # Result::<_,VariableNameParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Ord, PartialOrd, Debug, Clone, Copy, Hash)] pub struct VariableRef<'a> { diff --git a/lib/oxrdfio/src/parser.rs b/lib/oxrdfio/src/parser.rs index eb60e05e..1f8ad2b3 100644 --- a/lib/oxrdfio/src/parser.rs +++ b/lib/oxrdfio/src/parser.rs @@ -52,7 +52,6 @@ use tokio::io::AsyncRead; /// /// assert_eq!(quads.len(), 1); /// assert_eq!(quads[0].subject.to_string(), ""); -/// # std::io::Result::Ok(()) /// ``` #[must_use] pub struct RdfParser { @@ -156,7 +155,6 @@ impl RdfParser { /// /// assert_eq!(quads.len(), 1); /// assert_eq!(quads[0].subject.to_string(), ""); - /// # Result::<_,Box>::Ok(()) /// ``` #[inline] pub fn with_base_iri(mut self, base_iri: impl Into) -> Result { @@ -184,7 +182,6 @@ impl RdfParser { /// /// assert_eq!(quads.len(), 1); /// assert_eq!(quads[0].graph_name.to_string(), ""); - /// # Result::<_,Box>::Ok(()) /// ``` #[inline] pub fn with_default_graph(mut self, default_graph: impl Into) -> Self { @@ -226,7 +223,6 @@ impl RdfParser { /// .rename_blank_nodes() /// .parse_read(file.as_bytes()).collect::,_>>()?; /// assert_ne!(result1, result2); - /// # Result::<_,Box>::Ok(()) /// ``` #[inline] pub fn rename_blank_nodes(mut self) -> Self { @@ -266,7 +262,6 @@ impl RdfParser { /// /// assert_eq!(quads.len(), 1); /// assert_eq!(quads[0].subject.to_string(), ""); - /// # std::io::Result::Ok(()) /// ``` pub fn parse_read(self, reader: R) -> FromReadQuadReader { FromReadQuadReader { @@ -362,7 +357,6 @@ impl From for RdfParser { /// /// assert_eq!(quads.len(), 1); /// assert_eq!(quads[0].subject.to_string(), ""); -/// # std::io::Result::Ok(()) /// ``` #[must_use] pub struct FromReadQuadReader { diff --git a/lib/oxrdfio/src/serializer.rs b/lib/oxrdfio/src/serializer.rs index cd132cd5..df366544 100644 --- a/lib/oxrdfio/src/serializer.rs +++ b/lib/oxrdfio/src/serializer.rs @@ -46,7 +46,6 @@ use tokio::io::AsyncWrite; /// writer.finish()?; /// /// assert_eq!(buffer.as_slice(), " .\n".as_bytes()); -/// # Result::<_,Box>::Ok(()) /// ``` #[must_use] pub struct RdfSerializer { @@ -96,7 +95,6 @@ impl RdfSerializer { /// writer.finish()?; /// /// assert_eq!(buffer.as_slice(), " .\n".as_bytes()); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn serialize_to_write(self, write: W) -> ToWriteQuadWriter { ToWriteQuadWriter { @@ -210,7 +208,6 @@ impl From for RdfSerializer { /// writer.finish()?; /// /// assert_eq!(buffer.as_slice(), " .\n".as_bytes()); -/// # Result::<_,Box>::Ok(()) /// ``` #[must_use] pub struct ToWriteQuadWriter { diff --git a/lib/oxrdfxml/src/parser.rs b/lib/oxrdfxml/src/parser.rs index ba0b27a1..3cc2f03a 100644 --- a/lib/oxrdfxml/src/parser.rs +++ b/lib/oxrdfxml/src/parser.rs @@ -47,7 +47,6 @@ use tokio::io::{AsyncRead, BufReader as AsyncBufReader}; /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` #[derive(Default)] #[must_use] @@ -105,7 +104,6 @@ impl RdfXmlParser { /// } /// } /// assert_eq!(2, count); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn parse_read(self, read: R) -> FromReadRdfXmlReader { FromReadRdfXmlReader { @@ -200,7 +198,6 @@ impl RdfXmlParser { /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` #[must_use] pub struct FromReadRdfXmlReader { diff --git a/lib/oxrdfxml/src/serializer.rs b/lib/oxrdfxml/src/serializer.rs index a19856f0..6868a971 100644 --- a/lib/oxrdfxml/src/serializer.rs +++ b/lib/oxrdfxml/src/serializer.rs @@ -25,7 +25,6 @@ use tokio::io::AsyncWrite; /// b"\n\n\t\n\t\t\n\t\n", /// writer.finish()?.as_slice() /// ); -/// # Result::<_,Box>::Ok(()) /// ``` #[derive(Default)] #[must_use] @@ -56,7 +55,6 @@ impl RdfXmlSerializer { /// b"\n\n\t\n\t\t\n\t\n", /// writer.finish()?.as_slice() /// ); - /// # Result::<_,Box>::Ok(()) /// ``` #[allow(clippy::unused_self)] pub fn serialize_to_write(self, write: W) -> ToWriteRdfXmlWriter { @@ -122,7 +120,6 @@ impl RdfXmlSerializer { /// b"\n\n\t\n\t\t\n\t\n", /// writer.finish()?.as_slice() /// ); -/// # Result::<_,Box>::Ok(()) /// ``` #[must_use] pub struct ToWriteRdfXmlWriter { diff --git a/lib/oxttl/src/n3.rs b/lib/oxttl/src/n3.rs index 0642d416..cc61196f 100644 --- a/lib/oxttl/src/n3.rs +++ b/lib/oxttl/src/n3.rs @@ -201,7 +201,6 @@ impl From for N3Quad { /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` #[derive(Default)] #[must_use] @@ -270,7 +269,6 @@ impl N3Parser { /// } /// } /// assert_eq!(2, count); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn parse_read(self, read: R) -> FromReadN3Reader { FromReadN3Reader { @@ -353,7 +351,6 @@ impl N3Parser { /// } /// } /// assert_eq!(2, count); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn parse(self) -> LowLevelN3Reader { LowLevelN3Reader { @@ -386,7 +383,6 @@ impl N3Parser { /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` #[must_use] pub struct FromReadN3Reader { @@ -413,7 +409,6 @@ impl FromReadN3Reader { /// /// reader.next().unwrap()?; // We read the first triple /// assert_eq!(reader.prefixes()["schema"], "http://schema.org/"); // There are now prefixes - /// # Result::<_,Box>::Ok(()) /// ``` pub fn prefixes(&self) -> &HashMap> { &self.inner.parser.context.prefixes @@ -434,7 +429,6 @@ impl FromReadN3Reader { /// /// reader.next().unwrap()?; // We read the first triple /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI. - /// # Result::<_,Box>::Ok(()) /// ``` pub fn base_iri(&self) -> Option<&str> { self.inner @@ -592,7 +586,6 @@ impl FromTokioAsyncReadN3Reader { /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` pub struct LowLevelN3Reader { parser: Parser, @@ -644,7 +637,6 @@ impl LowLevelN3Reader { /// /// reader.read_next().unwrap()?; // We read the first triple /// assert_eq!(reader.prefixes()["schema"], "http://schema.org/"); // There are now prefixes - /// # Result::<_,Box>::Ok(()) /// ``` pub fn prefixes(&self) -> &HashMap> { &self.parser.context.prefixes @@ -666,7 +658,6 @@ impl LowLevelN3Reader { /// /// reader.read_next().unwrap()?; // We read the first triple /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI - /// # Result::<_,Box>::Ok(()) /// ``` pub fn base_iri(&self) -> Option<&str> { self.parser diff --git a/lib/oxttl/src/nquads.rs b/lib/oxttl/src/nquads.rs index 9be1843b..1f99f67f 100644 --- a/lib/oxttl/src/nquads.rs +++ b/lib/oxttl/src/nquads.rs @@ -33,7 +33,6 @@ use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt}; /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` #[derive(Default)] #[must_use] @@ -90,7 +89,6 @@ impl NQuadsParser { /// } /// } /// assert_eq!(2, count); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn parse_read(self, read: R) -> FromReadNQuadsReader { FromReadNQuadsReader { @@ -169,7 +167,6 @@ impl NQuadsParser { /// } /// } /// assert_eq!(2, count); - /// # Result::<_,Box>::Ok(()) /// ``` #[allow(clippy::unused_self)] pub fn parse(self) -> LowLevelNQuadsReader { @@ -205,7 +202,6 @@ impl NQuadsParser { /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` #[must_use] pub struct FromReadNQuadsReader { @@ -295,7 +291,6 @@ impl FromTokioAsyncReadNQuadsReader { /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` pub struct LowLevelNQuadsReader { parser: Parser, @@ -347,7 +342,6 @@ impl LowLevelNQuadsReader { /// b" .\n", /// writer.finish().as_slice() /// ); -/// # Result::<_,Box>::Ok(()) /// ``` #[derive(Default)] #[must_use] @@ -377,7 +371,6 @@ impl NQuadsSerializer { /// b" .\n", /// writer.finish().as_slice() /// ); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn serialize_to_write(self, write: W) -> ToWriteNQuadsWriter { ToWriteNQuadsWriter { @@ -438,7 +431,6 @@ impl NQuadsSerializer { /// b" .\n", /// buf.as_slice() /// ); - /// # Result::<_,Box>::Ok(()) /// ``` #[allow(clippy::unused_self)] pub fn serialize(&self) -> LowLevelNQuadsWriter { @@ -463,7 +455,6 @@ impl NQuadsSerializer { /// b" .\n", /// writer.finish().as_slice() /// ); -/// # Result::<_,Box>::Ok(()) /// ``` #[must_use] pub struct ToWriteNQuadsWriter { @@ -547,7 +538,6 @@ impl ToTokioAsyncWriteNQuadsWriter { /// b" .\n", /// buf.as_slice() /// ); -/// # Result::<_,Box>::Ok(()) /// ``` pub struct LowLevelNQuadsWriter; diff --git a/lib/oxttl/src/ntriples.rs b/lib/oxttl/src/ntriples.rs index 995643bc..8468b85a 100644 --- a/lib/oxttl/src/ntriples.rs +++ b/lib/oxttl/src/ntriples.rs @@ -33,7 +33,6 @@ use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt}; /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` #[derive(Default)] #[must_use] @@ -90,7 +89,6 @@ impl NTriplesParser { /// } /// } /// assert_eq!(2, count); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn parse_read(self, read: R) -> FromReadNTriplesReader { FromReadNTriplesReader { @@ -169,7 +167,6 @@ impl NTriplesParser { /// } /// } /// assert_eq!(2, count); - /// # Result::<_,Box>::Ok(()) /// ``` #[allow(clippy::unused_self)] pub fn parse(self) -> LowLevelNTriplesReader { @@ -205,7 +202,6 @@ impl NTriplesParser { /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` #[must_use] pub struct FromReadNTriplesReader { @@ -295,7 +291,6 @@ impl FromTokioAsyncReadNTriplesReader { /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` pub struct LowLevelNTriplesReader { parser: Parser, @@ -346,7 +341,6 @@ impl LowLevelNTriplesReader { /// b" .\n", /// writer.finish().as_slice() /// ); -/// # Result::<_,Box>::Ok(()) /// ``` #[derive(Default)] #[must_use] @@ -375,7 +369,6 @@ impl NTriplesSerializer { /// b" .\n", /// writer.finish().as_slice() /// ); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn serialize_to_write(self, write: W) -> ToWriteNTriplesWriter { ToWriteNTriplesWriter { @@ -434,7 +427,6 @@ impl NTriplesSerializer { /// b" .\n", /// buf.as_slice() /// ); - /// # Result::<_,Box>::Ok(()) /// ``` #[allow(clippy::unused_self)] pub fn serialize(&self) -> LowLevelNTriplesWriter { @@ -458,7 +450,6 @@ impl NTriplesSerializer { /// b" .\n", /// writer.finish().as_slice() /// ); -/// # Result::<_,Box>::Ok(()) /// ``` #[must_use] pub struct ToWriteNTriplesWriter { @@ -540,7 +531,6 @@ impl ToTokioAsyncWriteNTriplesWriter { /// b" .\n", /// buf.as_slice() /// ); -/// # Result::<_,Box>::Ok(()) /// ``` pub struct LowLevelNTriplesWriter; diff --git a/lib/oxttl/src/trig.rs b/lib/oxttl/src/trig.rs index 70d3edb6..c738cbc1 100644 --- a/lib/oxttl/src/trig.rs +++ b/lib/oxttl/src/trig.rs @@ -38,7 +38,6 @@ use tokio::io::{AsyncRead, AsyncWrite, AsyncWriteExt}; /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` #[derive(Default)] #[must_use] @@ -116,7 +115,6 @@ impl TriGParser { /// } /// } /// assert_eq!(2, count); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn parse_read(self, read: R) -> FromReadTriGReader { FromReadTriGReader { @@ -197,7 +195,6 @@ impl TriGParser { /// } /// } /// assert_eq!(2, count); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn parse(self) -> LowLevelTriGReader { LowLevelTriGReader { @@ -236,7 +233,6 @@ impl TriGParser { /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` #[must_use] pub struct FromReadTriGReader { @@ -263,7 +259,6 @@ impl FromReadTriGReader { /// /// reader.next().unwrap()?; // We read the first triple /// assert_eq!(reader.prefixes()["schema"], "http://schema.org/"); // There are now prefixes - /// # Result::<_,Box>::Ok(()) /// ``` pub fn prefixes(&self) -> &HashMap> { &self.inner.parser.context.prefixes @@ -284,7 +279,6 @@ impl FromReadTriGReader { /// /// reader.next().unwrap()?; // We read the first triple /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI. - /// # Result::<_,Box>::Ok(()) /// ``` pub fn base_iri(&self) -> Option<&str> { self.inner @@ -440,7 +434,6 @@ impl FromTokioAsyncReadTriGReader { /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` pub struct LowLevelTriGReader { parser: Parser, @@ -492,7 +485,6 @@ impl LowLevelTriGReader { /// /// reader.read_next().unwrap()?; // We read the first triple /// assert_eq!(reader.prefixes()["schema"], "http://schema.org/"); // There are now prefixes - /// # Result::<_,Box>::Ok(()) /// ``` pub fn prefixes(&self) -> &HashMap> { &self.parser.context.prefixes @@ -514,7 +506,6 @@ impl LowLevelTriGReader { /// /// reader.read_next().unwrap()?; // We read the first triple /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI - /// # Result::<_,Box>::Ok(()) /// ``` pub fn base_iri(&self) -> Option<&str> { self.parser @@ -545,7 +536,6 @@ impl LowLevelTriGReader { /// b" {\n\t .\n}\n", /// writer.finish()?.as_slice() /// ); -/// # Result::<_,Box>::Ok(()) /// ``` #[derive(Default)] #[must_use] @@ -575,7 +565,6 @@ impl TriGSerializer { /// b" {\n\t .\n}\n", /// writer.finish()?.as_slice() /// ); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn serialize_to_write(self, write: W) -> ToWriteTriGWriter { ToWriteTriGWriter { @@ -637,7 +626,6 @@ impl TriGSerializer { /// b" {\n\t .\n}\n", /// buf.as_slice() /// ); - /// # Result::<_,Box>::Ok(()) /// ``` #[allow(clippy::unused_self)] pub fn serialize(&self) -> LowLevelTriGWriter { @@ -665,7 +653,6 @@ impl TriGSerializer { /// b" {\n\t .\n}\n", /// writer.finish()?.as_slice() /// ); -/// # Result::<_,Box>::Ok(()) /// ``` #[must_use] pub struct ToWriteTriGWriter { @@ -754,7 +741,6 @@ impl ToTokioAsyncWriteTriGWriter { /// b" {\n\t .\n}\n", /// buf.as_slice() /// ); -/// # Result::<_,Box>::Ok(()) /// ``` pub struct LowLevelTriGWriter { current_graph_name: GraphName, diff --git a/lib/oxttl/src/turtle.rs b/lib/oxttl/src/turtle.rs index 0e225611..e2a31c86 100644 --- a/lib/oxttl/src/turtle.rs +++ b/lib/oxttl/src/turtle.rs @@ -40,7 +40,6 @@ use tokio::io::{AsyncRead, AsyncWrite}; /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` #[derive(Default)] #[must_use] @@ -118,7 +117,6 @@ impl TurtleParser { /// } /// } /// assert_eq!(2, count); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn parse_read(self, read: R) -> FromReadTurtleReader { FromReadTurtleReader { @@ -199,7 +197,6 @@ impl TurtleParser { /// } /// } /// assert_eq!(2, count); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn parse(self) -> LowLevelTurtleReader { LowLevelTurtleReader { @@ -238,7 +235,6 @@ impl TurtleParser { /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` #[must_use] pub struct FromReadTurtleReader { @@ -265,7 +261,6 @@ impl FromReadTurtleReader { /// /// reader.next().unwrap()?; // We read the first triple /// assert_eq!(reader.prefixes()["schema"], "http://schema.org/"); // There are now prefixes - /// # Result::<_,Box>::Ok(()) /// ``` pub fn prefixes(&self) -> &HashMap> { &self.inner.parser.context.prefixes @@ -286,7 +281,6 @@ impl FromReadTurtleReader { /// /// reader.next().unwrap()?; // We read the first triple /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI. - /// # Result::<_,Box>::Ok(()) /// ``` pub fn base_iri(&self) -> Option<&str> { self.inner @@ -442,7 +436,6 @@ impl FromTokioAsyncReadTurtleReader { /// } /// } /// assert_eq!(2, count); -/// # Result::<_,Box>::Ok(()) /// ``` pub struct LowLevelTurtleReader { parser: Parser, @@ -494,7 +487,6 @@ impl LowLevelTurtleReader { /// /// reader.read_next().unwrap()?; // We read the first triple /// assert_eq!(reader.prefixes()["schema"], "http://schema.org/"); // There are now prefixes - /// # Result::<_,Box>::Ok(()) /// ``` pub fn prefixes(&self) -> &HashMap> { &self.parser.context.prefixes @@ -516,7 +508,6 @@ impl LowLevelTurtleReader { /// /// reader.read_next().unwrap()?; // We read the first triple /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI - /// # Result::<_,Box>::Ok(()) /// ``` pub fn base_iri(&self) -> Option<&str> { self.parser @@ -546,7 +537,6 @@ impl LowLevelTurtleReader { /// b" .\n", /// writer.finish()?.as_slice() /// ); -/// # Result::<_,Box>::Ok(()) /// ``` #[derive(Default)] #[must_use] @@ -577,7 +567,6 @@ impl TurtleSerializer { /// b" .\n", /// writer.finish()?.as_slice() /// ); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn serialize_to_write(self, write: W) -> ToWriteTurtleWriter { ToWriteTurtleWriter { @@ -634,7 +623,6 @@ impl TurtleSerializer { /// b" .\n", /// buf.as_slice() /// ); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn serialize(&self) -> LowLevelTurtleWriter { LowLevelTurtleWriter { @@ -659,7 +647,6 @@ impl TurtleSerializer { /// b" .\n", /// writer.finish()?.as_slice() /// ); -/// # Result::<_,Box>::Ok(()) /// ``` #[must_use] pub struct ToWriteTurtleWriter { @@ -739,7 +726,6 @@ impl ToTokioAsyncWriteTurtleWriter { /// b" .\n", /// buf.as_slice() /// ); -/// # Result::<_,Box>::Ok(()) /// ``` pub struct LowLevelTurtleWriter { inner: LowLevelTriGWriter, diff --git a/lib/sparesults/src/parser.rs b/lib/sparesults/src/parser.rs index 0a826e96..23eee870 100644 --- a/lib/sparesults/src/parser.rs +++ b/lib/sparesults/src/parser.rs @@ -32,7 +32,6 @@ use std::sync::Arc; /// assert_eq!(solution?.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]); /// } /// } -/// # Result::<(),sparesults::ParseError>::Ok(()) /// ``` pub struct QueryResultsParser { format: QueryResultsFormat, @@ -68,7 +67,6 @@ impl QueryResultsParser { /// assert_eq!(solution?.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]); /// } /// } - /// # Result::<(),sparesults::ParseError>::Ok(()) /// ``` pub fn parse_read( &self, @@ -147,7 +145,6 @@ impl From for QueryResultsParser { /// assert_eq!(solution?.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]); /// } /// } -/// # Result::<(),sparesults::ParseError>::Ok(()) /// ``` pub enum FromReadQueryResultsReader { Solutions(FromReadSolutionsReader), @@ -170,7 +167,6 @@ pub enum FromReadQueryResultsReader { /// assert_eq!(solution?.iter().collect::>(), vec![(&Variable::new_unchecked("foo"), &Literal::from("test").into())]); /// } /// } -/// # Result::<(),sparesults::ParseError>::Ok(()) /// ``` pub struct FromReadSolutionsReader { variables: Arc<[Variable]>, @@ -195,7 +191,6 @@ impl FromReadSolutionsReader { /// if let FromReadQueryResultsReader::Solutions(solutions) = json_parser.parse_read(b"?foo\t?bar\n\"ex1\"\t\"ex2\"".as_slice())? { /// assert_eq!(solutions.variables(), &[Variable::new_unchecked("foo"), Variable::new_unchecked("bar")]); /// } - /// # Result::<(),sparesults::ParseError>::Ok(()) /// ``` #[inline] pub fn variables(&self) -> &[Variable] { diff --git a/lib/sparesults/src/serializer.rs b/lib/sparesults/src/serializer.rs index 9a4ba143..0b7cab67 100644 --- a/lib/sparesults/src/serializer.rs +++ b/lib/sparesults/src/serializer.rs @@ -43,7 +43,6 @@ use tokio::io::AsyncWrite; /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?; /// writer.finish()?; /// assert_eq!(buffer, b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}}]}}"); -/// # std::io::Result::Ok(()) /// ``` pub struct QueryResultsSerializer { format: QueryResultsFormat, @@ -66,7 +65,6 @@ impl QueryResultsSerializer { /// let mut buffer = Vec::new(); /// xml_serializer.serialize_boolean_to_write(&mut buffer, true)?; /// assert_eq!(buffer, b"true"); - /// # std::io::Result::Ok(()) /// ``` pub fn serialize_boolean_to_write(&self, write: W, value: bool) -> io::Result { match self.format { @@ -135,7 +133,6 @@ impl QueryResultsSerializer { /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?; /// writer.finish()?; /// assert_eq!(buffer, b"test"); - /// # std::io::Result::Ok(()) /// ``` pub fn serialize_solutions_to_write( &self, @@ -251,7 +248,6 @@ impl From for QueryResultsSerializer { /// writer.write(once((VariableRef::new_unchecked("foo"), LiteralRef::from("test"))))?; /// writer.finish()?; /// assert_eq!(buffer, b"?foo\t?bar\n\"test\"\t\n"); -/// # std::io::Result::Ok(()) /// ``` #[must_use] pub struct ToWriteSolutionsWriter { @@ -281,7 +277,6 @@ impl ToWriteSolutionsWriter { /// writer.write(&QuerySolution::from((vec![Variable::new_unchecked("bar")], vec![Some(Literal::from("test").into())])))?; /// writer.finish()?; /// assert_eq!(buffer, b"{\"head\":{\"vars\":[\"foo\",\"bar\"]},\"results\":{\"bindings\":[{\"foo\":{\"type\":\"literal\",\"value\":\"test\"}},{\"bar\":{\"type\":\"literal\",\"value\":\"test\"}}]}}"); - /// # std::io::Result::Ok(()) /// ``` pub fn write<'a>( &mut self, diff --git a/lib/spargebra/src/query.rs b/lib/spargebra/src/query.rs index 6cb7e57b..4f482ef8 100644 --- a/lib/spargebra/src/query.rs +++ b/lib/spargebra/src/query.rs @@ -14,7 +14,6 @@ use std::str::FromStr; /// let query = Query::parse(query_str, None)?; /// assert_eq!(query.to_string(), query_str); /// assert_eq!(query.to_sse(), "(project (?s ?p ?o) (bgp (triple ?s ?p ?o)))"); -/// # Ok::<_, spargebra::ParseError>(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Hash)] pub enum Query { diff --git a/lib/spargebra/src/term.rs b/lib/spargebra/src/term.rs index 6a33294c..24dfb0cc 100644 --- a/lib/spargebra/src/term.rs +++ b/lib/spargebra/src/term.rs @@ -151,7 +151,6 @@ impl TryFrom for GroundTerm { /// object: NamedNode::new("http://example.com/o")?.into(), /// }.to_string() /// ); -/// # Result::<_,oxrdf::IriParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Hash)] pub struct GroundTriple { @@ -246,7 +245,6 @@ impl TryFrom for GraphName { /// graph_name: NamedNode::new("http://example.com/g")?.into(), /// }.to_string() /// ); -/// # Result::<_,oxrdf::IriParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Hash)] pub struct Quad { @@ -321,7 +319,6 @@ impl TryFrom for Quad { /// graph_name: NamedNode::new("http://example.com/g")?.into(), /// }.to_string() /// ); -/// # Result::<_,oxrdf::IriParseError>::Ok(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Hash)] pub struct GroundQuad { diff --git a/lib/spargebra/src/update.rs b/lib/spargebra/src/update.rs index e73b234e..234cf701 100644 --- a/lib/spargebra/src/update.rs +++ b/lib/spargebra/src/update.rs @@ -14,7 +14,6 @@ use std::str::FromStr; /// let update = Update::parse(update_str, None)?; /// assert_eq!(update.to_string().trim(), update_str); /// assert_eq!(update.to_sse(), "(update (clear all))"); -/// # Ok::<_, spargebra::ParseError>(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Hash)] pub struct Update { diff --git a/lib/src/io/read.rs b/lib/src/io/read.rs index 33065615..35f9d4ec 100644 --- a/lib/src/io/read.rs +++ b/lib/src/io/read.rs @@ -25,7 +25,6 @@ use std::io::Read; /// /// assert_eq!(triples.len(), 1); /// assert_eq!(triples[0].subject.to_string(), ""); -/// # std::io::Result::Ok(()) /// ``` #[deprecated(note = "use RdfParser instead", since = "0.4.0")] pub struct GraphParser { @@ -55,7 +54,6 @@ impl GraphParser { /// /// assert_eq!(triples.len(), 1); /// assert_eq!(triples[0].subject.to_string(), ""); - /// # Result::<_,Box>::Ok(()) /// ``` #[inline] pub fn with_base_iri(self, base_iri: impl Into) -> Result { @@ -85,7 +83,6 @@ impl GraphParser { /// /// assert_eq!(triples.len(), 1); /// assert_eq!(triples[0].subject.to_string(), ""); -/// # std::io::Result::Ok(()) /// ``` #[must_use] pub struct TripleReader { @@ -116,7 +113,6 @@ impl Iterator for TripleReader { /// /// assert_eq!(quads.len(), 1); /// assert_eq!(quads[0].subject.to_string(), ""); -/// # std::io::Result::Ok(()) /// ``` #[deprecated(note = "use RdfParser instead", since = "0.4.0")] pub struct DatasetParser { @@ -144,7 +140,6 @@ impl DatasetParser { /// /// assert_eq!(triples.len(), 1); /// assert_eq!(triples[0].subject.to_string(), ""); - /// # Result::<_,Box>::Ok(()) /// ``` #[inline] pub fn with_base_iri(self, base_iri: impl Into) -> Result { @@ -174,7 +169,6 @@ impl DatasetParser { /// /// assert_eq!(quads.len(), 1); /// assert_eq!(quads[0].subject.to_string(), ""); -/// # std::io::Result::Ok(()) /// ``` #[must_use] pub struct QuadReader { diff --git a/lib/src/io/write.rs b/lib/src/io/write.rs index 7a9007c0..8ece59c5 100644 --- a/lib/src/io/write.rs +++ b/lib/src/io/write.rs @@ -28,7 +28,6 @@ use std::io::{self, Write}; /// writer.finish()?; /// /// assert_eq!(buffer.as_slice(), " .\n".as_bytes()); -/// # Result::<_,Box>::Ok(()) /// ``` #[deprecated(note = "use RdfSerializer instead", since = "0.4.0")] pub struct GraphSerializer { @@ -73,7 +72,6 @@ impl GraphSerializer { /// writer.finish()?; /// /// assert_eq!(buffer.as_slice(), " .\n".as_bytes()); -/// # Result::<_,Box>::Ok(()) /// ``` #[must_use] pub struct TripleWriter { @@ -113,7 +111,6 @@ impl TripleWriter { /// writer.finish()?; /// /// assert_eq!(buffer.as_slice(), " .\n".as_bytes()); -/// # Result::<_,Box>::Ok(()) /// ``` #[deprecated(note = "use RdfSerializer instead", since = "0.4.0")] pub struct DatasetSerializer { @@ -159,7 +156,6 @@ impl DatasetSerializer { /// writer.finish()?; /// /// assert_eq!(buffer.as_slice(), " .\n".as_bytes()); -/// # Result::<_,Box>::Ok(()) /// ``` #[must_use] pub struct QuadWriter { diff --git a/lib/src/sparql/algebra.rs b/lib/src/sparql/algebra.rs index d83241c6..b3e9b9dd 100644 --- a/lib/src/sparql/algebra.rs +++ b/lib/src/sparql/algebra.rs @@ -24,7 +24,6 @@ use std::str::FromStr; /// let default = vec![NamedNode::new("http://example.com")?.into()]; /// query.dataset_mut().set_default_graph(default.clone()); /// assert_eq!(query.dataset().default_graph_graphs(), Some(default.as_slice())); -/// # Ok::<_, Box>(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Hash)] pub struct Query { @@ -110,7 +109,6 @@ impl From for Query { /// let update = Update::parse(update_str, None)?; /// /// assert_eq!(update.to_string().trim(), update_str); -/// # Ok::<_, oxigraph::sparql::ParseError>(()) /// ``` #[derive(Eq, PartialEq, Debug, Clone, Hash)] pub struct Update { @@ -220,7 +218,6 @@ impl QueryDataset { /// assert!(Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)?.dataset().is_default_dataset()); /// assert!(!Query::parse("SELECT ?s ?p ?o FROM WHERE { ?s ?p ?o . }", None)?.dataset().is_default_dataset()); /// - /// # Ok::<_, Box>(()) /// ``` pub fn is_default_dataset(&self) -> bool { self.default @@ -252,7 +249,6 @@ impl QueryDataset { /// query.dataset_mut().set_default_graph(default.clone()); /// assert_eq!(query.dataset().default_graph_graphs(), Some(default.as_slice())); /// - /// # Ok::<_, Box>(()) /// ``` pub fn set_default_graph(&mut self, graphs: Vec) { self.default = Some(graphs) @@ -274,7 +270,6 @@ impl QueryDataset { /// query.dataset_mut().set_available_named_graphs(named.clone()); /// assert_eq!(query.dataset().available_named_graphs(), Some(named.as_slice())); /// - /// # Ok::<_, Box>(()) /// ``` pub fn set_available_named_graphs(&mut self, named_graphs: Vec) { self.named = Some(named_graphs); diff --git a/lib/src/sparql/mod.rs b/lib/src/sparql/mod.rs index f48ea908..c1571262 100644 --- a/lib/src/sparql/mod.rs +++ b/lib/src/sparql/mod.rs @@ -152,7 +152,6 @@ pub(crate) fn evaluate_query( /// "SELECT * WHERE { SERVICE {} }", /// QueryOptions::default().without_service_handler() /// )?; -/// # Result::<_,Box>::Ok(()) /// ``` #[derive(Clone, Default)] pub struct QueryOptions { @@ -224,7 +223,6 @@ impl QueryOptions { /// )? { /// assert_eq!(solutions.next().unwrap()?.get("nt"), Some(&Literal::from("\"1\"^^").into())); /// } - /// # Result::<_,Box>::Ok(()) /// ``` #[inline] #[must_use] diff --git a/lib/src/sparql/model.rs b/lib/src/sparql/model.rs index 326e7603..84db7ea1 100644 --- a/lib/src/sparql/model.rs +++ b/lib/src/sparql/model.rs @@ -44,7 +44,6 @@ impl QueryResults { /// let mut results = Vec::new(); /// store.query("SELECT ?s WHERE { ?s ?p ?o }")?.write(&mut results, QueryResultsFormat::Json)?; /// assert_eq!(results, "{\"head\":{\"vars\":[\"s\"]},\"results\":{\"bindings\":[{\"s\":{\"type\":\"uri\",\"value\":\"http://example.com\"}}]}}".as_bytes()); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn write( self, @@ -116,7 +115,6 @@ impl QueryResults { /// let mut results = Vec::new(); /// store.query("CONSTRUCT WHERE { ?s ?p ?o }")?.write_graph(&mut results, RdfFormat::NTriples)?; /// assert_eq!(results, graph.as_bytes()); - /// # Result::<_,Box>::Ok(()) /// ``` pub fn write_graph( self, @@ -168,7 +166,6 @@ impl From> for QueryResults { /// println!("{:?}", solution?.get("s")); /// } /// } -/// # Result::<_,Box>::Ok(()) /// ``` pub struct QuerySolutionIter { variables: Arc<[Variable]>, @@ -200,7 +197,6 @@ impl QuerySolutionIter { /// if let QueryResults::Solutions(solutions) = store.query("SELECT ?s ?o WHERE { ?s ?p ?o }")? { /// assert_eq!(solutions.variables(), &[Variable::new("s")?, Variable::new("o")?]); /// } - /// # Result::<_,Box>::Ok(()) /// ``` #[inline] pub fn variables(&self) -> &[Variable] { @@ -243,7 +239,6 @@ impl Iterator for QuerySolutionIter { /// println!("{}", triple?); /// } /// } -/// # Result::<_,Box>::Ok(()) /// ``` pub struct QueryTripleIter { pub(crate) iter: Box>>, diff --git a/lib/src/sparql/service.rs b/lib/src/sparql/service.rs index 4db172e4..8d5ec8c7 100644 --- a/lib/src/sparql/service.rs +++ b/lib/src/sparql/service.rs @@ -46,7 +46,6 @@ use std::time::Duration; /// )? { /// assert_eq!(solutions.next().unwrap()?.get("s"), Some(&ex.into())); /// } -/// # Result::<_,Box>::Ok(()) /// ``` pub trait ServiceHandler: Send + Sync { /// The service evaluation error. diff --git a/lib/src/store.rs b/lib/src/store.rs index 9b448141..57ae8a75 100644 --- a/lib/src/store.rs +++ b/lib/src/store.rs @@ -80,7 +80,6 @@ use std::{fmt, str}; /// # /// # }; /// # remove_dir_all("example.db")?; -/// # Result::<_, Box>::Ok(()) /// ``` #[derive(Clone)] pub struct Store { @@ -174,7 +173,6 @@ impl Store { /// if let QueryResults::Solutions(mut solutions) = store.query("SELECT ?s WHERE { ?s ?p ?o }")? { /// assert_eq!(solutions.next().unwrap()?.get("s"), Some(&ex.into_owned().into())); /// } - /// # Result::<_, Box>::Ok(()) /// ``` pub fn query( &self, @@ -201,7 +199,6 @@ impl Store { /// )? { /// assert_eq!(solutions.next().unwrap()?.get("nt"), Some(&Literal::from("\"1\"^^").into())); /// } - /// # Result::<_, Box>::Ok(()) /// ``` pub fn query_opt( &self, @@ -230,7 +227,6 @@ impl Store { /// let mut buf = Vec::new(); /// explanation.write_in_json(&mut buf)?; /// } - /// # Result::<_, Box>::Ok(()) /// ``` pub fn explain_query_opt( &self, @@ -258,7 +254,6 @@ impl Store { /// // quad filter by object /// let results = store.quads_for_pattern(None, None, Some((&ex).into()), None).collect::,_>>()?; /// assert_eq!(vec![quad], results); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn quads_for_pattern( &self, @@ -296,7 +291,6 @@ impl Store { /// // quad filter by object /// let results = store.iter().collect::,_>>()?; /// assert_eq!(vec![quad], results); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn iter(&self) -> QuadIter { self.quads_for_pattern(None, None, None, None) @@ -317,7 +311,6 @@ impl Store { /// /// store.insert(quad)?; /// assert!(store.contains(quad)?); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn contains<'a>(&self, quad: impl Into>) -> Result { let quad = EncodedQuad::from(quad.into()); @@ -338,7 +331,6 @@ impl Store { /// store.insert(QuadRef::new(ex, ex, ex, ex))?; /// store.insert(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?; /// assert_eq!(2, store.len()?); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn len(&self) -> Result { self.storage.snapshot().len() @@ -357,7 +349,6 @@ impl Store { /// let ex = NamedNodeRef::new("http://example.com")?; /// store.insert(QuadRef::new(ex, ex, ex, ex))?; /// assert!(!store.is_empty()?); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn is_empty(&self) -> Result { self.storage.snapshot().is_empty() @@ -386,7 +377,6 @@ impl Store { /// } /// Result::<_, StorageError>::Ok(()) /// })?; - /// # Result::<_, Box>::Ok(()) /// ``` pub fn transaction<'a, 'b: 'a, T, E: Error + 'static + From>( &'b self, @@ -410,7 +400,6 @@ impl Store { /// // we inspect the store contents /// let ex = NamedNodeRef::new("http://example.com")?; /// assert!(store.contains(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn update( &self, @@ -434,7 +423,6 @@ impl Store { /// |args| args.get(0).map(|t| Literal::from(t.to_string()).into()) /// ) /// )?; - /// # Result::<_, Box>::Ok(()) /// ``` pub fn update_opt( &self, @@ -478,7 +466,6 @@ impl Store { /// let ex = NamedNodeRef::new("http://example.com")?; /// assert!(store.contains(QuadRef::new(ex, ex, ex, NamedNodeRef::new("http://example.com/g")?))?); /// assert!(store.contains(QuadRef::new(ex, ex, ex, NamedNodeRef::new("http://example.com/g2")?))?); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn load_from_read( &self, @@ -517,7 +504,6 @@ impl Store { /// // we inspect the store contents /// let ex = NamedNodeRef::new("http://example.com")?; /// assert!(store.contains(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?); - /// # Result::<_, Box>::Ok(()) /// ``` #[deprecated(note = "use Store.load_from_read instead", since = "0.4.0")] pub fn load_graph( @@ -560,7 +546,6 @@ impl Store { /// // we inspect the store contents /// let ex = NamedNodeRef::new("http://example.com")?; /// assert!(store.contains(QuadRef::new(ex, ex, ex, ex))?); - /// # Result::<_, Box>::Ok(()) /// ``` #[deprecated(note = "use Store.load_from_read instead", since = "0.4.0")] pub fn load_dataset( @@ -598,7 +583,6 @@ impl Store { /// assert!(!store.insert(quad)?); /// /// assert!(store.contains(quad)?); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn insert<'a>(&self, quad: impl Into>) -> Result { let quad = quad.into(); @@ -636,7 +620,6 @@ impl Store { /// assert!(!store.remove(quad)?); /// /// assert!(!store.contains(quad)?); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn remove<'a>(&self, quad: impl Into>) -> Result { let quad = quad.into(); @@ -656,7 +639,6 @@ impl Store { /// /// let buffer = store.dump_to_write(RdfFormat::NQuads, Vec::new())?; /// assert_eq!(file, buffer.as_slice()); - /// # std::io::Result::Ok(()) /// ``` pub fn dump_to_write( &self, @@ -690,7 +672,6 @@ impl Store { /// let mut buffer = Vec::new(); /// store.dump_graph_to_write(GraphNameRef::DefaultGraph, RdfFormat::NTriples, &mut buffer)?; /// assert_eq!(file, buffer.as_slice()); - /// # std::io::Result::Ok(()) /// ``` pub fn dump_graph_to_write<'a, W: Write>( &self, @@ -721,7 +702,6 @@ impl Store { /// let mut buffer = Vec::new(); /// store.dump_graph(&mut buffer, RdfFormat::NTriples, GraphNameRef::DefaultGraph)?; /// assert_eq!(file, buffer.as_slice()); - /// # std::io::Result::Ok(()) /// ``` #[deprecated(note = "use Store.dump_graph_to_write instead", since = "0.4.0")] pub fn dump_graph<'a, W: Write>( @@ -746,7 +726,6 @@ impl Store { /// /// let buffer = store.dump_dataset(Vec::new(), RdfFormat::NQuads)?; /// assert_eq!(file, buffer.as_slice()); - /// # std::io::Result::Ok(()) /// ``` #[deprecated(note = "use Store.dump_to_write instead", since = "0.4.0")] pub fn dump_dataset( @@ -769,7 +748,6 @@ impl Store { /// store.insert(QuadRef::new(&ex, &ex, &ex, &ex))?; /// store.insert(QuadRef::new(&ex, &ex, &ex, GraphNameRef::DefaultGraph))?; /// assert_eq!(vec![NamedOrBlankNode::from(ex)], store.named_graphs().collect::,_>>()?); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn named_graphs(&self) -> GraphNameIter { let reader = self.storage.snapshot(); @@ -790,7 +768,6 @@ impl Store { /// let store = Store::new()?; /// store.insert(QuadRef::new(&ex, &ex, &ex, &ex))?; /// assert!(store.contains_named_graph(&ex)?); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn contains_named_graph<'a>( &self, @@ -814,7 +791,6 @@ impl Store { /// store.insert_named_graph(ex)?; /// /// assert_eq!(store.named_graphs().collect::,_>>()?, vec![ex.into_owned().into()]); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn insert_named_graph<'a>( &self, @@ -840,7 +816,6 @@ impl Store { /// store.clear_graph(ex)?; /// assert!(store.is_empty()?); /// assert_eq!(1, store.named_graphs().count()); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn clear_graph<'a>( &self, @@ -868,7 +843,6 @@ impl Store { /// assert!(store.remove_named_graph(ex)?); /// assert!(store.is_empty()?); /// assert_eq!(0, store.named_graphs().count()); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn remove_named_graph<'a>( &self, @@ -893,7 +867,6 @@ impl Store { /// /// store.clear()?; /// assert!(store.is_empty()?); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn clear(&self) -> Result<(), StorageError> { self.transaction(|mut t| t.clear()) @@ -957,7 +930,6 @@ impl Store { /// // we inspect the store contents /// let ex = NamedNodeRef::new("http://example.com")?; /// assert!(store.contains(QuadRef::new(ex, ex, ex, ex))?); - /// # Result::<_, Box>::Ok(()) /// ``` #[cfg(not(target_family = "wasm"))] pub fn bulk_loader(&self) -> BulkLoader { @@ -1010,7 +982,6 @@ impl<'a> Transaction<'a> { /// } /// Result::<_, EvaluationError>::Ok(()) /// })?; - /// # Result::<_, EvaluationError>::Ok(()) /// ``` pub fn query( &self, @@ -1045,7 +1016,6 @@ impl<'a> Transaction<'a> { /// } /// Result::<_, EvaluationError>::Ok(()) /// })?; - /// # Result::<_, EvaluationError>::Ok(()) /// ``` pub fn query_opt( &self, @@ -1075,7 +1045,6 @@ impl<'a> Transaction<'a> { /// } /// Result::<_, StorageError>::Ok(()) /// })?; - /// # Result::<_, Box>::Ok(()) /// ``` pub fn quads_for_pattern( &self, @@ -1137,7 +1106,6 @@ impl<'a> Transaction<'a> { /// assert!(transaction.contains(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?); /// Result::<_, EvaluationError>::Ok(()) /// })?; - /// # Result::<_, EvaluationError>::Ok(()) /// ``` pub fn update( &mut self, @@ -1193,7 +1161,6 @@ impl<'a> Transaction<'a> { /// let ex = NamedNodeRef::new("http://example.com")?; /// assert!(store.contains(QuadRef::new(ex, ex, ex, NamedNodeRef::new("http://example.com/g")?))?); /// assert!(store.contains(QuadRef::new(ex, ex, ex, NamedNodeRef::new("http://example.com/g2")?))?); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn load_from_read( &mut self, @@ -1225,7 +1192,6 @@ impl<'a> Transaction<'a> { /// // we inspect the store contents /// let ex = NamedNodeRef::new_unchecked("http://example.com"); /// assert!(store.contains(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?); - /// # Result::<_,oxigraph::store::LoaderError>::Ok(()) /// ``` #[deprecated(note = "use Transaction.load_from_read instead", since = "0.4.0")] pub fn load_graph( @@ -1268,7 +1234,6 @@ impl<'a> Transaction<'a> { /// // we inspect the store contents /// let ex = NamedNodeRef::new_unchecked("http://example.com"); /// assert!(store.contains(QuadRef::new(ex, ex, ex, ex))?); - /// # Result::<_,oxigraph::store::LoaderError>::Ok(()) /// ``` #[deprecated(note = "use Transaction.load_from_read instead", since = "0.4.0")] pub fn load_dataset( @@ -1306,7 +1271,6 @@ impl<'a> Transaction<'a> { /// transaction.insert(quad) /// })?; /// assert!(store.contains(quad)?); - /// # Result::<_,oxigraph::store::StorageError>::Ok(()) /// ``` pub fn insert<'b>(&mut self, quad: impl Into>) -> Result { self.writer.insert(quad.into()) @@ -1340,7 +1304,6 @@ impl<'a> Transaction<'a> { /// transaction.remove(quad) /// })?; /// assert!(!store.contains(quad)?); - /// # Result::<_,oxigraph::store::StorageError>::Ok(()) /// ``` pub fn remove<'b>(&mut self, quad: impl Into>) -> Result { self.writer.remove(quad.into()) @@ -1380,7 +1343,6 @@ impl<'a> Transaction<'a> { /// transaction.insert_named_graph(ex) /// })?; /// assert_eq!(store.named_graphs().collect::,_>>()?, vec![ex.into_owned().into()]); - /// # Result::<_,oxigraph::store::StorageError>::Ok(()) /// ``` pub fn insert_named_graph<'b>( &mut self, @@ -1405,7 +1367,6 @@ impl<'a> Transaction<'a> { /// })?; /// assert!(store.is_empty()?); /// assert_eq!(1, store.named_graphs().count()); - /// # Result::<_,oxigraph::store::StorageError>::Ok(()) /// ``` pub fn clear_graph<'b>( &mut self, @@ -1432,7 +1393,6 @@ impl<'a> Transaction<'a> { /// })?; /// assert!(store.is_empty()?); /// assert_eq!(0, store.named_graphs().count()); - /// # Result::<_,oxigraph::store::StorageError>::Ok(()) /// ``` pub fn remove_named_graph<'b>( &mut self, @@ -1455,7 +1415,6 @@ impl<'a> Transaction<'a> { /// transaction.clear() /// })?; /// assert!(store.is_empty()?); - /// # Result::<_,oxigraph::store::StorageError>::Ok(()) /// ``` pub fn clear(&mut self) -> Result<(), StorageError> { self.writer.clear() @@ -1531,7 +1490,6 @@ impl Iterator for GraphNameIter { /// // we inspect the store contents /// let ex = NamedNodeRef::new("http://example.com")?; /// assert!(store.contains(QuadRef::new(ex, ex, ex, ex))?); -/// # Result::<_, Box>::Ok(()) /// ``` #[cfg(not(target_family = "wasm"))] #[must_use] @@ -1641,7 +1599,6 @@ impl BulkLoader { /// let ex = NamedNodeRef::new("http://example.com")?; /// assert!(store.contains(QuadRef::new(ex, ex, ex, NamedNodeRef::new("http://example.com/g")?))?); /// assert!(store.contains(QuadRef::new(ex, ex, ex, NamedNodeRef::new("http://example.com/g2")?))?); - /// # Result::<_, Box>::Ok(()) /// ``` pub fn load_from_read( &self, @@ -1697,7 +1654,6 @@ impl BulkLoader { /// // we inspect the store contents /// let ex = NamedNodeRef::new("http://example.com")?; /// assert!(store.contains(QuadRef::new(ex, ex, ex, ex))?); - /// # Result::<_, Box>::Ok(()) /// ``` #[deprecated(note = "use BulkLoader.load_from_read instead", since = "0.4.0")] pub fn load_dataset( @@ -1758,7 +1714,6 @@ impl BulkLoader { /// // we inspect the store contents /// let ex = NamedNodeRef::new("http://example.com")?; /// assert!(store.contains(QuadRef::new(ex, ex, ex, GraphNameRef::DefaultGraph))?); - /// # Result::<_, Box>::Ok(()) /// ``` #[deprecated(note = "use BulkLoader.load_from_read instead", since = "0.4.0")] pub fn load_graph(