diff --git a/js/src/model.rs b/js/src/model.rs index 753ab66e..323b5978 100644 --- a/js/src/model.rs +++ b/js/src/model.rs @@ -56,7 +56,7 @@ pub fn literal( #[wasm_bindgen(js_name = defaultGraph)] pub fn default_graph() -> JsDefaultGraph { - JsDefaultGraph {} + JsDefaultGraph } #[wasm_bindgen(js_name = variable)] @@ -532,7 +532,7 @@ impl From for JsTerm { match name { GraphName::NamedNode(node) => node.into(), GraphName::BlankNode(node) => node.into(), - GraphName::DefaultGraph => Self::DefaultGraph(JsDefaultGraph {}), + GraphName::DefaultGraph => Self::DefaultGraph(JsDefaultGraph), } } } @@ -744,7 +744,7 @@ impl FromJsConverter { )) } } - "DefaultGraph" => Ok(JsTerm::DefaultGraph(JsDefaultGraph {})), + "DefaultGraph" => Ok(JsTerm::DefaultGraph(JsDefaultGraph)), "Variable" => Ok(Variable::new( Reflect::get(value, &self.value)? .as_string() @@ -754,8 +754,7 @@ impl FromJsConverter { .into()), "Quad" => Ok(self.to_quad(value)?.into()), _ => Err(format_err!( - "The termType {} is not supported by Oxigraph", - term_type + "The termType {term_type} is not supported by Oxigraph" )), } } else if term_type.is_undefined() { diff --git a/js/src/store.rs b/js/src/store.rs index adee6eef..12d4a039 100644 --- a/js/src/store.rs +++ b/js/src/store.rs @@ -8,7 +8,6 @@ use oxigraph::io::{DatasetFormat, GraphFormat}; use oxigraph::model::*; use oxigraph::sparql::QueryResults; use oxigraph::store::Store; -use std::io::Cursor; use wasm_bindgen::prelude::*; #[wasm_bindgen(js_name = Store)] @@ -171,7 +170,7 @@ impl JsStore { if let Some(graph_format) = GraphFormat::from_media_type(mime_type) { self.store .load_graph( - Cursor::new(data), + data.as_bytes(), graph_format, &to_graph_name.unwrap_or(GraphName::DefaultGraph), base_iri.as_deref(), @@ -184,10 +183,10 @@ impl JsStore { )); } self.store - .load_dataset(Cursor::new(data), dataset_format, base_iri.as_deref()) + .load_dataset(data.as_bytes(), dataset_format, base_iri.as_deref()) .map_err(to_err) } else { - Err(format_err!("Not supported MIME type: {}", mime_type)) + Err(format_err!("Not supported MIME type: {mime_type}")) } } @@ -218,7 +217,7 @@ impl JsStore { .dump_dataset(&mut buffer, dataset_format) .map_err(to_err)?; } else { - return Err(format_err!("Not supported MIME type: {}", mime_type)); + return Err(format_err!("Not supported MIME type: {mime_type}")); } String::from_utf8(buffer).map_err(to_err) } diff --git a/lib/oxrdf/src/blank_node.rs b/lib/oxrdf/src/blank_node.rs index 5b9172c2..0b485beb 100644 --- a/lib/oxrdf/src/blank_node.rs +++ b/lib/oxrdf/src/blank_node.rs @@ -264,7 +264,7 @@ impl IdStr { fn validate_blank_node_identifier(id: &str) -> Result<(), BlankNodeIdParseError> { let mut chars = id.chars(); - let front = chars.next().ok_or(BlankNodeIdParseError {})?; + let front = chars.next().ok_or(BlankNodeIdParseError)?; match front { '0'..='9' | '_' @@ -283,7 +283,7 @@ fn validate_blank_node_identifier(id: &str) -> Result<(), BlankNodeIdParseError> | '\u{F900}'..='\u{FDCF}' | '\u{FDF0}'..='\u{FFFD}' | '\u{10000}'..='\u{EFFFF}' => (), - _ => return Err(BlankNodeIdParseError {}), + _ => return Err(BlankNodeIdParseError), } for c in chars { match c { @@ -309,13 +309,13 @@ fn validate_blank_node_identifier(id: &str) -> Result<(), BlankNodeIdParseError> | '\u{F900}'..='\u{FDCF}' | '\u{FDF0}'..='\u{FFFD}' | '\u{10000}'..='\u{EFFFF}' => (), - _ => return Err(BlankNodeIdParseError {}), + _ => return Err(BlankNodeIdParseError), } } // Could not end with a dot if id.ends_with('.') { - Err(BlankNodeIdParseError {}) + Err(BlankNodeIdParseError) } else { Ok(()) } diff --git a/lib/oxrdf/src/variable.rs b/lib/oxrdf/src/variable.rs index af055bb9..7b9cd732 100644 --- a/lib/oxrdf/src/variable.rs +++ b/lib/oxrdf/src/variable.rs @@ -169,7 +169,7 @@ impl PartialOrd> for Variable { fn validate_variable_identifier(id: &str) -> Result<(), VariableNameParseError> { let mut chars = id.chars(); - let front = chars.next().ok_or(VariableNameParseError {})?; + let front = chars.next().ok_or(VariableNameParseError)?; match front { '0'..='9' | '_' @@ -188,7 +188,7 @@ fn validate_variable_identifier(id: &str) -> Result<(), VariableNameParseError> | '\u{F900}'..='\u{FDCF}' | '\u{FDF0}'..='\u{FFFD}' | '\u{10000}'..='\u{EFFFF}' => (), - _ => return Err(VariableNameParseError {}), + _ => return Err(VariableNameParseError), } for c in chars { match c { @@ -211,7 +211,7 @@ fn validate_variable_identifier(id: &str) -> Result<(), VariableNameParseError> | '\u{F900}'..='\u{FDCF}' | '\u{FDF0}'..='\u{FFFD}' | '\u{10000}'..='\u{EFFFF}' => (), - _ => return Err(VariableNameParseError {}), + _ => return Err(VariableNameParseError), } } Ok(()) diff --git a/lib/sparesults/src/csv.rs b/lib/sparesults/src/csv.rs index b365c4ac..7d737e5b 100644 --- a/lib/sparesults/src/csv.rs +++ b/lib/sparesults/src/csv.rs @@ -375,7 +375,6 @@ impl TsvSolutionsReader { mod tests { use super::*; use std::error::Error; - use std::io::Cursor; use std::rc::Rc; use std::str; @@ -466,7 +465,7 @@ mod tests { if let TsvQueryResultsReader::Solutions { solutions: mut solutions_iter, variables: actual_variables, - } = TsvQueryResultsReader::read(Cursor::new(result))? + } = TsvQueryResultsReader::read(result.as_slice())? { assert_eq!(actual_variables.as_slice(), variables.as_slice()); let mut rows = Vec::new(); @@ -499,7 +498,7 @@ mod tests { bad_tsvs.push(&a_lot_of_strings); for bad_tsv in bad_tsvs { if let Ok(TsvQueryResultsReader::Solutions { mut solutions, .. }) = - TsvQueryResultsReader::read(Cursor::new(bad_tsv)) + TsvQueryResultsReader::read(bad_tsv.as_bytes()) { while let Ok(Some(_)) = solutions.read_next() {} } diff --git a/lib/src/sparql/http/dummy.rs b/lib/src/sparql/http/dummy.rs index 3eb47e62..dc8516a6 100644 --- a/lib/src/sparql/http/dummy.rs +++ b/lib/src/sparql/http/dummy.rs @@ -7,7 +7,7 @@ pub struct Client; impl Client { pub fn new(_timeout: Option, _redirection_limit: usize) -> Self { - Self {} + Self } #[allow(clippy::unused_self)] diff --git a/lib/src/sparql/model.rs b/lib/src/sparql/model.rs index 9525a260..b5b1a650 100644 --- a/lib/src/sparql/model.rs +++ b/lib/src/sparql/model.rs @@ -98,16 +98,15 @@ impl QueryResults { /// use oxigraph::store::Store; /// use oxigraph::io::GraphFormat; /// use oxigraph::model::*; - /// use std::io::Cursor; /// - /// let graph = " .\n".as_bytes(); + /// let graph = " .\n"; /// /// let store = Store::new()?; - /// store.load_graph(Cursor::new(graph), GraphFormat::NTriples, GraphNameRef::DefaultGraph, None)?; + /// store.load_graph(graph.as_bytes(), GraphFormat::NTriples, GraphNameRef::DefaultGraph, None)?; /// /// let mut results = Vec::new(); /// store.query("CONSTRUCT WHERE { ?s ?p ?o }")?.write_graph(&mut results, GraphFormat::NTriples)?; - /// assert_eq!(results, graph); + /// assert_eq!(results, graph.as_bytes()); /// # Result::<_,Box>::Ok(()) /// ``` pub fn write_graph( diff --git a/lib/src/storage/binary_encoder.rs b/lib/src/storage/binary_encoder.rs index 4e888c2f..cd2272dd 100644 --- a/lib/src/storage/binary_encoder.rs +++ b/lib/src/storage/binary_encoder.rs @@ -2,7 +2,7 @@ use crate::storage::error::{CorruptionError, StorageError}; use crate::storage::numeric_encoder::{EncodedQuad, EncodedTerm, EncodedTriple, StrHash}; use crate::storage::small_string::SmallString; use oxsdatatypes::*; -use std::io::{Cursor, Read}; +use std::io::Read; use std::mem::size_of; #[cfg(not(target_family = "wasm"))] @@ -62,24 +62,23 @@ pub enum QuadEncoding { } impl QuadEncoding { - pub fn decode(self, buffer: &[u8]) -> Result { - let mut cursor = Cursor::new(&buffer); + pub fn decode(self, mut buffer: &[u8]) -> Result { match self { - Self::Spog => cursor.read_spog_quad(), - Self::Posg => cursor.read_posg_quad(), - Self::Ospg => cursor.read_ospg_quad(), - Self::Gspo => cursor.read_gspo_quad(), - Self::Gpos => cursor.read_gpos_quad(), - Self::Gosp => cursor.read_gosp_quad(), - Self::Dspo => cursor.read_dspo_quad(), - Self::Dpos => cursor.read_dpos_quad(), - Self::Dosp => cursor.read_dosp_quad(), + Self::Spog => buffer.read_spog_quad(), + Self::Posg => buffer.read_posg_quad(), + Self::Ospg => buffer.read_ospg_quad(), + Self::Gspo => buffer.read_gspo_quad(), + Self::Gpos => buffer.read_gpos_quad(), + Self::Gosp => buffer.read_gosp_quad(), + Self::Dspo => buffer.read_dspo_quad(), + Self::Dpos => buffer.read_dpos_quad(), + Self::Dosp => buffer.read_dosp_quad(), } } } -pub fn decode_term(buffer: &[u8]) -> Result { - Cursor::new(&buffer).read_term() +pub fn decode_term(mut buffer: &[u8]) -> Result { + buffer.read_term() } pub trait TermReader { @@ -740,7 +739,7 @@ mod tests { let mut buffer = Vec::new(); write_term(&mut buffer, &encoded); - assert_eq!(encoded, Cursor::new(&buffer).read_term().unwrap()); + assert_eq!(encoded, buffer.as_slice().read_term().unwrap()); } } } diff --git a/lib/src/storage/numeric_encoder.rs b/lib/src/storage/numeric_encoder.rs index 92fc1b5b..19632624 100644 --- a/lib/src/storage/numeric_encoder.rs +++ b/lib/src/storage/numeric_encoder.rs @@ -717,13 +717,13 @@ pub fn insert_term Result<(), StorageError>>( if let EncodedTerm::NamedNode { iri_id } = encoded { insert_str(iri_id, node.as_str()) } else { - unreachable!("Invalid term encoding {:?} for {}", encoded, term) + unreachable!("Invalid term encoding {encoded:?} for {term}") } } TermRef::BlankNode(node) => match encoded { EncodedTerm::BigBlankNode { id_id } => insert_str(id_id, node.as_str()), EncodedTerm::SmallBlankNode(..) | EncodedTerm::NumericalBlankNode { .. } => Ok(()), - _ => unreachable!("Invalid term encoding {:?} for {}", encoded, term), + _ => unreachable!("Invalid term encoding {encoded:?} for {term}"), }, TermRef::Literal(literal) => match encoded { EncodedTerm::BigStringLiteral { value_id } @@ -734,7 +734,7 @@ pub fn insert_term Result<(), StorageError>>( if let Some(language) = literal.language() { insert_str(language_id, language) } else { - unreachable!("Invalid term encoding {:?} for {}", encoded, term) + unreachable!("Invalid term encoding {encoded:?} for {term}") } } EncodedTerm::BigBigLangStringLiteral { @@ -745,7 +745,7 @@ pub fn insert_term Result<(), StorageError>>( if let Some(language) = literal.language() { insert_str(language_id, language) } else { - unreachable!("Invalid term encoding {:?} for {}", encoded, term) + unreachable!("Invalid term encoding {encoded:?} for {term}") } } EncodedTerm::SmallTypedLiteral { datatype_id, .. } => { @@ -776,7 +776,7 @@ pub fn insert_term Result<(), StorageError>>( | EncodedTerm::DurationLiteral(..) | EncodedTerm::YearMonthDurationLiteral(..) | EncodedTerm::DayTimeDurationLiteral(..) => Ok(()), - _ => unreachable!("Invalid term encoding {:?} for {}", encoded, term), + _ => unreachable!("Invalid term encoding {encoded:?} for {term}"), }, TermRef::Triple(triple) => { if let EncodedTerm::Triple(encoded) = encoded { @@ -788,7 +788,7 @@ pub fn insert_term Result<(), StorageError>>( )?; insert_term(triple.object.as_ref(), &encoded.object, insert_str) } else { - unreachable!("Invalid term encoding {:?} for {}", encoded, term) + unreachable!("Invalid term encoding {encoded:?} for {term}") } } } diff --git a/lib/tests/store.rs b/lib/tests/store.rs index 5f8a6809..7328ef4a 100644 --- a/lib/tests/store.rs +++ b/lib/tests/store.rs @@ -9,7 +9,6 @@ use std::env::temp_dir; use std::error::Error; #[cfg(not(target_family = "wasm"))] use std::fs::{create_dir, remove_dir_all, File}; -use std::io::Cursor; #[cfg(not(target_family = "wasm"))] use std::io::Write; #[cfg(target_os = "linux")] @@ -109,7 +108,7 @@ fn quads(graph_name: impl Into>) -> Vec> fn test_load_graph() -> Result<(), Box> { let store = Store::new()?; store.load_graph( - Cursor::new(DATA), + DATA.as_bytes(), GraphFormat::Turtle, GraphNameRef::DefaultGraph, None, @@ -126,7 +125,7 @@ fn test_load_graph() -> Result<(), Box> { fn test_bulk_load_graph() -> Result<(), Box> { let store = Store::new()?; store.bulk_loader().load_graph( - Cursor::new(DATA), + DATA.as_bytes(), GraphFormat::Turtle, GraphNameRef::DefaultGraph, None, @@ -143,7 +142,7 @@ fn test_bulk_load_graph() -> Result<(), Box> { fn test_bulk_load_graph_lenient() -> Result<(), Box> { let store = Store::new()?; store.bulk_loader().on_parse_error(|_| Ok(())).load_graph( - Cursor::new(b" .\n ."), + b" .\n .".as_slice(), GraphFormat::NTriples, GraphNameRef::DefaultGraph, None, @@ -162,7 +161,7 @@ fn test_bulk_load_graph_lenient() -> Result<(), Box> { #[test] fn test_load_dataset() -> Result<(), Box> { let store = Store::new()?; - store.load_dataset(Cursor::new(GRAPH_DATA), DatasetFormat::TriG, None)?; + store.load_dataset(GRAPH_DATA.as_bytes(), DatasetFormat::TriG, None)?; for q in quads(NamedNodeRef::new_unchecked( "http://www.wikidata.org/wiki/Special:EntityData/Q90", )) { @@ -178,7 +177,7 @@ fn test_bulk_load_dataset() -> Result<(), Box> { let store = Store::new()?; store .bulk_loader() - .load_dataset(Cursor::new(GRAPH_DATA), DatasetFormat::TriG, None)?; + .load_dataset(GRAPH_DATA.as_bytes(), DatasetFormat::TriG, None)?; let graph_name = NamedNodeRef::new_unchecked("http://www.wikidata.org/wiki/Special:EntityData/Q90"); for q in quads(graph_name) { @@ -194,7 +193,7 @@ fn test_load_graph_generates_new_blank_nodes() -> Result<(), Box> { let store = Store::new()?; for _ in 0..2 { store.load_graph( - Cursor::new("_:a ."), + "_:a .".as_bytes(), GraphFormat::NTriples, GraphNameRef::DefaultGraph, None,