Improves Clippy lint list

pull/489/head
Tpt 2 years ago committed by Thomas Tanon
parent a2d8bcaaa3
commit 7175784356
  1. 68
      .cargo/config.toml
  2. 2
      js/src/model.rs
  3. 8
      js/src/store.rs
  4. 4
      lib/oxrdf/src/dataset.rs
  5. 9
      lib/oxrdf/src/interning.rs
  6. 1
      lib/oxrdf/src/lib.rs
  7. 2
      lib/oxsdatatypes/src/decimal.rs
  8. 6
      lib/oxsdatatypes/src/duration.rs
  9. 10
      lib/oxsdatatypes/src/integer.rs
  10. 2
      lib/oxsdatatypes/src/lib.rs
  11. 14
      lib/sparesults/src/csv.rs
  12. 4
      lib/sparesults/src/lib.rs
  13. 3
      lib/sparesults/src/solution.rs
  14. 22
      lib/sparesults/src/xml.rs
  15. 1
      lib/spargebra/src/lib.rs
  16. 19
      lib/spargebra/src/parser.rs
  17. 12
      lib/src/io/error.rs
  18. 2
      lib/src/lib.rs
  19. 2
      lib/src/sparql/dataset.rs
  20. 366
      lib/src/sparql/eval.rs
  21. 7
      lib/src/sparql/model.rs
  22. 26
      lib/src/sparql/plan.rs
  23. 165
      lib/src/sparql/plan_builder.rs
  24. 17
      lib/src/sparql/update.rs
  25. 27
      lib/src/storage/backend/fallback.rs
  26. 60
      lib/src/storage/backend/rocksdb.rs
  27. 5
      lib/src/storage/mod.rs
  28. 4
      lib/src/store.rs
  29. 2
      lib/tests/store.rs
  30. 12
      python/src/io.rs
  31. 2
      python/src/sparql.rs
  32. 8
      python/src/store.rs
  33. 6
      server/src/main.rs
  34. 2
      testsuite/src/manifest.rs
  35. 26
      testsuite/src/sparql_evaluator.rs

@ -2,94 +2,124 @@
rustflags = [
"-Wtrivial-casts",
"-Wtrivial-numeric-casts",
"-Wunsafe_code",
"-Wunsafe-code",
"-Wunused-lifetimes",
"-Wunused-qualifications",
# TODO: 1.63+ "-Wclippy::as-underscore",
# TODO: 1.65+ ""-Wclippy::bool-to-int-with-if",
"-Wclippy::borrow-as-ptr",
"-Wclippy::case-sensitive-file-extension-comparisons",
"-Wclippy::cast-lossless",
"-Wclippy::cast-possible-truncation",
"-Wclippy::cast-possible-wrap",
"-Wclippy::cast-precision-loss",
"-Wclippy::cast-ptr-alignment",
"-Wclippy::cast-sign-loss",
"-Wclippy::checked-conversions",
"-Wclippy::clone-on-ref-ptr",
"-Wclippy::cloned-instead-of-copied",
"-Wclippy::copy-iterator",
"-Wclippy::dbg-macro",
"-Wclippy::debug-assert-with-mut-call",
"-Wclippy::decimal-literal-representation",
"-Wclippy::empty-line-after-outer-attr",
"-Wclippy::default-trait-access",
"-Wclippy::default-union-representation",
# TODO: 1.61+ "-Wclippy::deref-by-slicing",
# TODO: 1.63+ "-Wclippy::doc-link-with-quotes",
# TODO: 1.62+ "-Wclippy::empty-drop",
"-Wclippy::empty-enum",
# TODO: on major version "-Wclippy::empty-structs-with-brackets",
"-Wclippy::enum-glob-use",
"-Wclippy::exit",
"-Wclippy::expect-used",
"-Wclippy::expl-impl-clone-on-copy",
"-Wclippy::explicit-deref-methods",
"-Wclippy::explicit-into-iter-loop",
"-Wclippy::explicit-iter-loop",
"-Wclippy::fallible-impl-from",
"-Wclippy::filter-map-next",
"-Wclippy::flat-map-option",
"-Wclippy::fn-to-numeric-cast-any",
# TODO: 1.62+ "-Wclippy::format-push-string",
"-Wclippy::from-iter-instead-of-collect",
"-Wclippy::get-unwrap",
"-Wclippy::if-not-else",
"-Wclippy::if-then-some-else-none",
"-Wclippy::implicit-clone",
"-Wclippy::implicit-saturating-sub",
"-Wclippy::imprecise-flops",
"-Wclippy::inconsistent-struct-constructor",
"-Wclippy::index-refutable-slice",
"-Wclippy::inefficient-to-string",
"-Wclippy::inline-always",
"-Wclippy::inline-asm-x86-att-syntax",
"-Wclippy::inline-asm-x86-intel-syntax",
"-Wclippy::invalid-upcast-comparisons",
"-Wclippy::items-after-statements",
"-Wclippy::large-digit-groups",
# TODO: 1.68+ "-Wclippy::large-futures",
"-Wclippy::large-stack-arrays",
"-Wclippy::large-types-passed-by-value",
"-Wclippy::let-underscore-must-use",
"-Wclippy::let-unit-value",
"-Wclippy::linkedlist",
"-Wclippy::lossy-float-literal",
"-Wclippy::macro-use-imports",
"-Wclippy::manual-assert",
# TODO: 1.65+ "-Wclippy::manual-instant-elapsed",
# TODO: 1.67+ "-Wclippy::manual-let-else",
"-Wclippy::manual-ok-or",
"-Wclippy::map-flatten",
# TODO: 1.65+ "-Wclippy::manual-string-new",
"-Wclippy::many-single-char-names",
"-Wclippy::map-unwrap-or",
"-Wclippy::match-bool",
"-Wclippy::match-same-arms",
"-Wclippy::match-wildcard-for-single-variants",
"-Wclippy::maybe-infinite-iter",
"-Wclippy::mem-forget",
# TODO: 1.63+ "-Wclippy::mismatching-type-param-order",
"-Wclippy::multiple-inherent-impl",
"-Wclippy::mut-mut",
"-Wclippy::mutex-integer",
"-Wclippy::mutex-atomic",
"-Wclippy::naive-bytecount",
"-Wclippy::needless-bitwise-bool",
"-Wclippy::needless-continue",
"-Wclippy::needless-pass-by-value",
"-Wclippy::no-effect-underscore-binding",
# TODO: 1.69+ "-Wclippy::no-mangle-with-rust-abi",
"-Wclippy::non-ascii-literal",
"-Wclippy::nonstandard-macro-braces",
"-Wclippy::path-buf-push-overwrite",
"-Wclippy::print-stderr",
"-Wclippy::print-stdout",
"-Wclippy::ptr-as-ptr",
"-Wclippy::range-minus-one",
"-Wclippy::range-plus-one",
"-Wclippy::rc-buffer",
"-Wclippy::rc-mutex",
"-Wclippy::enum-variant-names",
"-Wclippy::redundant-closure-for-method-calls",
"-Wclippy::redundant-else",
"-Wclippy::redundant-pub-crate",
"-Wclippy::redundant-feature-names",
"-Wclippy::ref-binding-to-reference",
"-Wclippy::ref-option-ref",
"-Wclippy::rest-pat-in-fully-bound-structs",
"-Wclippy::return-self-not-must-use",
"-Wclippy::same-functions-in-if-condition",
# TODO: strange failure on 1.60 "-Wclippy::same-name-method",
# TODO: 1.68+ "-Wclippy::semicolon-outside-block",
"-Wclippy::single-match-else",
"-Wclippy::stable-sort-primitive",
"-Wclippy::str-to-string",
"-Wclippy::string-add",
"-Wclippy::string-add-assign",
"-Wclippy::string-lit-as-bytes",
"-Wclippy::string-to-string",
"-Wclippy::suboptimal-flops",
"-Wclippy::suspicious-operation-groupings",
# TODO: 1.67+ "-Wclippy::suspicious-xor-used-as-pow",
"-Wclippy::todo",
"-Wclippy::trait-duplication-in-bounds",
"-Wclippy::transmute-ptr-to-ptr",
"-Wclippy::trivial-regex",
"-Wclippy::trivially-copy-pass-by-ref",
"-Wclippy::type-repetition-in-bounds",
"-Wclippy::try-err",
"-Wclippy::unicode-not-nfc",
"-Wclippy::unimplemented",
# TODO: 1.66+ "-Wclippy::uninlined-format-args",
# TODO: 1.70+ "-Wclippy::unnecessary-box-returns",
# TODO: 1.61+ "-Wclippy::unnecessary-join",
# TODO: 1.67+ "-Wclippy::unnecessary-safety-comment",
# TODO: 1.67+ "-Wclippy::unnecessary-safety-doc",
"-Wclippy::unnecessary-self-imports",
"-Wclippy::unnecessary-wraps",
"-Wclippy::unneeded-field-pattern",
@ -99,13 +129,9 @@ rustflags = [
"-Wclippy::unused-async",
"-Wclippy::unused-self",
"-Wclippy::use-debug",
"-Wclippy::use-self",
"-Wclippy::used-underscore-binding",
"-Wclippy::useless-let-if-seq",
"-Wclippy::useless-transmute",
"-Wclippy::verbose-bit-mask",
"-Wclippy::verbose-file-reads",
"-Wclippy::wildcard-dependencies",
"-Wclippy::zero-sized-map-values",
"-Wclippy::wrong-self-convention",
]

@ -19,7 +19,7 @@ thread_local! {
#[wasm_bindgen(js_name = namedNode)]
pub fn named_node(value: String) -> Result<JsNamedNode, JsValue> {
NamedNode::new(value)
.map(|v| v.into())
.map(Into::into)
.map_err(|v| UriError::new(&v.to_string()).into())
}

@ -76,28 +76,28 @@ impl JsStore {
None
}
.as_ref()
.map(|t: &NamedOrBlankNode| t.into()),
.map(<&Subject>::into),
if let Some(predicate) = FROM_JS.with(|c| c.to_optional_term(predicate))? {
Some(NamedNode::try_from(predicate)?)
} else {
None
}
.as_ref()
.map(|t: &NamedNode| t.into()),
.map(<&NamedNode>::into),
if let Some(object) = FROM_JS.with(|c| c.to_optional_term(object))? {
Some(object.try_into()?)
} else {
None
}
.as_ref()
.map(|t: &Term| t.into()),
.map(<&Term>::into),
if let Some(graph_name) = FROM_JS.with(|c| c.to_optional_term(graph_name))? {
Some(graph_name.try_into()?)
} else {
None
}
.as_ref()
.map(|t: &GraphName| t.into()),
.map(<&GraphName>::into),
)
.map(|v| v.map(|v| JsQuad::from(v).into()))
.collect::<Result<Vec<_>, _>>()

@ -705,9 +705,7 @@ impl Dataset {
InternedTerm,
InternedGraphName,
)> {
let b_prime = partition
.iter()
.find_map(|(_, b)| if b.len() > 1 { Some(b) } else { None });
let b_prime = partition.iter().find_map(|(_, b)| (b.len() > 1).then(|| b));
if let Some(b_prime) = b_prime {
b_prime
.iter()

@ -463,11 +463,10 @@ impl InternedTriple {
predicate: InternedNamedNode::encoded_from(triple.predicate, interner)?,
object: InternedTerm::encoded_from(triple.object, interner)?,
};
if interner.triples.contains_key(&interned_triple) {
Some(interned_triple)
} else {
None
}
interner
.triples
.contains_key(&interned_triple)
.then(|| interned_triple)
}
pub fn next(&self) -> Self {

@ -1,5 +1,4 @@
#![doc = include_str!("../README.md")]
#![deny(unsafe_code)]
#![doc(test(attr(deny(warnings))))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc(html_favicon_url = "https://raw.githubusercontent.com/oxigraph/oxigraph/main/logo.svg")]

@ -380,7 +380,7 @@ impl FromStr for Decimal {
};
let mut value = 0_i128;
let with_before_dot = input.first().map_or(false, |c| c.is_ascii_digit());
let with_before_dot = input.first().map_or(false, u8::is_ascii_digit);
while let Some(c) = input.first() {
if c.is_ascii_digit() {
value = value

@ -170,8 +170,10 @@ impl fmt::Display for Duration {
let h = (s_int % 86400) / 3600;
let m = (s_int % 3600) / 60;
let s = ss
.checked_sub(Decimal::try_from(d * 86400 + h * 3600 + m * 60).unwrap())
.unwrap(); //could not fail
.checked_sub(
Decimal::try_from(d * 86400 + h * 3600 + m * 60).map_err(|_| fmt::Error)?,
)
.ok_or(fmt::Error)?;
if d != 0 {
write!(f, "{d}D")?;

@ -258,9 +258,9 @@ mod tests {
assert!(Integer::try_from(Float::from(f32::MIN)).is_err());
assert!(Integer::try_from(Float::from(f32::MAX)).is_err());
assert!(
Integer::try_from(Float::from(1_672_507_302_466.))
Integer::try_from(Float::from(1_672_507_300_000.))
.unwrap()
.checked_sub(Integer::from_str("1672507302466")?)
.checked_sub(Integer::from_str("1672507300000")?)
.unwrap()
.abs()
< Integer::from(1_000_000)
@ -283,12 +283,12 @@ mod tests {
Some(Integer::from_str("-123")?)
);
assert!(
Integer::try_from(Double::from(1_672_507_302_466.))
Integer::try_from(Double::from(1_672_507_300_000.))
.unwrap()
.checked_sub(Integer::from_str("1672507302466").unwrap())
.checked_sub(Integer::from_str("1672507300000").unwrap())
.unwrap()
.abs()
< Integer::from(1)
< Integer::from(10)
);
assert!(Integer::try_from(Double::from(f64::NAN)).is_err());
assert!(Integer::try_from(Double::from(f64::INFINITY)).is_err());

@ -1,9 +1,9 @@
#![doc = include_str!("../README.md")]
#![deny(unsafe_code)]
#![doc(test(attr(deny(warnings))))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc(html_favicon_url = "https://raw.githubusercontent.com/oxigraph/oxigraph/main/logo.svg")]
#![doc(html_logo_url = "https://raw.githubusercontent.com/oxigraph/oxigraph/main/logo.svg")]
#![allow(clippy::return_self_not_must_use)]
mod boolean;
mod date_time;

@ -160,7 +160,7 @@ fn write_tsv_term<'a>(term: impl Into<TermRef<'a>>, sink: &mut impl Write) -> io
let value = literal.value();
if let Some(language) = literal.language() {
write_tsv_quoted_str(value, sink)?;
write!(sink, "@{}", language)
write!(sink, "@{language}")
} else {
match literal.datatype() {
xsd::BOOLEAN if is_turtle_boolean(value) => sink.write_all(value.as_bytes()),
@ -216,7 +216,7 @@ fn is_turtle_integer(value: &str) -> bool {
} else if let Some(v) = value.strip_prefix(b"-") {
value = v;
}
!value.is_empty() && value.iter().all(|c| c.is_ascii_digit())
!value.is_empty() && value.iter().all(u8::is_ascii_digit)
}
fn is_turtle_decimal(value: &str) -> bool {
@ -227,7 +227,7 @@ fn is_turtle_decimal(value: &str) -> bool {
} else if let Some(v) = value.strip_prefix(b"-") {
value = v;
}
while value.first().map_or(false, |c| c.is_ascii_digit()) {
while value.first().map_or(false, u8::is_ascii_digit) {
value = &value[1..];
}
if let Some(v) = value.strip_prefix(b".") {
@ -235,7 +235,7 @@ fn is_turtle_decimal(value: &str) -> bool {
} else {
return false;
}
!value.is_empty() && value.iter().all(|c| c.is_ascii_digit())
!value.is_empty() && value.iter().all(u8::is_ascii_digit)
}
fn is_turtle_double(value: &str) -> bool {
@ -248,14 +248,14 @@ fn is_turtle_double(value: &str) -> bool {
value = v;
}
let mut with_before = false;
while value.first().map_or(false, |c| c.is_ascii_digit()) {
while value.first().map_or(false, u8::is_ascii_digit) {
value = &value[1..];
with_before = true;
}
let mut with_after = false;
if let Some(v) = value.strip_prefix(b".") {
value = v;
while value.first().map_or(false, |c| c.is_ascii_digit()) {
while value.first().map_or(false, u8::is_ascii_digit) {
value = &value[1..];
with_after = true;
}
@ -272,7 +272,7 @@ fn is_turtle_double(value: &str) -> bool {
} else if let Some(v) = value.strip_prefix(b"-") {
value = v;
}
(with_before || with_after) && !value.is_empty() && value.iter().all(|c| c.is_ascii_digit())
(with_before || with_after) && !value.is_empty() && value.iter().all(u8::is_ascii_digit)
}
pub enum TsvQueryResultsReader<R: BufRead> {

@ -1,5 +1,4 @@
#![doc = include_str!("../README.md")]
#![deny(unsafe_code)]
#![doc(test(attr(deny(warnings))))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc(html_favicon_url = "https://raw.githubusercontent.com/oxigraph/oxigraph/main/logo.svg")]
@ -276,6 +275,7 @@ pub enum QueryResultsReader<R: BufRead> {
/// }
/// # Result::<(),sparesults::ParseError>::Ok(())
/// ```
#[allow(clippy::rc_buffer)]
pub struct SolutionsReader<R: BufRead> {
variables: Rc<Vec<Variable>>,
solutions: SolutionsReaderKind<R>,
@ -318,7 +318,7 @@ impl<R: BufRead> Iterator for SolutionsReader<R> {
SolutionsReaderKind::Tsv(reader) => reader.read_next(),
}
.transpose()?
.map(|values| (self.variables.clone(), values).into()),
.map(|values| (Rc::clone(&self.variables), values).into()),
)
}
}

@ -18,6 +18,7 @@ use std::rc::Rc;
/// assert_eq!(solution.get("foo"), Some(&Literal::from(1).into())); // Get the value of the variable ?foo if it exists (here yes).
/// assert_eq!(solution.get(1), None); // Get the value of the second column if it exists (here no).
/// ```
#[allow(clippy::rc_buffer)]
pub struct QuerySolution {
variables: Rc<Vec<Variable>>,
values: Vec<Option<Term>>,
@ -69,7 +70,7 @@ impl QuerySolution {
/// ```
#[inline]
pub fn is_empty(&self) -> bool {
self.values.iter().all(|v| v.is_none())
self.values.iter().all(Option::is_none)
}
/// Returns an iterator over bound variables.

@ -482,20 +482,31 @@ impl<R: BufRead> XmlSolutionsReader<R> {
}
state = State::Triple;
}
State::Uri => state = self.stack.pop().unwrap(),
State::Uri => {
state = self
.stack
.pop()
.ok_or_else(|| SyntaxError::msg("Empty stack"))?
}
State::BNode => {
if term.is_none() {
//We default to a random bnode
term = Some(BlankNode::default().into())
}
state = self.stack.pop().unwrap()
state = self
.stack
.pop()
.ok_or_else(|| SyntaxError::msg("Empty stack"))?
}
State::Literal => {
if term.is_none() {
//We default to the empty literal
term = Some(build_literal("", lang.take(), datatype.take())?.into())
}
state = self.stack.pop().unwrap();
state = self
.stack
.pop()
.ok_or_else(|| SyntaxError::msg("Empty stack"))?;
}
State::Triple => {
#[cfg(feature = "rdf-star")]
@ -530,7 +541,10 @@ impl<R: BufRead> XmlSolutionsReader<R> {
)
.into(),
);
state = self.stack.pop().unwrap();
state = self
.stack
.pop()
.ok_or_else(|| SyntaxError::msg("Empty stack"))?;
} else {
return Err(
SyntaxError::msg("A <triple> should contain a <subject>, a <predicate> and an <object>").into()

@ -1,5 +1,4 @@
#![doc = include_str!("../README.md")]
#![deny(unsafe_code)]
#![doc(test(attr(deny(warnings))))]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc(html_favicon_url = "https://raw.githubusercontent.com/oxigraph/oxigraph/main/logo.svg")]

@ -352,7 +352,7 @@ impl<F, T: From<F>> From<FocusedTriplePattern<F>> for FocusedTripleOrPathPattern
fn from(input: FocusedTriplePattern<F>) -> Self {
Self {
focus: input.focus.into(),
patterns: input.patterns.into_iter().map(|p| p.into()).collect(),
patterns: input.patterns.into_iter().map(Into::into).collect(),
}
}
}
@ -736,7 +736,7 @@ impl ParserState {
let aggregates = self.aggregates.last_mut().ok_or("Unexpected aggregate")?;
Ok(aggregates
.iter()
.find_map(|(v, a)| if a == &agg { Some(v) } else { None })
.find_map(|(v, a)| (a == &agg).then(|| v))
.cloned()
.unwrap_or_else(|| {
let new_var = variable();
@ -884,13 +884,14 @@ impl<'a> Iterator for UnescapeCharsIterator<'a> {
}
match self.iter.next()? {
'\\' => match self.iter.next() {
Some(ch) => match self.replacement.get(ch) {
Some(replace) => Some(replace),
None => {
Some(ch) => {
if let Some(replace) = self.replacement.get(ch) {
Some(replace)
} else {
self.buffer = Some(ch);
Some('\\')
}
},
}
None => Some('\\'),
},
c => Some(c),
@ -1590,7 +1591,7 @@ parser! {
//[74]
rule ConstructTriples() -> Vec<TriplePattern> = p:ConstructTriples_item() ** ("." _) "."? {
p.into_iter().flat_map(|c| c.into_iter()).collect()
p.into_iter().flatten().collect()
}
rule ConstructTriples_item() -> Vec<TriplePattern> = t:TriplesSameSubject() _ { t }
@ -1701,7 +1702,7 @@ parser! {
//[83]
rule PropertyListPathNotEmpty() -> FocusedTripleOrPathPattern<Vec<(VariableOrPropertyPath,Vec<AnnotatedTermPath>)>> = hp:(VerbPath() / VerbSimple()) _ ho:ObjectListPath() _ t:PropertyListPathNotEmpty_item()* {
t.into_iter().flat_map(|e| e.into_iter()).fold(FocusedTripleOrPathPattern {
t.into_iter().flatten().fold(FocusedTripleOrPathPattern {
focus: vec![(hp, ho.focus)],
patterns: ho.patterns
}, |mut a, b| {
@ -2036,7 +2037,7 @@ parser! {
//[121]
rule BuiltInCall() -> Expression =
a:Aggregate() {? state.new_aggregation(a).map(|v| v.into()) } /
a:Aggregate() {? state.new_aggregation(a).map(Into::into) } /
i("STR") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Str, vec![e]) } /
i("LANG") _ "(" _ e:Expression() _ ")" { Expression::FunctionCall(Function::Lang, vec![e]) } /
i("LANGMATCHES") _ "(" _ a:Expression() _ "," _ b:Expression() _ ")" { Expression::FunctionCall(Function::LangMatches, vec![a, b]) } /

@ -45,12 +45,14 @@ impl Error for ParseError {
}
}
#[allow(clippy::fallible_impl_from)]
impl From<TurtleError> for ParseError {
#[inline]
fn from(error: TurtleError) -> Self {
let error = io::Error::from(error);
if error.get_ref().map_or(false, |e| e.is::<TurtleError>()) {
if error.get_ref().map_or(
false,
<(dyn Error + Send + Sync + 'static)>::is::<TurtleError>,
) {
Self::Syntax(SyntaxError {
inner: SyntaxErrorKind::Turtle(*error.into_inner().unwrap().downcast().unwrap()),
})
@ -60,12 +62,14 @@ impl From<TurtleError> for ParseError {
}
}
#[allow(clippy::fallible_impl_from)]
impl From<RdfXmlError> for ParseError {
#[inline]
fn from(error: RdfXmlError) -> Self {
let error = io::Error::from(error);
if error.get_ref().map_or(false, |e| e.is::<RdfXmlError>()) {
if error.get_ref().map_or(
false,
<(dyn Error + Send + Sync + 'static)>::is::<RdfXmlError>,
) {
Self::Syntax(SyntaxError {
inner: SyntaxErrorKind::RdfXml(*error.into_inner().unwrap().downcast().unwrap()),
})

@ -3,7 +3,7 @@
#![doc(html_logo_url = "https://raw.githubusercontent.com/oxigraph/oxigraph/main/logo.svg")]
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc(test(attr(deny(warnings))))]
#![deny(unsafe_code)]
#![allow(clippy::return_self_not_must_use)]
pub mod io;
pub mod sparql;

@ -40,7 +40,7 @@ impl DatasetView {
) -> impl Iterator<Item = Result<EncodedQuad, EvaluationError>> + 'static {
self.reader
.quads_for_pattern(subject, predicate, object, graph_name)
.map(|t| t.map_err(|e| e.into()))
.map(|t| t.map_err(Into::into))
}
#[allow(clippy::needless_collect)]

File diff suppressed because it is too large Load Diff

@ -160,6 +160,7 @@ impl<R: BufRead + 'static> From<QueryResultsReader<R>> for QueryResults {
/// }
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
/// ```
#[allow(clippy::rc_buffer)]
pub struct QuerySolutionIter {
variables: Rc<Vec<Variable>>,
iter: Box<dyn Iterator<Item = Result<QuerySolution, EvaluationError>>>,
@ -171,8 +172,10 @@ impl QuerySolutionIter {
iter: impl Iterator<Item = Result<Vec<Option<Term>>, EvaluationError>> + 'static,
) -> Self {
Self {
variables: variables.clone(),
iter: Box::new(iter.map(move |t| t.map(|values| (variables.clone(), values).into()))),
variables: Rc::clone(&variables),
iter: Box::new(
iter.map(move |t| t.map(|values| (Rc::clone(&variables), values).into())),
),
}
}

@ -22,7 +22,7 @@ pub enum PlanNode {
},
Service {
service_name: PatternValue,
variables: Rc<Vec<Variable>>,
variables: Rc<[Variable]>,
child: Rc<Self>,
graph_pattern: Rc<GraphPattern>,
silent: bool,
@ -71,7 +71,7 @@ pub enum PlanNode {
ForLoopLeftJoin {
left: Rc<Self>,
right: Rc<Self>,
possible_problem_vars: Rc<Vec<usize>>, //Variables that should not be part of the entry of the left join
possible_problem_vars: Rc<[usize]>, //Variables that should not be part of the entry of the left join
},
Extend {
child: Rc<Self>,
@ -99,13 +99,13 @@ pub enum PlanNode {
},
Project {
child: Rc<Self>,
mapping: Rc<Vec<(PlanVariable, PlanVariable)>>, // pairs of (variable key in child, variable key in output)
mapping: Rc<[(PlanVariable, PlanVariable)]>, // pairs of (variable key in child, variable key in output)
},
Aggregate {
// By definition the group by key are the range 0..key_mapping.len()
child: Rc<Self>,
key_variables: Rc<Vec<PlanVariable>>,
aggregates: Rc<Vec<(PlanAggregation, PlanVariable)>>,
key_variables: Rc<[PlanVariable]>,
aggregates: Rc<[(PlanAggregation, PlanVariable)]>,
},
}
@ -236,7 +236,10 @@ impl PlanNode {
match self {
Self::StaticBindings { encoded_tuples, .. } => {
let mut variables = BTreeMap::default(); // value true iff always bound
let max_tuple_length = encoded_tuples.iter().map(|t| t.capacity()).fold(0, max);
let max_tuple_length = encoded_tuples
.iter()
.map(EncodedTuple::capacity)
.fold(0, max);
for tuple in encoded_tuples {
for key in 0..max_tuple_length {
match variables.entry(key) {
@ -649,6 +652,7 @@ impl PlanExpression {
}
impl fmt::Display for PlanExpression {
#[allow(clippy::many_single_char_names)]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Variable(v) => {
@ -838,7 +842,7 @@ pub enum PlanAggregationFunction {
Max,
Avg,
Sample,
GroupConcat { separator: Rc<String> },
GroupConcat { separator: Rc<str> },
}
#[derive(Debug, Clone)]
@ -850,7 +854,7 @@ pub enum PlanPropertyPath {
ZeroOrMore(Rc<Self>),
OneOrMore(Rc<Self>),
ZeroOrOne(Rc<Self>),
NegatedPropertySet(Rc<Vec<PlanTerm<NamedNode>>>),
NegatedPropertySet(Rc<[PlanTerm<NamedNode>]>),
}
impl fmt::Display for PlanPropertyPath {
@ -1046,7 +1050,7 @@ impl PlanNodeWithStats {
"Aggregate({})",
key_variables
.iter()
.map(|c| c.to_string())
.map(ToString::to_string)
.chain(aggregates.iter().map(|(agg, v)| format!("{agg} -> {v}")))
.collect::<Vec<_>>()
.join(", ")
@ -1107,7 +1111,7 @@ impl PlanNodeWithStats {
format!(
"Sort({})",
by.iter()
.map(|c| c.to_string())
.map(ToString::to_string)
.collect::<Vec<_>>()
.join(", ")
)
@ -1117,7 +1121,7 @@ impl PlanNodeWithStats {
"StaticBindings({})",
variables
.iter()
.map(|v| v.to_string())
.map(ToString::to_string)
.collect::<Vec<_>>()
.join(", ")
)

@ -122,7 +122,7 @@ impl<'a> PlanBuilder<'a> {
PlanNode::ForLoopLeftJoin {
left: Rc::new(left),
right: Rc::new(right),
possible_problem_vars: Rc::new(possible_problem_vars.into_iter().collect()),
possible_problem_vars: possible_problem_vars.into_iter().collect(),
}
} else {
PlanNode::HashLeftJoin {
@ -191,7 +191,7 @@ impl<'a> PlanBuilder<'a> {
let service_name = self.pattern_value_from_named_node_or_variable(name, variables);
PlanNode::Service {
service_name,
variables: Rc::new(variables.clone()),
variables: Rc::from(variables.as_slice()),
child: Rc::new(child),
graph_pattern: Rc::new(inner.as_ref().clone()),
silent: *silent,
@ -203,22 +203,19 @@ impl<'a> PlanBuilder<'a> {
aggregates,
} => PlanNode::Aggregate {
child: Rc::new(self.build_for_graph_pattern(inner, variables, graph_name)?),
key_variables: Rc::new(
by.iter()
.map(|k| build_plan_variable(variables, k))
.collect(),
),
aggregates: Rc::new(
aggregates
.iter()
.map(|(v, a)| {
Ok((
self.build_for_aggregate(a, variables, graph_name)?,
build_plan_variable(variables, v),
))
})
.collect::<Result<Vec<_>, EvaluationError>>()?,
),
key_variables: by
.iter()
.map(|k| build_plan_variable(variables, k))
.collect(),
aggregates: aggregates
.iter()
.map(|(v, a)| {
Ok((
self.build_for_aggregate(a, variables, graph_name)?,
build_plan_variable(variables, v),
))
})
.collect::<Result<_, EvaluationError>>()?,
},
GraphPattern::Values {
variables: table_variables,
@ -283,21 +280,19 @@ impl<'a> PlanBuilder<'a> {
&mut inner_variables,
&inner_graph_name,
)?),
mapping: Rc::new(
projection
.iter()
.enumerate()
.map(|(new_variable, variable)| {
(
PlanVariable {
encoded: new_variable,
plain: variable.clone(),
},
build_plan_variable(variables, variable),
)
})
.collect(),
),
mapping: projection
.iter()
.enumerate()
.map(|(new_variable, variable)| {
(
PlanVariable {
encoded: new_variable,
plain: variable.clone(),
},
build_plan_variable(variables, variable),
)
})
.collect(),
}
}
GraphPattern::Distinct { inner } => PlanNode::HashDeduplicate {
@ -378,16 +373,14 @@ impl<'a> PlanBuilder<'a> {
PropertyPathExpression::ZeroOrOne(p) => {
PlanPropertyPath::ZeroOrOne(Rc::new(self.build_for_path(p)))
}
PropertyPathExpression::NegatedPropertySet(p) => {
PlanPropertyPath::NegatedPropertySet(Rc::new(
p.iter()
.map(|p| PlanTerm {
encoded: self.build_term(p),
plain: p.clone(),
})
.collect(),
))
}
PropertyPathExpression::NegatedPropertySet(p) => PlanPropertyPath::NegatedPropertySet(
p.iter()
.map(|p| PlanTerm {
encoded: self.build_term(p),
plain: p.clone(),
})
.collect(),
),
}
}
@ -1084,7 +1077,7 @@ impl<'a> PlanBuilder<'a> {
separator,
} => Ok(PlanAggregation {
function: PlanAggregationFunction::GroupConcat {
separator: Rc::new(separator.clone().unwrap_or_else(|| " ".to_owned())),
separator: Rc::from(separator.as_deref().unwrap_or(" ")),
},
parameter: Some(self.build_for_expression(expr, variables, graph_name)?),
distinct: *distinct,
@ -1219,13 +1212,11 @@ impl<'a> PlanBuilder<'a> {
}
fn convert_plan_variable(from_variable: &PlanVariable, to: &mut Vec<Variable>) -> PlanVariable {
let encoded = if let Some(to_id) = to.iter().enumerate().find_map(|(to_id, var)| {
if *var == from_variable.plain {
Some(to_id)
} else {
None
}
}) {
let encoded = if let Some(to_id) = to
.iter()
.enumerate()
.find_map(|(to_id, var)| (*var == from_variable.plain).then(|| to_id))
{
to_id
} else {
to.push(Variable::new_unchecked(format!("{:x}", random::<u128>())));
@ -1423,25 +1414,25 @@ impl<'a> PlanBuilder<'a> {
if filter_variables.iter().all(|v| left.is_variable_bound(*v)) {
if filter_variables.iter().all(|v| right.is_variable_bound(*v)) {
PlanNode::HashJoin {
left: Rc::new(self.push_filter(left.clone(), filter.clone())),
right: Rc::new(self.push_filter(right.clone(), filter)),
left: Rc::new(self.push_filter(Rc::clone(left), filter.clone())),
right: Rc::new(self.push_filter(Rc::clone(right), filter)),
}
} else {
PlanNode::HashJoin {
left: Rc::new(self.push_filter(left.clone(), filter)),
right: right.clone(),
left: Rc::new(self.push_filter(Rc::clone(left), filter)),
right: Rc::clone(right),
}
}
} else if filter_variables.iter().all(|v| right.is_variable_bound(*v)) {
PlanNode::HashJoin {
left: left.clone(),
right: Rc::new(self.push_filter(right.clone(), filter)),
left: Rc::clone(left),
right: Rc::new(self.push_filter(Rc::clone(right), filter)),
}
} else {
PlanNode::Filter {
child: Rc::new(PlanNode::HashJoin {
left: left.clone(),
right: right.clone(),
left: Rc::clone(left),
right: Rc::clone(right),
}),
expression: filter,
}
@ -1450,20 +1441,20 @@ impl<'a> PlanBuilder<'a> {
PlanNode::ForLoopJoin { left, right } => {
if filter_variables.iter().all(|v| left.is_variable_bound(*v)) {
PlanNode::ForLoopJoin {
left: Rc::new(self.push_filter(left.clone(), filter)),
right: right.clone(),
left: Rc::new(self.push_filter(Rc::clone(left), filter)),
right: Rc::clone(right),
}
} else if filter_variables.iter().all(|v| right.is_variable_bound(*v)) {
PlanNode::ForLoopJoin {
//TODO: should we do that always?
left: left.clone(),
right: Rc::new(self.push_filter(right.clone(), filter)),
left: Rc::clone(left),
right: Rc::new(self.push_filter(Rc::clone(right), filter)),
}
} else {
PlanNode::Filter {
child: Rc::new(PlanNode::HashJoin {
left: left.clone(),
right: right.clone(),
left: Rc::clone(left),
right: Rc::clone(right),
}),
expression: filter,
}
@ -1477,14 +1468,14 @@ impl<'a> PlanBuilder<'a> {
//TODO: handle the case where the filter generates an expression variable
if filter_variables.iter().all(|v| child.is_variable_bound(*v)) {
PlanNode::Extend {
child: Rc::new(self.push_filter(child.clone(), filter)),
child: Rc::new(self.push_filter(Rc::clone(child), filter)),
expression: expression.clone(),
variable: variable.clone(),
}
} else {
PlanNode::Filter {
child: Rc::new(PlanNode::Extend {
child: child.clone(),
child: Rc::clone(child),
expression: expression.clone(),
variable: variable.clone(),
}),
@ -1495,12 +1486,12 @@ impl<'a> PlanBuilder<'a> {
PlanNode::Filter { child, expression } => {
if filter_variables.iter().all(|v| child.is_variable_bound(*v)) {
PlanNode::Filter {
child: Rc::new(self.push_filter(child.clone(), filter)),
child: Rc::new(self.push_filter(Rc::clone(child), filter)),
expression: expression.clone(),
}
} else {
PlanNode::Filter {
child: child.clone(),
child: Rc::clone(child),
expression: Box::new(PlanExpression::And(expression.clone(), filter)),
}
}
@ -1508,7 +1499,7 @@ impl<'a> PlanBuilder<'a> {
PlanNode::Union { children } => PlanNode::Union {
children: children
.iter()
.map(|c| Rc::new(self.push_filter(c.clone(), filter.clone())))
.map(|c| Rc::new(self.push_filter(Rc::clone(c), filter.clone())))
.collect(),
},
_ => PlanNode::Filter {
@ -1541,12 +1532,11 @@ impl<'a> PlanBuilder<'a> {
}
fn build_plan_variable(variables: &mut Vec<Variable>, variable: &Variable) -> PlanVariable {
let encoded = match slice_key(variables, variable) {
Some(key) => key,
None => {
variables.push(variable.clone());
variables.len() - 1
}
let encoded = if let Some(key) = slice_key(variables, variable) {
key
} else {
variables.push(variable.clone());
variables.len() - 1
};
PlanVariable {
plain: variable.clone(),
@ -1555,12 +1545,11 @@ fn build_plan_variable(variables: &mut Vec<Variable>, variable: &Variable) -> Pl
}
fn bnode_key(blank_nodes: &mut Vec<BlankNode>, blank_node: &BlankNode) -> usize {
match slice_key(blank_nodes, blank_node) {
Some(key) => key,
None => {
blank_nodes.push(blank_node.clone());
blank_nodes.len() - 1
}
if let Some(key) = slice_key(blank_nodes, blank_node) {
key
} else {
blank_nodes.push(blank_node.clone());
blank_nodes.len() - 1
}
}
@ -1673,21 +1662,13 @@ fn compile_static_pattern_if_exists(
options: Option<&Expression>,
) -> Option<Regex> {
let static_pattern = if let Expression::Literal(pattern) = pattern {
if pattern.datatype() == xsd::STRING {
Some(pattern.value())
} else {
None
}
(pattern.datatype() == xsd::STRING).then(|| pattern.value())
} else {
None
};
let static_options = if let Some(options) = options {
if let Expression::Literal(options) = options {
if options.datatype() == xsd::STRING {
Some(Some(options.value()))
} else {
None
}
(options.datatype() == xsd::STRING).then(|| Some(options.value()))
} else {
None
}

@ -71,7 +71,14 @@ impl<'a, 'b: 'a> SimpleUpdateEvaluator<'a, 'b> {
insert,
pattern,
..
} => self.eval_delete_insert(delete, insert, using_dataset.as_ref().unwrap(), pattern),
} => self.eval_delete_insert(
delete,
insert,
using_dataset
.as_ref()
.ok_or_else(|| EvaluationError::msg("No dataset"))?,
pattern,
),
GraphUpdateOperation::Load {
silent,
source,
@ -119,14 +126,14 @@ impl<'a, 'b: 'a> SimpleUpdateEvaluator<'a, 'b> {
) -> Result<(), EvaluationError> {
let dataset = Rc::new(DatasetView::new(self.transaction.reader(), using));
let (plan, variables) = PlanBuilder::build(
dataset.as_ref(),
&dataset,
algebra,
false,
&self.options.query_options.custom_functions,
!self.options.query_options.without_optimizations,
)?;
let evaluator = SimpleEvaluator::new(
dataset.clone(),
Rc::clone(&dataset),
self.base_iri.clone(),
self.options.query_options.service_handler(),
Rc::new(self.options.query_options.custom_functions.clone()),
@ -374,7 +381,7 @@ impl<'a, 'b: 'a> SimpleUpdateEvaluator<'a, 'b> {
TermPattern::Literal(term) => Some(term.clone().into()),
TermPattern::Triple(triple) => {
Self::convert_triple_pattern(triple, variables, values, dataset, bnodes)?
.map(|t| t.into())
.map(Into::into)
}
TermPattern::Variable(v) => Self::lookup_variable(v, variables, values)
.map(|node| dataset.decode_term(&node))
@ -507,7 +514,7 @@ impl<'a, 'b: 'a> SimpleUpdateEvaluator<'a, 'b> {
GroundTermPattern::Literal(term) => Some(term.clone().into()),
GroundTermPattern::Triple(triple) => {
Self::convert_ground_triple_pattern(triple, variables, values, dataset)?
.map(|t| t.into())
.map(Into::into)
}
GroundTermPattern::Variable(v) => Self::lookup_variable(v, variables, values)
.map(|node| dataset.decode_term(&node))

@ -29,20 +29,18 @@ impl Db {
Ok(Self(Arc::new(RwLock::new(trees))))
}
#[allow(clippy::unwrap_in_result)]
pub fn column_family(&self, name: &'static str) -> Option<ColumnFamily> {
let name = ColumnFamily(name);
if self.0.read().unwrap().contains_key(&name) {
Some(name)
} else {
None
}
(self.0.read().unwrap().contains_key(&name)).then(|| name)
}
#[must_use]
pub fn snapshot(&self) -> Reader {
Reader(InnerReader::Simple(self.0.clone()))
Reader(InnerReader::Simple(Arc::clone(&self.0)))
}
#[allow(clippy::unwrap_in_result)]
pub fn transaction<'a, 'b: 'a, T, E: Error + 'static + From<StorageError>>(
&'b self,
f: impl Fn(Transaction<'a>) -> Result<T, E>,
@ -64,6 +62,7 @@ enum InnerReader {
}
impl Reader {
#[allow(clippy::unwrap_in_result)]
pub fn get(
&self,
column_family: &ColumnFamily,
@ -90,6 +89,7 @@ impl Reader {
}
}
#[allow(clippy::unwrap_in_result)]
pub fn contains_key(
&self,
column_family: &ColumnFamily,
@ -120,6 +120,7 @@ impl Reader {
self.scan_prefix(column_family, &[])
}
#[allow(clippy::unwrap_in_result)]
pub fn scan_prefix(
&self,
column_family: &ColumnFamily,
@ -176,19 +177,20 @@ impl Reader {
Ok(Iter { iter, current })
}
#[allow(clippy::unwrap_in_result)]
pub fn len(&self, column_family: &ColumnFamily) -> Result<usize, StorageError> {
match &self.0 {
InnerReader::Simple(reader) => Ok(reader
.read()
.unwrap()
.get(column_family)
.map_or(0, |tree| tree.len())),
.map_or(0, BTreeMap::len)),
InnerReader::Transaction(reader) => {
if let Some(reader) = reader.upgrade() {
Ok((*reader)
.borrow()
.get(column_family)
.map_or(0, |tree| tree.len()))
.map_or(0, BTreeMap::len))
} else {
Err(StorageError::Other(
"The transaction is already ended".into(),
@ -198,19 +200,20 @@ impl Reader {
}
}
#[allow(clippy::unwrap_in_result)]
pub fn is_empty(&self, column_family: &ColumnFamily) -> Result<bool, StorageError> {
match &self.0 {
InnerReader::Simple(reader) => Ok(reader
.read()
.unwrap()
.get(column_family)
.map_or(true, |tree| tree.is_empty())),
.map_or(true, BTreeMap::is_empty)),
InnerReader::Transaction(reader) => {
if let Some(reader) = reader.upgrade() {
Ok((*reader)
.borrow()
.get(column_family)
.map_or(true, |tree| tree.is_empty()))
.map_or(true, BTreeMap::is_empty))
} else {
Err(StorageError::Other(
"The transaction is already ended".into(),
@ -246,7 +249,7 @@ impl Transaction<'_> {
.map_or(false, |cf| cf.contains_key(key)))
}
#[allow(clippy::unnecessary_wraps)]
#[allow(clippy::unnecessary_wraps, clippy::unwrap_in_result)]
pub fn insert(
&mut self,
column_family: &ColumnFamily,
@ -269,7 +272,7 @@ impl Transaction<'_> {
self.insert(column_family, key, &[])
}
#[allow(clippy::unnecessary_wraps)]
#[allow(clippy::unnecessary_wraps, clippy::unwrap_in_result)]
pub fn remove(&mut self, column_family: &ColumnFamily, key: &[u8]) -> Result<(), StorageError> {
self.0
.borrow_mut()

@ -4,7 +4,7 @@
use crate::storage::error::{CorruptionError, StorageError};
use lazy_static::lazy_static;
use libc::{self, c_char, c_void, free};
use libc::{self, c_void, free};
use oxrocksdb_sys::*;
use rand::random;
use std::borrow::Borrow;
@ -241,7 +241,7 @@ impl Db {
.map(|cf| cf.as_ptr())
.collect::<Vec<_>>()
.as_ptr(),
cf_options.as_ptr() as *const *const rocksdb_options_t,
cf_options.as_ptr().cast(),
cf_handles.as_mut_ptr(),
))
.map_err(|e| {
@ -359,7 +359,7 @@ impl Db {
.map(|cf| cf.as_ptr())
.collect::<Vec<_>>()
.as_ptr(),
cf_options.as_ptr() as *const *const rocksdb_options_t,
cf_options.as_ptr().cast(),
cf_handles.as_mut_ptr(),
))
.map_err(|e| {
@ -393,11 +393,7 @@ impl Db {
cf_handles,
cf_options,
is_secondary: true,
path_to_remove: if in_memory {
Some(secondary_path)
} else {
None
},
path_to_remove: in_memory.then(|| secondary_path),
})),
})
}
@ -424,7 +420,7 @@ impl Db {
.map(|cf| cf.as_ptr())
.collect::<Vec<_>>()
.as_ptr(),
cf_options.as_ptr() as *const *const rocksdb_options_t,
cf_options.as_ptr().cast(),
cf_handles.as_mut_ptr(),
0, // false
))
@ -580,7 +576,7 @@ impl Db {
}
let options = rocksdb_readoptions_create_copy(db.read_options);
Reader {
inner: InnerReader::PlainDb(db.clone()),
inner: InnerReader::PlainDb(Arc::clone(db)),
options,
}
}
@ -594,7 +590,7 @@ impl Db {
rocksdb_readoptions_set_snapshot(options, snapshot);
Reader {
inner: InnerReader::TransactionalSnapshot(Rc::new(TransactionalSnapshot {
db: db.clone(),
db: Arc::clone(db),
snapshot,
})),
options,
@ -698,7 +694,7 @@ impl Db {
db.db,
db.read_options,
column_family.0,
key.as_ptr() as *const c_char,
key.as_ptr().cast(),
key.len(),
))
}
@ -707,7 +703,7 @@ impl Db {
db.db,
db.read_options,
column_family.0,
key.as_ptr() as *const c_char,
key.as_ptr().cast(),
key.len()
))
}
@ -740,9 +736,9 @@ impl Db {
db.db,
db.write_options,
column_family.0,
key.as_ptr() as *const c_char,
key.as_ptr().cast(),
key.len(),
value.as_ptr() as *const c_char,
value.as_ptr().cast(),
value.len(),
))
}?;
@ -940,7 +936,7 @@ impl Reader {
inner.db.db,
self.options,
column_family.0,
key.as_ptr() as *const c_char,
key.as_ptr().cast(),
key.len()
))
}
@ -950,7 +946,7 @@ impl Reader {
*inner,
self.options,
column_family.0,
key.as_ptr() as *const c_char,
key.as_ptr().cast(),
key.len()
))
} else {
@ -964,7 +960,7 @@ impl Reader {
inner.db,
self.options,
column_family.0,
key.as_ptr() as *const c_char,
key.as_ptr().cast(),
key.len()
))
}
@ -1005,11 +1001,7 @@ impl Reader {
break;
}
}
if found {
Some(bound)
} else {
None
}
found.then(|| bound)
};
unsafe {
@ -1021,7 +1013,7 @@ impl Reader {
if let Some(upper_bound) = &upper_bound {
rocksdb_readoptions_set_iterate_upper_bound(
options,
upper_bound.as_ptr() as *const c_char,
upper_bound.as_ptr().cast(),
upper_bound.len(),
);
}
@ -1046,7 +1038,7 @@ impl Reader {
if prefix.is_empty() {
rocksdb_iter_seek_to_first(iter);
} else {
rocksdb_iter_seek(iter, prefix.as_ptr() as *const c_char, prefix.len());
rocksdb_iter_seek(iter, prefix.as_ptr().cast(), prefix.len());
}
let is_currently_valid = rocksdb_iter_valid(iter) != 0;
Ok(Iter {
@ -1101,7 +1093,7 @@ impl Transaction<'_> {
*self.transaction,
self.read_options,
column_family.0,
key.as_ptr() as *const c_char,
key.as_ptr().cast(),
key.len()
))?;
Ok(if slice.is_null() {
@ -1130,9 +1122,9 @@ impl Transaction<'_> {
ffi_result!(rocksdb_transaction_put_cf_with_status(
*self.transaction,
column_family.0,
key.as_ptr() as *const c_char,
key.as_ptr().cast(),
key.len(),
value.as_ptr() as *const c_char,
value.as_ptr().cast(),
value.len(),
))?;
}
@ -1152,7 +1144,7 @@ impl Transaction<'_> {
ffi_result!(rocksdb_transaction_delete_cf_with_status(
*self.transaction,
column_family.0,
key.as_ptr() as *const c_char,
key.as_ptr().cast(),
key.len(),
))?;
}
@ -1177,7 +1169,7 @@ impl Deref for PinnableSlice {
unsafe {
let mut len = 0;
let val = rocksdb_pinnableslice_value(self.0, &mut len);
slice::from_raw_parts(val as *const u8, len)
slice::from_raw_parts(val.cast(), len)
}
}
}
@ -1208,7 +1200,7 @@ pub struct Buffer {
impl Drop for Buffer {
fn drop(&mut self) {
unsafe {
free(self.base as *mut c_void);
free(self.base.cast());
}
}
}
@ -1285,7 +1277,7 @@ impl Iter {
unsafe {
let mut len = 0;
let val = rocksdb_iter_key(self.iter, &mut len);
Some(slice::from_raw_parts(val as *const u8, len))
Some(slice::from_raw_parts(val.cast(), len))
}
} else {
None
@ -1311,9 +1303,9 @@ impl SstFileWriter {
unsafe {
ffi_result!(rocksdb_sstfilewriter_put_with_status(
self.writer,
key.as_ptr() as *const c_char,
key.as_ptr().cast(),
key.len(),
value.as_ptr() as *const c_char,
value.as_ptr().cast(),
value.len(),
))?;
}

@ -1,3 +1,4 @@
#![allow(clippy::same_name_method)]
#[cfg(not(target_family = "wasm"))]
use crate::model::Quad;
use crate::model::{GraphNameRef, NamedOrBlankNodeRef, QuadRef, TermRef};
@ -181,7 +182,7 @@ impl Storage {
]
}
#[allow(clippy::unnecessary_wraps)]
#[allow(clippy::unnecessary_wraps, clippy::unwrap_in_result)]
fn setup(db: Db) -> Result<Self, StorageError> {
let this = Self {
#[cfg(not(target_family = "wasm"))]
@ -1305,7 +1306,7 @@ impl StorageBulkLoader {
let mut buffer_to_load = Vec::with_capacity(batch_size);
swap(buffer, &mut buffer_to_load);
let storage = self.storage.clone();
let done_counter_clone = done_counter.clone();
let done_counter_clone = Arc::clone(done_counter);
threads.push_back(spawn(move || {
FileBulkLoader::new(storage, batch_size).load(buffer_to_load, &done_counter_clone)
}));

@ -565,7 +565,7 @@ impl Store {
&self,
quads: impl IntoIterator<Item = impl Into<Quad>>,
) -> Result<(), StorageError> {
let quads = quads.into_iter().map(|q| q.into()).collect::<Vec<_>>();
let quads = quads.into_iter().map(Into::into).collect::<Vec<_>>();
self.transaction(move |mut t| t.extend(&quads))
}
@ -1569,7 +1569,7 @@ impl BulkLoader {
quads: impl IntoIterator<Item = Result<impl Into<Quad>, EI>>,
) -> Result<(), EO> {
self.storage
.load(quads.into_iter().map(|q| q.map(|q| q.into())))
.load(quads.into_iter().map(|q| q.map(Into::into)))
}
}

@ -175,7 +175,7 @@ fn test_load_dataset() -> Result<(), Box<dyn Error>> {
#[test]
#[cfg(not(target_family = "wasm"))]
fn test_bulk_load_dataset() -> Result<(), Box<dyn Error>> {
let store = Store::new().unwrap();
let store = Store::new()?;
store
.bulk_loader()
.load_dataset(Cursor::new(GRAPH_DATA), DatasetFormat::TriG, None)?;

@ -9,6 +9,7 @@ use pyo3::exceptions::{PyIOError, PySyntaxError, PyValueError};
use pyo3::prelude::*;
use pyo3::types::PyBytes;
use pyo3::wrap_pyfunction;
use std::error::Error;
use std::fs::File;
use std::io::{self, BufRead, BufReader, BufWriter, Cursor, Read, Write};
@ -289,11 +290,7 @@ impl Read for PyIo {
.map_err(to_io_err)?;
let bytes = read
.extract::<&[u8]>(py)
.or_else(|e| {
read.extract::<&str>(py)
.map(|s| s.as_bytes())
.map_err(|_| e)
})
.or_else(|e| read.extract::<&str>(py).map(str::as_bytes).map_err(|_| e))
.map_err(to_io_err)?;
buf.write_all(bytes)?;
Ok(bytes.len())
@ -325,7 +322,10 @@ fn to_io_err(error: impl Into<PyErr>) -> io::Error {
}
pub fn map_io_err(error: io::Error) -> PyErr {
if error.get_ref().map_or(false, |s| s.is::<PyErr>()) {
if error
.get_ref()
.map_or(false, <(dyn Error + Send + Sync + 'static)>::is::<PyErr>)
{
*error.into_inner().unwrap().downcast().unwrap()
} else {
PyIOError::new_err(error.to_string())

@ -225,7 +225,7 @@ impl PyQueryTriples {
Ok(allow_threads_unsafe(|| self.inner.next())
.transpose()
.map_err(map_evaluation_error)?
.map(|t| t.into()))
.map(Into::into))
}
}

@ -228,10 +228,10 @@ impl PyStore {
extract_quads_pattern(subject, predicate, object, graph_name)?;
Ok(QuadIter {
inner: self.inner.quads_for_pattern(
subject.as_ref().map(|p| p.into()),
predicate.as_ref().map(|p| p.into()),
object.as_ref().map(|p| p.into()),
graph_name.as_ref().map(|p| p.into()),
subject.as_ref().map(Into::into),
predicate.as_ref().map(Into::into),
object.as_ref().map(Into::into),
graph_name.as_ref().map(Into::into),
),
})
}

@ -591,7 +591,7 @@ pub fn main() -> anyhow::Result<()> {
let mut file = BufWriter::new(File::create(&explain_file)?);
match explain_file
.extension()
.and_then(|e| e.to_str()) {
.and_then(OsStr::to_str) {
Some("json") => {
explanation.write_in_json(file)?;
},
@ -734,7 +734,7 @@ fn format_from_path<T>(
path: &Path,
from_extension: impl FnOnce(&str) -> anyhow::Result<T>,
) -> anyhow::Result<T> {
if let Some(ext) = path.extension().and_then(|ext| ext.to_str()) {
if let Some(ext) = path.extension().and_then(OsStr::to_str) {
from_extension(ext).map_err(|e| {
e.context(format!(
"Not able to guess the file format from file name extension '{ext}'"
@ -1636,7 +1636,7 @@ impl<O: 'static, U: (Fn(O) -> io::Result<Option<O>>) + 'static> ReadForWrite<O,
) -> Result<Response, HttpError> {
let buffer = Rc::new(RefCell::new(Vec::new()));
let state = initial_state_builder(ReadForWriteWriter {
buffer: buffer.clone(),
buffer: Rc::clone(&buffer),
})
.map_err(internal_server_error)?;
Ok(Response::builder(Status::OK)

@ -347,7 +347,7 @@ impl<'a> Iterator for RdfListIterator<'a> {
let result = self
.graph
.object_for_subject_predicate(current, rdf::FIRST)
.map(|v| v.into_owned());
.map(TermRef::into_owned);
self.current_node =
match self.graph.object_for_subject_predicate(current, rdf::REST) {
Some(TermRef::NamedNode(n)) if n == rdf::NIL => None,

@ -287,21 +287,11 @@ fn evaluate_update_evaluation_test(test: &Test) -> Result<()> {
}
fn load_sparql_query_result(url: &str) -> Result<StaticQueryResults> {
if url.ends_with(".srx") {
StaticQueryResults::from_query_results(
QueryResults::read(read_file(url)?, QueryResultsFormat::Xml)?,
false,
)
} else if url.ends_with(".srj") {
StaticQueryResults::from_query_results(
QueryResults::read(read_file(url)?, QueryResultsFormat::Json)?,
false,
)
} else if url.ends_with(".tsv") {
StaticQueryResults::from_query_results(
QueryResults::read(read_file(url)?, QueryResultsFormat::Tsv)?,
false,
)
if let Some(format) = url
.rsplit_once('.')
.and_then(|(_, extension)| QueryResultsFormat::from_extension(extension))
{
StaticQueryResults::from_query_results(QueryResults::read(read_file(url)?, format)?, false)
} else {
StaticQueryResults::from_graph(&load_graph(url, guess_graph_format(url)?)?)
}
@ -505,7 +495,7 @@ impl StaticQueryResults {
fn from_graph(graph: &Graph) -> Result<Self> {
// Hack to normalize literals
let store = Store::new().unwrap();
let store = Store::new()?;
for t in graph.iter() {
store
.insert(t.in_graph(GraphNameRef::DefaultGraph))
@ -617,12 +607,12 @@ fn results_diff(expected: StaticQueryResults, actual: StaticQueryResults) -> Str
format_diff(
&expected_variables
.iter()
.map(|v| v.to_string())
.map(ToString::to_string)
.collect::<Vec<_>>()
.join("\n"),
&actual_variables
.iter()
.map(|v| v.to_string())
.map(ToString::to_string)
.collect::<Vec<_>>()
.join("\n"),
"variables",

Loading…
Cancel
Save