Applies oxigraph crate clippy lint to the complete project

pull/475/head
Tpt 2 years ago committed by Thomas Tanon
parent 1d02098b70
commit 3e0f6b5405
  1. 111
      .cargo/config.toml
  2. 25
      js/src/model.rs
  3. 3
      js/src/store.rs
  4. 1
      js/src/utils.rs
  5. 4
      lib/benches/store.rs
  6. 32
      lib/oxrdf/src/interning.rs
  7. 10
      lib/oxrdf/src/literal.rs
  8. 6
      lib/oxsdatatypes/src/date_time.rs
  9. 11
      lib/oxsdatatypes/src/decimal.rs
  10. 1
      lib/oxsdatatypes/src/double.rs
  11. 2
      lib/oxsdatatypes/src/float.rs
  12. 4
      lib/oxsdatatypes/src/integer.rs
  13. 4
      lib/sparesults/src/csv.rs
  14. 4
      lib/sparesults/src/error.rs
  15. 71
      lib/sparesults/src/json.rs
  16. 28
      lib/sparesults/src/lib.rs
  17. 2
      lib/sparesults/src/solution.rs
  18. 6
      lib/sparesults/src/xml.rs
  19. 277
      lib/spargebra/src/algebra.rs
  20. 19
      lib/spargebra/src/query.rs
  21. 4
      lib/spargebra/src/term.rs
  22. 31
      lib/spargebra/src/update.rs
  23. 126
      lib/src/lib.rs
  24. 10
      lib/tests/store.rs
  25. 6
      oxrocksdb-sys/build.rs
  26. 32
      python/src/io.rs
  27. 6
      python/src/lib.rs
  28. 2
      python/src/model.rs
  29. 8
      python/src/sparql.rs
  30. 27
      python/src/store.rs
  31. 64
      server/src/main.rs
  32. 1
      testsuite/src/main.rs
  33. 28
      testsuite/src/manifest.rs
  34. 25
      testsuite/src/sparql_evaluator.rs
  35. 20
      testsuite/tests/sparql.rs

@ -0,0 +1,111 @@
[build]
rustflags = [
"-Wtrivial-casts",
"-Wtrivial-numeric-casts",
"-Wunsafe_code",
"-Wunused-lifetimes",
"-Wunused-qualifications",
"-Wclippy::cast-lossless",
"-Wclippy::cast-possible-truncation",
"-Wclippy::cast-possible-wrap",
"-Wclippy::cast-precision-loss",
"-Wclippy::cast-sign-loss",
"-Wclippy::checked-conversions",
"-Wclippy::cloned-instead-of-copied",
"-Wclippy::copy-iterator",
"-Wclippy::dbg-macro",
"-Wclippy::debug-assert-with-mut-call",
"-Wclippy::decimal-literal-representation",
"-Wclippy::empty-line-after-outer-attr",
"-Wclippy::empty-enum",
"-Wclippy::enum-glob-use",
"-Wclippy::expect-used",
"-Wclippy::expl-impl-clone-on-copy",
"-Wclippy::explicit-deref-methods",
"-Wclippy::explicit-into-iter-loop",
"-Wclippy::explicit-iter-loop",
"-Wclippy::fallible-impl-from",
"-Wclippy::filter-map-next",
"-Wclippy::flat-map-option",
"-Wclippy::from-iter-instead-of-collect",
"-Wclippy::get-unwrap",
"-Wclippy::if-not-else",
"-Wclippy::implicit-clone",
"-Wclippy::implicit-saturating-sub",
"-Wclippy::imprecise-flops",
"-Wclippy::inconsistent-struct-constructor",
"-Wclippy::inefficient-to-string",
"-Wclippy::inline-always",
"-Wclippy::invalid-upcast-comparisons",
"-Wclippy::items-after-statements",
"-Wclippy::large-digit-groups",
"-Wclippy::large-stack-arrays",
"-Wclippy::large-types-passed-by-value",
"-Wclippy::let-underscore-must-use",
"-Wclippy::let-unit-value",
"-Wclippy::linkedlist",
"-Wclippy::macro-use-imports",
"-Wclippy::manual-ok-or",
"-Wclippy::map-flatten",
"-Wclippy::map-unwrap-or",
"-Wclippy::match-bool",
"-Wclippy::match-same-arms",
"-Wclippy::match-wildcard-for-single-variants",
"-Wclippy::maybe-infinite-iter",
"-Wclippy::mem-forget",
"-Wclippy::multiple-inherent-impl",
"-Wclippy::mut-mut",
"-Wclippy::mutex-integer",
"-Wclippy::naive-bytecount",
"-Wclippy::needless-bitwise-bool",
"-Wclippy::needless-continue",
"-Wclippy::needless-pass-by-value",
"-Wclippy::non-ascii-literal",
"-Wclippy::nonstandard-macro-braces",
"-Wclippy::path-buf-push-overwrite",
"-Wclippy::print-stderr",
"-Wclippy::print-stdout",
"-Wclippy::range-minus-one",
"-Wclippy::range-plus-one",
"-Wclippy::rc-mutex",
"-Wclippy::enum-variant-names",
"-Wclippy::redundant-else",
"-Wclippy::redundant-pub-crate",
"-Wclippy::ref-binding-to-reference",
"-Wclippy::ref-option-ref",
"-Wclippy::rest-pat-in-fully-bound-structs",
"-Wclippy::same-functions-in-if-condition",
"-Wclippy::str-to-string",
"-Wclippy::string-add",
"-Wclippy::string-add-assign",
"-Wclippy::string-lit-as-bytes",
"-Wclippy::string-to-string",
"-Wclippy::suboptimal-flops",
"-Wclippy::suspicious-operation-groupings",
"-Wclippy::todo",
"-Wclippy::trait-duplication-in-bounds",
"-Wclippy::transmute-ptr-to-ptr",
"-Wclippy::trivial-regex",
"-Wclippy::trivially-copy-pass-by-ref",
"-Wclippy::type-repetition-in-bounds",
"-Wclippy::unicode-not-nfc",
"-Wclippy::unimplemented",
"-Wclippy::unnecessary-self-imports",
"-Wclippy::unnecessary-wraps",
"-Wclippy::unneeded-field-pattern",
"-Wclippy::unnested-or-patterns",
"-Wclippy::unreadable-literal",
"-Wclippy::unseparated-literal-suffix",
"-Wclippy::unused-async",
"-Wclippy::unused-self",
"-Wclippy::use-debug",
"-Wclippy::use-self",
"-Wclippy::used-underscore-binding",
"-Wclippy::useless-let-if-seq",
"-Wclippy::useless-transmute",
"-Wclippy::verbose-bit-mask",
"-Wclippy::verbose-file-reads",
"-Wclippy::wildcard-dependencies",
"-Wclippy::zero-sized-map-values",
"-Wclippy::wrong-self-convention",
]

@ -1,4 +1,9 @@
#![allow(dead_code, clippy::inherent_to_string)] #![allow(
dead_code,
clippy::inherent_to_string,
clippy::unused_self,
clippy::use_self
)]
use crate::format_err; use crate::format_err;
use crate::utils::to_err; use crate::utils::to_err;
@ -313,7 +318,7 @@ impl JsDefaultGraph {
#[wasm_bindgen(js_name = toString)] #[wasm_bindgen(js_name = toString)]
pub fn to_string(&self) -> String { pub fn to_string(&self) -> String {
"DEFAULT".to_string() "DEFAULT".to_owned()
} }
pub fn equals(&self, other: &JsValue) -> bool { pub fn equals(&self, other: &JsValue) -> bool {
@ -476,19 +481,19 @@ impl From<JsTerm> for JsValue {
impl From<NamedNode> for JsTerm { impl From<NamedNode> for JsTerm {
fn from(node: NamedNode) -> Self { fn from(node: NamedNode) -> Self {
JsTerm::NamedNode(node.into()) Self::NamedNode(node.into())
} }
} }
impl From<BlankNode> for JsTerm { impl From<BlankNode> for JsTerm {
fn from(node: BlankNode) -> Self { fn from(node: BlankNode) -> Self {
JsTerm::BlankNode(node.into()) Self::BlankNode(node.into())
} }
} }
impl From<Literal> for JsTerm { impl From<Literal> for JsTerm {
fn from(literal: Literal) -> Self { fn from(literal: Literal) -> Self {
JsTerm::Literal(literal.into()) Self::Literal(literal.into())
} }
} }
@ -527,20 +532,20 @@ impl From<GraphName> for JsTerm {
match name { match name {
GraphName::NamedNode(node) => node.into(), GraphName::NamedNode(node) => node.into(),
GraphName::BlankNode(node) => node.into(), GraphName::BlankNode(node) => node.into(),
GraphName::DefaultGraph => JsTerm::DefaultGraph(JsDefaultGraph {}), GraphName::DefaultGraph => Self::DefaultGraph(JsDefaultGraph {}),
} }
} }
} }
impl From<Variable> for JsTerm { impl From<Variable> for JsTerm {
fn from(variable: Variable) -> Self { fn from(variable: Variable) -> Self {
JsTerm::Variable(variable.into()) Self::Variable(variable.into())
} }
} }
impl From<Triple> for JsTerm { impl From<Triple> for JsTerm {
fn from(triple: Triple) -> Self { fn from(triple: Triple) -> Self {
JsTerm::Quad(triple.into()) Self::Quad(triple.into())
} }
} }
@ -552,7 +557,7 @@ impl From<Box<Triple>> for JsTerm {
impl From<Quad> for JsTerm { impl From<Quad> for JsTerm {
fn from(quad: Quad) -> Self { fn from(quad: Quad) -> Self {
JsTerm::Quad(quad.into()) Self::Quad(quad.into())
} }
} }
@ -660,7 +665,7 @@ impl TryFrom<JsTerm> for GraphName {
"The literal {} is not a possible graph name", "The literal {} is not a possible graph name",
literal.inner literal.inner
)), )),
JsTerm::DefaultGraph(_) => Ok(GraphName::DefaultGraph), JsTerm::DefaultGraph(_) => Ok(Self::DefaultGraph),
JsTerm::Variable(variable) => Err(format_err!( JsTerm::Variable(variable) => Err(format_err!(
"The variable {} is not a possible RDF term", "The variable {} is not a possible RDF term",
variable.inner variable.inner

@ -1,3 +1,5 @@
#![allow(clippy::use_self)]
use crate::format_err; use crate::format_err;
use crate::model::*; use crate::model::*;
use crate::utils::to_err; use crate::utils::to_err;
@ -17,6 +19,7 @@ pub struct JsStore {
#[wasm_bindgen(js_class = Store)] #[wasm_bindgen(js_class = Store)]
impl JsStore { impl JsStore {
#[wasm_bindgen(constructor)] #[wasm_bindgen(constructor)]
#[allow(clippy::use_self)]
pub fn new(quads: Option<Box<[JsValue]>>) -> Result<JsStore, JsValue> { pub fn new(quads: Option<Box<[JsValue]>>) -> Result<JsStore, JsValue> {
console_error_panic_hook::set_once(); console_error_panic_hook::set_once();

@ -11,6 +11,7 @@ macro_rules! format_err {
}; };
} }
#[allow(clippy::needless_pass_by_value)]
pub fn to_err(e: impl ToString) -> JsValue { pub fn to_err(e: impl ToString) -> JsValue {
JsValue::from(Error::new(&e.to_string())) JsValue::from(Error::new(&e.to_string()))
} }

@ -175,8 +175,8 @@ fn sparql_parsing(c: &mut Criterion) {
let kind = parts.next().unwrap(); let kind = parts.next().unwrap();
let operation = parts.next().unwrap(); let operation = parts.next().unwrap();
match kind { match kind {
"query" => RawOperation::Query(operation.to_string()), "query" => RawOperation::Query(operation.to_owned()),
"update" => RawOperation::Update(operation.to_string()), "update" => RawOperation::Update(operation.to_owned()),
_ => panic!("Unexpected operation kind {kind}"), _ => panic!("Unexpected operation kind {kind}"),
} }
}) })

@ -59,10 +59,8 @@ impl Interner {
} }
} }
fn resolve(&self, key: &Key) -> &str { fn resolve(&self, key: Key) -> &str {
self.string_for_hash &self.string_for_hash[&key.0]
.get(&key.0)
.expect("Interned key not found")
} }
} }
@ -79,7 +77,7 @@ impl Key {
} }
fn impossible() -> Self { fn impossible() -> Self {
Key(u64::MAX) Self(u64::MAX)
} }
} }
@ -101,8 +99,8 @@ impl InternedNamedNode {
}) })
} }
pub fn decode_from<'a>(&self, interner: &'a Interner) -> NamedNodeRef<'a> { pub fn decode_from(self, interner: &Interner) -> NamedNodeRef {
NamedNodeRef::new_unchecked(interner.resolve(&self.id)) NamedNodeRef::new_unchecked(interner.resolve(self.id))
} }
pub fn first() -> Self { pub fn first() -> Self {
@ -138,8 +136,8 @@ impl InternedBlankNode {
}) })
} }
pub fn decode_from<'a>(&self, interner: &'a Interner) -> BlankNodeRef<'a> { pub fn decode_from(self, interner: &Interner) -> BlankNodeRef {
BlankNodeRef::new_unchecked(interner.resolve(&self.id)) BlankNodeRef::new_unchecked(interner.resolve(self.id))
} }
pub fn next(self) -> Self { pub fn next(self) -> Self {
@ -203,18 +201,18 @@ impl InternedLiteral {
pub fn decode_from<'a>(&self, interner: &'a Interner) -> LiteralRef<'a> { pub fn decode_from<'a>(&self, interner: &'a Interner) -> LiteralRef<'a> {
match self { match self {
InternedLiteral::String { value_id } => { Self::String { value_id } => {
LiteralRef::new_simple_literal(interner.resolve(value_id)) LiteralRef::new_simple_literal(interner.resolve(*value_id))
} }
InternedLiteral::LanguageTaggedString { Self::LanguageTaggedString {
value_id, value_id,
language_id, language_id,
} => LiteralRef::new_language_tagged_literal_unchecked( } => LiteralRef::new_language_tagged_literal_unchecked(
interner.resolve(value_id), interner.resolve(*value_id),
interner.resolve(language_id), interner.resolve(*language_id),
), ),
InternedLiteral::TypedLiteral { value_id, datatype } => LiteralRef::new_typed_literal( Self::TypedLiteral { value_id, datatype } => LiteralRef::new_typed_literal(
interner.resolve(value_id), interner.resolve(*value_id),
datatype.decode_from(interner), datatype.decode_from(interner),
), ),
} }
@ -503,7 +501,7 @@ impl Hasher for IdentityHasher {
} }
fn write(&mut self, _bytes: &[u8]) { fn write(&mut self, _bytes: &[u8]) {
unimplemented!() unreachable!("Should only be used on u64 values")
} }
fn write_u64(&mut self, i: u64) { fn write_u64(&mut self, i: u64) {

@ -268,9 +268,9 @@ impl From<f32> for Literal {
fn from(value: f32) -> Self { fn from(value: f32) -> Self {
Self(LiteralContent::TypedLiteral { Self(LiteralContent::TypedLiteral {
value: if value == f32::INFINITY { value: if value == f32::INFINITY {
"INF".to_string() "INF".to_owned()
} else if value == f32::NEG_INFINITY { } else if value == f32::NEG_INFINITY {
"-INF".to_string() "-INF".to_owned()
} else { } else {
value.to_string() value.to_string()
}, },
@ -284,9 +284,9 @@ impl From<f64> for Literal {
fn from(value: f64) -> Self { fn from(value: f64) -> Self {
Self(LiteralContent::TypedLiteral { Self(LiteralContent::TypedLiteral {
value: if value == f64::INFINITY { value: if value == f64::INFINITY {
"INF".to_string() "INF".to_owned()
} else if value == f64::NEG_INFINITY { } else if value == f64::NEG_INFINITY {
"-INF".to_string() "-INF".to_owned()
} else { } else {
value.to_string() value.to_string()
}, },
@ -616,7 +616,7 @@ impl PartialEq<LiteralRef<'_>> for Literal {
} }
#[inline] #[inline]
pub(crate) fn print_quoted_str(string: &str, f: &mut impl Write) -> fmt::Result { pub fn print_quoted_str(string: &str, f: &mut impl Write) -> fmt::Result {
f.write_char('"')?; f.write_char('"')?;
for c in string.chars() { for c in string.chars() {
match c { match c {

@ -1299,7 +1299,11 @@ impl TryFrom<DayTimeDuration> for TimezoneOffset {
#[inline] #[inline]
fn try_from(value: DayTimeDuration) -> Result<Self, DateTimeError> { fn try_from(value: DayTimeDuration) -> Result<Self, DateTimeError> {
let result = Self::new((value.minutes() + value.hours() * 60) as i16)?; let result = Self::new(
(value.minutes() + value.hours() * 60)
.try_into()
.map_err(|_| DATE_TIME_OVERFLOW)?,
)?;
if DayTimeDuration::from(result) == value { if DayTimeDuration::from(result) == value {
Ok(result) Ok(result)
} else { } else {

@ -314,6 +314,7 @@ impl TryFrom<Double> for Decimal {
type Error = DecimalOverflowError; type Error = DecimalOverflowError;
#[inline] #[inline]
#[allow(clippy::cast_precision_loss, clippy::cast_possible_truncation)]
fn try_from(value: Double) -> Result<Self, DecimalOverflowError> { fn try_from(value: Double) -> Result<Self, DecimalOverflowError> {
let shifted = value * Double::from(DECIMAL_PART_POW as f64); let shifted = value * Double::from(DECIMAL_PART_POW as f64);
if shifted.is_finite() if shifted.is_finite()
@ -331,6 +332,7 @@ impl TryFrom<Double> for Decimal {
impl From<Decimal> for Float { impl From<Decimal> for Float {
#[inline] #[inline]
#[allow(clippy::cast_precision_loss)]
fn from(value: Decimal) -> Self { fn from(value: Decimal) -> Self {
((value.value as f32) / (DECIMAL_PART_POW as f32)).into() ((value.value as f32) / (DECIMAL_PART_POW as f32)).into()
} }
@ -338,6 +340,7 @@ impl From<Decimal> for Float {
impl From<Decimal> for Double { impl From<Decimal> for Double {
#[inline] #[inline]
#[allow(clippy::cast_precision_loss)]
fn from(value: Decimal) -> Self { fn from(value: Decimal) -> Self {
((value.value as f64) / (DECIMAL_PART_POW as f64)).into() ((value.value as f64) / (DECIMAL_PART_POW as f64)).into()
} }
@ -769,8 +772,8 @@ mod tests {
#[test] #[test]
fn from_bool() { fn from_bool() {
assert_eq!(Decimal::from(false), Decimal::from(0u8)); assert_eq!(Decimal::from(false), Decimal::from(0_u8));
assert_eq!(Decimal::from(true), Decimal::from(1u8)); assert_eq!(Decimal::from(true), Decimal::from(1_u8));
} }
#[test] #[test]
@ -793,7 +796,7 @@ mod tests {
assert!(Decimal::try_from(Float::from(f32::MIN)).is_err()); assert!(Decimal::try_from(Float::from(f32::MIN)).is_err());
assert!(Decimal::try_from(Float::from(f32::MAX)).is_err()); assert!(Decimal::try_from(Float::from(f32::MAX)).is_err());
assert!( assert!(
Decimal::try_from(Float::from(1672507302466.)) Decimal::try_from(Float::from(1_672_507_302_466.))
.unwrap() .unwrap()
.checked_sub(Decimal::from_str("1672507302466")?) .checked_sub(Decimal::from_str("1672507302466")?)
.unwrap() .unwrap()
@ -818,7 +821,7 @@ mod tests {
Some(Decimal::from_str("-123.1")?) Some(Decimal::from_str("-123.1")?)
); );
assert!( assert!(
Decimal::try_from(Double::from(1672507302466.)) Decimal::try_from(Double::from(1_672_507_302_466.))
.unwrap() .unwrap()
.checked_sub(Decimal::from_str("1672507302466")?) .checked_sub(Decimal::from_str("1672507302466")?)
.unwrap() .unwrap()

@ -156,6 +156,7 @@ impl From<Boolean> for Double {
impl From<Integer> for Double { impl From<Integer> for Double {
#[inline] #[inline]
#[allow(clippy::cast_precision_loss)]
fn from(value: Integer) -> Self { fn from(value: Integer) -> Self {
(i64::from(value) as f64).into() (i64::from(value) as f64).into()
} }

@ -136,6 +136,7 @@ impl From<Boolean> for Float {
impl From<Integer> for Float { impl From<Integer> for Float {
#[inline] #[inline]
#[allow(clippy::cast_precision_loss)]
fn from(value: Integer) -> Self { fn from(value: Integer) -> Self {
(i64::from(value) as f32).into() (i64::from(value) as f32).into()
} }
@ -143,6 +144,7 @@ impl From<Integer> for Float {
impl From<Double> for Float { impl From<Double> for Float {
#[inline] #[inline]
#[allow(clippy::cast_possible_truncation)]
fn from(value: Double) -> Self { fn from(value: Double) -> Self {
Self { Self {
value: f64::from(value) as f32, value: f64::from(value) as f32,

@ -258,7 +258,7 @@ mod tests {
assert!(Integer::try_from(Float::from(f32::MIN)).is_err()); assert!(Integer::try_from(Float::from(f32::MIN)).is_err());
assert!(Integer::try_from(Float::from(f32::MAX)).is_err()); assert!(Integer::try_from(Float::from(f32::MAX)).is_err());
assert!( assert!(
Integer::try_from(Float::from(1672507302466.)) Integer::try_from(Float::from(1_672_507_302_466.))
.unwrap() .unwrap()
.checked_sub(Integer::from_str("1672507302466")?) .checked_sub(Integer::from_str("1672507302466")?)
.unwrap() .unwrap()
@ -283,7 +283,7 @@ mod tests {
Some(Integer::from_str("-123")?) Some(Integer::from_str("-123")?)
); );
assert!( assert!(
Integer::try_from(Double::from(1672507302466.)) Integer::try_from(Double::from(1_672_507_302_466.))
.unwrap() .unwrap()
.checked_sub(Integer::from_str("1672507302466").unwrap()) .checked_sub(Integer::from_str("1672507302466").unwrap())
.unwrap() .unwrap()

@ -529,7 +529,7 @@ mod tests {
if let TsvQueryResultsReader::Solutions { if let TsvQueryResultsReader::Solutions {
mut solutions, mut solutions,
variables, variables,
} = TsvQueryResultsReader::read("\n\n".as_bytes())? } = TsvQueryResultsReader::read(b"\n\n".as_slice())?
{ {
assert_eq!(variables, Vec::<Variable>::new()); assert_eq!(variables, Vec::<Variable>::new());
assert_eq!(solutions.read_next()?, Some(Vec::new())); assert_eq!(solutions.read_next()?, Some(Vec::new()));
@ -561,7 +561,7 @@ mod tests {
if let TsvQueryResultsReader::Solutions { if let TsvQueryResultsReader::Solutions {
mut solutions, mut solutions,
variables, variables,
} = TsvQueryResultsReader::read("?a\n".as_bytes())? } = TsvQueryResultsReader::read(b"?a\n".as_slice())?
{ {
assert_eq!(variables, vec![Variable::new_unchecked("a")]); assert_eq!(variables, vec![Variable::new_unchecked("a")]);
assert_eq!(solutions.read_next()?, None); assert_eq!(solutions.read_next()?, None);

@ -74,7 +74,7 @@ pub struct SyntaxError {
} }
#[derive(Debug)] #[derive(Debug)]
pub(crate) enum SyntaxErrorKind { pub enum SyntaxErrorKind {
Xml(quick_xml::Error), Xml(quick_xml::Error),
Term(TermParseError), Term(TermParseError),
Msg { msg: String }, Msg { msg: String },
@ -117,7 +117,7 @@ impl From<SyntaxError> for io::Error {
fn from(error: SyntaxError) -> Self { fn from(error: SyntaxError) -> Self {
match error.inner { match error.inner {
SyntaxErrorKind::Xml(error) => match error { SyntaxErrorKind::Xml(error) => match error {
quick_xml::Error::Io(error) => io::Error::new(error.kind(), error), quick_xml::Error::Io(error) => Self::new(error.kind(), error),
quick_xml::Error::UnexpectedEof(error) => { quick_xml::Error::UnexpectedEof(error) => {
Self::new(io::ErrorKind::UnexpectedEof, error) Self::new(io::ErrorKind::UnexpectedEof, error)
} }

@ -30,14 +30,14 @@ pub struct JsonSolutionsWriter<W: Write> {
} }
impl<W: Write> JsonSolutionsWriter<W> { impl<W: Write> JsonSolutionsWriter<W> {
pub fn start(sink: W, variables: Vec<Variable>) -> io::Result<Self> { pub fn start(sink: W, variables: &[Variable]) -> io::Result<Self> {
let mut writer = JsonWriter::from_writer(sink); let mut writer = JsonWriter::from_writer(sink);
writer.write_event(JsonEvent::StartObject)?; writer.write_event(JsonEvent::StartObject)?;
writer.write_event(JsonEvent::ObjectKey("head"))?; writer.write_event(JsonEvent::ObjectKey("head"))?;
writer.write_event(JsonEvent::StartObject)?; writer.write_event(JsonEvent::StartObject)?;
writer.write_event(JsonEvent::ObjectKey("vars"))?; writer.write_event(JsonEvent::ObjectKey("vars"))?;
writer.write_event(JsonEvent::StartArray)?; writer.write_event(JsonEvent::StartArray)?;
for variable in &variables { for variable in variables {
writer.write_event(JsonEvent::String(variable.as_str()))?; writer.write_event(JsonEvent::String(variable.as_str()))?;
} }
writer.write_event(JsonEvent::EndArray)?; writer.write_event(JsonEvent::EndArray)?;
@ -155,7 +155,7 @@ impl<R: BufRead> JsonQueryResultsReader<R> {
if let Some(buffered_bindings) = buffered_bindings.take() { if let Some(buffered_bindings) = buffered_bindings.take() {
let mut mapping = BTreeMap::default(); let mut mapping = BTreeMap::default();
for (i, var) in extracted_variables.iter().enumerate() { for (i, var) in extracted_variables.iter().enumerate() {
mapping.insert(var.as_str().to_string(), i); mapping.insert(var.as_str().to_owned(), i);
} }
output_iter = Some(Self::Solutions { output_iter = Some(Self::Solutions {
variables: extracted_variables, variables: extracted_variables,
@ -192,7 +192,7 @@ impl<R: BufRead> JsonQueryResultsReader<R> {
if let Some(variables) = variables { if let Some(variables) = variables {
let mut mapping = BTreeMap::default(); let mut mapping = BTreeMap::default();
for (i, var) in variables.iter().enumerate() { for (i, var) in variables.iter().enumerate() {
mapping.insert(var.as_str().to_string(), i); mapping.insert(var.as_str().to_owned(), i);
} }
return Ok(Self::Solutions { return Ok(Self::Solutions {
variables, variables,
@ -201,34 +201,32 @@ impl<R: BufRead> JsonQueryResultsReader<R> {
mapping, mapping,
}, },
}); });
} else { }
// We buffer all results before being able to read the header // We buffer all results before being able to read the header
let mut bindings = Vec::new(); let mut bindings = Vec::new();
let mut variables = Vec::new(); let mut variables = Vec::new();
let mut values = Vec::new(); let mut values = Vec::new();
loop { loop {
match reader.read_event(&mut buffer)? { match reader.read_event(&mut buffer)? {
JsonEvent::StartObject => (), JsonEvent::StartObject => (),
JsonEvent::EndObject => { JsonEvent::EndObject => {
bindings.push((take(&mut variables), take(&mut values))); bindings.push((take(&mut variables), take(&mut values)));
} }
JsonEvent::EndArray | JsonEvent::Eof => { JsonEvent::EndArray | JsonEvent::Eof => {
buffered_bindings = Some(bindings); buffered_bindings = Some(bindings);
break; break;
} }
JsonEvent::ObjectKey(key) => { JsonEvent::ObjectKey(key) => {
variables.push(key.to_string()); variables.push(key.to_owned());
values.push(read_value(&mut reader, &mut buffer, 0)?); values.push(read_value(&mut reader, &mut buffer, 0)?);
} }
_ => { _ => {
return Err(SyntaxError::msg( return Err(
"Invalid result serialization", SyntaxError::msg("Invalid result serialization").into()
) )
.into())
}
} }
} }
}; }
} }
"boolean" => { "boolean" => {
return if let JsonEvent::Boolean(v) = reader.read_event(&mut buffer)? { return if let JsonEvent::Boolean(v) = reader.read_event(&mut buffer)? {
@ -323,12 +321,6 @@ fn read_value<R: BufRead>(
buffer: &mut Vec<u8>, buffer: &mut Vec<u8>,
number_of_recursive_calls: usize, number_of_recursive_calls: usize,
) -> Result<Term, ParseError> { ) -> Result<Term, ParseError> {
if number_of_recursive_calls == MAX_NUMBER_OF_NESTED_TRIPLES {
return Err(SyntaxError::msg(format!(
"Too many nested triples ({MAX_NUMBER_OF_NESTED_TRIPLES}). The parser fails here to avoid a stack overflow."
))
.into());
}
enum Type { enum Type {
Uri, Uri,
BNode, BNode,
@ -343,6 +335,13 @@ fn read_value<R: BufRead>(
Lang, Lang,
Datatype, Datatype,
} }
if number_of_recursive_calls == MAX_NUMBER_OF_NESTED_TRIPLES {
return Err(SyntaxError::msg(format!(
"Too many nested triples ({MAX_NUMBER_OF_NESTED_TRIPLES}). The parser fails here to avoid a stack overflow."
))
.into());
}
let mut state = None; let mut state = None;
let mut t = None; let mut t = None;
let mut value = None; let mut value = None;

@ -45,10 +45,10 @@ impl QueryResultsFormat {
#[inline] #[inline]
pub fn iri(self) -> &'static str { pub fn iri(self) -> &'static str {
match self { match self {
QueryResultsFormat::Xml => "http://www.w3.org/ns/formats/SPARQL_Results_XML", Self::Xml => "http://www.w3.org/ns/formats/SPARQL_Results_XML",
QueryResultsFormat::Json => "http://www.w3.org/ns/formats/SPARQL_Results_JSON", Self::Json => "http://www.w3.org/ns/formats/SPARQL_Results_JSON",
QueryResultsFormat::Csv => "http://www.w3.org/ns/formats/SPARQL_Results_CSV", Self::Csv => "http://www.w3.org/ns/formats/SPARQL_Results_CSV",
QueryResultsFormat::Tsv => "http://www.w3.org/ns/formats/SPARQL_Results_TSV", Self::Tsv => "http://www.w3.org/ns/formats/SPARQL_Results_TSV",
} }
} }
/// The format [IANA media type](https://tools.ietf.org/html/rfc2046). /// The format [IANA media type](https://tools.ietf.org/html/rfc2046).
@ -61,10 +61,10 @@ impl QueryResultsFormat {
#[inline] #[inline]
pub fn media_type(self) -> &'static str { pub fn media_type(self) -> &'static str {
match self { match self {
QueryResultsFormat::Xml => "application/sparql-results+xml", Self::Xml => "application/sparql-results+xml",
QueryResultsFormat::Json => "application/sparql-results+json", Self::Json => "application/sparql-results+json",
QueryResultsFormat::Csv => "text/csv; charset=utf-8", Self::Csv => "text/csv; charset=utf-8",
QueryResultsFormat::Tsv => "text/tab-separated-values; charset=utf-8", Self::Tsv => "text/tab-separated-values; charset=utf-8",
} }
} }
@ -78,10 +78,10 @@ impl QueryResultsFormat {
#[inline] #[inline]
pub fn file_extension(self) -> &'static str { pub fn file_extension(self) -> &'static str {
match self { match self {
QueryResultsFormat::Xml => "srx", Self::Xml => "srx",
QueryResultsFormat::Json => "srj", Self::Json => "srj",
QueryResultsFormat::Csv => "csv", Self::Csv => "csv",
QueryResultsFormat::Tsv => "tsv", Self::Tsv => "tsv",
} }
} }
@ -408,10 +408,10 @@ impl QueryResultsSerializer {
Ok(SolutionsWriter { Ok(SolutionsWriter {
formatter: match self.format { formatter: match self.format {
QueryResultsFormat::Xml => { QueryResultsFormat::Xml => {
SolutionsWriterKind::Xml(XmlSolutionsWriter::start(writer, variables)?) SolutionsWriterKind::Xml(XmlSolutionsWriter::start(writer, &variables)?)
} }
QueryResultsFormat::Json => { QueryResultsFormat::Json => {
SolutionsWriterKind::Json(JsonSolutionsWriter::start(writer, variables)?) SolutionsWriterKind::Json(JsonSolutionsWriter::start(writer, &variables)?)
} }
QueryResultsFormat::Csv => { QueryResultsFormat::Csv => {
SolutionsWriterKind::Csv(CsvSolutionsWriter::start(writer, variables)?) SolutionsWriterKind::Csv(CsvSolutionsWriter::start(writer, variables)?)

@ -118,7 +118,7 @@ impl QuerySolution {
impl<V: Into<Rc<Vec<Variable>>>, S: Into<Vec<Option<Term>>>> From<(V, S)> for QuerySolution { impl<V: Into<Rc<Vec<Variable>>>, S: Into<Vec<Option<Term>>>> From<(V, S)> for QuerySolution {
#[inline] #[inline]
fn from((v, s): (V, S)) -> Self { fn from((v, s): (V, S)) -> Self {
QuerySolution { Self {
variables: v.into(), variables: v.into(),
values: s.into(), values: s.into(),
} }

@ -37,11 +37,11 @@ pub struct XmlSolutionsWriter<W: Write> {
} }
impl<W: Write> XmlSolutionsWriter<W> { impl<W: Write> XmlSolutionsWriter<W> {
pub fn start(sink: W, variables: Vec<Variable>) -> io::Result<Self> { pub fn start(sink: W, variables: &[Variable]) -> io::Result<Self> {
Self::do_start(sink, variables).map_err(map_xml_error) Self::do_start(sink, variables).map_err(map_xml_error)
} }
fn do_start(sink: W, variables: Vec<Variable>) -> Result<Self, quick_xml::Error> { fn do_start(sink: W, variables: &[Variable]) -> Result<Self, quick_xml::Error> {
let mut writer = Writer::new(sink); let mut writer = Writer::new(sink);
writer.write_event(Event::Decl(BytesDecl::new("1.0", None, None)))?; writer.write_event(Event::Decl(BytesDecl::new("1.0", None, None)))?;
let mut sparql_open = BytesStart::new("sparql"); let mut sparql_open = BytesStart::new("sparql");
@ -50,7 +50,7 @@ impl<W: Write> XmlSolutionsWriter<W> {
writer writer
.create_element("head") .create_element("head")
.write_inner_content(|writer| { .write_inner_content(|writer| {
for variable in &variables { for variable in variables {
writer writer
.create_element("variable") .create_element("variable")
.with_attribute(("name", variable.as_str())) .with_attribute(("name", variable.as_str()))

@ -591,6 +591,138 @@ pub enum GraphPattern {
}, },
} }
impl fmt::Display for GraphPattern {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Bgp { patterns } => {
for pattern in patterns {
write!(f, "{pattern} .")?
}
Ok(())
}
Self::Path {
subject,
path,
object,
} => write!(f, "{subject} {path} {object} ."),
Self::Join { left, right } => {
#[allow(clippy::match_same_arms)]
match right.as_ref() {
Self::LeftJoin { .. }
| Self::Minus { .. }
| Self::Extend { .. }
| Self::Filter { .. } => {
// The second block might be considered as a modification of the first one.
write!(f, "{left} {{ {right} }}")
}
#[cfg(feature = "sep-0006")]
Self::Lateral { .. } => {
write!(f, "{left} {{ {right} }}")
}
_ => write!(f, "{left} {right}"),
}
}
Self::LeftJoin {
left,
right,
expression,
} => {
if let Some(expr) = expression {
write!(f, "{left} OPTIONAL {{ {right} FILTER({expr}) }}")
} else {
write!(f, "{left} OPTIONAL {{ {right} }}")
}
}
#[cfg(feature = "sep-0006")]
Self::Lateral { left, right } => {
write!(f, "{left} LATERAL {{ {right} }}")
}
Self::Filter { expr, inner } => {
write!(f, "{inner} FILTER({expr})")
}
Self::Union { left, right } => write!(f, "{{ {left} }} UNION {{ {right} }}",),
Self::Graph { name, inner } => {
write!(f, "GRAPH {name} {{ {inner} }}")
}
Self::Extend {
inner,
variable,
expression,
} => write!(f, "{inner} BIND({expression} AS {variable})"),
Self::Minus { left, right } => write!(f, "{left} MINUS {{ {right} }}"),
Self::Service {
name,
inner,
silent,
} => {
if *silent {
write!(f, "SERVICE SILENT {name} {{ {inner} }}")
} else {
write!(f, "SERVICE {name} {{ {inner} }}")
}
}
Self::Values {
variables,
bindings,
} => {
write!(f, "VALUES ( ")?;
for var in variables {
write!(f, "{var} ")?;
}
write!(f, ") {{ ")?;
for row in bindings {
write!(f, "( ")?;
for val in row {
match val {
Some(val) => write!(f, "{val} "),
None => write!(f, "UNDEF "),
}?;
}
write!(f, ") ")?;
}
write!(f, " }}")
}
Self::Group {
inner,
variables,
aggregates,
} => {
write!(f, "{{SELECT")?;
for (a, v) in aggregates {
write!(f, " ({v} AS {a})")?;
}
for b in variables {
write!(f, " {b}")?;
}
write!(f, " WHERE {{ {inner} }}")?;
if !variables.is_empty() {
write!(f, " GROUP BY")?;
for v in variables {
write!(f, " {v}")?;
}
}
write!(f, "}}")
}
p => write!(
f,
"{{ {} }}",
SparqlGraphRootPattern {
pattern: p,
dataset: None
}
),
}
}
}
impl Default for GraphPattern {
fn default() -> Self {
Self::Bgp {
patterns: Vec::default(),
}
}
}
impl GraphPattern { impl GraphPattern {
/// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html).
pub(crate) fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result { pub(crate) fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result {
@ -793,146 +925,14 @@ impl GraphPattern {
} }
} }
} }
}
impl fmt::Display for GraphPattern {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Bgp { patterns } => {
for pattern in patterns {
write!(f, "{pattern} .")?
}
Ok(())
}
Self::Path {
subject,
path,
object,
} => write!(f, "{subject} {path} {object} ."),
Self::Join { left, right } => {
match right.as_ref() {
Self::LeftJoin { .. }
| Self::Minus { .. }
| Self::Extend { .. }
| Self::Filter { .. } => {
// The second block might be considered as a modification of the first one.
write!(f, "{left} {{ {right} }}")
}
#[cfg(feature = "sep-0006")]
Self::Lateral { .. } => {
write!(f, "{left} {{ {right} }}")
}
_ => write!(f, "{left} {right}"),
}
}
Self::LeftJoin {
left,
right,
expression,
} => {
if let Some(expr) = expression {
write!(f, "{left} OPTIONAL {{ {right} FILTER({expr}) }}")
} else {
write!(f, "{left} OPTIONAL {{ {right} }}")
}
}
#[cfg(feature = "sep-0006")]
Self::Lateral { left, right } => {
write!(f, "{left} LATERAL {{ {right} }}")
}
Self::Filter { expr, inner } => {
write!(f, "{inner} FILTER({expr})")
}
Self::Union { left, right } => write!(f, "{{ {left} }} UNION {{ {right} }}",),
Self::Graph { name, inner } => {
write!(f, "GRAPH {name} {{ {inner} }}")
}
Self::Extend {
inner,
variable,
expression,
} => write!(f, "{inner} BIND({expression} AS {variable})"),
Self::Minus { left, right } => write!(f, "{left} MINUS {{ {right} }}"),
Self::Service {
name,
inner,
silent,
} => {
if *silent {
write!(f, "SERVICE SILENT {name} {{ {inner} }}")
} else {
write!(f, "SERVICE {name} {{ {inner} }}")
}
}
Self::Values {
variables,
bindings,
} => {
write!(f, "VALUES ( ")?;
for var in variables {
write!(f, "{var} ")?;
}
write!(f, ") {{ ")?;
for row in bindings {
write!(f, "( ")?;
for val in row {
match val {
Some(val) => write!(f, "{val} "),
None => write!(f, "UNDEF "),
}?;
}
write!(f, ") ")?;
}
write!(f, " }}")
}
Self::Group {
inner,
variables,
aggregates,
} => {
write!(f, "{{SELECT")?;
for (a, v) in aggregates {
write!(f, " ({v} AS {a})")?;
}
for b in variables {
write!(f, " {b}")?;
}
write!(f, " WHERE {{ {inner} }}")?;
if !variables.is_empty() {
write!(f, " GROUP BY")?;
for v in variables {
write!(f, " {v}")?;
}
}
write!(f, "}}")
}
p => write!(
f,
"{{ {} }}",
SparqlGraphRootPattern {
pattern: p,
dataset: None
}
),
}
}
}
impl Default for GraphPattern {
fn default() -> Self {
Self::Bgp {
patterns: Vec::default(),
}
}
}
impl GraphPattern {
/// Calls `callback` on each [in-scope variable](https://www.w3.org/TR/sparql11-query/#variableScope) occurrence. /// Calls `callback` on each [in-scope variable](https://www.w3.org/TR/sparql11-query/#variableScope) occurrence.
pub fn on_in_scope_variable<'a>(&'a self, mut callback: impl FnMut(&'a Variable)) { pub fn on_in_scope_variable<'a>(&'a self, mut callback: impl FnMut(&'a Variable)) {
self.lookup_in_scope_variables(&mut callback) self.lookup_in_scope_variables(&mut callback)
} }
fn lookup_in_scope_variables<'a>(&'a self, callback: &mut impl FnMut(&'a Variable)) { fn lookup_in_scope_variables<'a>(&'a self, callback: &mut impl FnMut(&'a Variable)) {
#[allow(clippy::match_same_arms)]
match self { match self {
Self::Bgp { patterns } => { Self::Bgp { patterns } => {
for pattern in patterns { for pattern in patterns {
@ -981,7 +981,6 @@ impl GraphPattern {
inner.lookup_in_scope_variables(callback); inner.lookup_in_scope_variables(callback);
} }
Self::Minus { left, .. } => left.lookup_in_scope_variables(callback), Self::Minus { left, .. } => left.lookup_in_scope_variables(callback),
Self::Service { inner, .. } => inner.lookup_in_scope_variables(callback),
Self::Group { Self::Group {
variables, variables,
aggregates, aggregates,
@ -994,17 +993,13 @@ impl GraphPattern {
callback(v); callback(v);
} }
} }
Self::Values { variables, .. } => { Self::Values { variables, .. } | Self::Project { variables, .. } => {
for v in variables {
callback(v);
}
}
Self::Project { variables, .. } => {
for v in variables { for v in variables {
callback(v); callback(v);
} }
} }
Self::Filter { inner, .. } Self::Service { inner, .. }
| Self::Filter { inner, .. }
| Self::OrderBy { inner, .. } | Self::OrderBy { inner, .. }
| Self::Distinct { inner } | Self::Distinct { inner }
| Self::Reduced { inner } | Self::Reduced { inner }

@ -67,15 +67,14 @@ impl Query {
/// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html).
pub fn to_sse(&self) -> String { pub fn to_sse(&self) -> String {
let mut buffer = String::new(); let mut buffer = String::new();
self.fmt_sse(&mut buffer) self.fmt_sse(&mut buffer).unwrap();
.expect("Unexpected error during SSE formatting");
buffer buffer
} }
/// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html).
fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result { fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result {
match self { match self {
Query::Select { Self::Select {
dataset, dataset,
pattern, pattern,
base_iri, base_iri,
@ -97,7 +96,7 @@ impl Query {
} }
Ok(()) Ok(())
} }
Query::Construct { Self::Construct {
template, template,
dataset, dataset,
pattern, pattern,
@ -129,7 +128,7 @@ impl Query {
} }
Ok(()) Ok(())
} }
Query::Describe { Self::Describe {
dataset, dataset,
pattern, pattern,
base_iri, base_iri,
@ -153,7 +152,7 @@ impl Query {
} }
Ok(()) Ok(())
} }
Query::Ask { Self::Ask {
dataset, dataset,
pattern, pattern,
base_iri, base_iri,
@ -184,7 +183,7 @@ impl Query {
impl fmt::Display for Query { impl fmt::Display for Query {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
Query::Select { Self::Select {
dataset, dataset,
pattern, pattern,
base_iri, base_iri,
@ -201,7 +200,7 @@ impl fmt::Display for Query {
} }
) )
} }
Query::Construct { Self::Construct {
template, template,
dataset, dataset,
pattern, pattern,
@ -227,7 +226,7 @@ impl fmt::Display for Query {
} }
) )
} }
Query::Describe { Self::Describe {
dataset, dataset,
pattern, pattern,
base_iri, base_iri,
@ -248,7 +247,7 @@ impl fmt::Display for Query {
} }
) )
} }
Query::Ask { Self::Ask {
dataset, dataset,
pattern, pattern,
base_iri, base_iri,

@ -764,9 +764,7 @@ impl TriplePattern {
object: object.into(), object: object.into(),
} }
} }
}
impl TriplePattern {
/// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html).
pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result { pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result {
write!(f, "(triple ")?; write!(f, "(triple ")?;
@ -885,9 +883,7 @@ impl QuadPattern {
graph_name: graph_name.into(), graph_name: graph_name.into(),
} }
} }
}
impl QuadPattern {
/// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html).
pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result { pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result {
if self.graph_name != GraphNamePattern::DefaultGraph { if self.graph_name != GraphNamePattern::DefaultGraph {

@ -33,8 +33,7 @@ impl Update {
/// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html).
pub fn to_sse(&self) -> String { pub fn to_sse(&self) -> String {
let mut buffer = String::new(); let mut buffer = String::new();
self.fmt_sse(&mut buffer) self.fmt_sse(&mut buffer).unwrap();
.expect("Unexpected error during SSE formatting");
buffer buffer
} }
@ -124,7 +123,7 @@ impl GraphUpdateOperation {
/// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html). /// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html).
fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result { fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result {
match self { match self {
GraphUpdateOperation::InsertData { data } => { Self::InsertData { data } => {
write!(f, "(insertData (")?; write!(f, "(insertData (")?;
for (i, t) in data.iter().enumerate() { for (i, t) in data.iter().enumerate() {
if i > 0 { if i > 0 {
@ -134,7 +133,7 @@ impl GraphUpdateOperation {
} }
write!(f, "))") write!(f, "))")
} }
GraphUpdateOperation::DeleteData { data } => { Self::DeleteData { data } => {
write!(f, "(deleteData (")?; write!(f, "(deleteData (")?;
for (i, t) in data.iter().enumerate() { for (i, t) in data.iter().enumerate() {
if i > 0 { if i > 0 {
@ -144,7 +143,7 @@ impl GraphUpdateOperation {
} }
write!(f, "))") write!(f, "))")
} }
GraphUpdateOperation::DeleteInsert { Self::DeleteInsert {
delete, delete,
insert, insert,
using, using,
@ -182,7 +181,7 @@ impl GraphUpdateOperation {
} }
write!(f, ")") write!(f, ")")
} }
GraphUpdateOperation::Load { Self::Load {
silent, silent,
source, source,
destination, destination,
@ -195,7 +194,7 @@ impl GraphUpdateOperation {
destination.fmt_sse(f)?; destination.fmt_sse(f)?;
write!(f, ")") write!(f, ")")
} }
GraphUpdateOperation::Clear { silent, graph } => { Self::Clear { silent, graph } => {
write!(f, "(clear ")?; write!(f, "(clear ")?;
if *silent { if *silent {
write!(f, "silent ")?; write!(f, "silent ")?;
@ -203,14 +202,14 @@ impl GraphUpdateOperation {
graph.fmt_sse(f)?; graph.fmt_sse(f)?;
write!(f, ")") write!(f, ")")
} }
GraphUpdateOperation::Create { silent, graph } => { Self::Create { silent, graph } => {
write!(f, "(create ")?; write!(f, "(create ")?;
if *silent { if *silent {
write!(f, "silent ")?; write!(f, "silent ")?;
} }
write!(f, "{graph})") write!(f, "{graph})")
} }
GraphUpdateOperation::Drop { silent, graph } => { Self::Drop { silent, graph } => {
write!(f, "(drop ")?; write!(f, "(drop ")?;
if *silent { if *silent {
write!(f, "silent ")?; write!(f, "silent ")?;
@ -225,17 +224,17 @@ impl GraphUpdateOperation {
impl fmt::Display for GraphUpdateOperation { impl fmt::Display for GraphUpdateOperation {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self { match self {
GraphUpdateOperation::InsertData { data } => { Self::InsertData { data } => {
writeln!(f, "INSERT DATA {{")?; writeln!(f, "INSERT DATA {{")?;
write_quads(data, f)?; write_quads(data, f)?;
write!(f, "}}") write!(f, "}}")
} }
GraphUpdateOperation::DeleteData { data } => { Self::DeleteData { data } => {
writeln!(f, "DELETE DATA {{")?; writeln!(f, "DELETE DATA {{")?;
write_ground_quads(data, f)?; write_ground_quads(data, f)?;
write!(f, "}}") write!(f, "}}")
} }
GraphUpdateOperation::DeleteInsert { Self::DeleteInsert {
delete, delete,
insert, insert,
using, using,
@ -274,7 +273,7 @@ impl fmt::Display for GraphUpdateOperation {
} }
) )
} }
GraphUpdateOperation::Load { Self::Load {
silent, silent,
source, source,
destination, destination,
@ -289,21 +288,21 @@ impl fmt::Display for GraphUpdateOperation {
} }
Ok(()) Ok(())
} }
GraphUpdateOperation::Clear { silent, graph } => { Self::Clear { silent, graph } => {
write!(f, "CLEAR ")?; write!(f, "CLEAR ")?;
if *silent { if *silent {
write!(f, "SILENT ")?; write!(f, "SILENT ")?;
} }
write!(f, "{graph}") write!(f, "{graph}")
} }
GraphUpdateOperation::Create { silent, graph } => { Self::Create { silent, graph } => {
write!(f, "CREATE ")?; write!(f, "CREATE ")?;
if *silent { if *silent {
write!(f, "SILENT ")?; write!(f, "SILENT ")?;
} }
write!(f, "GRAPH {graph}") write!(f, "GRAPH {graph}")
} }
GraphUpdateOperation::Drop { silent, graph } => { Self::Drop { silent, graph } => {
write!(f, "DROP ")?; write!(f, "DROP ")?;
if *silent { if *silent {
write!(f, "SILENT ")?; write!(f, "SILENT ")?;

@ -4,132 +4,6 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))] #![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc(test(attr(deny(warnings))))] #![doc(test(attr(deny(warnings))))]
#![deny(unsafe_code)] #![deny(unsafe_code)]
#![warn(
trivial_casts,
trivial_numeric_casts,
unused_lifetimes,
unused_qualifications,
clippy::cast_lossless,
clippy::cast_possible_truncation,
clippy::cast_possible_wrap,
clippy::cast_precision_loss,
clippy::cast_sign_loss,
clippy::checked_conversions,
clippy::cloned_instead_of_copied,
clippy::copy_iterator,
clippy::dbg_macro,
clippy::debug_assert_with_mut_call,
clippy::decimal_literal_representation,
//TODO clippy::doc_markdown,
// clippy::else_if_without_else,
clippy::empty_line_after_outer_attr,
clippy::empty_enum,
clippy::enum_glob_use,
clippy::expect_used,
clippy::expl_impl_clone_on_copy,
clippy::explicit_deref_methods,
clippy::explicit_into_iter_loop,
clippy::explicit_iter_loop,
clippy::fallible_impl_from,
clippy::filter_map_next,
clippy::flat_map_option,
clippy::from_iter_instead_of_collect,
clippy::get_unwrap,
clippy::if_not_else,
// clippy::if_then_some_else_none,
clippy::implicit_clone,
clippy::implicit_saturating_sub,
clippy::imprecise_flops,
clippy::inconsistent_struct_constructor,
// clippy::indexing_slicing,
clippy::inefficient_to_string,
clippy::inline_always,
clippy::invalid_upcast_comparisons,
clippy::items_after_statements,
clippy::large_digit_groups,
clippy::large_stack_arrays,
clippy::large_types_passed_by_value,
clippy::let_underscore_must_use,
clippy::let_unit_value,
clippy::linkedlist,
clippy::macro_use_imports,
clippy::manual_ok_or,
//TODO clippy::map_err_ignore,
clippy::map_flatten,
clippy::map_unwrap_or,
clippy::match_bool,
// clippy::match_on_vec_items,
clippy::match_same_arms,
clippy::match_wildcard_for_single_variants,
clippy::maybe_infinite_iter,
clippy::mem_forget,
//TODO clippy::missing_const_for_fn,
//TODO clippy::module_name_repetitions,
clippy::multiple_crate_versions,
clippy::multiple_inherent_impl,
//TODO clippy::must_use_candidate,
clippy::mut_mut,
clippy::mutex_integer,
clippy::naive_bytecount,
clippy::needless_bitwise_bool,
clippy::needless_continue,
clippy::needless_pass_by_value,
clippy::non_ascii_literal,
clippy::nonstandard_macro_braces,
//TODO clippy::option_if_let_else,
// clippy::panic, clippy::panic_in_result_fn, does not work well with tests
clippy::path_buf_push_overwrite,
clippy::print_stderr,
clippy::print_stdout,
clippy::range_minus_one,
clippy::range_plus_one,
clippy::rc_mutex,
clippy::enum_variant_names,
//TODO clippy::redundant_closure_for_method_calls,
clippy::redundant_else,
clippy::redundant_pub_crate,
clippy::ref_binding_to_reference,
clippy::ref_option_ref,
clippy::rest_pat_in_fully_bound_structs,
clippy::same_functions_in_if_condition,
// clippy::shadow_reuse,
// clippy::shadow_same,
// clippy::shadow_unrelated,
// clippy::single_match_else,
clippy::str_to_string,
clippy::string_add,
clippy::string_add_assign,
clippy::string_lit_as_bytes,
clippy::string_to_string,
clippy::suboptimal_flops,
clippy::suspicious_operation_groupings,
clippy::todo,
clippy::trait_duplication_in_bounds,
clippy::transmute_ptr_to_ptr,
clippy::trivial_regex,
clippy::trivially_copy_pass_by_ref,
clippy::type_repetition_in_bounds,
clippy::unicode_not_nfc,
clippy::unimplemented,
clippy::unnecessary_self_imports,
clippy::unnecessary_wraps,
clippy::unneeded_field_pattern,
clippy::unnested_or_patterns,
clippy::unreadable_literal,
clippy::unseparated_literal_suffix,
clippy::unused_async,
clippy::unused_self,
clippy::use_debug,
clippy::use_self,
clippy::used_underscore_binding,
clippy::useless_let_if_seq,
clippy::useless_transmute,
clippy::verbose_bit_mask,
clippy::verbose_file_reads,
clippy::wildcard_dependencies,
clippy::zero_sized_map_values,
clippy::wrong_self_convention,
)]
pub mod io; pub mod io;
pub mod sparql; pub mod sparql;

@ -18,6 +18,8 @@ use std::iter::once;
use std::path::{Path, PathBuf}; use std::path::{Path, PathBuf};
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
use std::process::Command; use std::process::Command;
#[allow(clippy::non_ascii_literal)]
const DATA: &str = r#" const DATA: &str = r#"
@prefix schema: <http://schema.org/> . @prefix schema: <http://schema.org/> .
@prefix wd: <http://www.wikidata.org/entity/> . @prefix wd: <http://www.wikidata.org/entity/> .
@ -31,6 +33,8 @@ wd:Q90 a schema:City ;
schema:url "https://www.paris.fr/"^^xsd:anyURI ; schema:url "https://www.paris.fr/"^^xsd:anyURI ;
schema:postalCode "75001" . schema:postalCode "75001" .
"#; "#;
#[allow(clippy::non_ascii_literal)]
const GRAPH_DATA: &str = r#" const GRAPH_DATA: &str = r#"
@prefix schema: <http://schema.org/> . @prefix schema: <http://schema.org/> .
@prefix wd: <http://www.wikidata.org/entity/> . @prefix wd: <http://www.wikidata.org/entity/> .
@ -70,7 +74,7 @@ fn quads(graph_name: impl Into<GraphNameRef<'static>>) -> Vec<QuadRef<'static>>
QuadRef::new( QuadRef::new(
paris, paris,
name, name,
LiteralRef::new_language_tagged_literal_unchecked("la ville lumière", "fr"), LiteralRef::new_language_tagged_literal_unchecked("la ville lumi\u{e8}re", "fr"),
graph_name, graph_name,
), ),
QuadRef::new(paris, country, france, graph_name), QuadRef::new(paris, country, france, graph_name),
@ -534,6 +538,8 @@ impl AsRef<Path> for TempDir {
#[cfg(not(target_family = "wasm"))] #[cfg(not(target_family = "wasm"))]
impl Drop for TempDir { impl Drop for TempDir {
fn drop(&mut self) { fn drop(&mut self) {
let _ = remove_dir_all(&self.0); if self.0.is_dir() {
remove_dir_all(&self.0).unwrap();
}
} }
} }

@ -25,9 +25,9 @@ fn bindgen_rocksdb() {
.allowlist_type("rocksdb_.*") .allowlist_type("rocksdb_.*")
.allowlist_var("rocksdb_.*") .allowlist_var("rocksdb_.*")
.generate() .generate()
.expect("unable to generate rocksdb bindings") .unwrap()
.write_to_file(PathBuf::from(var("OUT_DIR").unwrap()).join("bindings.rs")) .write_to_file(PathBuf::from(var("OUT_DIR").unwrap()).join("bindings.rs"))
.expect("unable to write rocksdb bindings"); .unwrap();
} }
fn build_rocksdb() { fn build_rocksdb() {
@ -138,7 +138,7 @@ fn build_rocksdb() {
// Remove POSIX-specific sources // Remove POSIX-specific sources
lib_sources = lib_sources lib_sources = lib_sources
.iter() .iter()
.cloned() .copied()
.filter(|file| { .filter(|file| {
!matches!( !matches!(
*file, *file,

@ -54,11 +54,10 @@ pub fn parse(
py: Python<'_>, py: Python<'_>,
) -> PyResult<PyObject> { ) -> PyResult<PyObject> {
let input = if let Ok(path) = input.extract::<&str>(py) { let input = if let Ok(path) = input.extract::<&str>(py) {
PyReadable::from_file(path, py) PyReadable::from_file(path, py).map_err(map_io_err)?
} else { } else {
PyReadable::from_data(input, py) PyReadable::from_data(input, py)
} };
.map_err(map_io_err)?;
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) { if let Some(graph_format) = GraphFormat::from_media_type(mime_type) {
let mut parser = GraphParser::from_format(graph_format); let mut parser = GraphParser::from_format(graph_format);
if let Some(base_iri) = base_iri { if let Some(base_iri) = base_iri {
@ -119,11 +118,10 @@ pub fn parse(
#[pyfunction] #[pyfunction]
pub fn serialize(input: &PyAny, output: PyObject, mime_type: &str, py: Python<'_>) -> PyResult<()> { pub fn serialize(input: &PyAny, output: PyObject, mime_type: &str, py: Python<'_>) -> PyResult<()> {
let output = if let Ok(path) = output.extract::<&str>(py) { let output = if let Ok(path) = output.extract::<&str>(py) {
PyWritable::from_file(path, py) PyWritable::from_file(path, py).map_err(map_io_err)?
} else { } else {
PyWritable::from_data(output) PyWritable::from_data(output)
} };
.map_err(map_io_err)?;
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) { if let Some(graph_format) = GraphFormat::from_media_type(mime_type) {
let mut writer = GraphSerializer::from_format(graph_format) let mut writer = GraphSerializer::from_format(graph_format)
.triple_writer(output) .triple_writer(output)
@ -195,7 +193,7 @@ impl PyQuadReader {
} }
} }
pub(crate) enum PyReadable { pub enum PyReadable {
Bytes(Cursor<Vec<u8>>), Bytes(Cursor<Vec<u8>>),
Io(BufReader<PyIo>), Io(BufReader<PyIo>),
File(BufReader<File>), File(BufReader<File>),
@ -208,14 +206,14 @@ impl PyReadable {
))) )))
} }
pub fn from_data(data: PyObject, py: Python<'_>) -> io::Result<Self> { pub fn from_data(data: PyObject, py: Python<'_>) -> Self {
Ok(if let Ok(bytes) = data.extract::<Vec<u8>>(py) { if let Ok(bytes) = data.extract::<Vec<u8>>(py) {
Self::Bytes(Cursor::new(bytes)) Self::Bytes(Cursor::new(bytes))
} else if let Ok(string) = data.extract::<String>(py) { } else if let Ok(string) = data.extract::<String>(py) {
Self::Bytes(Cursor::new(string.into_bytes())) Self::Bytes(Cursor::new(string.into_bytes()))
} else { } else {
Self::Io(BufReader::new(PyIo(data))) Self::Io(BufReader::new(PyIo(data)))
}) }
} }
} }
@ -247,7 +245,7 @@ impl BufRead for PyReadable {
} }
} }
pub(crate) enum PyWritable { pub enum PyWritable {
Io(BufWriter<PyIo>), Io(BufWriter<PyIo>),
File(BufWriter<File>), File(BufWriter<File>),
} }
@ -259,8 +257,8 @@ impl PyWritable {
))) )))
} }
pub fn from_data(data: PyObject) -> io::Result<Self> { pub fn from_data(data: PyObject) -> Self {
Ok(Self::Io(BufWriter::new(PyIo(data)))) Self::Io(BufWriter::new(PyIo(data)))
} }
} }
@ -280,7 +278,7 @@ impl Write for PyWritable {
} }
} }
pub(crate) struct PyIo(PyObject); pub struct PyIo(PyObject);
impl Read for PyIo { impl Read for PyIo {
fn read(&mut self, mut buf: &mut [u8]) -> io::Result<usize> { fn read(&mut self, mut buf: &mut [u8]) -> io::Result<usize> {
@ -326,7 +324,7 @@ fn to_io_err(error: impl Into<PyErr>) -> io::Error {
io::Error::new(io::ErrorKind::Other, error.into()) io::Error::new(io::ErrorKind::Other, error.into())
} }
pub(crate) fn map_io_err(error: io::Error) -> PyErr { pub fn map_io_err(error: io::Error) -> PyErr {
if error.get_ref().map_or(false, |s| s.is::<PyErr>()) { if error.get_ref().map_or(false, |s| s.is::<PyErr>()) {
*error.into_inner().unwrap().downcast().unwrap() *error.into_inner().unwrap().downcast().unwrap()
} else { } else {
@ -334,7 +332,7 @@ pub(crate) fn map_io_err(error: io::Error) -> PyErr {
} }
} }
pub(crate) fn map_parse_error(error: ParseError) -> PyErr { pub fn map_parse_error(error: ParseError) -> PyErr {
match error { match error {
ParseError::Syntax(error) => PySyntaxError::new_err(error.to_string()), ParseError::Syntax(error) => PySyntaxError::new_err(error.to_string()),
ParseError::Io(error) => map_io_err(error), ParseError::Io(error) => map_io_err(error),
@ -346,7 +344,7 @@ pub(crate) fn map_parse_error(error: ParseError) -> PyErr {
/// ///
/// Code from pyo3: https://github.com/PyO3/pyo3/blob/a67180c8a42a0bc0fdc45b651b62c0644130cf47/src/python.rs#L366 /// Code from pyo3: https://github.com/PyO3/pyo3/blob/a67180c8a42a0bc0fdc45b651b62c0644130cf47/src/python.rs#L366
#[allow(unsafe_code)] #[allow(unsafe_code)]
pub(crate) fn allow_threads_unsafe<T>(f: impl FnOnce() -> T) -> T { pub fn allow_threads_unsafe<T>(f: impl FnOnce() -> T) -> T {
struct RestoreGuard { struct RestoreGuard {
tstate: *mut pyo3::ffi::PyThreadState, tstate: *mut pyo3::ffi::PyThreadState,
} }

@ -1,3 +1,9 @@
#![allow(
clippy::redundant_pub_crate,
clippy::used_underscore_binding,
clippy::unused_self,
clippy::trivially_copy_pass_by_ref
)]
mod io; mod io;
mod model; mod model;
mod sparql; mod sparql;

@ -434,7 +434,7 @@ pub struct PyDefaultGraph {}
impl From<PyDefaultGraph> for GraphName { impl From<PyDefaultGraph> for GraphName {
fn from(_: PyDefaultGraph) -> Self { fn from(_: PyDefaultGraph) -> Self {
GraphName::DefaultGraph Self::DefaultGraph
} }
} }

@ -61,12 +61,12 @@ pub fn parse_query(
Ok(query) Ok(query)
} }
pub fn query_results_to_python(py: Python<'_>, results: QueryResults) -> PyResult<PyObject> { pub fn query_results_to_python(py: Python<'_>, results: QueryResults) -> PyObject {
Ok(match results { match results {
QueryResults::Solutions(inner) => PyQuerySolutions { inner }.into_py(py), QueryResults::Solutions(inner) => PyQuerySolutions { inner }.into_py(py),
QueryResults::Graph(inner) => PyQueryTriples { inner }.into_py(py), QueryResults::Graph(inner) => PyQueryTriples { inner }.into_py(py),
QueryResults::Boolean(b) => b.into_py(py), QueryResults::Boolean(b) => b.into_py(py),
}) }
} }
/// Tuple associating variables and terms that are the result of a SPARQL ``SELECT`` query. /// Tuple associating variables and terms that are the result of a SPARQL ``SELECT`` query.
@ -229,7 +229,7 @@ impl PyQueryTriples {
} }
} }
pub(crate) fn map_evaluation_error(error: EvaluationError) -> PyErr { pub fn map_evaluation_error(error: EvaluationError) -> PyErr {
match error { match error {
EvaluationError::Parsing(error) => PySyntaxError::new_err(error.to_string()), EvaluationError::Parsing(error) => PySyntaxError::new_err(error.to_string()),
EvaluationError::Storage(error) => map_storage_error(error), EvaluationError::Storage(error) => map_storage_error(error),

@ -295,7 +295,7 @@ impl PyStore {
)?; )?;
let results = let results =
allow_threads_unsafe(|| self.inner.query(query)).map_err(map_evaluation_error)?; allow_threads_unsafe(|| self.inner.query(query)).map_err(map_evaluation_error)?;
query_results_to_python(py, results) Ok(query_results_to_python(py, results))
} }
/// Executes a `SPARQL 1.1 update <https://www.w3.org/TR/sparql11-update/>`_. /// Executes a `SPARQL 1.1 update <https://www.w3.org/TR/sparql11-update/>`_.
@ -392,11 +392,10 @@ impl PyStore {
None None
}; };
let input = if let Ok(path) = input.extract::<&str>(py) { let input = if let Ok(path) = input.extract::<&str>(py) {
PyReadable::from_file(path, py) PyReadable::from_file(path, py).map_err(map_io_err)?
} else { } else {
PyReadable::from_data(input, py) PyReadable::from_data(input, py)
} };
.map_err(map_io_err)?;
py.allow_threads(|| { py.allow_threads(|| {
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) { if let Some(graph_format) = GraphFormat::from_media_type(mime_type) {
self.inner self.inner
@ -475,11 +474,10 @@ impl PyStore {
None None
}; };
let input = if let Ok(path) = input.extract::<&str>(py) { let input = if let Ok(path) = input.extract::<&str>(py) {
PyReadable::from_file(path, py) PyReadable::from_file(path, py).map_err(map_io_err)?
} else { } else {
PyReadable::from_data(input, py) PyReadable::from_data(input, py)
} };
.map_err(map_io_err)?;
py.allow_threads(|| { py.allow_threads(|| {
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) { if let Some(graph_format) = GraphFormat::from_media_type(mime_type) {
self.inner self.inner
@ -548,11 +546,10 @@ impl PyStore {
py: Python<'_>, py: Python<'_>,
) -> PyResult<()> { ) -> PyResult<()> {
let output = if let Ok(path) = output.extract::<&str>(py) { let output = if let Ok(path) = output.extract::<&str>(py) {
PyWritable::from_file(path, py) PyWritable::from_file(path, py).map_err(map_io_err)?
} else { } else {
PyWritable::from_data(output) PyWritable::from_data(output)
} };
.map_err(map_io_err)?;
let from_graph_name = if let Some(graph_name) = from_graph { let from_graph_name = if let Some(graph_name) = from_graph {
Some(GraphName::from(&PyGraphNameRef::try_from(graph_name)?)) Some(GraphName::from(&PyGraphNameRef::try_from(graph_name)?))
} else { } else {
@ -757,8 +754,8 @@ impl PyStore {
self.inner.len().map_err(map_storage_error) self.inner.len().map_err(map_storage_error)
} }
fn __contains__(&self, quad: PyQuad) -> PyResult<bool> { fn __contains__(&self, quad: &PyQuad) -> PyResult<bool> {
self.inner.contains(&quad).map_err(map_storage_error) self.inner.contains(quad).map_err(map_storage_error)
} }
fn __iter__(&self) -> QuadIter { fn __iter__(&self) -> QuadIter {
@ -845,21 +842,21 @@ pub fn extract_quads_pattern<'a>(
)) ))
} }
pub(crate) fn map_storage_error(error: StorageError) -> PyErr { pub fn map_storage_error(error: StorageError) -> PyErr {
match error { match error {
StorageError::Io(error) => PyIOError::new_err(error.to_string()), StorageError::Io(error) => PyIOError::new_err(error.to_string()),
_ => PyRuntimeError::new_err(error.to_string()), _ => PyRuntimeError::new_err(error.to_string()),
} }
} }
pub(crate) fn map_loader_error(error: LoaderError) -> PyErr { pub fn map_loader_error(error: LoaderError) -> PyErr {
match error { match error {
LoaderError::Storage(error) => map_storage_error(error), LoaderError::Storage(error) => map_storage_error(error),
LoaderError::Parsing(error) => map_parse_error(error), LoaderError::Parsing(error) => map_parse_error(error),
} }
} }
pub(crate) fn map_serializer_error(error: SerializerError) -> PyErr { pub fn map_serializer_error(error: SerializerError) -> PyErr {
match error { match error {
SerializerError::Storage(error) => map_storage_error(error), SerializerError::Storage(error) => map_storage_error(error),
SerializerError::Io(error) => PyIOError::new_err(error.to_string()), SerializerError::Io(error) => PyIOError::new_err(error.to_string()),

@ -1,3 +1,4 @@
#![allow(clippy::print_stderr, clippy::cast_precision_loss, clippy::use_debug)]
use anyhow::{anyhow, bail, Context, Error}; use anyhow::{anyhow, bail, Context, Error};
use clap::{Parser, Subcommand}; use clap::{Parser, Subcommand};
use flate2::read::MultiGzDecoder; use flate2::read::MultiGzDecoder;
@ -27,7 +28,7 @@ use std::time::{Duration, Instant};
use std::{fmt, fs, str}; use std::{fmt, fs, str};
use url::form_urlencoded; use url::form_urlencoded;
const MAX_SPARQL_BODY_SIZE: u64 = 1_048_576; const MAX_SPARQL_BODY_SIZE: u64 = 0x0010_0000;
const HTTP_TIMEOUT: Duration = Duration::from_secs(60); const HTTP_TIMEOUT: Duration = Duration::from_secs(60);
const HTML_ROOT_PAGE: &str = include_str!("../templates/query.html"); const HTML_ROOT_PAGE: &str = include_str!("../templates/query.html");
const LOGO: &str = include_str!("../logo.svg"); const LOGO: &str = include_str!("../logo.svg");
@ -302,6 +303,7 @@ pub fn main() -> anyhow::Result<()> {
} else { } else {
None None
}; };
#[allow(clippy::cast_precision_loss)]
if file.is_empty() { if file.is_empty() {
// We read from stdin // We read from stdin
let start = Instant::now(); let start = Instant::now();
@ -320,7 +322,7 @@ pub fn main() -> anyhow::Result<()> {
}) })
} }
bulk_load( bulk_load(
loader, &loader,
stdin().lock(), stdin().lock(),
format.ok_or_else(|| { format.ok_or_else(|| {
anyhow!("The --format option must be set when loading from stdin") anyhow!("The --format option must be set when loading from stdin")
@ -370,7 +372,7 @@ pub fn main() -> anyhow::Result<()> {
if let Err(error) = { if let Err(error) = {
if file.extension().map_or(false, |e| e == OsStr::new("gz")) { if file.extension().map_or(false, |e| e == OsStr::new("gz")) {
bulk_load( bulk_load(
loader, &loader,
BufReader::new(MultiGzDecoder::new(fp)), BufReader::new(MultiGzDecoder::new(fp)),
format.unwrap_or_else(|| { format.unwrap_or_else(|| {
GraphOrDatasetFormat::from_path( GraphOrDatasetFormat::from_path(
@ -383,7 +385,7 @@ pub fn main() -> anyhow::Result<()> {
) )
} else { } else {
bulk_load( bulk_load(
loader, &loader,
BufReader::new(fp), BufReader::new(fp),
format.unwrap_or_else(|| { format.unwrap_or_else(|| {
GraphOrDatasetFormat::from_path(&file).unwrap() GraphOrDatasetFormat::from_path(&file).unwrap()
@ -515,6 +517,7 @@ pub fn main() -> anyhow::Result<()> {
for solution in solutions { for solution in solutions {
writer.write(&solution?)?; writer.write(&solution?)?;
} }
#[allow(clippy::let_underscore_must_use)]
let _ = writer.finish()?; let _ = writer.finish()?;
} }
} }
@ -542,6 +545,7 @@ pub fn main() -> anyhow::Result<()> {
result, result,
)?; )?;
} else { } else {
#[allow(clippy::let_underscore_must_use)]
let _ = QueryResultsSerializer::from_format(format) let _ = QueryResultsSerializer::from_format(format)
.write_boolean_result(stdout().lock(), result)?; .write_boolean_result(stdout().lock(), result)?;
} }
@ -641,7 +645,7 @@ pub fn main() -> anyhow::Result<()> {
} }
fn bulk_load( fn bulk_load(
loader: BulkLoader, loader: &BulkLoader,
reader: impl BufRead, reader: impl BufRead,
format: GraphOrDatasetFormat, format: GraphOrDatasetFormat,
base_iri: Option<&str>, base_iri: Option<&str>,
@ -700,8 +704,8 @@ impl GraphOrDatasetFormat {
fn from_extension(name: &str) -> anyhow::Result<Self> { fn from_extension(name: &str) -> anyhow::Result<Self> {
Ok(match (GraphFormat::from_extension(name), DatasetFormat::from_extension(name)) { Ok(match (GraphFormat::from_extension(name), DatasetFormat::from_extension(name)) {
(Some(g), Some(d)) => bail!("The file extension '{name}' can be resolved to both '{}' and '{}', not sure what to pick", g.file_extension(), d.file_extension()), (Some(g), Some(d)) => bail!("The file extension '{name}' can be resolved to both '{}' and '{}', not sure what to pick", g.file_extension(), d.file_extension()),
(Some(g), None) => GraphOrDatasetFormat::Graph(g), (Some(g), None) => Self::Graph(g),
(None, Some(d)) => GraphOrDatasetFormat::Dataset(d), (None, Some(d)) => Self::Dataset(d),
(None, None) => (None, None) =>
bail!("The file extension '{name}' is unknown") bail!("The file extension '{name}' is unknown")
}) })
@ -718,8 +722,8 @@ impl GraphOrDatasetFormat {
g.file_extension(), g.file_extension(),
d.file_extension() d.file_extension()
), ),
(Some(g), None) => GraphOrDatasetFormat::Graph(g), (Some(g), None) => Self::Graph(g),
(None, Some(d)) => GraphOrDatasetFormat::Dataset(d), (None, Some(d)) => Self::Dataset(d),
(None, None) => bail!("The media type '{name}' is unknown"), (None, None) => bail!("The media type '{name}' is unknown"),
}, },
) )
@ -846,7 +850,7 @@ fn handle_request(
.unwrap() .unwrap()
.with_body(LOGO)), .with_body(LOGO)),
("/query", "GET") => { ("/query", "GET") => {
configure_and_evaluate_sparql_query(store, &[url_query(request)], None, request) configure_and_evaluate_sparql_query(&store, &[url_query(request)], None, request)
} }
("/query", "POST") => { ("/query", "POST") => {
let content_type = let content_type =
@ -859,7 +863,7 @@ fn handle_request(
.read_to_string(&mut buffer) .read_to_string(&mut buffer)
.map_err(bad_request)?; .map_err(bad_request)?;
configure_and_evaluate_sparql_query( configure_and_evaluate_sparql_query(
store, &store,
&[url_query(request)], &[url_query(request)],
Some(buffer), Some(buffer),
request, request,
@ -872,7 +876,7 @@ fn handle_request(
.read_to_end(&mut buffer) .read_to_end(&mut buffer)
.map_err(bad_request)?; .map_err(bad_request)?;
configure_and_evaluate_sparql_query( configure_and_evaluate_sparql_query(
store, &store,
&[url_query(request), &buffer], &[url_query(request), &buffer],
None, None,
request, request,
@ -895,7 +899,7 @@ fn handle_request(
.read_to_string(&mut buffer) .read_to_string(&mut buffer)
.map_err(bad_request)?; .map_err(bad_request)?;
configure_and_evaluate_sparql_update( configure_and_evaluate_sparql_update(
store, &store,
&[url_query(request)], &[url_query(request)],
Some(buffer), Some(buffer),
request, request,
@ -908,7 +912,7 @@ fn handle_request(
.read_to_end(&mut buffer) .read_to_end(&mut buffer)
.map_err(bad_request)?; .map_err(bad_request)?;
configure_and_evaluate_sparql_update( configure_and_evaluate_sparql_update(
store, &store,
&[url_query(request), &buffer], &[url_query(request), &buffer],
None, None,
request, request,
@ -1127,7 +1131,7 @@ fn url_query_parameter<'a>(request: &'a Request, param: &str) -> Option<Cow<'a,
} }
fn configure_and_evaluate_sparql_query( fn configure_and_evaluate_sparql_query(
store: Store, store: &Store,
encoded: &[&[u8]], encoded: &[&[u8]],
mut query: Option<String>, mut query: Option<String>,
request: &Request, request: &Request,
@ -1154,7 +1158,7 @@ fn configure_and_evaluate_sparql_query(
let query = query.ok_or_else(|| bad_request("You should set the 'query' parameter"))?; let query = query.ok_or_else(|| bad_request("You should set the 'query' parameter"))?;
evaluate_sparql_query( evaluate_sparql_query(
store, store,
query, &query,
use_default_graph_as_union, use_default_graph_as_union,
default_graph_uris, default_graph_uris,
named_graph_uris, named_graph_uris,
@ -1163,14 +1167,14 @@ fn configure_and_evaluate_sparql_query(
} }
fn evaluate_sparql_query( fn evaluate_sparql_query(
store: Store, store: &Store,
query: String, query: &str,
use_default_graph_as_union: bool, use_default_graph_as_union: bool,
default_graph_uris: Vec<String>, default_graph_uris: Vec<String>,
named_graph_uris: Vec<String>, named_graph_uris: Vec<String>,
request: &Request, request: &Request,
) -> Result<Response, HttpError> { ) -> Result<Response, HttpError> {
let mut query = Query::parse(&query, Some(&base_url(request))).map_err(bad_request)?; let mut query = Query::parse(query, Some(&base_url(request))).map_err(bad_request)?;
if use_default_graph_as_union { if use_default_graph_as_union {
if !default_graph_uris.is_empty() || !named_graph_uris.is_empty() { if !default_graph_uris.is_empty() || !named_graph_uris.is_empty() {
@ -1256,7 +1260,7 @@ fn evaluate_sparql_query(
} }
fn configure_and_evaluate_sparql_update( fn configure_and_evaluate_sparql_update(
store: Store, store: &Store,
encoded: &[&[u8]], encoded: &[&[u8]],
mut update: Option<String>, mut update: Option<String>,
request: &Request, request: &Request,
@ -1283,7 +1287,7 @@ fn configure_and_evaluate_sparql_update(
let update = update.ok_or_else(|| bad_request("You should set the 'update' parameter"))?; let update = update.ok_or_else(|| bad_request("You should set the 'update' parameter"))?;
evaluate_sparql_update( evaluate_sparql_update(
store, store,
update, &update,
use_default_graph_as_union, use_default_graph_as_union,
default_graph_uris, default_graph_uris,
named_graph_uris, named_graph_uris,
@ -1292,15 +1296,15 @@ fn configure_and_evaluate_sparql_update(
} }
fn evaluate_sparql_update( fn evaluate_sparql_update(
store: Store, store: &Store,
update: String, update: &str,
use_default_graph_as_union: bool, use_default_graph_as_union: bool,
default_graph_uris: Vec<String>, default_graph_uris: Vec<String>,
named_graph_uris: Vec<String>, named_graph_uris: Vec<String>,
request: &Request, request: &Request,
) -> Result<Response, HttpError> { ) -> Result<Response, HttpError> {
let mut update = let mut update =
Update::parse(&update, Some(base_url(request).as_str())).map_err(bad_request)?; Update::parse(update, Some(base_url(request).as_str())).map_err(bad_request)?;
if use_default_graph_as_union { if use_default_graph_as_union {
if !default_graph_uris.is_empty() || !named_graph_uris.is_empty() { if !default_graph_uris.is_empty() || !named_graph_uris.is_empty() {
@ -1461,7 +1465,7 @@ fn content_negotiation<F>(
.ok_or_else(|| internal_server_error("Unknown media type")); .ok_or_else(|| internal_server_error("Unknown media type"));
} }
let mut result = None; let mut result = None;
let mut result_score = 0f32; let mut result_score = 0_f32;
for possible in header.split(',') { for possible in header.split(',') {
let (possible, parameters) = possible.split_once(';').unwrap_or((possible, "")); let (possible, parameters) = possible.split_once(';').unwrap_or((possible, ""));
@ -1730,7 +1734,7 @@ mod tests {
Ok(store_dir) Ok(store_dir)
} }
fn assert_cli_state(store_dir: TempDir, data: &'static str) -> Result<()> { fn assert_cli_state(store_dir: &TempDir, data: &'static str) -> Result<()> {
cli_command()? cli_command()?
.arg("dump") .arg("dump")
.arg("--location") .arg("--location")
@ -1961,7 +1965,7 @@ mod tests {
.success(); .success();
assert_cli_state( assert_cli_state(
store_dir, &store_dir,
"<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n", "<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n",
) )
} }
@ -2043,7 +2047,7 @@ mod tests {
.assert() .assert()
.success(); .success();
assert_cli_state( assert_cli_state(
store_dir, &store_dir,
"<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n", "<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n",
) )
} }
@ -2061,7 +2065,7 @@ mod tests {
.assert() .assert()
.success(); .success();
assert_cli_state( assert_cli_state(
store_dir, &store_dir,
"<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n", "<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n",
) )
} }
@ -2082,7 +2086,7 @@ mod tests {
.assert() .assert()
.success(); .success();
assert_cli_state( assert_cli_state(
store_dir, &store_dir,
"<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n", "<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n",
) )
} }

@ -1,3 +1,4 @@
#![allow(clippy::print_stdout)]
use anyhow::Result; use anyhow::Result;
use clap::Parser; use clap::Parser;
use oxigraph_testsuite::evaluator::TestEvaluator; use oxigraph_testsuite::evaluator::TestEvaluator;

@ -55,19 +55,6 @@ pub struct TestManifest {
manifests_to_do: VecDeque<String>, manifests_to_do: VecDeque<String>,
} }
impl TestManifest {
pub fn new<S: ToString>(manifest_urls: impl IntoIterator<Item = S>) -> Self {
Self {
graph: Graph::new(),
tests_to_do: VecDeque::new(),
manifests_to_do: manifest_urls
.into_iter()
.map(|url| url.to_string())
.collect(),
}
}
}
impl Iterator for TestManifest { impl Iterator for TestManifest {
type Item = Result<Test>; type Item = Result<Test>;
@ -84,6 +71,17 @@ impl Iterator for TestManifest {
} }
impl TestManifest { impl TestManifest {
pub fn new<S: ToString>(manifest_urls: impl IntoIterator<Item = S>) -> Self {
Self {
graph: Graph::new(),
tests_to_do: VecDeque::new(),
manifests_to_do: manifest_urls
.into_iter()
.map(|url| url.to_string())
.collect(),
}
}
fn next_test(&mut self) -> Result<Option<Test>> { fn next_test(&mut self) -> Result<Option<Test>> {
loop { loop {
let test_node = if let Some(test_node) = self.tests_to_do.pop_front() { let test_node = if let Some(test_node) = self.tests_to_do.pop_front() {
@ -107,7 +105,7 @@ impl TestManifest {
.graph .graph
.object_for_subject_predicate(&test_node, mf::NAME) .object_for_subject_predicate(&test_node, mf::NAME)
{ {
Some(c.value().to_string()) Some(c.value().to_owned())
} else { } else {
None None
}; };
@ -126,7 +124,7 @@ impl TestManifest {
.graph .graph
.object_for_subject_predicate(&test_node, rdfs::COMMENT) .object_for_subject_predicate(&test_node, rdfs::COMMENT)
{ {
Some(c.value().to_string()) Some(c.value().to_owned())
} else { } else {
None None
}; };

@ -101,9 +101,8 @@ fn evaluate_positive_json_result_syntax_test(test: &Test) -> Result<()> {
fn evaluate_negative_json_result_syntax_test(test: &Test) -> Result<()> { fn evaluate_negative_json_result_syntax_test(test: &Test) -> Result<()> {
if result_syntax_check(test, QueryResultsFormat::Json).is_ok() { if result_syntax_check(test, QueryResultsFormat::Json).is_ok() {
bail!("Oxigraph parses even if it should not {test}.") bail!("Oxigraph parses even if it should not {test}.")
} else {
Ok(())
} }
Ok(())
} }
fn evaluate_positive_xml_result_syntax_test(test: &Test) -> Result<()> { fn evaluate_positive_xml_result_syntax_test(test: &Test) -> Result<()> {
@ -113,17 +112,15 @@ fn evaluate_positive_xml_result_syntax_test(test: &Test) -> Result<()> {
fn evaluate_negative_xml_result_syntax_test(test: &Test) -> Result<()> { fn evaluate_negative_xml_result_syntax_test(test: &Test) -> Result<()> {
if result_syntax_check(test, QueryResultsFormat::Xml).is_ok() { if result_syntax_check(test, QueryResultsFormat::Xml).is_ok() {
bail!("Oxigraph parses even if it should not {test}.") bail!("Oxigraph parses even if it should not {test}.")
} else {
Ok(())
} }
Ok(())
} }
fn evaluate_negative_tsv_result_syntax_test(test: &Test) -> Result<()> { fn evaluate_negative_tsv_result_syntax_test(test: &Test) -> Result<()> {
if result_syntax_check(test, QueryResultsFormat::Tsv).is_ok() { if result_syntax_check(test, QueryResultsFormat::Tsv).is_ok() {
bail!("Oxigraph parses even if it should not {test}.") bail!("Oxigraph parses even if it should not {test}.")
} else {
Ok(())
} }
Ok(())
} }
fn result_syntax_check(test: &Test, format: QueryResultsFormat) -> Result<()> { fn result_syntax_check(test: &Test, format: QueryResultsFormat) -> Result<()> {
@ -306,7 +303,7 @@ fn load_sparql_query_result(url: &str) -> Result<StaticQueryResults> {
false, false,
) )
} else { } else {
StaticQueryResults::from_graph(load_graph(url, guess_graph_format(url)?)?) StaticQueryResults::from_graph(&load_graph(url, guess_graph_format(url)?)?)
} }
} }
@ -502,11 +499,11 @@ enum StaticQueryResults {
} }
impl StaticQueryResults { impl StaticQueryResults {
fn from_query_results(results: QueryResults, with_order: bool) -> Result<StaticQueryResults> { fn from_query_results(results: QueryResults, with_order: bool) -> Result<Self> {
Self::from_graph(to_graph(results, with_order)?) Self::from_graph(&to_graph(results, with_order)?)
} }
fn from_graph(graph: Graph) -> Result<StaticQueryResults> { fn from_graph(graph: &Graph) -> Result<Self> {
// Hack to normalize literals // Hack to normalize literals
let store = Store::new().unwrap(); let store = Store::new().unwrap();
for t in graph.iter() { for t in graph.iter() {
@ -519,9 +516,7 @@ impl StaticQueryResults {
if let Some(result_set) = graph.subject_for_predicate_object(rdf::TYPE, rs::RESULT_SET) { if let Some(result_set) = graph.subject_for_predicate_object(rdf::TYPE, rs::RESULT_SET) {
if let Some(bool) = graph.object_for_subject_predicate(result_set, rs::BOOLEAN) { if let Some(bool) = graph.object_for_subject_predicate(result_set, rs::BOOLEAN) {
// Boolean query // Boolean query
Ok(StaticQueryResults::Boolean( Ok(Self::Boolean(bool == Literal::from(true).as_ref().into()))
bool == Literal::from(true).as_ref().into(),
))
} else { } else {
// Regular query // Regular query
let mut variables: Vec<Variable> = graph let mut variables: Vec<Variable> = graph
@ -584,7 +579,7 @@ impl StaticQueryResults {
let ordered = solutions.iter().all(|(_, index)| index.is_some()); let ordered = solutions.iter().all(|(_, index)| index.is_some());
Ok(StaticQueryResults::Solutions { Ok(Self::Solutions {
variables, variables,
solutions: solutions solutions: solutions
.into_iter() .into_iter()
@ -595,7 +590,7 @@ impl StaticQueryResults {
} }
} else { } else {
graph.canonicalize(); graph.canonicalize();
Ok(StaticQueryResults::Graph(graph)) Ok(Self::Graph(graph))
} }
} }
} }

@ -3,7 +3,7 @@ use oxigraph_testsuite::evaluator::TestEvaluator;
use oxigraph_testsuite::manifest::TestManifest; use oxigraph_testsuite::manifest::TestManifest;
use oxigraph_testsuite::sparql_evaluator::register_sparql_tests; use oxigraph_testsuite::sparql_evaluator::register_sparql_tests;
fn run_testsuite(manifest_url: &str, ignored_tests: Vec<&str>) -> Result<()> { fn run_testsuite(manifest_url: &str, ignored_tests: &[&str]) -> Result<()> {
let mut evaluator = TestEvaluator::default(); let mut evaluator = TestEvaluator::default();
register_sparql_tests(&mut evaluator); register_sparql_tests(&mut evaluator);
let manifest = TestManifest::new(vec![manifest_url]); let manifest = TestManifest::new(vec![manifest_url]);
@ -31,7 +31,7 @@ fn run_testsuite(manifest_url: &str, ignored_tests: Vec<&str>) -> Result<()> {
fn sparql10_w3c_query_syntax_testsuite() -> Result<()> { fn sparql10_w3c_query_syntax_testsuite() -> Result<()> {
run_testsuite( run_testsuite(
"https://w3c.github.io/rdf-tests/sparql/sparql10/manifest-syntax.ttl", "https://w3c.github.io/rdf-tests/sparql/sparql10/manifest-syntax.ttl",
vec![ &[
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/syntax-sparql3/manifest#syn-bad-26", // tokenizer "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/syntax-sparql3/manifest#syn-bad-26", // tokenizer
], ],
) )
@ -39,7 +39,7 @@ fn sparql10_w3c_query_syntax_testsuite() -> Result<()> {
#[test] #[test]
fn sparql10_w3c_query_evaluation_testsuite() -> Result<()> { fn sparql10_w3c_query_evaluation_testsuite() -> Result<()> {
run_testsuite("https://w3c.github.io/rdf-tests/sparql/sparql10/manifest-evaluation.ttl", vec![ run_testsuite("https://w3c.github.io/rdf-tests/sparql/sparql10/manifest-evaluation.ttl", &[
//Multiple writing of the same xsd:integer. Our system does strong normalization. //Multiple writing of the same xsd:integer. Our system does strong normalization.
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/distinct/manifest#distinct-1", "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/distinct/manifest#distinct-1",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/distinct/manifest#distinct-9", "http://www.w3.org/2001/sw/DataAccess/tests/data-r2/distinct/manifest#distinct-9",
@ -75,7 +75,7 @@ fn sparql10_w3c_query_evaluation_testsuite() -> Result<()> {
fn sparql11_query_w3c_evaluation_testsuite() -> Result<()> { fn sparql11_query_w3c_evaluation_testsuite() -> Result<()> {
run_testsuite( run_testsuite(
"https://w3c.github.io/rdf-tests/sparql/sparql11/manifest-sparql11-query.ttl", "https://w3c.github.io/rdf-tests/sparql/sparql11/manifest-sparql11-query.ttl",
vec![ &[
//BNODE() scope is currently wrong //BNODE() scope is currently wrong
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/functions/manifest#bnode01", "http://www.w3.org/2009/sparql/docs/tests/data-sparql11/functions/manifest#bnode01",
//SERVICE name from a BGP //SERVICE name from a BGP
@ -88,7 +88,7 @@ fn sparql11_query_w3c_evaluation_testsuite() -> Result<()> {
fn sparql11_federation_w3c_evaluation_testsuite() -> Result<()> { fn sparql11_federation_w3c_evaluation_testsuite() -> Result<()> {
run_testsuite( run_testsuite(
"https://w3c.github.io/rdf-tests/sparql/sparql11/manifest-sparql11-fed.ttl", "https://w3c.github.io/rdf-tests/sparql/sparql11/manifest-sparql11-fed.ttl",
vec![ &[
// Problem during service evaluation order // Problem during service evaluation order
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/service/manifest#service5", "http://www.w3.org/2009/sparql/docs/tests/data-sparql11/service/manifest#service5",
], ],
@ -99,7 +99,7 @@ fn sparql11_federation_w3c_evaluation_testsuite() -> Result<()> {
fn sparql11_update_w3c_evaluation_testsuite() -> Result<()> { fn sparql11_update_w3c_evaluation_testsuite() -> Result<()> {
run_testsuite( run_testsuite(
"https://w3c.github.io/rdf-tests/sparql/sparql11/manifest-sparql11-update.ttl", "https://w3c.github.io/rdf-tests/sparql/sparql11/manifest-sparql11-update.ttl",
vec![ &[
// We allow multiple INSERT DATA with the same blank nodes // We allow multiple INSERT DATA with the same blank nodes
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/syntax-update-1/manifest#test_54", "http://www.w3.org/2009/sparql/docs/tests/data-sparql11/syntax-update-1/manifest#test_54",
], ],
@ -110,7 +110,7 @@ fn sparql11_update_w3c_evaluation_testsuite() -> Result<()> {
fn sparql11_json_w3c_evaluation_testsuite() -> Result<()> { fn sparql11_json_w3c_evaluation_testsuite() -> Result<()> {
run_testsuite( run_testsuite(
"https://w3c.github.io/rdf-tests/sparql/sparql11/json-res/manifest.ttl", "https://w3c.github.io/rdf-tests/sparql/sparql11/json-res/manifest.ttl",
vec![], &[],
) )
} }
@ -118,7 +118,7 @@ fn sparql11_json_w3c_evaluation_testsuite() -> Result<()> {
fn sparql11_tsv_w3c_evaluation_testsuite() -> Result<()> { fn sparql11_tsv_w3c_evaluation_testsuite() -> Result<()> {
run_testsuite( run_testsuite(
"https://w3c.github.io/rdf-tests/sparql/sparql11/csv-tsv-res/manifest.ttl", "https://w3c.github.io/rdf-tests/sparql/sparql11/csv-tsv-res/manifest.ttl",
vec![ &[
// We do not run CSVResultFormatTest tests yet // We do not run CSVResultFormatTest tests yet
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/csv-tsv-res/manifest#csv01", "http://www.w3.org/2009/sparql/docs/tests/data-sparql11/csv-tsv-res/manifest#csv01",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/csv-tsv-res/manifest#csv02", "http://www.w3.org/2009/sparql/docs/tests/data-sparql11/csv-tsv-res/manifest#csv02",
@ -131,7 +131,7 @@ fn sparql11_tsv_w3c_evaluation_testsuite() -> Result<()> {
fn sparql_star_syntax_testsuite() -> Result<()> { fn sparql_star_syntax_testsuite() -> Result<()> {
run_testsuite( run_testsuite(
"https://w3c.github.io/rdf-star/tests/sparql/syntax/manifest.ttl", "https://w3c.github.io/rdf-star/tests/sparql/syntax/manifest.ttl",
vec![], &[],
) )
} }
@ -139,6 +139,6 @@ fn sparql_star_syntax_testsuite() -> Result<()> {
fn sparql_star_eval_testsuite() -> Result<()> { fn sparql_star_eval_testsuite() -> Result<()> {
run_testsuite( run_testsuite(
"https://w3c.github.io/rdf-star/tests/sparql/eval/manifest.ttl", "https://w3c.github.io/rdf-star/tests/sparql/eval/manifest.ttl",
vec![], &[],
) )
} }

Loading…
Cancel
Save