Applies oxigraph crate clippy lint to the complete project

pull/475/head
Tpt 2 years ago committed by Thomas Tanon
parent 1d02098b70
commit 3e0f6b5405
  1. 111
      .cargo/config.toml
  2. 25
      js/src/model.rs
  3. 3
      js/src/store.rs
  4. 1
      js/src/utils.rs
  5. 4
      lib/benches/store.rs
  6. 32
      lib/oxrdf/src/interning.rs
  7. 10
      lib/oxrdf/src/literal.rs
  8. 6
      lib/oxsdatatypes/src/date_time.rs
  9. 11
      lib/oxsdatatypes/src/decimal.rs
  10. 1
      lib/oxsdatatypes/src/double.rs
  11. 2
      lib/oxsdatatypes/src/float.rs
  12. 4
      lib/oxsdatatypes/src/integer.rs
  13. 4
      lib/sparesults/src/csv.rs
  14. 4
      lib/sparesults/src/error.rs
  15. 71
      lib/sparesults/src/json.rs
  16. 28
      lib/sparesults/src/lib.rs
  17. 2
      lib/sparesults/src/solution.rs
  18. 6
      lib/sparesults/src/xml.rs
  19. 277
      lib/spargebra/src/algebra.rs
  20. 19
      lib/spargebra/src/query.rs
  21. 4
      lib/spargebra/src/term.rs
  22. 31
      lib/spargebra/src/update.rs
  23. 126
      lib/src/lib.rs
  24. 10
      lib/tests/store.rs
  25. 6
      oxrocksdb-sys/build.rs
  26. 32
      python/src/io.rs
  27. 6
      python/src/lib.rs
  28. 2
      python/src/model.rs
  29. 8
      python/src/sparql.rs
  30. 27
      python/src/store.rs
  31. 64
      server/src/main.rs
  32. 1
      testsuite/src/main.rs
  33. 28
      testsuite/src/manifest.rs
  34. 25
      testsuite/src/sparql_evaluator.rs
  35. 20
      testsuite/tests/sparql.rs

@ -0,0 +1,111 @@
[build]
rustflags = [
"-Wtrivial-casts",
"-Wtrivial-numeric-casts",
"-Wunsafe_code",
"-Wunused-lifetimes",
"-Wunused-qualifications",
"-Wclippy::cast-lossless",
"-Wclippy::cast-possible-truncation",
"-Wclippy::cast-possible-wrap",
"-Wclippy::cast-precision-loss",
"-Wclippy::cast-sign-loss",
"-Wclippy::checked-conversions",
"-Wclippy::cloned-instead-of-copied",
"-Wclippy::copy-iterator",
"-Wclippy::dbg-macro",
"-Wclippy::debug-assert-with-mut-call",
"-Wclippy::decimal-literal-representation",
"-Wclippy::empty-line-after-outer-attr",
"-Wclippy::empty-enum",
"-Wclippy::enum-glob-use",
"-Wclippy::expect-used",
"-Wclippy::expl-impl-clone-on-copy",
"-Wclippy::explicit-deref-methods",
"-Wclippy::explicit-into-iter-loop",
"-Wclippy::explicit-iter-loop",
"-Wclippy::fallible-impl-from",
"-Wclippy::filter-map-next",
"-Wclippy::flat-map-option",
"-Wclippy::from-iter-instead-of-collect",
"-Wclippy::get-unwrap",
"-Wclippy::if-not-else",
"-Wclippy::implicit-clone",
"-Wclippy::implicit-saturating-sub",
"-Wclippy::imprecise-flops",
"-Wclippy::inconsistent-struct-constructor",
"-Wclippy::inefficient-to-string",
"-Wclippy::inline-always",
"-Wclippy::invalid-upcast-comparisons",
"-Wclippy::items-after-statements",
"-Wclippy::large-digit-groups",
"-Wclippy::large-stack-arrays",
"-Wclippy::large-types-passed-by-value",
"-Wclippy::let-underscore-must-use",
"-Wclippy::let-unit-value",
"-Wclippy::linkedlist",
"-Wclippy::macro-use-imports",
"-Wclippy::manual-ok-or",
"-Wclippy::map-flatten",
"-Wclippy::map-unwrap-or",
"-Wclippy::match-bool",
"-Wclippy::match-same-arms",
"-Wclippy::match-wildcard-for-single-variants",
"-Wclippy::maybe-infinite-iter",
"-Wclippy::mem-forget",
"-Wclippy::multiple-inherent-impl",
"-Wclippy::mut-mut",
"-Wclippy::mutex-integer",
"-Wclippy::naive-bytecount",
"-Wclippy::needless-bitwise-bool",
"-Wclippy::needless-continue",
"-Wclippy::needless-pass-by-value",
"-Wclippy::non-ascii-literal",
"-Wclippy::nonstandard-macro-braces",
"-Wclippy::path-buf-push-overwrite",
"-Wclippy::print-stderr",
"-Wclippy::print-stdout",
"-Wclippy::range-minus-one",
"-Wclippy::range-plus-one",
"-Wclippy::rc-mutex",
"-Wclippy::enum-variant-names",
"-Wclippy::redundant-else",
"-Wclippy::redundant-pub-crate",
"-Wclippy::ref-binding-to-reference",
"-Wclippy::ref-option-ref",
"-Wclippy::rest-pat-in-fully-bound-structs",
"-Wclippy::same-functions-in-if-condition",
"-Wclippy::str-to-string",
"-Wclippy::string-add",
"-Wclippy::string-add-assign",
"-Wclippy::string-lit-as-bytes",
"-Wclippy::string-to-string",
"-Wclippy::suboptimal-flops",
"-Wclippy::suspicious-operation-groupings",
"-Wclippy::todo",
"-Wclippy::trait-duplication-in-bounds",
"-Wclippy::transmute-ptr-to-ptr",
"-Wclippy::trivial-regex",
"-Wclippy::trivially-copy-pass-by-ref",
"-Wclippy::type-repetition-in-bounds",
"-Wclippy::unicode-not-nfc",
"-Wclippy::unimplemented",
"-Wclippy::unnecessary-self-imports",
"-Wclippy::unnecessary-wraps",
"-Wclippy::unneeded-field-pattern",
"-Wclippy::unnested-or-patterns",
"-Wclippy::unreadable-literal",
"-Wclippy::unseparated-literal-suffix",
"-Wclippy::unused-async",
"-Wclippy::unused-self",
"-Wclippy::use-debug",
"-Wclippy::use-self",
"-Wclippy::used-underscore-binding",
"-Wclippy::useless-let-if-seq",
"-Wclippy::useless-transmute",
"-Wclippy::verbose-bit-mask",
"-Wclippy::verbose-file-reads",
"-Wclippy::wildcard-dependencies",
"-Wclippy::zero-sized-map-values",
"-Wclippy::wrong-self-convention",
]

@ -1,4 +1,9 @@
#![allow(dead_code, clippy::inherent_to_string)]
#![allow(
dead_code,
clippy::inherent_to_string,
clippy::unused_self,
clippy::use_self
)]
use crate::format_err;
use crate::utils::to_err;
@ -313,7 +318,7 @@ impl JsDefaultGraph {
#[wasm_bindgen(js_name = toString)]
pub fn to_string(&self) -> String {
"DEFAULT".to_string()
"DEFAULT".to_owned()
}
pub fn equals(&self, other: &JsValue) -> bool {
@ -476,19 +481,19 @@ impl From<JsTerm> for JsValue {
impl From<NamedNode> for JsTerm {
fn from(node: NamedNode) -> Self {
JsTerm::NamedNode(node.into())
Self::NamedNode(node.into())
}
}
impl From<BlankNode> for JsTerm {
fn from(node: BlankNode) -> Self {
JsTerm::BlankNode(node.into())
Self::BlankNode(node.into())
}
}
impl From<Literal> for JsTerm {
fn from(literal: Literal) -> Self {
JsTerm::Literal(literal.into())
Self::Literal(literal.into())
}
}
@ -527,20 +532,20 @@ impl From<GraphName> for JsTerm {
match name {
GraphName::NamedNode(node) => node.into(),
GraphName::BlankNode(node) => node.into(),
GraphName::DefaultGraph => JsTerm::DefaultGraph(JsDefaultGraph {}),
GraphName::DefaultGraph => Self::DefaultGraph(JsDefaultGraph {}),
}
}
}
impl From<Variable> for JsTerm {
fn from(variable: Variable) -> Self {
JsTerm::Variable(variable.into())
Self::Variable(variable.into())
}
}
impl From<Triple> for JsTerm {
fn from(triple: Triple) -> Self {
JsTerm::Quad(triple.into())
Self::Quad(triple.into())
}
}
@ -552,7 +557,7 @@ impl From<Box<Triple>> for JsTerm {
impl From<Quad> for JsTerm {
fn from(quad: Quad) -> Self {
JsTerm::Quad(quad.into())
Self::Quad(quad.into())
}
}
@ -660,7 +665,7 @@ impl TryFrom<JsTerm> for GraphName {
"The literal {} is not a possible graph name",
literal.inner
)),
JsTerm::DefaultGraph(_) => Ok(GraphName::DefaultGraph),
JsTerm::DefaultGraph(_) => Ok(Self::DefaultGraph),
JsTerm::Variable(variable) => Err(format_err!(
"The variable {} is not a possible RDF term",
variable.inner

@ -1,3 +1,5 @@
#![allow(clippy::use_self)]
use crate::format_err;
use crate::model::*;
use crate::utils::to_err;
@ -17,6 +19,7 @@ pub struct JsStore {
#[wasm_bindgen(js_class = Store)]
impl JsStore {
#[wasm_bindgen(constructor)]
#[allow(clippy::use_self)]
pub fn new(quads: Option<Box<[JsValue]>>) -> Result<JsStore, JsValue> {
console_error_panic_hook::set_once();

@ -11,6 +11,7 @@ macro_rules! format_err {
};
}
#[allow(clippy::needless_pass_by_value)]
pub fn to_err(e: impl ToString) -> JsValue {
JsValue::from(Error::new(&e.to_string()))
}

@ -175,8 +175,8 @@ fn sparql_parsing(c: &mut Criterion) {
let kind = parts.next().unwrap();
let operation = parts.next().unwrap();
match kind {
"query" => RawOperation::Query(operation.to_string()),
"update" => RawOperation::Update(operation.to_string()),
"query" => RawOperation::Query(operation.to_owned()),
"update" => RawOperation::Update(operation.to_owned()),
_ => panic!("Unexpected operation kind {kind}"),
}
})

@ -59,10 +59,8 @@ impl Interner {
}
}
fn resolve(&self, key: &Key) -> &str {
self.string_for_hash
.get(&key.0)
.expect("Interned key not found")
fn resolve(&self, key: Key) -> &str {
&self.string_for_hash[&key.0]
}
}
@ -79,7 +77,7 @@ impl Key {
}
fn impossible() -> Self {
Key(u64::MAX)
Self(u64::MAX)
}
}
@ -101,8 +99,8 @@ impl InternedNamedNode {
})
}
pub fn decode_from<'a>(&self, interner: &'a Interner) -> NamedNodeRef<'a> {
NamedNodeRef::new_unchecked(interner.resolve(&self.id))
pub fn decode_from(self, interner: &Interner) -> NamedNodeRef {
NamedNodeRef::new_unchecked(interner.resolve(self.id))
}
pub fn first() -> Self {
@ -138,8 +136,8 @@ impl InternedBlankNode {
})
}
pub fn decode_from<'a>(&self, interner: &'a Interner) -> BlankNodeRef<'a> {
BlankNodeRef::new_unchecked(interner.resolve(&self.id))
pub fn decode_from(self, interner: &Interner) -> BlankNodeRef {
BlankNodeRef::new_unchecked(interner.resolve(self.id))
}
pub fn next(self) -> Self {
@ -203,18 +201,18 @@ impl InternedLiteral {
pub fn decode_from<'a>(&self, interner: &'a Interner) -> LiteralRef<'a> {
match self {
InternedLiteral::String { value_id } => {
LiteralRef::new_simple_literal(interner.resolve(value_id))
Self::String { value_id } => {
LiteralRef::new_simple_literal(interner.resolve(*value_id))
}
InternedLiteral::LanguageTaggedString {
Self::LanguageTaggedString {
value_id,
language_id,
} => LiteralRef::new_language_tagged_literal_unchecked(
interner.resolve(value_id),
interner.resolve(language_id),
interner.resolve(*value_id),
interner.resolve(*language_id),
),
InternedLiteral::TypedLiteral { value_id, datatype } => LiteralRef::new_typed_literal(
interner.resolve(value_id),
Self::TypedLiteral { value_id, datatype } => LiteralRef::new_typed_literal(
interner.resolve(*value_id),
datatype.decode_from(interner),
),
}
@ -503,7 +501,7 @@ impl Hasher for IdentityHasher {
}
fn write(&mut self, _bytes: &[u8]) {
unimplemented!()
unreachable!("Should only be used on u64 values")
}
fn write_u64(&mut self, i: u64) {

@ -268,9 +268,9 @@ impl From<f32> for Literal {
fn from(value: f32) -> Self {
Self(LiteralContent::TypedLiteral {
value: if value == f32::INFINITY {
"INF".to_string()
"INF".to_owned()
} else if value == f32::NEG_INFINITY {
"-INF".to_string()
"-INF".to_owned()
} else {
value.to_string()
},
@ -284,9 +284,9 @@ impl From<f64> for Literal {
fn from(value: f64) -> Self {
Self(LiteralContent::TypedLiteral {
value: if value == f64::INFINITY {
"INF".to_string()
"INF".to_owned()
} else if value == f64::NEG_INFINITY {
"-INF".to_string()
"-INF".to_owned()
} else {
value.to_string()
},
@ -616,7 +616,7 @@ impl PartialEq<LiteralRef<'_>> for Literal {
}
#[inline]
pub(crate) fn print_quoted_str(string: &str, f: &mut impl Write) -> fmt::Result {
pub fn print_quoted_str(string: &str, f: &mut impl Write) -> fmt::Result {
f.write_char('"')?;
for c in string.chars() {
match c {

@ -1299,7 +1299,11 @@ impl TryFrom<DayTimeDuration> for TimezoneOffset {
#[inline]
fn try_from(value: DayTimeDuration) -> Result<Self, DateTimeError> {
let result = Self::new((value.minutes() + value.hours() * 60) as i16)?;
let result = Self::new(
(value.minutes() + value.hours() * 60)
.try_into()
.map_err(|_| DATE_TIME_OVERFLOW)?,
)?;
if DayTimeDuration::from(result) == value {
Ok(result)
} else {

@ -314,6 +314,7 @@ impl TryFrom<Double> for Decimal {
type Error = DecimalOverflowError;
#[inline]
#[allow(clippy::cast_precision_loss, clippy::cast_possible_truncation)]
fn try_from(value: Double) -> Result<Self, DecimalOverflowError> {
let shifted = value * Double::from(DECIMAL_PART_POW as f64);
if shifted.is_finite()
@ -331,6 +332,7 @@ impl TryFrom<Double> for Decimal {
impl From<Decimal> for Float {
#[inline]
#[allow(clippy::cast_precision_loss)]
fn from(value: Decimal) -> Self {
((value.value as f32) / (DECIMAL_PART_POW as f32)).into()
}
@ -338,6 +340,7 @@ impl From<Decimal> for Float {
impl From<Decimal> for Double {
#[inline]
#[allow(clippy::cast_precision_loss)]
fn from(value: Decimal) -> Self {
((value.value as f64) / (DECIMAL_PART_POW as f64)).into()
}
@ -769,8 +772,8 @@ mod tests {
#[test]
fn from_bool() {
assert_eq!(Decimal::from(false), Decimal::from(0u8));
assert_eq!(Decimal::from(true), Decimal::from(1u8));
assert_eq!(Decimal::from(false), Decimal::from(0_u8));
assert_eq!(Decimal::from(true), Decimal::from(1_u8));
}
#[test]
@ -793,7 +796,7 @@ mod tests {
assert!(Decimal::try_from(Float::from(f32::MIN)).is_err());
assert!(Decimal::try_from(Float::from(f32::MAX)).is_err());
assert!(
Decimal::try_from(Float::from(1672507302466.))
Decimal::try_from(Float::from(1_672_507_302_466.))
.unwrap()
.checked_sub(Decimal::from_str("1672507302466")?)
.unwrap()
@ -818,7 +821,7 @@ mod tests {
Some(Decimal::from_str("-123.1")?)
);
assert!(
Decimal::try_from(Double::from(1672507302466.))
Decimal::try_from(Double::from(1_672_507_302_466.))
.unwrap()
.checked_sub(Decimal::from_str("1672507302466")?)
.unwrap()

@ -156,6 +156,7 @@ impl From<Boolean> for Double {
impl From<Integer> for Double {
#[inline]
#[allow(clippy::cast_precision_loss)]
fn from(value: Integer) -> Self {
(i64::from(value) as f64).into()
}

@ -136,6 +136,7 @@ impl From<Boolean> for Float {
impl From<Integer> for Float {
#[inline]
#[allow(clippy::cast_precision_loss)]
fn from(value: Integer) -> Self {
(i64::from(value) as f32).into()
}
@ -143,6 +144,7 @@ impl From<Integer> for Float {
impl From<Double> for Float {
#[inline]
#[allow(clippy::cast_possible_truncation)]
fn from(value: Double) -> Self {
Self {
value: f64::from(value) as f32,

@ -258,7 +258,7 @@ mod tests {
assert!(Integer::try_from(Float::from(f32::MIN)).is_err());
assert!(Integer::try_from(Float::from(f32::MAX)).is_err());
assert!(
Integer::try_from(Float::from(1672507302466.))
Integer::try_from(Float::from(1_672_507_302_466.))
.unwrap()
.checked_sub(Integer::from_str("1672507302466")?)
.unwrap()
@ -283,7 +283,7 @@ mod tests {
Some(Integer::from_str("-123")?)
);
assert!(
Integer::try_from(Double::from(1672507302466.))
Integer::try_from(Double::from(1_672_507_302_466.))
.unwrap()
.checked_sub(Integer::from_str("1672507302466").unwrap())
.unwrap()

@ -529,7 +529,7 @@ mod tests {
if let TsvQueryResultsReader::Solutions {
mut solutions,
variables,
} = TsvQueryResultsReader::read("\n\n".as_bytes())?
} = TsvQueryResultsReader::read(b"\n\n".as_slice())?
{
assert_eq!(variables, Vec::<Variable>::new());
assert_eq!(solutions.read_next()?, Some(Vec::new()));
@ -561,7 +561,7 @@ mod tests {
if let TsvQueryResultsReader::Solutions {
mut solutions,
variables,
} = TsvQueryResultsReader::read("?a\n".as_bytes())?
} = TsvQueryResultsReader::read(b"?a\n".as_slice())?
{
assert_eq!(variables, vec![Variable::new_unchecked("a")]);
assert_eq!(solutions.read_next()?, None);

@ -74,7 +74,7 @@ pub struct SyntaxError {
}
#[derive(Debug)]
pub(crate) enum SyntaxErrorKind {
pub enum SyntaxErrorKind {
Xml(quick_xml::Error),
Term(TermParseError),
Msg { msg: String },
@ -117,7 +117,7 @@ impl From<SyntaxError> for io::Error {
fn from(error: SyntaxError) -> Self {
match error.inner {
SyntaxErrorKind::Xml(error) => match error {
quick_xml::Error::Io(error) => io::Error::new(error.kind(), error),
quick_xml::Error::Io(error) => Self::new(error.kind(), error),
quick_xml::Error::UnexpectedEof(error) => {
Self::new(io::ErrorKind::UnexpectedEof, error)
}

@ -30,14 +30,14 @@ pub struct JsonSolutionsWriter<W: Write> {
}
impl<W: Write> JsonSolutionsWriter<W> {
pub fn start(sink: W, variables: Vec<Variable>) -> io::Result<Self> {
pub fn start(sink: W, variables: &[Variable]) -> io::Result<Self> {
let mut writer = JsonWriter::from_writer(sink);
writer.write_event(JsonEvent::StartObject)?;
writer.write_event(JsonEvent::ObjectKey("head"))?;
writer.write_event(JsonEvent::StartObject)?;
writer.write_event(JsonEvent::ObjectKey("vars"))?;
writer.write_event(JsonEvent::StartArray)?;
for variable in &variables {
for variable in variables {
writer.write_event(JsonEvent::String(variable.as_str()))?;
}
writer.write_event(JsonEvent::EndArray)?;
@ -155,7 +155,7 @@ impl<R: BufRead> JsonQueryResultsReader<R> {
if let Some(buffered_bindings) = buffered_bindings.take() {
let mut mapping = BTreeMap::default();
for (i, var) in extracted_variables.iter().enumerate() {
mapping.insert(var.as_str().to_string(), i);
mapping.insert(var.as_str().to_owned(), i);
}
output_iter = Some(Self::Solutions {
variables: extracted_variables,
@ -192,7 +192,7 @@ impl<R: BufRead> JsonQueryResultsReader<R> {
if let Some(variables) = variables {
let mut mapping = BTreeMap::default();
for (i, var) in variables.iter().enumerate() {
mapping.insert(var.as_str().to_string(), i);
mapping.insert(var.as_str().to_owned(), i);
}
return Ok(Self::Solutions {
variables,
@ -201,34 +201,32 @@ impl<R: BufRead> JsonQueryResultsReader<R> {
mapping,
},
});
} else {
// We buffer all results before being able to read the header
let mut bindings = Vec::new();
let mut variables = Vec::new();
let mut values = Vec::new();
loop {
match reader.read_event(&mut buffer)? {
JsonEvent::StartObject => (),
JsonEvent::EndObject => {
bindings.push((take(&mut variables), take(&mut values)));
}
JsonEvent::EndArray | JsonEvent::Eof => {
buffered_bindings = Some(bindings);
break;
}
JsonEvent::ObjectKey(key) => {
variables.push(key.to_string());
values.push(read_value(&mut reader, &mut buffer, 0)?);
}
_ => {
return Err(SyntaxError::msg(
"Invalid result serialization",
)
.into())
}
}
// We buffer all results before being able to read the header
let mut bindings = Vec::new();
let mut variables = Vec::new();
let mut values = Vec::new();
loop {
match reader.read_event(&mut buffer)? {
JsonEvent::StartObject => (),
JsonEvent::EndObject => {
bindings.push((take(&mut variables), take(&mut values)));
}
JsonEvent::EndArray | JsonEvent::Eof => {
buffered_bindings = Some(bindings);
break;
}
JsonEvent::ObjectKey(key) => {
variables.push(key.to_owned());
values.push(read_value(&mut reader, &mut buffer, 0)?);
}
_ => {
return Err(
SyntaxError::msg("Invalid result serialization").into()
)
}
}
};
}
}
"boolean" => {
return if let JsonEvent::Boolean(v) = reader.read_event(&mut buffer)? {
@ -323,12 +321,6 @@ fn read_value<R: BufRead>(
buffer: &mut Vec<u8>,
number_of_recursive_calls: usize,
) -> Result<Term, ParseError> {
if number_of_recursive_calls == MAX_NUMBER_OF_NESTED_TRIPLES {
return Err(SyntaxError::msg(format!(
"Too many nested triples ({MAX_NUMBER_OF_NESTED_TRIPLES}). The parser fails here to avoid a stack overflow."
))
.into());
}
enum Type {
Uri,
BNode,
@ -343,6 +335,13 @@ fn read_value<R: BufRead>(
Lang,
Datatype,
}
if number_of_recursive_calls == MAX_NUMBER_OF_NESTED_TRIPLES {
return Err(SyntaxError::msg(format!(
"Too many nested triples ({MAX_NUMBER_OF_NESTED_TRIPLES}). The parser fails here to avoid a stack overflow."
))
.into());
}
let mut state = None;
let mut t = None;
let mut value = None;

@ -45,10 +45,10 @@ impl QueryResultsFormat {
#[inline]
pub fn iri(self) -> &'static str {
match self {
QueryResultsFormat::Xml => "http://www.w3.org/ns/formats/SPARQL_Results_XML",
QueryResultsFormat::Json => "http://www.w3.org/ns/formats/SPARQL_Results_JSON",
QueryResultsFormat::Csv => "http://www.w3.org/ns/formats/SPARQL_Results_CSV",
QueryResultsFormat::Tsv => "http://www.w3.org/ns/formats/SPARQL_Results_TSV",
Self::Xml => "http://www.w3.org/ns/formats/SPARQL_Results_XML",
Self::Json => "http://www.w3.org/ns/formats/SPARQL_Results_JSON",
Self::Csv => "http://www.w3.org/ns/formats/SPARQL_Results_CSV",
Self::Tsv => "http://www.w3.org/ns/formats/SPARQL_Results_TSV",
}
}
/// The format [IANA media type](https://tools.ietf.org/html/rfc2046).
@ -61,10 +61,10 @@ impl QueryResultsFormat {
#[inline]
pub fn media_type(self) -> &'static str {
match self {
QueryResultsFormat::Xml => "application/sparql-results+xml",
QueryResultsFormat::Json => "application/sparql-results+json",
QueryResultsFormat::Csv => "text/csv; charset=utf-8",
QueryResultsFormat::Tsv => "text/tab-separated-values; charset=utf-8",
Self::Xml => "application/sparql-results+xml",
Self::Json => "application/sparql-results+json",
Self::Csv => "text/csv; charset=utf-8",
Self::Tsv => "text/tab-separated-values; charset=utf-8",
}
}
@ -78,10 +78,10 @@ impl QueryResultsFormat {
#[inline]
pub fn file_extension(self) -> &'static str {
match self {
QueryResultsFormat::Xml => "srx",
QueryResultsFormat::Json => "srj",
QueryResultsFormat::Csv => "csv",
QueryResultsFormat::Tsv => "tsv",
Self::Xml => "srx",
Self::Json => "srj",
Self::Csv => "csv",
Self::Tsv => "tsv",
}
}
@ -408,10 +408,10 @@ impl QueryResultsSerializer {
Ok(SolutionsWriter {
formatter: match self.format {
QueryResultsFormat::Xml => {
SolutionsWriterKind::Xml(XmlSolutionsWriter::start(writer, variables)?)
SolutionsWriterKind::Xml(XmlSolutionsWriter::start(writer, &variables)?)
}
QueryResultsFormat::Json => {
SolutionsWriterKind::Json(JsonSolutionsWriter::start(writer, variables)?)
SolutionsWriterKind::Json(JsonSolutionsWriter::start(writer, &variables)?)
}
QueryResultsFormat::Csv => {
SolutionsWriterKind::Csv(CsvSolutionsWriter::start(writer, variables)?)

@ -118,7 +118,7 @@ impl QuerySolution {
impl<V: Into<Rc<Vec<Variable>>>, S: Into<Vec<Option<Term>>>> From<(V, S)> for QuerySolution {
#[inline]
fn from((v, s): (V, S)) -> Self {
QuerySolution {
Self {
variables: v.into(),
values: s.into(),
}

@ -37,11 +37,11 @@ pub struct XmlSolutionsWriter<W: Write> {
}
impl<W: Write> XmlSolutionsWriter<W> {
pub fn start(sink: W, variables: Vec<Variable>) -> io::Result<Self> {
pub fn start(sink: W, variables: &[Variable]) -> io::Result<Self> {
Self::do_start(sink, variables).map_err(map_xml_error)
}
fn do_start(sink: W, variables: Vec<Variable>) -> Result<Self, quick_xml::Error> {
fn do_start(sink: W, variables: &[Variable]) -> Result<Self, quick_xml::Error> {
let mut writer = Writer::new(sink);
writer.write_event(Event::Decl(BytesDecl::new("1.0", None, None)))?;
let mut sparql_open = BytesStart::new("sparql");
@ -50,7 +50,7 @@ impl<W: Write> XmlSolutionsWriter<W> {
writer
.create_element("head")
.write_inner_content(|writer| {
for variable in &variables {
for variable in variables {
writer
.create_element("variable")
.with_attribute(("name", variable.as_str()))

@ -591,6 +591,138 @@ pub enum GraphPattern {
},
}
impl fmt::Display for GraphPattern {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Bgp { patterns } => {
for pattern in patterns {
write!(f, "{pattern} .")?
}
Ok(())
}
Self::Path {
subject,
path,
object,
} => write!(f, "{subject} {path} {object} ."),
Self::Join { left, right } => {
#[allow(clippy::match_same_arms)]
match right.as_ref() {
Self::LeftJoin { .. }
| Self::Minus { .. }
| Self::Extend { .. }
| Self::Filter { .. } => {
// The second block might be considered as a modification of the first one.
write!(f, "{left} {{ {right} }}")
}
#[cfg(feature = "sep-0006")]
Self::Lateral { .. } => {
write!(f, "{left} {{ {right} }}")
}
_ => write!(f, "{left} {right}"),
}
}
Self::LeftJoin {
left,
right,
expression,
} => {
if let Some(expr) = expression {
write!(f, "{left} OPTIONAL {{ {right} FILTER({expr}) }}")
} else {
write!(f, "{left} OPTIONAL {{ {right} }}")
}
}
#[cfg(feature = "sep-0006")]
Self::Lateral { left, right } => {
write!(f, "{left} LATERAL {{ {right} }}")
}
Self::Filter { expr, inner } => {
write!(f, "{inner} FILTER({expr})")
}
Self::Union { left, right } => write!(f, "{{ {left} }} UNION {{ {right} }}",),
Self::Graph { name, inner } => {
write!(f, "GRAPH {name} {{ {inner} }}")
}
Self::Extend {
inner,
variable,
expression,
} => write!(f, "{inner} BIND({expression} AS {variable})"),
Self::Minus { left, right } => write!(f, "{left} MINUS {{ {right} }}"),
Self::Service {
name,
inner,
silent,
} => {
if *silent {
write!(f, "SERVICE SILENT {name} {{ {inner} }}")
} else {
write!(f, "SERVICE {name} {{ {inner} }}")
}
}
Self::Values {
variables,
bindings,
} => {
write!(f, "VALUES ( ")?;
for var in variables {
write!(f, "{var} ")?;
}
write!(f, ") {{ ")?;
for row in bindings {
write!(f, "( ")?;
for val in row {
match val {
Some(val) => write!(f, "{val} "),
None => write!(f, "UNDEF "),
}?;
}
write!(f, ") ")?;
}
write!(f, " }}")
}
Self::Group {
inner,
variables,
aggregates,
} => {
write!(f, "{{SELECT")?;
for (a, v) in aggregates {
write!(f, " ({v} AS {a})")?;
}
for b in variables {
write!(f, " {b}")?;
}
write!(f, " WHERE {{ {inner} }}")?;
if !variables.is_empty() {
write!(f, " GROUP BY")?;
for v in variables {
write!(f, " {v}")?;
}
}
write!(f, "}}")
}
p => write!(
f,
"{{ {} }}",
SparqlGraphRootPattern {
pattern: p,
dataset: None
}
),
}
}
}
impl Default for GraphPattern {
fn default() -> Self {
Self::Bgp {
patterns: Vec::default(),
}
}
}
impl GraphPattern {
/// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html).
pub(crate) fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result {
@ -793,146 +925,14 @@ impl GraphPattern {
}
}
}
}
impl fmt::Display for GraphPattern {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Bgp { patterns } => {
for pattern in patterns {
write!(f, "{pattern} .")?
}
Ok(())
}
Self::Path {
subject,
path,
object,
} => write!(f, "{subject} {path} {object} ."),
Self::Join { left, right } => {
match right.as_ref() {
Self::LeftJoin { .. }
| Self::Minus { .. }
| Self::Extend { .. }
| Self::Filter { .. } => {
// The second block might be considered as a modification of the first one.
write!(f, "{left} {{ {right} }}")
}
#[cfg(feature = "sep-0006")]
Self::Lateral { .. } => {
write!(f, "{left} {{ {right} }}")
}
_ => write!(f, "{left} {right}"),
}
}
Self::LeftJoin {
left,
right,
expression,
} => {
if let Some(expr) = expression {
write!(f, "{left} OPTIONAL {{ {right} FILTER({expr}) }}")
} else {
write!(f, "{left} OPTIONAL {{ {right} }}")
}
}
#[cfg(feature = "sep-0006")]
Self::Lateral { left, right } => {
write!(f, "{left} LATERAL {{ {right} }}")
}
Self::Filter { expr, inner } => {
write!(f, "{inner} FILTER({expr})")
}
Self::Union { left, right } => write!(f, "{{ {left} }} UNION {{ {right} }}",),
Self::Graph { name, inner } => {
write!(f, "GRAPH {name} {{ {inner} }}")
}
Self::Extend {
inner,
variable,
expression,
} => write!(f, "{inner} BIND({expression} AS {variable})"),
Self::Minus { left, right } => write!(f, "{left} MINUS {{ {right} }}"),
Self::Service {
name,
inner,
silent,
} => {
if *silent {
write!(f, "SERVICE SILENT {name} {{ {inner} }}")
} else {
write!(f, "SERVICE {name} {{ {inner} }}")
}
}
Self::Values {
variables,
bindings,
} => {
write!(f, "VALUES ( ")?;
for var in variables {
write!(f, "{var} ")?;
}
write!(f, ") {{ ")?;
for row in bindings {
write!(f, "( ")?;
for val in row {
match val {
Some(val) => write!(f, "{val} "),
None => write!(f, "UNDEF "),
}?;
}
write!(f, ") ")?;
}
write!(f, " }}")
}
Self::Group {
inner,
variables,
aggregates,
} => {
write!(f, "{{SELECT")?;
for (a, v) in aggregates {
write!(f, " ({v} AS {a})")?;
}
for b in variables {
write!(f, " {b}")?;
}
write!(f, " WHERE {{ {inner} }}")?;
if !variables.is_empty() {
write!(f, " GROUP BY")?;
for v in variables {
write!(f, " {v}")?;
}
}
write!(f, "}}")
}
p => write!(
f,
"{{ {} }}",
SparqlGraphRootPattern {
pattern: p,
dataset: None
}
),
}
}
}
impl Default for GraphPattern {
fn default() -> Self {
Self::Bgp {
patterns: Vec::default(),
}
}
}
impl GraphPattern {
/// Calls `callback` on each [in-scope variable](https://www.w3.org/TR/sparql11-query/#variableScope) occurrence.
pub fn on_in_scope_variable<'a>(&'a self, mut callback: impl FnMut(&'a Variable)) {
self.lookup_in_scope_variables(&mut callback)
}
fn lookup_in_scope_variables<'a>(&'a self, callback: &mut impl FnMut(&'a Variable)) {
#[allow(clippy::match_same_arms)]
match self {
Self::Bgp { patterns } => {
for pattern in patterns {
@ -981,7 +981,6 @@ impl GraphPattern {
inner.lookup_in_scope_variables(callback);
}
Self::Minus { left, .. } => left.lookup_in_scope_variables(callback),
Self::Service { inner, .. } => inner.lookup_in_scope_variables(callback),
Self::Group {
variables,
aggregates,
@ -994,17 +993,13 @@ impl GraphPattern {
callback(v);
}
}
Self::Values { variables, .. } => {
for v in variables {
callback(v);
}
}
Self::Project { variables, .. } => {
Self::Values { variables, .. } | Self::Project { variables, .. } => {
for v in variables {
callback(v);
}
}
Self::Filter { inner, .. }
Self::Service { inner, .. }
| Self::Filter { inner, .. }
| Self::OrderBy { inner, .. }
| Self::Distinct { inner }
| Self::Reduced { inner }

@ -67,15 +67,14 @@ impl Query {
/// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html).
pub fn to_sse(&self) -> String {
let mut buffer = String::new();
self.fmt_sse(&mut buffer)
.expect("Unexpected error during SSE formatting");
self.fmt_sse(&mut buffer).unwrap();
buffer
}
/// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html).
fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result {
match self {
Query::Select {
Self::Select {
dataset,
pattern,
base_iri,
@ -97,7 +96,7 @@ impl Query {
}
Ok(())
}
Query::Construct {
Self::Construct {
template,
dataset,
pattern,
@ -129,7 +128,7 @@ impl Query {
}
Ok(())
}
Query::Describe {
Self::Describe {
dataset,
pattern,
base_iri,
@ -153,7 +152,7 @@ impl Query {
}
Ok(())
}
Query::Ask {
Self::Ask {
dataset,
pattern,
base_iri,
@ -184,7 +183,7 @@ impl Query {
impl fmt::Display for Query {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Query::Select {
Self::Select {
dataset,
pattern,
base_iri,
@ -201,7 +200,7 @@ impl fmt::Display for Query {
}
)
}
Query::Construct {
Self::Construct {
template,
dataset,
pattern,
@ -227,7 +226,7 @@ impl fmt::Display for Query {
}
)
}
Query::Describe {
Self::Describe {
dataset,
pattern,
base_iri,
@ -248,7 +247,7 @@ impl fmt::Display for Query {
}
)
}
Query::Ask {
Self::Ask {
dataset,
pattern,
base_iri,

@ -764,9 +764,7 @@ impl TriplePattern {
object: object.into(),
}
}
}
impl TriplePattern {
/// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html).
pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result {
write!(f, "(triple ")?;
@ -885,9 +883,7 @@ impl QuadPattern {
graph_name: graph_name.into(),
}
}
}
impl QuadPattern {
/// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html).
pub(crate) fn fmt_sse(&self, f: &mut impl Write) -> fmt::Result {
if self.graph_name != GraphNamePattern::DefaultGraph {

@ -33,8 +33,7 @@ impl Update {
/// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html).
pub fn to_sse(&self) -> String {
let mut buffer = String::new();
self.fmt_sse(&mut buffer)
.expect("Unexpected error during SSE formatting");
self.fmt_sse(&mut buffer).unwrap();
buffer
}
@ -124,7 +123,7 @@ impl GraphUpdateOperation {
/// Formats using the [SPARQL S-Expression syntax](https://jena.apache.org/documentation/notes/sse.html).
fn fmt_sse(&self, f: &mut impl fmt::Write) -> fmt::Result {
match self {
GraphUpdateOperation::InsertData { data } => {
Self::InsertData { data } => {
write!(f, "(insertData (")?;
for (i, t) in data.iter().enumerate() {
if i > 0 {
@ -134,7 +133,7 @@ impl GraphUpdateOperation {
}
write!(f, "))")
}
GraphUpdateOperation::DeleteData { data } => {
Self::DeleteData { data } => {
write!(f, "(deleteData (")?;
for (i, t) in data.iter().enumerate() {
if i > 0 {
@ -144,7 +143,7 @@ impl GraphUpdateOperation {
}
write!(f, "))")
}
GraphUpdateOperation::DeleteInsert {
Self::DeleteInsert {
delete,
insert,
using,
@ -182,7 +181,7 @@ impl GraphUpdateOperation {
}
write!(f, ")")
}
GraphUpdateOperation::Load {
Self::Load {
silent,
source,
destination,
@ -195,7 +194,7 @@ impl GraphUpdateOperation {
destination.fmt_sse(f)?;
write!(f, ")")
}
GraphUpdateOperation::Clear { silent, graph } => {
Self::Clear { silent, graph } => {
write!(f, "(clear ")?;
if *silent {
write!(f, "silent ")?;
@ -203,14 +202,14 @@ impl GraphUpdateOperation {
graph.fmt_sse(f)?;
write!(f, ")")
}
GraphUpdateOperation::Create { silent, graph } => {
Self::Create { silent, graph } => {
write!(f, "(create ")?;
if *silent {
write!(f, "silent ")?;
}
write!(f, "{graph})")
}
GraphUpdateOperation::Drop { silent, graph } => {
Self::Drop { silent, graph } => {
write!(f, "(drop ")?;
if *silent {
write!(f, "silent ")?;
@ -225,17 +224,17 @@ impl GraphUpdateOperation {
impl fmt::Display for GraphUpdateOperation {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
GraphUpdateOperation::InsertData { data } => {
Self::InsertData { data } => {
writeln!(f, "INSERT DATA {{")?;
write_quads(data, f)?;
write!(f, "}}")
}
GraphUpdateOperation::DeleteData { data } => {
Self::DeleteData { data } => {
writeln!(f, "DELETE DATA {{")?;
write_ground_quads(data, f)?;
write!(f, "}}")
}
GraphUpdateOperation::DeleteInsert {
Self::DeleteInsert {
delete,
insert,
using,
@ -274,7 +273,7 @@ impl fmt::Display for GraphUpdateOperation {
}
)
}
GraphUpdateOperation::Load {
Self::Load {
silent,
source,
destination,
@ -289,21 +288,21 @@ impl fmt::Display for GraphUpdateOperation {
}
Ok(())
}
GraphUpdateOperation::Clear { silent, graph } => {
Self::Clear { silent, graph } => {
write!(f, "CLEAR ")?;
if *silent {
write!(f, "SILENT ")?;
}
write!(f, "{graph}")
}
GraphUpdateOperation::Create { silent, graph } => {
Self::Create { silent, graph } => {
write!(f, "CREATE ")?;
if *silent {
write!(f, "SILENT ")?;
}
write!(f, "GRAPH {graph}")
}
GraphUpdateOperation::Drop { silent, graph } => {
Self::Drop { silent, graph } => {
write!(f, "DROP ")?;
if *silent {
write!(f, "SILENT ")?;

@ -4,132 +4,6 @@
#![cfg_attr(docsrs, feature(doc_auto_cfg))]
#![doc(test(attr(deny(warnings))))]
#![deny(unsafe_code)]
#![warn(
trivial_casts,
trivial_numeric_casts,
unused_lifetimes,
unused_qualifications,
clippy::cast_lossless,
clippy::cast_possible_truncation,
clippy::cast_possible_wrap,
clippy::cast_precision_loss,
clippy::cast_sign_loss,
clippy::checked_conversions,
clippy::cloned_instead_of_copied,
clippy::copy_iterator,
clippy::dbg_macro,
clippy::debug_assert_with_mut_call,
clippy::decimal_literal_representation,
//TODO clippy::doc_markdown,
// clippy::else_if_without_else,
clippy::empty_line_after_outer_attr,
clippy::empty_enum,
clippy::enum_glob_use,
clippy::expect_used,
clippy::expl_impl_clone_on_copy,
clippy::explicit_deref_methods,
clippy::explicit_into_iter_loop,
clippy::explicit_iter_loop,
clippy::fallible_impl_from,
clippy::filter_map_next,
clippy::flat_map_option,
clippy::from_iter_instead_of_collect,
clippy::get_unwrap,
clippy::if_not_else,
// clippy::if_then_some_else_none,
clippy::implicit_clone,
clippy::implicit_saturating_sub,
clippy::imprecise_flops,
clippy::inconsistent_struct_constructor,
// clippy::indexing_slicing,
clippy::inefficient_to_string,
clippy::inline_always,
clippy::invalid_upcast_comparisons,
clippy::items_after_statements,
clippy::large_digit_groups,
clippy::large_stack_arrays,
clippy::large_types_passed_by_value,
clippy::let_underscore_must_use,
clippy::let_unit_value,
clippy::linkedlist,
clippy::macro_use_imports,
clippy::manual_ok_or,
//TODO clippy::map_err_ignore,
clippy::map_flatten,
clippy::map_unwrap_or,
clippy::match_bool,
// clippy::match_on_vec_items,
clippy::match_same_arms,
clippy::match_wildcard_for_single_variants,
clippy::maybe_infinite_iter,
clippy::mem_forget,
//TODO clippy::missing_const_for_fn,
//TODO clippy::module_name_repetitions,
clippy::multiple_crate_versions,
clippy::multiple_inherent_impl,
//TODO clippy::must_use_candidate,
clippy::mut_mut,
clippy::mutex_integer,
clippy::naive_bytecount,
clippy::needless_bitwise_bool,
clippy::needless_continue,
clippy::needless_pass_by_value,
clippy::non_ascii_literal,
clippy::nonstandard_macro_braces,
//TODO clippy::option_if_let_else,
// clippy::panic, clippy::panic_in_result_fn, does not work well with tests
clippy::path_buf_push_overwrite,
clippy::print_stderr,
clippy::print_stdout,
clippy::range_minus_one,
clippy::range_plus_one,
clippy::rc_mutex,
clippy::enum_variant_names,
//TODO clippy::redundant_closure_for_method_calls,
clippy::redundant_else,
clippy::redundant_pub_crate,
clippy::ref_binding_to_reference,
clippy::ref_option_ref,
clippy::rest_pat_in_fully_bound_structs,
clippy::same_functions_in_if_condition,
// clippy::shadow_reuse,
// clippy::shadow_same,
// clippy::shadow_unrelated,
// clippy::single_match_else,
clippy::str_to_string,
clippy::string_add,
clippy::string_add_assign,
clippy::string_lit_as_bytes,
clippy::string_to_string,
clippy::suboptimal_flops,
clippy::suspicious_operation_groupings,
clippy::todo,
clippy::trait_duplication_in_bounds,
clippy::transmute_ptr_to_ptr,
clippy::trivial_regex,
clippy::trivially_copy_pass_by_ref,
clippy::type_repetition_in_bounds,
clippy::unicode_not_nfc,
clippy::unimplemented,
clippy::unnecessary_self_imports,
clippy::unnecessary_wraps,
clippy::unneeded_field_pattern,
clippy::unnested_or_patterns,
clippy::unreadable_literal,
clippy::unseparated_literal_suffix,
clippy::unused_async,
clippy::unused_self,
clippy::use_debug,
clippy::use_self,
clippy::used_underscore_binding,
clippy::useless_let_if_seq,
clippy::useless_transmute,
clippy::verbose_bit_mask,
clippy::verbose_file_reads,
clippy::wildcard_dependencies,
clippy::zero_sized_map_values,
clippy::wrong_self_convention,
)]
pub mod io;
pub mod sparql;

@ -18,6 +18,8 @@ use std::iter::once;
use std::path::{Path, PathBuf};
#[cfg(target_os = "linux")]
use std::process::Command;
#[allow(clippy::non_ascii_literal)]
const DATA: &str = r#"
@prefix schema: <http://schema.org/> .
@prefix wd: <http://www.wikidata.org/entity/> .
@ -31,6 +33,8 @@ wd:Q90 a schema:City ;
schema:url "https://www.paris.fr/"^^xsd:anyURI ;
schema:postalCode "75001" .
"#;
#[allow(clippy::non_ascii_literal)]
const GRAPH_DATA: &str = r#"
@prefix schema: <http://schema.org/> .
@prefix wd: <http://www.wikidata.org/entity/> .
@ -70,7 +74,7 @@ fn quads(graph_name: impl Into<GraphNameRef<'static>>) -> Vec<QuadRef<'static>>
QuadRef::new(
paris,
name,
LiteralRef::new_language_tagged_literal_unchecked("la ville lumière", "fr"),
LiteralRef::new_language_tagged_literal_unchecked("la ville lumi\u{e8}re", "fr"),
graph_name,
),
QuadRef::new(paris, country, france, graph_name),
@ -534,6 +538,8 @@ impl AsRef<Path> for TempDir {
#[cfg(not(target_family = "wasm"))]
impl Drop for TempDir {
fn drop(&mut self) {
let _ = remove_dir_all(&self.0);
if self.0.is_dir() {
remove_dir_all(&self.0).unwrap();
}
}
}

@ -25,9 +25,9 @@ fn bindgen_rocksdb() {
.allowlist_type("rocksdb_.*")
.allowlist_var("rocksdb_.*")
.generate()
.expect("unable to generate rocksdb bindings")
.unwrap()
.write_to_file(PathBuf::from(var("OUT_DIR").unwrap()).join("bindings.rs"))
.expect("unable to write rocksdb bindings");
.unwrap();
}
fn build_rocksdb() {
@ -138,7 +138,7 @@ fn build_rocksdb() {
// Remove POSIX-specific sources
lib_sources = lib_sources
.iter()
.cloned()
.copied()
.filter(|file| {
!matches!(
*file,

@ -54,11 +54,10 @@ pub fn parse(
py: Python<'_>,
) -> PyResult<PyObject> {
let input = if let Ok(path) = input.extract::<&str>(py) {
PyReadable::from_file(path, py)
PyReadable::from_file(path, py).map_err(map_io_err)?
} else {
PyReadable::from_data(input, py)
}
.map_err(map_io_err)?;
};
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) {
let mut parser = GraphParser::from_format(graph_format);
if let Some(base_iri) = base_iri {
@ -119,11 +118,10 @@ pub fn parse(
#[pyfunction]
pub fn serialize(input: &PyAny, output: PyObject, mime_type: &str, py: Python<'_>) -> PyResult<()> {
let output = if let Ok(path) = output.extract::<&str>(py) {
PyWritable::from_file(path, py)
PyWritable::from_file(path, py).map_err(map_io_err)?
} else {
PyWritable::from_data(output)
}
.map_err(map_io_err)?;
};
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) {
let mut writer = GraphSerializer::from_format(graph_format)
.triple_writer(output)
@ -195,7 +193,7 @@ impl PyQuadReader {
}
}
pub(crate) enum PyReadable {
pub enum PyReadable {
Bytes(Cursor<Vec<u8>>),
Io(BufReader<PyIo>),
File(BufReader<File>),
@ -208,14 +206,14 @@ impl PyReadable {
)))
}
pub fn from_data(data: PyObject, py: Python<'_>) -> io::Result<Self> {
Ok(if let Ok(bytes) = data.extract::<Vec<u8>>(py) {
pub fn from_data(data: PyObject, py: Python<'_>) -> Self {
if let Ok(bytes) = data.extract::<Vec<u8>>(py) {
Self::Bytes(Cursor::new(bytes))
} else if let Ok(string) = data.extract::<String>(py) {
Self::Bytes(Cursor::new(string.into_bytes()))
} else {
Self::Io(BufReader::new(PyIo(data)))
})
}
}
}
@ -247,7 +245,7 @@ impl BufRead for PyReadable {
}
}
pub(crate) enum PyWritable {
pub enum PyWritable {
Io(BufWriter<PyIo>),
File(BufWriter<File>),
}
@ -259,8 +257,8 @@ impl PyWritable {
)))
}
pub fn from_data(data: PyObject) -> io::Result<Self> {
Ok(Self::Io(BufWriter::new(PyIo(data))))
pub fn from_data(data: PyObject) -> Self {
Self::Io(BufWriter::new(PyIo(data)))
}
}
@ -280,7 +278,7 @@ impl Write for PyWritable {
}
}
pub(crate) struct PyIo(PyObject);
pub struct PyIo(PyObject);
impl Read for PyIo {
fn read(&mut self, mut buf: &mut [u8]) -> io::Result<usize> {
@ -326,7 +324,7 @@ fn to_io_err(error: impl Into<PyErr>) -> io::Error {
io::Error::new(io::ErrorKind::Other, error.into())
}
pub(crate) fn map_io_err(error: io::Error) -> PyErr {
pub fn map_io_err(error: io::Error) -> PyErr {
if error.get_ref().map_or(false, |s| s.is::<PyErr>()) {
*error.into_inner().unwrap().downcast().unwrap()
} else {
@ -334,7 +332,7 @@ pub(crate) fn map_io_err(error: io::Error) -> PyErr {
}
}
pub(crate) fn map_parse_error(error: ParseError) -> PyErr {
pub fn map_parse_error(error: ParseError) -> PyErr {
match error {
ParseError::Syntax(error) => PySyntaxError::new_err(error.to_string()),
ParseError::Io(error) => map_io_err(error),
@ -346,7 +344,7 @@ pub(crate) fn map_parse_error(error: ParseError) -> PyErr {
///
/// Code from pyo3: https://github.com/PyO3/pyo3/blob/a67180c8a42a0bc0fdc45b651b62c0644130cf47/src/python.rs#L366
#[allow(unsafe_code)]
pub(crate) fn allow_threads_unsafe<T>(f: impl FnOnce() -> T) -> T {
pub fn allow_threads_unsafe<T>(f: impl FnOnce() -> T) -> T {
struct RestoreGuard {
tstate: *mut pyo3::ffi::PyThreadState,
}

@ -1,3 +1,9 @@
#![allow(
clippy::redundant_pub_crate,
clippy::used_underscore_binding,
clippy::unused_self,
clippy::trivially_copy_pass_by_ref
)]
mod io;
mod model;
mod sparql;

@ -434,7 +434,7 @@ pub struct PyDefaultGraph {}
impl From<PyDefaultGraph> for GraphName {
fn from(_: PyDefaultGraph) -> Self {
GraphName::DefaultGraph
Self::DefaultGraph
}
}

@ -61,12 +61,12 @@ pub fn parse_query(
Ok(query)
}
pub fn query_results_to_python(py: Python<'_>, results: QueryResults) -> PyResult<PyObject> {
Ok(match results {
pub fn query_results_to_python(py: Python<'_>, results: QueryResults) -> PyObject {
match results {
QueryResults::Solutions(inner) => PyQuerySolutions { inner }.into_py(py),
QueryResults::Graph(inner) => PyQueryTriples { inner }.into_py(py),
QueryResults::Boolean(b) => b.into_py(py),
})
}
}
/// Tuple associating variables and terms that are the result of a SPARQL ``SELECT`` query.
@ -229,7 +229,7 @@ impl PyQueryTriples {
}
}
pub(crate) fn map_evaluation_error(error: EvaluationError) -> PyErr {
pub fn map_evaluation_error(error: EvaluationError) -> PyErr {
match error {
EvaluationError::Parsing(error) => PySyntaxError::new_err(error.to_string()),
EvaluationError::Storage(error) => map_storage_error(error),

@ -295,7 +295,7 @@ impl PyStore {
)?;
let results =
allow_threads_unsafe(|| self.inner.query(query)).map_err(map_evaluation_error)?;
query_results_to_python(py, results)
Ok(query_results_to_python(py, results))
}
/// Executes a `SPARQL 1.1 update <https://www.w3.org/TR/sparql11-update/>`_.
@ -392,11 +392,10 @@ impl PyStore {
None
};
let input = if let Ok(path) = input.extract::<&str>(py) {
PyReadable::from_file(path, py)
PyReadable::from_file(path, py).map_err(map_io_err)?
} else {
PyReadable::from_data(input, py)
}
.map_err(map_io_err)?;
};
py.allow_threads(|| {
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) {
self.inner
@ -475,11 +474,10 @@ impl PyStore {
None
};
let input = if let Ok(path) = input.extract::<&str>(py) {
PyReadable::from_file(path, py)
PyReadable::from_file(path, py).map_err(map_io_err)?
} else {
PyReadable::from_data(input, py)
}
.map_err(map_io_err)?;
};
py.allow_threads(|| {
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) {
self.inner
@ -548,11 +546,10 @@ impl PyStore {
py: Python<'_>,
) -> PyResult<()> {
let output = if let Ok(path) = output.extract::<&str>(py) {
PyWritable::from_file(path, py)
PyWritable::from_file(path, py).map_err(map_io_err)?
} else {
PyWritable::from_data(output)
}
.map_err(map_io_err)?;
};
let from_graph_name = if let Some(graph_name) = from_graph {
Some(GraphName::from(&PyGraphNameRef::try_from(graph_name)?))
} else {
@ -757,8 +754,8 @@ impl PyStore {
self.inner.len().map_err(map_storage_error)
}
fn __contains__(&self, quad: PyQuad) -> PyResult<bool> {
self.inner.contains(&quad).map_err(map_storage_error)
fn __contains__(&self, quad: &PyQuad) -> PyResult<bool> {
self.inner.contains(quad).map_err(map_storage_error)
}
fn __iter__(&self) -> QuadIter {
@ -845,21 +842,21 @@ pub fn extract_quads_pattern<'a>(
))
}
pub(crate) fn map_storage_error(error: StorageError) -> PyErr {
pub fn map_storage_error(error: StorageError) -> PyErr {
match error {
StorageError::Io(error) => PyIOError::new_err(error.to_string()),
_ => PyRuntimeError::new_err(error.to_string()),
}
}
pub(crate) fn map_loader_error(error: LoaderError) -> PyErr {
pub fn map_loader_error(error: LoaderError) -> PyErr {
match error {
LoaderError::Storage(error) => map_storage_error(error),
LoaderError::Parsing(error) => map_parse_error(error),
}
}
pub(crate) fn map_serializer_error(error: SerializerError) -> PyErr {
pub fn map_serializer_error(error: SerializerError) -> PyErr {
match error {
SerializerError::Storage(error) => map_storage_error(error),
SerializerError::Io(error) => PyIOError::new_err(error.to_string()),

@ -1,3 +1,4 @@
#![allow(clippy::print_stderr, clippy::cast_precision_loss, clippy::use_debug)]
use anyhow::{anyhow, bail, Context, Error};
use clap::{Parser, Subcommand};
use flate2::read::MultiGzDecoder;
@ -27,7 +28,7 @@ use std::time::{Duration, Instant};
use std::{fmt, fs, str};
use url::form_urlencoded;
const MAX_SPARQL_BODY_SIZE: u64 = 1_048_576;
const MAX_SPARQL_BODY_SIZE: u64 = 0x0010_0000;
const HTTP_TIMEOUT: Duration = Duration::from_secs(60);
const HTML_ROOT_PAGE: &str = include_str!("../templates/query.html");
const LOGO: &str = include_str!("../logo.svg");
@ -302,6 +303,7 @@ pub fn main() -> anyhow::Result<()> {
} else {
None
};
#[allow(clippy::cast_precision_loss)]
if file.is_empty() {
// We read from stdin
let start = Instant::now();
@ -320,7 +322,7 @@ pub fn main() -> anyhow::Result<()> {
})
}
bulk_load(
loader,
&loader,
stdin().lock(),
format.ok_or_else(|| {
anyhow!("The --format option must be set when loading from stdin")
@ -370,7 +372,7 @@ pub fn main() -> anyhow::Result<()> {
if let Err(error) = {
if file.extension().map_or(false, |e| e == OsStr::new("gz")) {
bulk_load(
loader,
&loader,
BufReader::new(MultiGzDecoder::new(fp)),
format.unwrap_or_else(|| {
GraphOrDatasetFormat::from_path(
@ -383,7 +385,7 @@ pub fn main() -> anyhow::Result<()> {
)
} else {
bulk_load(
loader,
&loader,
BufReader::new(fp),
format.unwrap_or_else(|| {
GraphOrDatasetFormat::from_path(&file).unwrap()
@ -515,6 +517,7 @@ pub fn main() -> anyhow::Result<()> {
for solution in solutions {
writer.write(&solution?)?;
}
#[allow(clippy::let_underscore_must_use)]
let _ = writer.finish()?;
}
}
@ -542,6 +545,7 @@ pub fn main() -> anyhow::Result<()> {
result,
)?;
} else {
#[allow(clippy::let_underscore_must_use)]
let _ = QueryResultsSerializer::from_format(format)
.write_boolean_result(stdout().lock(), result)?;
}
@ -641,7 +645,7 @@ pub fn main() -> anyhow::Result<()> {
}
fn bulk_load(
loader: BulkLoader,
loader: &BulkLoader,
reader: impl BufRead,
format: GraphOrDatasetFormat,
base_iri: Option<&str>,
@ -700,8 +704,8 @@ impl GraphOrDatasetFormat {
fn from_extension(name: &str) -> anyhow::Result<Self> {
Ok(match (GraphFormat::from_extension(name), DatasetFormat::from_extension(name)) {
(Some(g), Some(d)) => bail!("The file extension '{name}' can be resolved to both '{}' and '{}', not sure what to pick", g.file_extension(), d.file_extension()),
(Some(g), None) => GraphOrDatasetFormat::Graph(g),
(None, Some(d)) => GraphOrDatasetFormat::Dataset(d),
(Some(g), None) => Self::Graph(g),
(None, Some(d)) => Self::Dataset(d),
(None, None) =>
bail!("The file extension '{name}' is unknown")
})
@ -718,8 +722,8 @@ impl GraphOrDatasetFormat {
g.file_extension(),
d.file_extension()
),
(Some(g), None) => GraphOrDatasetFormat::Graph(g),
(None, Some(d)) => GraphOrDatasetFormat::Dataset(d),
(Some(g), None) => Self::Graph(g),
(None, Some(d)) => Self::Dataset(d),
(None, None) => bail!("The media type '{name}' is unknown"),
},
)
@ -846,7 +850,7 @@ fn handle_request(
.unwrap()
.with_body(LOGO)),
("/query", "GET") => {
configure_and_evaluate_sparql_query(store, &[url_query(request)], None, request)
configure_and_evaluate_sparql_query(&store, &[url_query(request)], None, request)
}
("/query", "POST") => {
let content_type =
@ -859,7 +863,7 @@ fn handle_request(
.read_to_string(&mut buffer)
.map_err(bad_request)?;
configure_and_evaluate_sparql_query(
store,
&store,
&[url_query(request)],
Some(buffer),
request,
@ -872,7 +876,7 @@ fn handle_request(
.read_to_end(&mut buffer)
.map_err(bad_request)?;
configure_and_evaluate_sparql_query(
store,
&store,
&[url_query(request), &buffer],
None,
request,
@ -895,7 +899,7 @@ fn handle_request(
.read_to_string(&mut buffer)
.map_err(bad_request)?;
configure_and_evaluate_sparql_update(
store,
&store,
&[url_query(request)],
Some(buffer),
request,
@ -908,7 +912,7 @@ fn handle_request(
.read_to_end(&mut buffer)
.map_err(bad_request)?;
configure_and_evaluate_sparql_update(
store,
&store,
&[url_query(request), &buffer],
None,
request,
@ -1127,7 +1131,7 @@ fn url_query_parameter<'a>(request: &'a Request, param: &str) -> Option<Cow<'a,
}
fn configure_and_evaluate_sparql_query(
store: Store,
store: &Store,
encoded: &[&[u8]],
mut query: Option<String>,
request: &Request,
@ -1154,7 +1158,7 @@ fn configure_and_evaluate_sparql_query(
let query = query.ok_or_else(|| bad_request("You should set the 'query' parameter"))?;
evaluate_sparql_query(
store,
query,
&query,
use_default_graph_as_union,
default_graph_uris,
named_graph_uris,
@ -1163,14 +1167,14 @@ fn configure_and_evaluate_sparql_query(
}
fn evaluate_sparql_query(
store: Store,
query: String,
store: &Store,
query: &str,
use_default_graph_as_union: bool,
default_graph_uris: Vec<String>,
named_graph_uris: Vec<String>,
request: &Request,
) -> Result<Response, HttpError> {
let mut query = Query::parse(&query, Some(&base_url(request))).map_err(bad_request)?;
let mut query = Query::parse(query, Some(&base_url(request))).map_err(bad_request)?;
if use_default_graph_as_union {
if !default_graph_uris.is_empty() || !named_graph_uris.is_empty() {
@ -1256,7 +1260,7 @@ fn evaluate_sparql_query(
}
fn configure_and_evaluate_sparql_update(
store: Store,
store: &Store,
encoded: &[&[u8]],
mut update: Option<String>,
request: &Request,
@ -1283,7 +1287,7 @@ fn configure_and_evaluate_sparql_update(
let update = update.ok_or_else(|| bad_request("You should set the 'update' parameter"))?;
evaluate_sparql_update(
store,
update,
&update,
use_default_graph_as_union,
default_graph_uris,
named_graph_uris,
@ -1292,15 +1296,15 @@ fn configure_and_evaluate_sparql_update(
}
fn evaluate_sparql_update(
store: Store,
update: String,
store: &Store,
update: &str,
use_default_graph_as_union: bool,
default_graph_uris: Vec<String>,
named_graph_uris: Vec<String>,
request: &Request,
) -> Result<Response, HttpError> {
let mut update =
Update::parse(&update, Some(base_url(request).as_str())).map_err(bad_request)?;
Update::parse(update, Some(base_url(request).as_str())).map_err(bad_request)?;
if use_default_graph_as_union {
if !default_graph_uris.is_empty() || !named_graph_uris.is_empty() {
@ -1461,7 +1465,7 @@ fn content_negotiation<F>(
.ok_or_else(|| internal_server_error("Unknown media type"));
}
let mut result = None;
let mut result_score = 0f32;
let mut result_score = 0_f32;
for possible in header.split(',') {
let (possible, parameters) = possible.split_once(';').unwrap_or((possible, ""));
@ -1730,7 +1734,7 @@ mod tests {
Ok(store_dir)
}
fn assert_cli_state(store_dir: TempDir, data: &'static str) -> Result<()> {
fn assert_cli_state(store_dir: &TempDir, data: &'static str) -> Result<()> {
cli_command()?
.arg("dump")
.arg("--location")
@ -1961,7 +1965,7 @@ mod tests {
.success();
assert_cli_state(
store_dir,
&store_dir,
"<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n",
)
}
@ -2043,7 +2047,7 @@ mod tests {
.assert()
.success();
assert_cli_state(
store_dir,
&store_dir,
"<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n",
)
}
@ -2061,7 +2065,7 @@ mod tests {
.assert()
.success();
assert_cli_state(
store_dir,
&store_dir,
"<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n",
)
}
@ -2082,7 +2086,7 @@ mod tests {
.assert()
.success();
assert_cli_state(
store_dir,
&store_dir,
"<http://example.com/s> <http://example.com/p> <http://example.com/o> .\n",
)
}

@ -1,3 +1,4 @@
#![allow(clippy::print_stdout)]
use anyhow::Result;
use clap::Parser;
use oxigraph_testsuite::evaluator::TestEvaluator;

@ -55,19 +55,6 @@ pub struct TestManifest {
manifests_to_do: VecDeque<String>,
}
impl TestManifest {
pub fn new<S: ToString>(manifest_urls: impl IntoIterator<Item = S>) -> Self {
Self {
graph: Graph::new(),
tests_to_do: VecDeque::new(),
manifests_to_do: manifest_urls
.into_iter()
.map(|url| url.to_string())
.collect(),
}
}
}
impl Iterator for TestManifest {
type Item = Result<Test>;
@ -84,6 +71,17 @@ impl Iterator for TestManifest {
}
impl TestManifest {
pub fn new<S: ToString>(manifest_urls: impl IntoIterator<Item = S>) -> Self {
Self {
graph: Graph::new(),
tests_to_do: VecDeque::new(),
manifests_to_do: manifest_urls
.into_iter()
.map(|url| url.to_string())
.collect(),
}
}
fn next_test(&mut self) -> Result<Option<Test>> {
loop {
let test_node = if let Some(test_node) = self.tests_to_do.pop_front() {
@ -107,7 +105,7 @@ impl TestManifest {
.graph
.object_for_subject_predicate(&test_node, mf::NAME)
{
Some(c.value().to_string())
Some(c.value().to_owned())
} else {
None
};
@ -126,7 +124,7 @@ impl TestManifest {
.graph
.object_for_subject_predicate(&test_node, rdfs::COMMENT)
{
Some(c.value().to_string())
Some(c.value().to_owned())
} else {
None
};

@ -101,9 +101,8 @@ fn evaluate_positive_json_result_syntax_test(test: &Test) -> Result<()> {
fn evaluate_negative_json_result_syntax_test(test: &Test) -> Result<()> {
if result_syntax_check(test, QueryResultsFormat::Json).is_ok() {
bail!("Oxigraph parses even if it should not {test}.")
} else {
Ok(())
}
Ok(())
}
fn evaluate_positive_xml_result_syntax_test(test: &Test) -> Result<()> {
@ -113,17 +112,15 @@ fn evaluate_positive_xml_result_syntax_test(test: &Test) -> Result<()> {
fn evaluate_negative_xml_result_syntax_test(test: &Test) -> Result<()> {
if result_syntax_check(test, QueryResultsFormat::Xml).is_ok() {
bail!("Oxigraph parses even if it should not {test}.")
} else {
Ok(())
}
Ok(())
}
fn evaluate_negative_tsv_result_syntax_test(test: &Test) -> Result<()> {
if result_syntax_check(test, QueryResultsFormat::Tsv).is_ok() {
bail!("Oxigraph parses even if it should not {test}.")
} else {
Ok(())
}
Ok(())
}
fn result_syntax_check(test: &Test, format: QueryResultsFormat) -> Result<()> {
@ -306,7 +303,7 @@ fn load_sparql_query_result(url: &str) -> Result<StaticQueryResults> {
false,
)
} else {
StaticQueryResults::from_graph(load_graph(url, guess_graph_format(url)?)?)
StaticQueryResults::from_graph(&load_graph(url, guess_graph_format(url)?)?)
}
}
@ -502,11 +499,11 @@ enum StaticQueryResults {
}
impl StaticQueryResults {
fn from_query_results(results: QueryResults, with_order: bool) -> Result<StaticQueryResults> {
Self::from_graph(to_graph(results, with_order)?)
fn from_query_results(results: QueryResults, with_order: bool) -> Result<Self> {
Self::from_graph(&to_graph(results, with_order)?)
}
fn from_graph(graph: Graph) -> Result<StaticQueryResults> {
fn from_graph(graph: &Graph) -> Result<Self> {
// Hack to normalize literals
let store = Store::new().unwrap();
for t in graph.iter() {
@ -519,9 +516,7 @@ impl StaticQueryResults {
if let Some(result_set) = graph.subject_for_predicate_object(rdf::TYPE, rs::RESULT_SET) {
if let Some(bool) = graph.object_for_subject_predicate(result_set, rs::BOOLEAN) {
// Boolean query
Ok(StaticQueryResults::Boolean(
bool == Literal::from(true).as_ref().into(),
))
Ok(Self::Boolean(bool == Literal::from(true).as_ref().into()))
} else {
// Regular query
let mut variables: Vec<Variable> = graph
@ -584,7 +579,7 @@ impl StaticQueryResults {
let ordered = solutions.iter().all(|(_, index)| index.is_some());
Ok(StaticQueryResults::Solutions {
Ok(Self::Solutions {
variables,
solutions: solutions
.into_iter()
@ -595,7 +590,7 @@ impl StaticQueryResults {
}
} else {
graph.canonicalize();
Ok(StaticQueryResults::Graph(graph))
Ok(Self::Graph(graph))
}
}
}

@ -3,7 +3,7 @@ use oxigraph_testsuite::evaluator::TestEvaluator;
use oxigraph_testsuite::manifest::TestManifest;
use oxigraph_testsuite::sparql_evaluator::register_sparql_tests;
fn run_testsuite(manifest_url: &str, ignored_tests: Vec<&str>) -> Result<()> {
fn run_testsuite(manifest_url: &str, ignored_tests: &[&str]) -> Result<()> {
let mut evaluator = TestEvaluator::default();
register_sparql_tests(&mut evaluator);
let manifest = TestManifest::new(vec![manifest_url]);
@ -31,7 +31,7 @@ fn run_testsuite(manifest_url: &str, ignored_tests: Vec<&str>) -> Result<()> {
fn sparql10_w3c_query_syntax_testsuite() -> Result<()> {
run_testsuite(
"https://w3c.github.io/rdf-tests/sparql/sparql10/manifest-syntax.ttl",
vec![
&[
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/syntax-sparql3/manifest#syn-bad-26", // tokenizer
],
)
@ -39,7 +39,7 @@ fn sparql10_w3c_query_syntax_testsuite() -> Result<()> {
#[test]
fn sparql10_w3c_query_evaluation_testsuite() -> Result<()> {
run_testsuite("https://w3c.github.io/rdf-tests/sparql/sparql10/manifest-evaluation.ttl", vec![
run_testsuite("https://w3c.github.io/rdf-tests/sparql/sparql10/manifest-evaluation.ttl", &[
//Multiple writing of the same xsd:integer. Our system does strong normalization.
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/distinct/manifest#distinct-1",
"http://www.w3.org/2001/sw/DataAccess/tests/data-r2/distinct/manifest#distinct-9",
@ -75,7 +75,7 @@ fn sparql10_w3c_query_evaluation_testsuite() -> Result<()> {
fn sparql11_query_w3c_evaluation_testsuite() -> Result<()> {
run_testsuite(
"https://w3c.github.io/rdf-tests/sparql/sparql11/manifest-sparql11-query.ttl",
vec![
&[
//BNODE() scope is currently wrong
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/functions/manifest#bnode01",
//SERVICE name from a BGP
@ -88,7 +88,7 @@ fn sparql11_query_w3c_evaluation_testsuite() -> Result<()> {
fn sparql11_federation_w3c_evaluation_testsuite() -> Result<()> {
run_testsuite(
"https://w3c.github.io/rdf-tests/sparql/sparql11/manifest-sparql11-fed.ttl",
vec![
&[
// Problem during service evaluation order
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/service/manifest#service5",
],
@ -99,7 +99,7 @@ fn sparql11_federation_w3c_evaluation_testsuite() -> Result<()> {
fn sparql11_update_w3c_evaluation_testsuite() -> Result<()> {
run_testsuite(
"https://w3c.github.io/rdf-tests/sparql/sparql11/manifest-sparql11-update.ttl",
vec![
&[
// We allow multiple INSERT DATA with the same blank nodes
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/syntax-update-1/manifest#test_54",
],
@ -110,7 +110,7 @@ fn sparql11_update_w3c_evaluation_testsuite() -> Result<()> {
fn sparql11_json_w3c_evaluation_testsuite() -> Result<()> {
run_testsuite(
"https://w3c.github.io/rdf-tests/sparql/sparql11/json-res/manifest.ttl",
vec![],
&[],
)
}
@ -118,7 +118,7 @@ fn sparql11_json_w3c_evaluation_testsuite() -> Result<()> {
fn sparql11_tsv_w3c_evaluation_testsuite() -> Result<()> {
run_testsuite(
"https://w3c.github.io/rdf-tests/sparql/sparql11/csv-tsv-res/manifest.ttl",
vec![
&[
// We do not run CSVResultFormatTest tests yet
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/csv-tsv-res/manifest#csv01",
"http://www.w3.org/2009/sparql/docs/tests/data-sparql11/csv-tsv-res/manifest#csv02",
@ -131,7 +131,7 @@ fn sparql11_tsv_w3c_evaluation_testsuite() -> Result<()> {
fn sparql_star_syntax_testsuite() -> Result<()> {
run_testsuite(
"https://w3c.github.io/rdf-star/tests/sparql/syntax/manifest.ttl",
vec![],
&[],
)
}
@ -139,6 +139,6 @@ fn sparql_star_syntax_testsuite() -> Result<()> {
fn sparql_star_eval_testsuite() -> Result<()> {
run_testsuite(
"https://w3c.github.io/rdf-star/tests/sparql/eval/manifest.ttl",
vec![],
&[],
)
}

Loading…
Cancel
Save