Makes Clippy 1.51 happy

pull/90/head
Tpt 4 years ago
parent b69f847097
commit cc43742da1
  1. 2
      js/src/model.rs
  2. 2
      lib/src/lib.rs
  3. 2
      lib/src/sparql/algebra.rs
  4. 2
      lib/src/sparql/csv_results.rs
  5. 22
      lib/src/sparql/eval.rs
  6. 8
      lib/src/sparql/model.rs
  7. 10
      lib/src/sparql/parser.rs
  8. 44
      lib/src/sparql/plan.rs
  9. 28
      lib/src/sparql/plan_builder.rs
  10. 34
      lib/src/sparql/xml_results.rs
  11. 36
      lib/src/store/binary_encoder.rs
  12. 34
      lib/src/store/rocksdb.rs
  13. 18
      lib/src/store/sled.rs
  14. 4
      python/src/io.rs
  15. 44
      server/src/main.rs
  16. 6
      testsuite/benches/sparql_query.rs
  17. 4
      wikibase/src/loader.rs
  18. 16
      wikibase/src/main.rs

@ -99,7 +99,7 @@ impl JsDataFactory {
#[wasm_bindgen(js_name = fromQuad)]
pub fn convert_quad(&self, original: &JsValue) -> Result<JsQuad, JsValue> {
Ok(self.from_js.to_quad(original)?)
self.from_js.to_quad(original)
}
}

@ -78,7 +78,7 @@
clippy::fallible_impl_from,
clippy::filter_map,
clippy::filter_map_next,
clippy::find_map,
clippy::manual_find_map,
clippy::get_unwrap,
clippy::if_not_else,
clippy::inline_always,

@ -751,6 +751,7 @@ fn write_arg_list(
}
/// A function name
#[allow(clippy::upper_case_acronyms)] //TODO: Fix on the next breaking release
#[derive(Eq, PartialEq, Debug, Clone, Hash)]
pub enum Function {
Str,
@ -857,6 +858,7 @@ impl fmt::Display for Function {
}
/// A SPARQL query [graph pattern](https://www.w3.org/TR/sparql11-query/#sparqlQuery)
#[allow(clippy::upper_case_acronyms)] //TODO: Fix on the next breaking release
#[derive(Eq, PartialEq, Debug, Clone, Hash)]
pub enum GraphPattern {
/// A [basic graph pattern](https://www.w3.org/TR/sparql11-query/#defn_BasicGraphPattern)

@ -179,7 +179,7 @@ pub fn read_tsv_results(mut source: impl BufRead + 'static) -> Result<QueryResul
Ok(QueryResults::Solutions(QuerySolutionIter::new(
Rc::new(variables),
Box::new(TsvResultsIterator { buffer, source }),
Box::new(TsvResultsIterator { source, buffer }),
)))
}

@ -1050,7 +1050,7 @@ where
}
PlanExpression::Datatype(e) => self.datatype(self.eval_expression(e, tuple)?),
PlanExpression::Bound(v) => Some(tuple.contains(*v).into()),
PlanExpression::IRI(e) => {
PlanExpression::Iri(e) => {
let e = self.eval_expression(e, tuple)?;
if e.is_named_node() {
Some(e)
@ -1210,7 +1210,7 @@ where
)?;
Some((&arg1).starts_with(arg2.as_str()).into())
}
PlanExpression::EncodeForURI(ltrl) => {
PlanExpression::EncodeForUri(ltrl) => {
let ltlr = self.to_string(self.eval_expression(ltrl, tuple)?)?;
let mut result = Vec::with_capacity(ltlr.len());
for c in ltlr.bytes() {
@ -1344,22 +1344,22 @@ where
}
}
PlanExpression::Now => Some(self.now.into()),
PlanExpression::UUID => {
PlanExpression::Uuid => {
let mut buffer = String::with_capacity(44);
buffer.push_str("urn:uuid:");
generate_uuid(&mut buffer);
self.build_named_node(&buffer)
}
PlanExpression::StrUUID => {
PlanExpression::StrUuid => {
let mut buffer = String::with_capacity(36);
generate_uuid(&mut buffer);
self.build_string_literal(&buffer)
}
PlanExpression::MD5(arg) => self.hash::<Md5>(arg, tuple),
PlanExpression::SHA1(arg) => self.hash::<Sha1>(arg, tuple),
PlanExpression::SHA256(arg) => self.hash::<Sha256>(arg, tuple),
PlanExpression::SHA384(arg) => self.hash::<Sha384>(arg, tuple),
PlanExpression::SHA512(arg) => self.hash::<Sha512>(arg, tuple),
PlanExpression::Md5(arg) => self.hash::<Md5>(arg, tuple),
PlanExpression::Sha1(arg) => self.hash::<Sha1>(arg, tuple),
PlanExpression::Sha256(arg) => self.hash::<Sha256>(arg, tuple),
PlanExpression::Sha384(arg) => self.hash::<Sha384>(arg, tuple),
PlanExpression::Sha512(arg) => self.hash::<Sha512>(arg, tuple),
PlanExpression::Coalesce(l) => {
for e in l {
if let Some(result) = self.eval_expression(e, tuple) {
@ -1381,7 +1381,7 @@ where
self.build_language_id(self.eval_expression(lang_tag, tuple)?)?,
))
}
PlanExpression::StrDT(lexical_form, datatype) => {
PlanExpression::StrDt(lexical_form, datatype) => {
let value = self.to_simple_string(self.eval_expression(lexical_form, tuple)?)?;
let datatype = if let EncodedTerm::NamedNode { iri_id } =
self.eval_expression(datatype, tuple)?
@ -1401,7 +1401,7 @@ where
PlanExpression::SameTerm(a, b) => {
Some((self.eval_expression(a, tuple)? == self.eval_expression(b, tuple)?).into())
}
PlanExpression::IsIRI(e) => {
PlanExpression::IsIri(e) => {
Some(self.eval_expression(e, tuple)?.is_named_node().into())
}
PlanExpression::IsBlank(e) => {

@ -314,13 +314,7 @@ impl QuerySolution {
self.values
.iter()
.enumerate()
.filter_map(move |(i, value)| {
if let Some(value) = value {
Some((&self.variables[i], value))
} else {
None
}
})
.filter_map(move |(i, value)| value.as_ref().map(|value| (&self.variables[i], value)))
}
/// Returns an iterator over all values, bound or not

@ -30,13 +30,9 @@ pub fn parse_query(query: &str, base_iri: Option<&str>) -> Result<Query, ParseEr
aggregates: Vec::default(),
};
Ok(
parser::QueryUnit(&unescape_unicode_codepoints(query), &mut state).map_err(|e| {
ParseError {
inner: ParseErrorKind::Parser(e),
}
})?,
)
parser::QueryUnit(&unescape_unicode_codepoints(query), &mut state).map_err(|e| ParseError {
inner: ParseErrorKind::Parser(e),
})
}
/// Parses a SPARQL update with an optional base IRI to resolve relative IRIs in the query

@ -233,7 +233,7 @@ pub enum PlanExpression<I: StrId> {
LangMatches(Box<PlanExpression<I>>, Box<PlanExpression<I>>),
Datatype(Box<PlanExpression<I>>),
Bound(usize),
IRI(Box<PlanExpression<I>>),
Iri(Box<PlanExpression<I>>),
BNode(Option<Box<PlanExpression<I>>>),
Rand,
Abs(Box<PlanExpression<I>>),
@ -255,7 +255,7 @@ pub enum PlanExpression<I: StrId> {
),
UCase(Box<PlanExpression<I>>),
LCase(Box<PlanExpression<I>>),
EncodeForURI(Box<PlanExpression<I>>),
EncodeForUri(Box<PlanExpression<I>>),
Contains(Box<PlanExpression<I>>, Box<PlanExpression<I>>),
StrStarts(Box<PlanExpression<I>>, Box<PlanExpression<I>>),
StrEnds(Box<PlanExpression<I>>, Box<PlanExpression<I>>),
@ -270,13 +270,13 @@ pub enum PlanExpression<I: StrId> {
Timezone(Box<PlanExpression<I>>),
Tz(Box<PlanExpression<I>>),
Now,
UUID,
StrUUID,
MD5(Box<PlanExpression<I>>),
SHA1(Box<PlanExpression<I>>),
SHA256(Box<PlanExpression<I>>),
SHA384(Box<PlanExpression<I>>),
SHA512(Box<PlanExpression<I>>),
Uuid,
StrUuid,
Md5(Box<PlanExpression<I>>),
Sha1(Box<PlanExpression<I>>),
Sha256(Box<PlanExpression<I>>),
Sha384(Box<PlanExpression<I>>),
Sha512(Box<PlanExpression<I>>),
Coalesce(Vec<PlanExpression<I>>),
If(
Box<PlanExpression<I>>,
@ -284,9 +284,9 @@ pub enum PlanExpression<I: StrId> {
Box<PlanExpression<I>>,
),
StrLang(Box<PlanExpression<I>>, Box<PlanExpression<I>>),
StrDT(Box<PlanExpression<I>>, Box<PlanExpression<I>>),
StrDt(Box<PlanExpression<I>>, Box<PlanExpression<I>>),
SameTerm(Box<PlanExpression<I>>, Box<PlanExpression<I>>),
IsIRI(Box<PlanExpression<I>>),
IsIri(Box<PlanExpression<I>>),
IsBlank(Box<PlanExpression<I>>),
IsLiteral(Box<PlanExpression<I>>),
IsNumeric(Box<PlanExpression<I>>),
@ -318,8 +318,8 @@ impl<I: StrId> PlanExpression<I> {
PlanExpression::Constant(_)
| PlanExpression::Rand
| PlanExpression::Now
| PlanExpression::UUID
| PlanExpression::StrUUID
| PlanExpression::Uuid
| PlanExpression::StrUuid
| PlanExpression::BNode(None) => (),
PlanExpression::UnaryPlus(e)
| PlanExpression::UnaryMinus(e)
@ -328,7 +328,7 @@ impl<I: StrId> PlanExpression<I> {
| PlanExpression::Str(e)
| PlanExpression::Lang(e)
| PlanExpression::Datatype(e)
| PlanExpression::IRI(e)
| PlanExpression::Iri(e)
| PlanExpression::Abs(e)
| PlanExpression::Ceil(e)
| PlanExpression::Floor(e)
@ -336,7 +336,7 @@ impl<I: StrId> PlanExpression<I> {
| PlanExpression::UCase(e)
| PlanExpression::LCase(e)
| PlanExpression::StrLen(e)
| PlanExpression::EncodeForURI(e)
| PlanExpression::EncodeForUri(e)
| PlanExpression::Year(e)
| PlanExpression::Month(e)
| PlanExpression::Day(e)
@ -345,12 +345,12 @@ impl<I: StrId> PlanExpression<I> {
| PlanExpression::Seconds(e)
| PlanExpression::Timezone(e)
| PlanExpression::Tz(e)
| PlanExpression::MD5(e)
| PlanExpression::SHA1(e)
| PlanExpression::SHA256(e)
| PlanExpression::SHA384(e)
| PlanExpression::SHA512(e)
| PlanExpression::IsIRI(e)
| PlanExpression::Md5(e)
| PlanExpression::Sha1(e)
| PlanExpression::Sha256(e)
| PlanExpression::Sha384(e)
| PlanExpression::Sha512(e)
| PlanExpression::IsIri(e)
| PlanExpression::IsBlank(e)
| PlanExpression::IsLiteral(e)
| PlanExpression::IsNumeric(e)
@ -384,7 +384,7 @@ impl<I: StrId> PlanExpression<I> {
| PlanExpression::StrBefore(a, b)
| PlanExpression::StrAfter(a, b)
| PlanExpression::StrLang(a, b)
| PlanExpression::StrDT(a, b)
| PlanExpression::StrDt(a, b)
| PlanExpression::SameTerm(a, b)
| PlanExpression::SubStr(a, b, None)
| PlanExpression::Regex(a, b, None) => {

@ -411,7 +411,7 @@ impl<E: WriteEncoder<Error = EvaluationError>> PlanBuilder<E> {
Function::Datatype => PlanExpression::Datatype(Box::new(
self.build_for_expression(&parameters[0], variables, graph_name)?,
)),
Function::IRI => PlanExpression::IRI(Box::new(self.build_for_expression(
Function::IRI => PlanExpression::Iri(Box::new(self.build_for_expression(
&parameters[0],
variables,
graph_name,
@ -482,7 +482,7 @@ impl<E: WriteEncoder<Error = EvaluationError>> PlanBuilder<E> {
variables,
graph_name,
)?)),
Function::EncodeForURI => PlanExpression::EncodeForURI(Box::new(
Function::EncodeForURI => PlanExpression::EncodeForUri(Box::new(
self.build_for_expression(&parameters[0], variables, graph_name)?,
)),
Function::Contains => PlanExpression::Contains(
@ -544,29 +544,29 @@ impl<E: WriteEncoder<Error = EvaluationError>> PlanBuilder<E> {
graph_name,
)?)),
Function::Now => PlanExpression::Now,
Function::UUID => PlanExpression::UUID,
Function::StrUUID => PlanExpression::StrUUID,
Function::MD5 => PlanExpression::MD5(Box::new(self.build_for_expression(
Function::UUID => PlanExpression::Uuid,
Function::StrUUID => PlanExpression::StrUuid,
Function::MD5 => PlanExpression::Md5(Box::new(self.build_for_expression(
&parameters[0],
variables,
graph_name,
)?)),
Function::SHA1 => PlanExpression::SHA1(Box::new(self.build_for_expression(
Function::SHA1 => PlanExpression::Sha1(Box::new(self.build_for_expression(
&parameters[0],
variables,
graph_name,
)?)),
Function::SHA256 => PlanExpression::SHA256(Box::new(self.build_for_expression(
Function::SHA256 => PlanExpression::Sha256(Box::new(self.build_for_expression(
&parameters[0],
variables,
graph_name,
)?)),
Function::SHA384 => PlanExpression::SHA384(Box::new(self.build_for_expression(
Function::SHA384 => PlanExpression::Sha384(Box::new(self.build_for_expression(
&parameters[0],
variables,
graph_name,
)?)),
Function::SHA512 => PlanExpression::SHA512(Box::new(self.build_for_expression(
Function::SHA512 => PlanExpression::Sha512(Box::new(self.build_for_expression(
&parameters[0],
variables,
graph_name,
@ -575,11 +575,11 @@ impl<E: WriteEncoder<Error = EvaluationError>> PlanBuilder<E> {
Box::new(self.build_for_expression(&parameters[0], variables, graph_name)?),
Box::new(self.build_for_expression(&parameters[1], variables, graph_name)?),
),
Function::StrDT => PlanExpression::StrDT(
Function::StrDT => PlanExpression::StrDt(
Box::new(self.build_for_expression(&parameters[0], variables, graph_name)?),
Box::new(self.build_for_expression(&parameters[1], variables, graph_name)?),
),
Function::IsIRI => PlanExpression::IsIRI(Box::new(self.build_for_expression(
Function::IsIRI => PlanExpression::IsIri(Box::new(self.build_for_expression(
&parameters[0],
variables,
graph_name,
@ -1037,18 +1037,18 @@ impl<E: WriteEncoder<Error = EvaluationError>> PlanBuilder<E> {
&mut self,
node: &NamedNode,
) -> Result<EncodedTerm<E::StrId>, EvaluationError> {
Ok(self.encoder.encode_named_node(node.as_ref())?)
self.encoder.encode_named_node(node.as_ref())
}
fn build_literal(
&mut self,
literal: &Literal,
) -> Result<EncodedTerm<E::StrId>, EvaluationError> {
Ok(self.encoder.encode_literal(literal.as_ref())?)
self.encoder.encode_literal(literal.as_ref())
}
fn build_term(&mut self, term: &Term) -> Result<EncodedTerm<E::StrId>, EvaluationError> {
Ok(self.encoder.encode_term(term.as_ref())?)
self.encoder.encode_term(term.as_ref())
}
}

@ -392,24 +392,22 @@ impl<R: BufRead> ResultsIterator<R> {
} else if event.name() == b"bnode" {
state = State::BNode;
} else if event.name() == b"literal" {
for attr in event.attributes() {
if let Ok(attr) = attr {
if attr.key == b"xml:lang" {
lang = Some(
attr.unescape_and_decode_value(&self.reader)
.map_err(map_xml_error)?,
);
} else if attr.key == b"datatype" {
let iri = attr
.unescape_and_decode_value(&self.reader)
.map_err(map_xml_error)?;
datatype = Some(NamedNode::new(&iri).map_err(|e| {
invalid_data_error(format!(
"Invalid datatype IRI '{}': {}",
iri, e
))
})?);
}
for attr in event.attributes().flatten() {
if attr.key == b"xml:lang" {
lang = Some(
attr.unescape_and_decode_value(&self.reader)
.map_err(map_xml_error)?,
);
} else if attr.key == b"datatype" {
let iri = attr
.unescape_and_decode_value(&self.reader)
.map_err(map_xml_error)?;
datatype = Some(NamedNode::new(&iri).map_err(|e| {
invalid_data_error(format!(
"Invalid datatype IRI '{}': {}",
iri, e
))
})?);
}
}
state = State::Literal;

@ -83,30 +83,30 @@ impl StrId for StrHash {}
#[derive(Clone, Copy)]
pub enum QuadEncoding {
SPOG,
POSG,
OSPG,
GSPO,
GPOS,
GOSP,
DSPO,
DPOS,
DOSP,
Spog,
Posg,
Ospg,
Gspo,
Gpos,
Gosp,
Dspo,
Dpos,
Dosp,
}
impl QuadEncoding {
pub fn decode(self, buffer: &[u8]) -> Result<EncodedQuad, io::Error> {
let mut cursor = Cursor::new(&buffer);
match self {
QuadEncoding::SPOG => cursor.read_spog_quad(),
QuadEncoding::POSG => cursor.read_posg_quad(),
QuadEncoding::OSPG => cursor.read_ospg_quad(),
QuadEncoding::GSPO => cursor.read_gspo_quad(),
QuadEncoding::GPOS => cursor.read_gpos_quad(),
QuadEncoding::GOSP => cursor.read_gosp_quad(),
QuadEncoding::DSPO => cursor.read_dspo_quad(),
QuadEncoding::DPOS => cursor.read_dpos_quad(),
QuadEncoding::DOSP => cursor.read_dosp_quad(),
QuadEncoding::Spog => cursor.read_spog_quad(),
QuadEncoding::Posg => cursor.read_posg_quad(),
QuadEncoding::Ospg => cursor.read_ospg_quad(),
QuadEncoding::Gspo => cursor.read_gspo_quad(),
QuadEncoding::Gpos => cursor.read_gpos_quad(),
QuadEncoding::Gosp => cursor.read_gosp_quad(),
QuadEncoding::Dspo => cursor.read_dspo_quad(),
QuadEncoding::Dpos => cursor.read_dpos_quad(),
QuadEncoding::Dosp => cursor.read_dosp_quad(),
}
}
}

@ -314,7 +314,7 @@ impl RocksDbStore {
to_graph_name.into(),
base_iri,
)?;
Ok(transaction.apply()?)
transaction.apply()
}
/// Loads a dataset file (i.e. quads) into the store.
@ -335,7 +335,7 @@ impl RocksDbStore {
) -> Result<(), io::Error> {
let mut transaction = self.auto_batch_writer();
load_dataset(&mut transaction, reader, format, base_iri)?;
Ok(transaction.apply()?)
transaction.apply()
}
/// Adds a quad to this store.
@ -701,39 +701,39 @@ impl RocksDbStore {
}
fn spog_quads(&self, prefix: Vec<u8>) -> DecodingIndexIterator {
self.inner_quads(self.spog_cf(), prefix, QuadEncoding::SPOG)
self.inner_quads(self.spog_cf(), prefix, QuadEncoding::Spog)
}
fn posg_quads(&self, prefix: Vec<u8>) -> DecodingIndexIterator {
self.inner_quads(self.posg_cf(), prefix, QuadEncoding::POSG)
self.inner_quads(self.posg_cf(), prefix, QuadEncoding::Posg)
}
fn ospg_quads(&self, prefix: Vec<u8>) -> DecodingIndexIterator {
self.inner_quads(self.ospg_cf(), prefix, QuadEncoding::OSPG)
self.inner_quads(self.ospg_cf(), prefix, QuadEncoding::Ospg)
}
fn gspo_quads(&self, prefix: Vec<u8>) -> DecodingIndexIterator {
self.inner_quads(self.gspo_cf(), prefix, QuadEncoding::GSPO)
self.inner_quads(self.gspo_cf(), prefix, QuadEncoding::Gspo)
}
fn gpos_quads(&self, prefix: Vec<u8>) -> DecodingIndexIterator {
self.inner_quads(self.gpos_cf(), prefix, QuadEncoding::GPOS)
self.inner_quads(self.gpos_cf(), prefix, QuadEncoding::Gpos)
}
fn gosp_quads(&self, prefix: Vec<u8>) -> DecodingIndexIterator {
self.inner_quads(self.gosp_cf(), prefix, QuadEncoding::GOSP)
self.inner_quads(self.gosp_cf(), prefix, QuadEncoding::Gosp)
}
fn dspo_quads(&self, prefix: Vec<u8>) -> DecodingIndexIterator {
self.inner_quads(self.dspo_cf(), prefix, QuadEncoding::DSPO)
self.inner_quads(self.dspo_cf(), prefix, QuadEncoding::Dspo)
}
fn dpos_quads(&self, prefix: Vec<u8>) -> DecodingIndexIterator {
self.inner_quads(self.dpos_cf(), prefix, QuadEncoding::DPOS)
self.inner_quads(self.dpos_cf(), prefix, QuadEncoding::Dpos)
}
fn dosp_quads(&self, prefix: Vec<u8>) -> DecodingIndexIterator {
self.inner_quads(self.dosp_cf(), prefix, QuadEncoding::DOSP)
self.inner_quads(self.dosp_cf(), prefix, QuadEncoding::Dosp)
}
fn inner_quads(
@ -752,9 +752,9 @@ impl RocksDbStore {
}
#[allow(unsafe_code)]
fn db_iter(&self, cf: &ColumnFamily) -> StaticDBRowIterator {
fn db_iter(&self, cf: &ColumnFamily) -> StaticDbRowIterator {
// Valid because it's the same database so db can't be dropped before iter
unsafe { StaticDBRowIterator::new(self.db.raw_iterator_cf(cf), self.db.clone()) }
unsafe { StaticDbRowIterator::new(self.db.raw_iterator_cf(cf), self.db.clone()) }
}
}
@ -1322,12 +1322,12 @@ fn get_cf<'a>(db: &'a DB, name: &str) -> &'a ColumnFamily {
.expect("A column family that should exist in RocksDB does not exist")
}
struct StaticDBRowIterator {
struct StaticDbRowIterator {
iter: DBRawIterator<'static>,
_db: Arc<DB>, // needed to ensure that DB still lives while iter is used
}
impl StaticDBRowIterator {
impl StaticDbRowIterator {
/// Creates a static iterator from a non static one by keeping a ARC reference to the database
/// Caller must ensure that the iterator belongs to the same database
///
@ -1386,7 +1386,7 @@ impl Iterator for DecodingIndexesIterator {
}
struct DecodingIndexIterator {
iter: StaticDBRowIterator,
iter: StaticDbRowIterator,
prefix: Vec<u8>,
encoding: QuadEncoding,
}
@ -1443,7 +1443,7 @@ impl Iterator for RocksDbQuadIter {
}
pub(crate) struct DecodingGraphIterator {
iter: StaticDBRowIterator,
iter: StaticDbRowIterator,
}
impl Iterator for DecodingGraphIterator {

@ -845,39 +845,39 @@ impl SledStore {
}
fn spog_quads(&self, prefix: Vec<u8>) -> DecodingQuadIterator {
self.inner_quads(&self.spog, prefix, QuadEncoding::SPOG)
self.inner_quads(&self.spog, prefix, QuadEncoding::Spog)
}
fn posg_quads(&self, prefix: Vec<u8>) -> DecodingQuadIterator {
self.inner_quads(&self.posg, prefix, QuadEncoding::POSG)
self.inner_quads(&self.posg, prefix, QuadEncoding::Posg)
}
fn ospg_quads(&self, prefix: Vec<u8>) -> DecodingQuadIterator {
self.inner_quads(&self.ospg, prefix, QuadEncoding::OSPG)
self.inner_quads(&self.ospg, prefix, QuadEncoding::Ospg)
}
fn gspo_quads(&self, prefix: Vec<u8>) -> DecodingQuadIterator {
self.inner_quads(&self.gspo, prefix, QuadEncoding::GSPO)
self.inner_quads(&self.gspo, prefix, QuadEncoding::Gspo)
}
fn gpos_quads(&self, prefix: Vec<u8>) -> DecodingQuadIterator {
self.inner_quads(&self.gpos, prefix, QuadEncoding::GPOS)
self.inner_quads(&self.gpos, prefix, QuadEncoding::Gpos)
}
fn gosp_quads(&self, prefix: Vec<u8>) -> DecodingQuadIterator {
self.inner_quads(&self.gosp, prefix, QuadEncoding::GOSP)
self.inner_quads(&self.gosp, prefix, QuadEncoding::Gosp)
}
fn dspo_quads(&self, prefix: Vec<u8>) -> DecodingQuadIterator {
self.inner_quads(&self.dspo, prefix, QuadEncoding::DSPO)
self.inner_quads(&self.dspo, prefix, QuadEncoding::Dspo)
}
fn dpos_quads(&self, prefix: Vec<u8>) -> DecodingQuadIterator {
self.inner_quads(&self.dpos, prefix, QuadEncoding::DPOS)
self.inner_quads(&self.dpos, prefix, QuadEncoding::Dpos)
}
fn dosp_quads(&self, prefix: Vec<u8>) -> DecodingQuadIterator {
self.inner_quads(&self.dosp, prefix, QuadEncoding::DOSP)
self.inner_quads(&self.dosp, prefix, QuadEncoding::Dosp)
}
fn inner_quads(

@ -211,13 +211,13 @@ impl Write for PyFileLike {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
let gil = Python::acquire_gil();
let py = gil.python();
Ok(usize::extract(
usize::extract(
self.inner
.call_method(py, "write", (PyBytes::new(py, buf),), None)
.map_err(|e| to_io_err(e, py))?
.as_ref(py),
)
.map_err(|e| to_io_err(e, py))?)
.map_err(|e| to_io_err(e, py))
}
fn flush(&mut self) -> io::Result<()> {

@ -104,7 +104,7 @@ async fn handle_request(request: Request, store: Store) -> Result<Response> {
.await?;
configure_and_evaluate_sparql_query(store, buffer, None, request)?
} else {
bail_status!(415, "Not supported Content-Type given: {}", content_type)
bail_status!(415, "Not supported Content-Type given: {}", content_type);
}
} else {
bail_status!(400, "No Content-Type given");
@ -136,10 +136,10 @@ async fn handle_request(request: Request, store: Store) -> Result<Response> {
.await?;
configure_and_evaluate_sparql_update(store, buffer, None, request)?
} else {
bail_status!(415, "Not supported Content-Type given: {}", content_type)
bail_status!(415, "Not supported Content-Type given: {}", content_type);
}
} else {
bail_status!(400, "No Content-Type given")
bail_status!(400, "No Content-Type given");
}
}
(path, Method::Get) if path.starts_with("/store") => {
@ -151,7 +151,7 @@ async fn handle_request(request: Request, store: Store) -> Result<Response> {
GraphName::NamedNode(target) => store.contains_named_graph(target)?,
GraphName::BlankNode(target) => store.contains_named_graph(target)?,
} {
bail_status!(404, "The graph {} does not exists", target)
bail_status!(404, "The graph {} does not exists", target);
}
let format = graph_content_negotiation(request)?;
store.dump_graph(&mut body, format, &target)?;
@ -211,7 +211,7 @@ async fn handle_request(request: Request, store: Store) -> Result<Response> {
415,
"No supported content Content-Type given: {}",
content_type
)
);
}
} else if let Some(format) = DatasetFormat::from_media_type(content_type.essence())
{
@ -225,10 +225,10 @@ async fn handle_request(request: Request, store: Store) -> Result<Response> {
415,
"No supported content Content-Type given: {}",
content_type
)
);
}
} else {
bail_status!(400, "No Content-Type given")
bail_status!(400, "No Content-Type given");
}
}
(path, Method::Delete) if path.starts_with("/store") => {
@ -239,14 +239,14 @@ async fn handle_request(request: Request, store: Store) -> Result<Response> {
if store.contains_named_graph(&target)? {
store.remove_named_graph(&target)?;
} else {
bail_status!(404, "The graph {} does not exists", target)
bail_status!(404, "The graph {} does not exists", target);
}
}
GraphName::BlankNode(target) => {
if store.contains_named_graph(&target)? {
store.remove_named_graph(&target)?;
} else {
bail_status!(404, "The graph {} does not exists", target)
bail_status!(404, "The graph {} does not exists", target);
}
}
}
@ -282,7 +282,7 @@ async fn handle_request(request: Request, store: Store) -> Result<Response> {
415,
"No supported content Content-Type given: {}",
content_type
)
);
}
} else if let Some(format) = DatasetFormat::from_media_type(content_type.essence())
{
@ -312,7 +312,7 @@ async fn handle_request(request: Request, store: Store) -> Result<Response> {
415,
"No supported content Content-Type given: {}",
content_type
)
);
}
} else {
bail_status!(400, "No Content-Type given")
@ -325,19 +325,21 @@ async fn handle_request(request: Request, store: Store) -> Result<Response> {
GraphName::NamedNode(target) => store.contains_named_graph(target)?,
GraphName::BlankNode(target) => store.contains_named_graph(target)?,
} {
bail_status!(404, "The graph {} does not exists", target)
bail_status!(404, "The graph {} does not exists", target);
}
Response::new(StatusCode::Ok)
} else {
Response::new(StatusCode::Ok)
}
}
_ => bail_status!(
404,
"{} {} is not supported by this server",
request.method(),
request.url().path()
),
_ => {
bail_status!(
404,
"{} {} is not supported by this server",
request.method(),
request.url().path()
);
}
})
}
@ -510,7 +512,9 @@ fn store_target(request: &Request) -> Result<Option<GraphName>> {
match k.as_ref() {
"graph" => graph = Some(v.into_owned()),
"default" => default = true,
_ => bail_status!(400, "Unexpected parameter: {}", k),
_ => {
bail_status!(400, "Unexpected parameter: {}", k);
}
}
}
Ok(if let Some(graph) = graph {
@ -518,7 +522,7 @@ fn store_target(request: &Request) -> Result<Option<GraphName>> {
bail_status!(
400,
"Both graph and default parameters should not be set at the same time",
)
);
} else {
Some(
NamedNode::new(

@ -18,11 +18,7 @@ fn sparql_w3c_syntax_bench(c: &mut Criterion) {
if test.kind == "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#PositiveSyntaxTest"
|| test.kind
== "http://www.w3.org/2001/sw/DataAccess/tests/test-manifest#PositiveSyntaxTest11" {
if let Some(query) = test.action {
Some((read_file_to_string(&query).unwrap(), query))
} else {
None
}
test.action.map(|query| (read_file_to_string(&query).unwrap(), query))
} else {
None
}

@ -200,12 +200,12 @@ impl WikibaseLoader {
}
fn get_entity_data(&self, id: &str) -> Result<Vec<u8>> {
Ok(self.get_request(
self.get_request(
&self.entity_data_url,
[("id", id), ("format", "nt"), ("flavor", "dump")]
.iter()
.cloned(),
)?)
)
}
fn get_request<K: AsRef<str>, V: AsRef<str>>(

@ -136,18 +136,20 @@ async fn handle_request(request: Request, store: RocksDbStore) -> Result<Respons
.await?;
configure_and_evaluate_sparql_query(store, buffer, None, request)?
} else {
bail_status!(415, "Not supported Content-Type given: {}", content_type)
bail_status!(415, "Not supported Content-Type given: {}", content_type);
}
} else {
bail_status!(400, "No Content-Type given");
}
}
_ => bail_status!(
404,
"{} {} is not supported by this server",
request.method(),
request.url().path()
),
_ => {
bail_status!(
404,
"{} {} is not supported by this server",
request.method(),
request.url().path()
);
}
})
}

Loading…
Cancel
Save