Removes unnecessary qualified paths

pull/255/head
Tpt 2 years ago committed by Thomas Tanon
parent cda274873c
commit eedc4b3a71
  1. 4
      lib/sparesults/src/solution.rs
  2. 4
      lib/sparesults/src/xml.rs
  3. 2
      lib/spargebra/src/query.rs
  4. 2
      lib/spargebra/src/update.rs
  5. 18
      lib/src/sparql/algebra.rs
  6. 7
      lib/src/sparql/eval.rs
  7. 2
      lib/src/sparql/mod.rs
  8. 4
      lib/src/store.rs
  9. 22
      server/src/main.rs

@ -35,9 +35,7 @@ impl QuerySolution {
/// ```
#[inline]
pub fn get(&self, index: impl VariableSolutionIndex) -> Option<&Term> {
self.values
.get(index.index(self)?)
.and_then(std::option::Option::as_ref)
self.values.get(index.index(self)?).and_then(Option::as_ref)
}
/// The number of variables which could be bound.

@ -205,7 +205,7 @@ impl<R: BufRead> XmlQueryResultsReader<R> {
State::Head => {
if event.name() == b"variable" {
let name = event.attributes()
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.find(|attr| attr.key == b"name")
.ok_or_else(|| SyntaxError::msg("No name attribute found for the <variable> tag"))?
.unescape_and_decode_value(&reader)?;
@ -345,7 +345,7 @@ impl<R: BufRead> XmlSolutionsReader<R> {
if event.name() == b"binding" {
match event
.attributes()
.filter_map(std::result::Result::ok)
.filter_map(Result::ok)
.find(|attr| attr.key == b"name")
{
Some(attr) => current_var = Some(attr.unescaped_value()?.to_vec()),

@ -14,7 +14,7 @@ use std::str::FromStr;
/// let query = Query::parse(query_str, None)?;
/// assert_eq!(query.to_string(), query_str);
/// assert_eq!(query.to_sse(), "(project (?s ?p ?o) (bgp (triple ?s ?p ?o)))");
/// # Result::Ok::<_, spargebra::ParseError>(())
/// # Ok::<_, spargebra::ParseError>(())
/// ```
#[derive(Eq, PartialEq, Debug, Clone, Hash)]
pub enum Query {

@ -14,7 +14,7 @@ use std::str::FromStr;
/// let update = Update::parse(update_str, None)?;
/// assert_eq!(update.to_string().trim(), update_str);
/// assert_eq!(update.to_sse(), "(update (clear all))");
/// # Result::Ok::<_, spargebra::ParseError>(())
/// # Ok::<_, spargebra::ParseError>(())
/// ```
#[derive(Eq, PartialEq, Debug, Clone, Hash)]
pub struct Update {

@ -24,7 +24,7 @@ use std::str::FromStr;
/// let default = vec![NamedNode::new("http://example.com")?.into()];
/// query.dataset_mut().set_default_graph(default.clone());
/// assert_eq!(query.dataset().default_graph_graphs(), Some(default.as_slice()));
/// # Result::Ok::<_, Box<dyn std::error::Error>>(())
/// # Ok::<_, Box<dyn std::error::Error>>(())
/// ```
#[derive(Eq, PartialEq, Debug, Clone, Hash)]
pub struct Query {
@ -97,7 +97,7 @@ impl<'a> TryFrom<&'a String> for Query {
/// let update = Update::parse(update_str, None)?;
///
/// assert_eq!(update.to_string().trim(), update_str);
/// # Result::Ok::<_, oxigraph::sparql::ParseError>(())
/// # Ok::<_, oxigraph::sparql::ParseError>(())
/// ```
#[derive(Eq, PartialEq, Debug, Clone, Hash)]
pub struct Update {
@ -127,16 +127,12 @@ impl Update {
/// Returns [the query dataset specification](https://www.w3.org/TR/sparql11-query/#specifyingDataset) in [DELETE/INSERT operations](https://www.w3.org/TR/sparql11-update/#deleteInsert).
pub fn using_datasets(&self) -> impl Iterator<Item = &QueryDataset> {
self.using_datasets
.iter()
.filter_map(std::option::Option::as_ref)
self.using_datasets.iter().filter_map(Option::as_ref)
}
/// Returns [the query dataset specification](https://www.w3.org/TR/sparql11-query/#specifyingDataset) in [DELETE/INSERT operations](https://www.w3.org/TR/sparql11-update/#deleteInsert).
pub fn using_datasets_mut(&mut self) -> impl Iterator<Item = &mut QueryDataset> {
self.using_datasets
.iter_mut()
.filter_map(std::option::Option::as_mut)
self.using_datasets.iter_mut().filter_map(Option::as_mut)
}
}
@ -211,7 +207,7 @@ impl QueryDataset {
/// assert!(Query::parse("SELECT ?s ?p ?o WHERE { ?s ?p ?o . }", None)?.dataset().is_default_dataset());
/// assert!(!Query::parse("SELECT ?s ?p ?o FROM <http://example.com> WHERE { ?s ?p ?o . }", None)?.dataset().is_default_dataset());
///
/// # Result::Ok::<_, Box<dyn std::error::Error>>(())
/// # Ok::<_, Box<dyn std::error::Error>>(())
/// ```
pub fn is_default_dataset(&self) -> bool {
self.default
@ -243,7 +239,7 @@ impl QueryDataset {
/// query.dataset_mut().set_default_graph(default.clone());
/// assert_eq!(query.dataset().default_graph_graphs(), Some(default.as_slice()));
///
/// # Result::Ok::<_, Box<dyn std::error::Error>>(())
/// # Ok::<_, Box<dyn std::error::Error>>(())
/// ```
pub fn set_default_graph(&mut self, graphs: Vec<GraphName>) {
self.default = Some(graphs)
@ -265,7 +261,7 @@ impl QueryDataset {
/// query.dataset_mut().set_available_named_graphs(named.clone());
/// assert_eq!(query.dataset().available_named_graphs(), Some(named.as_slice()));
///
/// # Result::Ok::<_, Box<dyn std::error::Error>>(())
/// # Ok::<_, Box<dyn std::error::Error>>(())
/// ```
pub fn set_available_named_graphs(&mut self, named_graphs: Vec<NamedOrBlankNode>) {
self.named = Some(named_graphs);

@ -345,9 +345,7 @@ impl SimpleEvaluator {
let right = self.plan_evaluator(right);
if join_keys.is_empty() {
Rc::new(move |from| {
let right: Vec<_> = right(from.clone())
.filter_map(std::result::Result::ok)
.collect();
let right: Vec<_> = right(from.clone()).filter_map(Result::ok).collect();
Box::new(left(from).filter(move |left_tuple| {
if let Ok(left_tuple) = left_tuple {
!right.iter().any(|right_tuple| {
@ -361,8 +359,7 @@ impl SimpleEvaluator {
} else {
Rc::new(move |from| {
let mut right_values = EncodedTupleSet::new(join_keys.clone());
right_values
.extend(right(from.clone()).filter_map(std::result::Result::ok));
right_values.extend(right(from.clone()).filter_map(Result::ok));
Box::new(left(from).filter(move |left_tuple| {
if let Ok(left_tuple) = left_tuple {
!right_values.get(left_tuple).iter().any(|right_tuple| {

@ -37,7 +37,7 @@ pub(crate) fn evaluate_query(
query: impl TryInto<Query, Error = impl Into<EvaluationError>>,
options: QueryOptions,
) -> Result<QueryResults, EvaluationError> {
let query = query.try_into().map_err(std::convert::Into::into)?;
let query = query.try_into().map_err(Into::into)?;
let dataset = DatasetView::new(reader, &query.dataset);
match query.inner {
spargebra::Query::Select {

@ -357,7 +357,7 @@ impl Store {
update: impl TryInto<Update, Error = impl Into<EvaluationError>>,
options: impl Into<UpdateOptions>,
) -> Result<(), EvaluationError> {
let update = update.try_into().map_err(std::convert::Into::into)?;
let update = update.try_into().map_err(Into::into)?;
let options = options.into();
self.storage
.transaction(|mut t| evaluate_update(&mut t, &update, &options))
@ -968,7 +968,7 @@ impl<'a> Transaction<'a> {
) -> Result<(), EvaluationError> {
evaluate_update(
&mut self.writer,
&update.try_into().map_err(std::convert::Into::into)?,
&update.try_into().map_err(Into::into)?,
&options.into(),
)
}

@ -61,7 +61,7 @@ enum Command {
},
}
pub fn main() -> std::io::Result<()> {
pub fn main() -> io::Result<()> {
let matches = Args::parse();
let store = if let Some(path) = &matches.location {
Store::open(path)
@ -139,7 +139,7 @@ pub fn main() -> std::io::Result<()> {
}
fn bulk_load(loader: BulkLoader, file: &str, reader: impl Read) -> io::Result<()> {
let (_, extension) = file.rsplit_once('.').ok_or_else(|| io::Error::new(
let (_, extension) = file.rsplit_once('.').ok_or_else(|| Error::new(
ErrorKind::InvalidInput,
format!("The server is not able to guess the file format of {} because the file name as no extension", file)))?;
let reader = BufReader::new(reader);
@ -150,7 +150,7 @@ fn bulk_load(loader: BulkLoader, file: &str, reader: impl Read) -> io::Result<()
loader.load_graph(reader, format, GraphNameRef::DefaultGraph, None)?;
Ok(())
} else {
Err(io::Error::new(
Err(Error::new(
ErrorKind::InvalidInput,
format!(
"The server is not able to guess the file format from the extension {}",
@ -1002,16 +1002,16 @@ fn internal_server_error(message: impl fmt::Display) -> Response {
}
/// Hacky tool to allow implementing read on top of a write loop
struct ReadForWrite<O, U: (Fn(O) -> std::io::Result<Option<O>>)> {
struct ReadForWrite<O, U: (Fn(O) -> io::Result<Option<O>>)> {
buffer: Rc<RefCell<Vec<u8>>>,
position: usize,
add_more_data: U,
state: Option<O>,
}
impl<O: 'static, U: (Fn(O) -> std::io::Result<Option<O>>) + 'static> ReadForWrite<O, U> {
impl<O: 'static, U: (Fn(O) -> io::Result<Option<O>>) + 'static> ReadForWrite<O, U> {
fn build_response(
initial_state_builder: impl FnOnce(ReadForWriteWriter) -> std::io::Result<O>,
initial_state_builder: impl FnOnce(ReadForWriteWriter) -> io::Result<O>,
add_more_data: U,
content_type: &'static str,
) -> Response {
@ -1033,8 +1033,8 @@ impl<O: 'static, U: (Fn(O) -> std::io::Result<Option<O>>) + 'static> ReadForWrit
}
}
impl<O, U: (Fn(O) -> std::io::Result<Option<O>>)> Read for ReadForWrite<O, U> {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
impl<O, U: (Fn(O) -> io::Result<Option<O>>)> Read for ReadForWrite<O, U> {
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> {
while self.position == self.buffer.borrow().len() {
// We read more data
if let Some(state) = self.state.take() {
@ -1067,15 +1067,15 @@ struct ReadForWriteWriter {
}
impl Write for ReadForWriteWriter {
fn write(&mut self, buf: &[u8]) -> std::io::Result<usize> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
self.buffer.borrow_mut().write(buf)
}
fn flush(&mut self) -> std::io::Result<()> {
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
fn write_all(&mut self, buf: &[u8]) -> std::io::Result<()> {
fn write_all(&mut self, buf: &[u8]) -> io::Result<()> {
self.buffer.borrow_mut().write_all(buf)
}
}

Loading…
Cancel
Save