diff --git a/Cargo.lock b/Cargo.lock index f8f74cc6..d293c2c1 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -786,9 +786,9 @@ dependencies = [ [[package]] name = "json-event-parser" -version = "0.1.1" +version = "0.2.0-alpha.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f12e624eaeb74accb9bb48f01cb071427f68115aaafa5689acb372d7e22977" +checksum = "20a2ad11b373ee8f1d5f9b0632b148a6dc65cf1faa8b2a99c89cbe70411e31a2" [[package]] name = "kernel32-sys" diff --git a/lib/Cargo.toml b/lib/Cargo.toml index 87b0715d..1ea01640 100644 --- a/lib/Cargo.toml +++ b/lib/Cargo.toml @@ -28,7 +28,7 @@ rocksdb-debug = [] [dependencies] digest = "0.10" hex = "0.4" -json-event-parser = "0.1" +json-event-parser = "0.2.0-alpha.1" md-5 = "0.10" oxilangtag = "0.1" oxiri = "0.2" diff --git a/lib/sparesults/Cargo.toml b/lib/sparesults/Cargo.toml index 3fa9e27e..2516cdfa 100644 --- a/lib/sparesults/Cargo.toml +++ b/lib/sparesults/Cargo.toml @@ -19,7 +19,7 @@ default = [] rdf-star = ["oxrdf/rdf-star"] [dependencies] -json-event-parser = "0.1" +json-event-parser = "0.2.0-alpha.1" memchr = "2.5" oxrdf = { version = "0.2.0-alpha.1-dev", path="../oxrdf" } quick-xml = ">=0.29, <0.31" diff --git a/lib/sparesults/src/error.rs b/lib/sparesults/src/error.rs index 40510663..1096b393 100644 --- a/lib/sparesults/src/error.rs +++ b/lib/sparesults/src/error.rs @@ -56,6 +56,15 @@ impl From for io::Error { } } +impl From for ParseError { + fn from(error: json_event_parser::ParseError) -> Self { + match error { + json_event_parser::ParseError::Syntax(error) => SyntaxError::from(error).into(), + json_event_parser::ParseError::Io(error) => error.into(), + } + } +} + impl From for ParseError { #[inline] fn from(error: quick_xml::Error) -> Self { @@ -79,6 +88,7 @@ pub struct SyntaxError { #[derive(Debug)] pub(crate) enum SyntaxErrorKind { + Json(json_event_parser::SyntaxError), Xml(quick_xml::Error), Term { error: TermParseError, term: String }, Msg { msg: String }, @@ -98,6 +108,7 @@ impl fmt::Display for SyntaxError { #[inline] fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match &self.inner { + SyntaxErrorKind::Json(e) => e.fmt(f), SyntaxErrorKind::Xml(e) => e.fmt(f), SyntaxErrorKind::Term { error, term } => write!(f, "{error}: {term}"), SyntaxErrorKind::Msg { msg } => f.write_str(msg), @@ -109,6 +120,7 @@ impl Error for SyntaxError { #[inline] fn source(&self) -> Option<&(dyn Error + 'static)> { match &self.inner { + SyntaxErrorKind::Json(e) => Some(e), SyntaxErrorKind::Xml(e) => Some(e), SyntaxErrorKind::Term { error, .. } => Some(error), SyntaxErrorKind::Msg { .. } => None, @@ -120,6 +132,7 @@ impl From for io::Error { #[inline] fn from(error: SyntaxError) -> Self { match error.inner { + SyntaxErrorKind::Json(error) => Self::new(io::ErrorKind::InvalidData, error), SyntaxErrorKind::Xml(error) => match error { quick_xml::Error::Io(error) => match Arc::try_unwrap(error) { Ok(error) => error, @@ -130,10 +143,16 @@ impl From for io::Error { } _ => Self::new(io::ErrorKind::InvalidData, error), }, - SyntaxErrorKind::Term { .. } => { - Self::new(io::ErrorKind::InvalidData, error.to_string()) - } + SyntaxErrorKind::Term { .. } => Self::new(io::ErrorKind::InvalidData, error), SyntaxErrorKind::Msg { msg } => Self::new(io::ErrorKind::InvalidData, msg), } } } + +impl From for SyntaxError { + fn from(error: json_event_parser::SyntaxError) -> Self { + Self { + inner: SyntaxErrorKind::Json(error), + } + } +} diff --git a/lib/sparesults/src/json.rs b/lib/sparesults/src/json.rs index 67e2d494..3dc9be11 100644 --- a/lib/sparesults/src/json.rs +++ b/lib/sparesults/src/json.rs @@ -1,50 +1,50 @@ //! Implementation of [SPARQL Query Results JSON Format](https://www.w3.org/TR/sparql11-results-json/) use crate::error::{ParseError, SyntaxError}; -use json_event_parser::{JsonEvent, JsonReader, JsonWriter}; +use json_event_parser::{FromReadJsonReader, JsonEvent, ToWriteJsonWriter}; use oxrdf::vocab::rdf; use oxrdf::Variable; use oxrdf::*; use std::collections::BTreeMap; -use std::io::{self, BufRead, Write}; +use std::io::{self, Read, Write}; use std::mem::take; /// This limit is set in order to avoid stack overflow error when parsing nested triples due to too many recursive calls. /// The actual limit value is a wet finger compromise between not failing to parse valid files and avoiding to trigger stack overflow errors. const MAX_NUMBER_OF_NESTED_TRIPLES: usize = 128; -pub fn write_boolean_json_result(sink: W, value: bool) -> io::Result { - let mut writer = JsonWriter::from_writer(sink); +pub fn write_boolean_json_result(write: W, value: bool) -> io::Result { + let mut writer = ToWriteJsonWriter::new(write); writer.write_event(JsonEvent::StartObject)?; - writer.write_event(JsonEvent::ObjectKey("head"))?; + writer.write_event(JsonEvent::ObjectKey("head".into()))?; writer.write_event(JsonEvent::StartObject)?; writer.write_event(JsonEvent::EndObject)?; - writer.write_event(JsonEvent::ObjectKey("boolean"))?; + writer.write_event(JsonEvent::ObjectKey("boolean".into()))?; writer.write_event(JsonEvent::Boolean(value))?; writer.write_event(JsonEvent::EndObject)?; - Ok(writer.into_inner()) + writer.finish() } pub struct JsonSolutionsWriter { - writer: JsonWriter, + writer: ToWriteJsonWriter, } impl JsonSolutionsWriter { - pub fn start(sink: W, variables: &[Variable]) -> io::Result { - let mut writer = JsonWriter::from_writer(sink); + pub fn start(write: W, variables: &[Variable]) -> io::Result { + let mut writer = ToWriteJsonWriter::new(write); writer.write_event(JsonEvent::StartObject)?; - writer.write_event(JsonEvent::ObjectKey("head"))?; + writer.write_event(JsonEvent::ObjectKey("head".into()))?; writer.write_event(JsonEvent::StartObject)?; - writer.write_event(JsonEvent::ObjectKey("vars"))?; + writer.write_event(JsonEvent::ObjectKey("vars".into()))?; writer.write_event(JsonEvent::StartArray)?; for variable in variables { - writer.write_event(JsonEvent::String(variable.as_str()))?; + writer.write_event(JsonEvent::String(variable.as_str().into()))?; } writer.write_event(JsonEvent::EndArray)?; writer.write_event(JsonEvent::EndObject)?; - writer.write_event(JsonEvent::ObjectKey("results"))?; + writer.write_event(JsonEvent::ObjectKey("results".into()))?; writer.write_event(JsonEvent::StartObject)?; - writer.write_event(JsonEvent::ObjectKey("bindings"))?; + writer.write_event(JsonEvent::ObjectKey("bindings".into()))?; writer.write_event(JsonEvent::StartArray)?; Ok(Self { writer }) } @@ -56,7 +56,7 @@ impl JsonSolutionsWriter { self.writer.write_event(JsonEvent::StartObject)?; for (variable, value) in solution { self.writer - .write_event(JsonEvent::ObjectKey(variable.as_str()))?; + .write_event(JsonEvent::ObjectKey(variable.as_str().into()))?; write_json_term(value, &mut self.writer)?; } self.writer.write_event(JsonEvent::EndObject)?; @@ -67,55 +67,58 @@ impl JsonSolutionsWriter { self.writer.write_event(JsonEvent::EndArray)?; self.writer.write_event(JsonEvent::EndObject)?; self.writer.write_event(JsonEvent::EndObject)?; - Ok(self.writer.into_inner()) + self.writer.finish() } } -fn write_json_term(term: TermRef<'_>, writer: &mut JsonWriter) -> io::Result<()> { +fn write_json_term( + term: TermRef<'_>, + writer: &mut ToWriteJsonWriter, +) -> io::Result<()> { match term { TermRef::NamedNode(uri) => { writer.write_event(JsonEvent::StartObject)?; - writer.write_event(JsonEvent::ObjectKey("type"))?; - writer.write_event(JsonEvent::String("uri"))?; - writer.write_event(JsonEvent::ObjectKey("value"))?; - writer.write_event(JsonEvent::String(uri.as_str()))?; + writer.write_event(JsonEvent::ObjectKey("type".into()))?; + writer.write_event(JsonEvent::String("uri".into()))?; + writer.write_event(JsonEvent::ObjectKey("value".into()))?; + writer.write_event(JsonEvent::String(uri.as_str().into()))?; writer.write_event(JsonEvent::EndObject)?; } TermRef::BlankNode(bnode) => { writer.write_event(JsonEvent::StartObject)?; - writer.write_event(JsonEvent::ObjectKey("type"))?; - writer.write_event(JsonEvent::String("bnode"))?; - writer.write_event(JsonEvent::ObjectKey("value"))?; - writer.write_event(JsonEvent::String(bnode.as_str()))?; + writer.write_event(JsonEvent::ObjectKey("type".into()))?; + writer.write_event(JsonEvent::String("bnode".into()))?; + writer.write_event(JsonEvent::ObjectKey("value".into()))?; + writer.write_event(JsonEvent::String(bnode.as_str().into()))?; writer.write_event(JsonEvent::EndObject)?; } TermRef::Literal(literal) => { writer.write_event(JsonEvent::StartObject)?; - writer.write_event(JsonEvent::ObjectKey("type"))?; - writer.write_event(JsonEvent::String("literal"))?; - writer.write_event(JsonEvent::ObjectKey("value"))?; - writer.write_event(JsonEvent::String(literal.value()))?; + writer.write_event(JsonEvent::ObjectKey("type".into()))?; + writer.write_event(JsonEvent::String("literal".into()))?; + writer.write_event(JsonEvent::ObjectKey("value".into()))?; + writer.write_event(JsonEvent::String(literal.value().into()))?; if let Some(language) = literal.language() { - writer.write_event(JsonEvent::ObjectKey("xml:lang"))?; - writer.write_event(JsonEvent::String(language))?; + writer.write_event(JsonEvent::ObjectKey("xml:lang".into()))?; + writer.write_event(JsonEvent::String(language.into()))?; } else if !literal.is_plain() { - writer.write_event(JsonEvent::ObjectKey("datatype"))?; - writer.write_event(JsonEvent::String(literal.datatype().as_str()))?; + writer.write_event(JsonEvent::ObjectKey("datatype".into()))?; + writer.write_event(JsonEvent::String(literal.datatype().as_str().into()))?; } writer.write_event(JsonEvent::EndObject)?; } #[cfg(feature = "rdf-star")] TermRef::Triple(triple) => { writer.write_event(JsonEvent::StartObject)?; - writer.write_event(JsonEvent::ObjectKey("type"))?; - writer.write_event(JsonEvent::String("triple"))?; - writer.write_event(JsonEvent::ObjectKey("value"))?; + writer.write_event(JsonEvent::ObjectKey("type".into()))?; + writer.write_event(JsonEvent::String("triple".into()))?; + writer.write_event(JsonEvent::ObjectKey("value".into()))?; writer.write_event(JsonEvent::StartObject)?; - writer.write_event(JsonEvent::ObjectKey("subject"))?; + writer.write_event(JsonEvent::ObjectKey("subject".into()))?; write_json_term(triple.subject.as_ref().into(), writer)?; - writer.write_event(JsonEvent::ObjectKey("predicate"))?; + writer.write_event(JsonEvent::ObjectKey("predicate".into()))?; write_json_term(triple.predicate.as_ref().into(), writer)?; - writer.write_event(JsonEvent::ObjectKey("object"))?; + writer.write_event(JsonEvent::ObjectKey("object".into()))?; write_json_term(triple.object.as_ref(), writer)?; writer.write_event(JsonEvent::EndObject)?; writer.write_event(JsonEvent::EndObject)?; @@ -124,7 +127,7 @@ fn write_json_term(term: TermRef<'_>, writer: &mut JsonWriter) -> io Ok(()) } -pub enum JsonQueryResultsReader { +pub enum JsonQueryResultsReader { Solutions { variables: Vec, solutions: JsonSolutionsReader, @@ -132,24 +135,23 @@ pub enum JsonQueryResultsReader { Boolean(bool), } -impl JsonQueryResultsReader { - pub fn read(source: R) -> Result { - let mut reader = JsonReader::from_reader(source); - let mut buffer = Vec::default(); +impl JsonQueryResultsReader { + pub fn read(read: R) -> Result { + let mut reader = FromReadJsonReader::new(read); let mut variables = None; let mut buffered_bindings: Option> = None; let mut output_iter = None; - if reader.read_event(&mut buffer)? != JsonEvent::StartObject { + if reader.read_next_event()? != JsonEvent::StartObject { return Err(SyntaxError::msg("SPARQL JSON results should be an object").into()); } loop { - let event = reader.read_event(&mut buffer)?; + let event = reader.read_next_event()?; match event { - JsonEvent::ObjectKey(key) => match key { + JsonEvent::ObjectKey(key) => match key.as_ref() { "head" => { - let extracted_variables = read_head(&mut reader, &mut buffer)?; + let extracted_variables = read_head(&mut reader)?; if let Some(buffered_bindings) = buffered_bindings.take() { let mut mapping = BTreeMap::default(); for (i, var) in extracted_variables.iter().enumerate() { @@ -169,13 +171,13 @@ impl JsonQueryResultsReader { } } "results" => { - if reader.read_event(&mut buffer)? != JsonEvent::StartObject { + if reader.read_next_event()? != JsonEvent::StartObject { return Err(SyntaxError::msg("'results' should be an object").into()); } loop { - match reader.read_event(&mut buffer)? { - JsonEvent::ObjectKey("bindings") => break, // Found - JsonEvent::ObjectKey(_) => ignore_value(&mut reader, &mut buffer)?, + match reader.read_next_event()? { + JsonEvent::ObjectKey(k) if k == "bindings" => break, // Found + JsonEvent::ObjectKey(_) => ignore_value(&mut reader)?, _ => { return Err(SyntaxError::msg( "'results' should contain a 'bindings' key", @@ -184,7 +186,7 @@ impl JsonQueryResultsReader { } } } - if reader.read_event(&mut buffer)? != JsonEvent::StartArray { + if reader.read_next_event()? != JsonEvent::StartArray { return Err(SyntaxError::msg("'bindings' should be an object").into()); } if let Some(variables) = variables { @@ -195,7 +197,7 @@ impl JsonQueryResultsReader { return Ok(Self::Solutions { variables, solutions: JsonSolutionsReader { - kind: JsonSolutionsReaderKind::Streaming { reader, buffer }, + kind: JsonSolutionsReaderKind::Streaming { reader }, mapping, }, }); @@ -205,7 +207,7 @@ impl JsonQueryResultsReader { let mut variables = Vec::new(); let mut values = Vec::new(); loop { - match reader.read_event(&mut buffer)? { + match reader.read_next_event()? { JsonEvent::StartObject => (), JsonEvent::EndObject => { bindings.push((take(&mut variables), take(&mut values))); @@ -215,8 +217,8 @@ impl JsonQueryResultsReader { break; } JsonEvent::ObjectKey(key) => { - variables.push(key.to_owned()); - values.push(read_value(&mut reader, &mut buffer, 0)?); + variables.push(key.into_owned()); + values.push(read_value(&mut reader, 0)?); } _ => { return Err( @@ -227,7 +229,7 @@ impl JsonQueryResultsReader { } } "boolean" => { - return if let JsonEvent::Boolean(v) = reader.read_event(&mut buffer)? { + return if let JsonEvent::Boolean(v) = reader.read_next_event()? { Ok(Self::Boolean(v)) } else { Err(SyntaxError::msg("Unexpected boolean value").into()) @@ -257,38 +259,37 @@ impl JsonQueryResultsReader { } } -pub struct JsonSolutionsReader { +pub struct JsonSolutionsReader { mapping: BTreeMap, kind: JsonSolutionsReaderKind, } -enum JsonSolutionsReaderKind { +enum JsonSolutionsReaderKind { Streaming { - reader: JsonReader, - buffer: Vec, + reader: FromReadJsonReader, }, Buffered { bindings: std::vec::IntoIter<(Vec, Vec)>, }, } -impl JsonSolutionsReader { +impl JsonSolutionsReader { pub fn read_next(&mut self) -> Result>>, ParseError> { match &mut self.kind { - JsonSolutionsReaderKind::Streaming { reader, buffer } => { + JsonSolutionsReaderKind::Streaming { reader } => { let mut new_bindings = vec![None; self.mapping.len()]; loop { - match reader.read_event(buffer)? { + match reader.read_next_event()? { JsonEvent::StartObject => (), JsonEvent::EndObject => return Ok(Some(new_bindings)), JsonEvent::EndArray | JsonEvent::Eof => return Ok(None), JsonEvent::ObjectKey(key) => { - let k = *self.mapping.get(key).ok_or_else(|| { + let k = *self.mapping.get(key.as_ref()).ok_or_else(|| { SyntaxError::msg(format!( "The variable {key} has not been defined in the header" )) })?; - new_bindings[k] = Some(read_value(reader, buffer, 0)?) + new_bindings[k] = Some(read_value(reader, 0)?) } _ => return Err(SyntaxError::msg("Invalid result serialization").into()), } @@ -314,9 +315,8 @@ impl JsonSolutionsReader { } } -fn read_value( - reader: &mut JsonReader, - buffer: &mut Vec, +fn read_value( + reader: &mut FromReadJsonReader, number_of_recursive_calls: usize, ) -> Result { enum Type { @@ -351,28 +351,29 @@ fn read_value( let mut predicate = None; #[cfg(feature = "rdf-star")] let mut object = None; - if reader.read_event(buffer)? != JsonEvent::StartObject { + if reader.read_next_event()? != JsonEvent::StartObject { return Err(SyntaxError::msg("Term serializations should be an object").into()); } loop { - match reader.read_event(buffer)? { - JsonEvent::ObjectKey(key) => match key { + #[allow(unsafe_code)] + // SAFETY: Borrow checker workaround https://github.com/rust-lang/rust/issues/70255 + let next_event = unsafe { + let r: *mut FromReadJsonReader = reader; + &mut *r + } + .read_next_event()?; + match next_event { + JsonEvent::ObjectKey(key) => match key.as_ref() { "type" => state = Some(State::Type), "value" => state = Some(State::Value), "xml:lang" => state = Some(State::Lang), "datatype" => state = Some(State::Datatype), #[cfg(feature = "rdf-star")] - "subject" => { - subject = Some(read_value(reader, buffer, number_of_recursive_calls + 1)?) - } + "subject" => subject = Some(read_value(reader, number_of_recursive_calls + 1)?), #[cfg(feature = "rdf-star")] - "predicate" => { - predicate = Some(read_value(reader, buffer, number_of_recursive_calls + 1)?) - } + "predicate" => predicate = Some(read_value(reader, number_of_recursive_calls + 1)?), #[cfg(feature = "rdf-star")] - "object" => { - object = Some(read_value(reader, buffer, number_of_recursive_calls + 1)?) - } + "object" => object = Some(read_value(reader, number_of_recursive_calls + 1)?), _ => { return Err(SyntaxError::msg(format!( "Unexpected key in term serialization: '{key}'" @@ -389,7 +390,7 @@ fn read_value( } JsonEvent::String(s) => match state { Some(State::Type) => { - match s { + match s.as_ref() { "uri" => t = Some(Type::Uri), "bnode" => t = Some(Type::BNode), "literal" | "typed-literal" => t = Some(Type::Literal), @@ -404,11 +405,11 @@ fn read_value( state = None; } Some(State::Value) => { - value = Some(s.to_owned()); + value = Some(s.into_owned()); state = None; } Some(State::Lang) => { - lang = Some(s.to_owned()); + lang = Some(s.into_owned()); state = None; } Some(State::Datatype) => { @@ -458,7 +459,7 @@ fn read_value( )).into()) } } - Literal::new_language_tagged_literal(value, &lang).map_err(|e| { + Literal::new_language_tagged_literal(value, &*lang).map_err(|e| { SyntaxError::msg(format!("Invalid xml:lang value '{lang}': {e}")) })? } @@ -511,25 +512,22 @@ fn read_value( } } -fn read_head( - reader: &mut JsonReader, - buffer: &mut Vec, -) -> Result, ParseError> { - if reader.read_event(buffer)? != JsonEvent::StartObject { +fn read_head(reader: &mut FromReadJsonReader) -> Result, ParseError> { + if reader.read_next_event()? != JsonEvent::StartObject { return Err(SyntaxError::msg("head should be an object").into()); } let mut variables = Vec::new(); loop { - match reader.read_event(buffer)? { - JsonEvent::ObjectKey(key) => match key { + match reader.read_next_event()? { + JsonEvent::ObjectKey(key) => match key.as_ref() { "vars" => { - if reader.read_event(buffer)? != JsonEvent::StartArray { + if reader.read_next_event()? != JsonEvent::StartArray { return Err(SyntaxError::msg("Variable list should be an array").into()); } loop { - match reader.read_event(buffer)? { + match reader.read_next_event()? { JsonEvent::String(s) => { - let new_var = Variable::new(s).map_err(|e| { + let new_var = Variable::new(s.as_ref()).map_err(|e| { SyntaxError::msg(format!( "Invalid variable declaration '{s}': {e}" )) @@ -552,11 +550,11 @@ fn read_head( } } "link" => { - if reader.read_event(buffer)? != JsonEvent::StartArray { + if reader.read_next_event()? != JsonEvent::StartArray { return Err(SyntaxError::msg("Variable list should be an array").into()); } loop { - match reader.read_event(buffer)? { + match reader.read_next_event()? { JsonEvent::String(_) => (), JsonEvent::EndArray => break, _ => { @@ -565,7 +563,7 @@ fn read_head( } } } - _ => ignore_value(reader, buffer)?, + _ => ignore_value(reader)?, }, JsonEvent::EndObject => return Ok(variables), _ => return Err(SyntaxError::msg("Invalid head serialization").into()), @@ -573,13 +571,10 @@ fn read_head( } } -fn ignore_value( - reader: &mut JsonReader, - buffer: &mut Vec, -) -> Result<(), ParseError> { +fn ignore_value(reader: &mut FromReadJsonReader) -> Result<(), ParseError> { let mut nesting = 0; loop { - match reader.read_event(buffer)? { + match reader.read_next_event()? { JsonEvent::Boolean(_) | JsonEvent::Null | JsonEvent::Number(_) diff --git a/lib/src/sparql/eval.rs b/lib/src/sparql/eval.rs index ed9feba7..e21ee891 100644 --- a/lib/src/sparql/eval.rs +++ b/lib/src/sparql/eval.rs @@ -8,7 +8,7 @@ use crate::sparql::service::ServiceHandler; use crate::storage::numeric_encoder::*; use crate::storage::small_string::SmallString; use digest::Digest; -use json_event_parser::{JsonEvent, JsonWriter}; +use json_event_parser::{JsonEvent, ToWriteJsonWriter}; use md5::Md5; use oxilangtag::LanguageTag; use oxiri::Iri; @@ -5676,21 +5676,21 @@ pub struct EvalNodeWithStats { impl EvalNodeWithStats { pub fn json_node( &self, - writer: &mut JsonWriter, + writer: &mut ToWriteJsonWriter, with_stats: bool, ) -> io::Result<()> { writer.write_event(JsonEvent::StartObject)?; - writer.write_event(JsonEvent::ObjectKey("name"))?; - writer.write_event(JsonEvent::String(&self.label))?; + writer.write_event(JsonEvent::ObjectKey("name".into()))?; + writer.write_event(JsonEvent::String((&self.label).into()))?; if with_stats { - writer.write_event(JsonEvent::ObjectKey("number of results"))?; - writer.write_event(JsonEvent::Number(&self.exec_count.get().to_string()))?; + writer.write_event(JsonEvent::ObjectKey("number of results".into()))?; + writer.write_event(JsonEvent::Number(self.exec_count.get().to_string().into()))?; if let Some(duration) = self.exec_duration.get() { - writer.write_event(JsonEvent::ObjectKey("duration in seconds"))?; - writer.write_event(JsonEvent::Number(&duration.as_seconds().to_string()))?; + writer.write_event(JsonEvent::ObjectKey("duration in seconds".into()))?; + writer.write_event(JsonEvent::Number(duration.as_seconds().to_string().into()))?; } } - writer.write_event(JsonEvent::ObjectKey("children"))?; + writer.write_event(JsonEvent::ObjectKey("children".into()))?; writer.write_event(JsonEvent::StartArray)?; for child in &self.children { child.json_node(writer, with_stats)?; diff --git a/lib/src/sparql/mod.rs b/lib/src/sparql/mod.rs index 82944f95..1972a77e 100644 --- a/lib/src/sparql/mod.rs +++ b/lib/src/sparql/mod.rs @@ -22,7 +22,7 @@ pub use crate::sparql::service::ServiceHandler; use crate::sparql::service::{EmptyServiceHandler, ErrorConversionServiceHandler}; pub(crate) use crate::sparql::update::evaluate_update; use crate::storage::StorageReader; -use json_event_parser::{JsonEvent, JsonWriter}; +use json_event_parser::{JsonEvent, ToWriteJsonWriter}; pub use oxrdf::{Variable, VariableNameParseError}; use oxsdatatypes::{DayTimeDuration, Float}; pub use spargebra::ParseError; @@ -279,22 +279,22 @@ pub struct QueryExplanation { impl QueryExplanation { /// Writes the explanation as JSON. - pub fn write_in_json(&self, output: impl io::Write) -> io::Result<()> { - let mut writer = JsonWriter::from_writer(output); + pub fn write_in_json(&self, write: impl io::Write) -> io::Result<()> { + let mut writer = ToWriteJsonWriter::new(write); writer.write_event(JsonEvent::StartObject)?; if let Some(parsing_duration) = self.parsing_duration { - writer.write_event(JsonEvent::ObjectKey("parsing duration in seconds"))?; + writer.write_event(JsonEvent::ObjectKey("parsing duration in seconds".into()))?; writer.write_event(JsonEvent::Number( - &parsing_duration.as_seconds().to_string(), + parsing_duration.as_seconds().to_string().into(), ))?; } if let Some(planning_duration) = self.planning_duration { - writer.write_event(JsonEvent::ObjectKey("planning duration in seconds"))?; + writer.write_event(JsonEvent::ObjectKey("planning duration in seconds".into()))?; writer.write_event(JsonEvent::Number( - &planning_duration.as_seconds().to_string(), + planning_duration.as_seconds().to_string().into(), ))?; } - writer.write_event(JsonEvent::ObjectKey("plan"))?; + writer.write_event(JsonEvent::ObjectKey("plan".into()))?; self.inner.json_node(&mut writer, self.with_stats)?; writer.write_event(JsonEvent::EndObject) } diff --git a/lints/test_debian_compatibility.py b/lints/test_debian_compatibility.py index 10f6596a..d409bb9d 100644 --- a/lints/test_debian_compatibility.py +++ b/lints/test_debian_compatibility.py @@ -5,7 +5,7 @@ from urllib.request import urlopen TARGET_DEBIAN_VERSIONS = ["sid"] IGNORE_PACKAGES = {"oxigraph-js", "oxigraph-testsuite", "pyoxigraph", "sparql-smith"} -ALLOWED_MISSING_PACKAGES = {"escargot", "oxhttp", "quick-xml"} +ALLOWED_MISSING_PACKAGES = {"escargot", "json-event-parser", "oxhttp", "quick-xml"} base_path = Path(__file__).parent.parent