oxttl: Exposes the base IRI

Issue #730
pull/732/head
Tpt 11 months ago committed by Thomas Tanon
parent c2df0b829d
commit c2040a30fd
  1. 2
      .github/workflows/artifacts.yml
  2. 8
      .github/workflows/tests.yml
  3. 84
      lib/oxttl/src/n3.rs
  4. 2
      lib/oxttl/src/terse.rs
  5. 9
      lib/oxttl/src/toolkit/parser.rs
  6. 84
      lib/oxttl/src/trig.rs
  7. 84
      lib/oxttl/src/turtle.rs
  8. 1
      lib/src/sparql/eval.rs
  9. 2
      lib/src/storage/small_string.rs

@ -24,7 +24,7 @@ jobs:
with:
target: aarch64-unknown-linux-gnu
- run: |
sudo apt-get install -y g++-aarch64-linux-gnu
sudo apt-get update && sudo apt-get install -y g++-aarch64-linux-gnu
mkdir .cargo
echo -e "[target.aarch64-unknown-linux-gnu]\nlinker = \"aarch64-linux-gnu-gcc\"" >> .cargo/config.toml
- run: cargo build --release --no-default-features --features rustls-native

@ -140,7 +140,7 @@ jobs:
- uses: ./.github/actions/setup-rust
with:
target: i686-unknown-linux-gnu
- run: sudo apt-get install -y g++-multilib
- run: sudo apt-get update && sudo apt-get install -y g++-multilib
- run: cargo test --target i686-unknown-linux-gnu --no-default-features --features http-client-rustls-native
working-directory: ./lib
@ -176,7 +176,7 @@ jobs:
- uses: ./.github/actions/setup-rust
with:
version: nightly
- run: sudo apt-get install -y llvm
- run: sudo apt-get update && sudo apt-get install -y llvm
- run: cargo test --tests --target x86_64-unknown-linux-gnu --workspace --exclude pyoxigraph --exclude oxigraph-testsuite --exclude oxigraph-cli
env:
RUSTFLAGS: -Z sanitizer=address
@ -354,7 +354,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- run: sudo apt-get install -y clang-format
- run: sudo apt-get update && sudo apt-get install -y clang-format
- run: clang-format --Werror --dry-run oxrocksdb-sys/api/*
fuzz_changes:
@ -425,7 +425,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- run: sudo apt-get install -y shellcheck
- run: sudo apt-get update && sudo apt-get install -y shellcheck
- run: git grep -l '^#\( *shellcheck \|!\(/bin/\|/usr/bin/env \)\(sh\|bash\|dash\|ksh\)\)' | xargs shellcheck
spec_links:

@ -418,6 +418,33 @@ impl<R: Read> FromReadN3Reader<R> {
pub fn prefixes(&self) -> &HashMap<String, Iri<String>> {
&self.inner.parser.context.prefixes
}
/// The base IRI considered at the current step of the parsing.
///
/// ```
/// use oxttl::N3Parser;
///
/// let file = b"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ;
/// schema:name \"Foo\" .";
///
/// let mut reader = N3Parser::new().parse_read(file.as_ref());
/// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser.
///
/// reader.next().unwrap()?; // We read the first triple
/// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI.
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
/// ```
pub fn base_iri(&self) -> Option<&str> {
self.inner
.parser
.context
.lexer_options
.base_iri
.as_ref()
.map(Iri::as_str)
}
}
impl<R: Read> Iterator for FromReadN3Reader<R> {
@ -498,6 +525,36 @@ impl<R: AsyncRead + Unpin> FromTokioAsyncReadN3Reader<R> {
pub fn prefixes(&self) -> &HashMap<String, Iri<String>> {
&self.inner.parser.context.prefixes
}
/// The base IRI considered at the current step of the parsing.
///
/// ```
/// use oxttl::N3Parser;
///
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ;
/// schema:name \"Foo\" .";
///
/// let mut reader = N3Parser::new().parse_tokio_async_read(file.as_ref());
/// assert!(reader.base_iri().is_none()); // No base IRI at the beginning
///
/// reader.next().await.unwrap()?; // We read the first triple
/// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI
/// # Ok(())
/// # }
/// ```
pub fn base_iri(&self) -> Option<&str> {
self.inner
.parser
.context
.lexer_options
.base_iri
.as_ref()
.map(Iri::as_str)
}
}
/// Parses a N3 file by using a low-level API. Can be built using [`N3Parser::parse`].
@ -592,6 +649,33 @@ impl LowLevelN3Reader {
pub fn prefixes(&self) -> &HashMap<String, Iri<String>> {
&self.parser.context.prefixes
}
/// The base IRI considered at the current step of the parsing.
///
/// ```
/// use oxttl::N3Parser;
///
/// let file = b"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ;
/// schema:name \"Foo\" .";
///
/// let mut reader = N3Parser::new().parse();
/// reader.extend_from_slice(file);
/// assert!(reader.base_iri().is_none()); // No base IRI at the beginning
///
/// reader.read_next().unwrap()?; // We read the first triple
/// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
/// ```
pub fn base_iri(&self) -> Option<&str> {
self.parser
.context
.lexer_options
.base_iri
.as_ref()
.map(Iri::as_str)
}
}
#[derive(Clone)]

@ -22,7 +22,7 @@ pub struct TriGRecognizer {
#[allow(clippy::partial_pub_fields)]
pub struct TriGRecognizerContext {
lexer_options: N3LexerOptions,
pub lexer_options: N3LexerOptions,
pub with_graph_name: bool,
#[cfg(feature = "rdf-star")]
pub with_quoted_triples: bool,

@ -110,10 +110,11 @@ impl<RR: RuleRecognizer> Parser<RR> {
}
}
if self.lexer.is_end() {
let Some(state) = self.state.take() else {
return None;
};
state.recognize_end(&mut self.context, &mut self.results, &mut self.errors)
self.state.take()?.recognize_end(
&mut self.context,
&mut self.results,
&mut self.errors,
)
} else {
return None;
}

@ -268,6 +268,33 @@ impl<R: Read> FromReadTriGReader<R> {
pub fn prefixes(&self) -> &HashMap<String, Iri<String>> {
&self.inner.parser.context.prefixes
}
/// The base IRI considered at the current step of the parsing.
///
/// ```
/// use oxttl::TriGParser;
///
/// let file = b"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ;
/// schema:name \"Foo\" .";
///
/// let mut reader = TriGParser::new().parse_read(file.as_ref());
/// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser.
///
/// reader.next().unwrap()?; // We read the first triple
/// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI.
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
/// ```
pub fn base_iri(&self) -> Option<&str> {
self.inner
.parser
.context
.lexer_options
.base_iri
.as_ref()
.map(Iri::as_str)
}
}
impl<R: Read> Iterator for FromReadTriGReader<R> {
@ -347,6 +374,36 @@ impl<R: AsyncRead + Unpin> FromTokioAsyncReadTriGReader<R> {
pub fn prefixes(&self) -> &HashMap<String, Iri<String>> {
&self.inner.parser.context.prefixes
}
/// The base IRI considered at the current step of the parsing.
///
/// ```
/// use oxttl::TriGParser;
///
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ;
/// schema:name \"Foo\" .";
///
/// let mut reader = TriGParser::new().parse_tokio_async_read(file.as_ref());
/// assert!(reader.base_iri().is_none()); // No base IRI at the beginning
///
/// reader.next().await.unwrap()?; // We read the first triple
/// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI
/// # Ok(())
/// # }
/// ```
pub fn base_iri(&self) -> Option<&str> {
self.inner
.parser
.context
.lexer_options
.base_iri
.as_ref()
.map(Iri::as_str)
}
}
/// Parses a TriG file by using a low-level API. Can be built using [`TriGParser::parse`].
@ -440,6 +497,33 @@ impl LowLevelTriGReader {
pub fn prefixes(&self) -> &HashMap<String, Iri<String>> {
&self.parser.context.prefixes
}
/// The base IRI considered at the current step of the parsing.
///
/// ```
/// use oxttl::TriGParser;
///
/// let file = b"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ;
/// schema:name \"Foo\" .";
///
/// let mut reader = TriGParser::new().parse();
/// reader.extend_from_slice(file);
/// assert!(reader.base_iri().is_none()); // No base IRI at the beginning
///
/// reader.read_next().unwrap()?; // We read the first triple
/// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
/// ```
pub fn base_iri(&self) -> Option<&str> {
self.parser
.context
.lexer_options
.base_iri
.as_ref()
.map(Iri::as_str)
}
}
/// A [TriG](https://www.w3.org/TR/trig/) serializer.

@ -270,6 +270,33 @@ impl<R: Read> FromReadTurtleReader<R> {
pub fn prefixes(&self) -> &HashMap<String, Iri<String>> {
&self.inner.parser.context.prefixes
}
/// The base IRI considered at the current step of the parsing.
///
/// ```
/// use oxttl::TurtleParser;
///
/// let file = b"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ;
/// schema:name \"Foo\" .";
///
/// let mut reader = TurtleParser::new().parse_read(file.as_ref());
/// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser.
///
/// reader.next().unwrap()?; // We read the first triple
/// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI.
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
/// ```
pub fn base_iri(&self) -> Option<&str> {
self.inner
.parser
.context
.lexer_options
.base_iri
.as_ref()
.map(Iri::as_str)
}
}
impl<R: Read> Iterator for FromReadTurtleReader<R> {
@ -349,6 +376,36 @@ impl<R: AsyncRead + Unpin> FromTokioAsyncReadTurtleReader<R> {
pub fn prefixes(&self) -> &HashMap<String, Iri<String>> {
&self.inner.parser.context.prefixes
}
/// The base IRI considered at the current step of the parsing.
///
/// ```
/// use oxttl::TurtleParser;
///
/// # #[tokio::main(flavor = "current_thread")]
/// # async fn main() -> Result<(), oxttl::ParseError> {
/// let file = b"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ;
/// schema:name \"Foo\" .";
///
/// let mut reader = TurtleParser::new().parse_tokio_async_read(file.as_ref());
/// assert!(reader.base_iri().is_none()); // No base IRI at the beginning
///
/// reader.next().await.unwrap()?; // We read the first triple
/// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI
/// # Ok(())
/// # }
/// ```
pub fn base_iri(&self) -> Option<&str> {
self.inner
.parser
.context
.lexer_options
.base_iri
.as_ref()
.map(Iri::as_str)
}
}
/// Parses a Turtle file by using a low-level API. Can be built using [`TurtleParser::parse`].
@ -442,6 +499,33 @@ impl LowLevelTurtleReader {
pub fn prefixes(&self) -> &HashMap<String, Iri<String>> {
&self.parser.context.prefixes
}
/// The base IRI considered at the current step of the parsing.
///
/// ```
/// use oxttl::TurtleParser;
///
/// let file = b"@base <http://example.com/> .
/// @prefix schema: <http://schema.org/> .
/// <foo> a schema:Person ;
/// schema:name \"Foo\" .";
///
/// let mut reader = TurtleParser::new().parse();
/// reader.extend_from_slice(file);
/// assert!(reader.base_iri().is_none()); // No base IRI at the beginning
///
/// reader.read_next().unwrap()?; // We read the first triple
/// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI
/// # Result::<_,Box<dyn std::error::Error>>::Ok(())
/// ```
pub fn base_iri(&self) -> Option<&str> {
self.parser
.context
.lexer_options
.base_iri
.as_ref()
.map(Iri::as_str)
}
}
/// A [Turtle](https://www.w3.org/TR/turtle/) serializer.

@ -1076,6 +1076,7 @@ impl SimpleEvaluator {
inner: spargebra::Query::Select {
dataset: None,
pattern: graph_pattern.clone(),
#[allow(clippy::useless_asref)]
base_iri: self.base_iri.as_ref().map(|iri| iri.as_ref().clone()),
},
dataset: QueryDataset::new(),

@ -101,7 +101,7 @@ impl fmt::Display for SmallString {
impl PartialEq for SmallString {
#[inline]
fn eq(&self, other: &Self) -> bool {
self.as_str().eq(&**other)
self.as_str() == other.as_str()
}
}

Loading…
Cancel
Save