diff --git a/.github/workflows/artifacts.yml b/.github/workflows/artifacts.yml index d4fe4fb0..05579293 100644 --- a/.github/workflows/artifacts.yml +++ b/.github/workflows/artifacts.yml @@ -24,7 +24,7 @@ jobs: with: target: aarch64-unknown-linux-gnu - run: | - sudo apt-get install -y g++-aarch64-linux-gnu + sudo apt-get update && sudo apt-get install -y g++-aarch64-linux-gnu mkdir .cargo echo -e "[target.aarch64-unknown-linux-gnu]\nlinker = \"aarch64-linux-gnu-gcc\"" >> .cargo/config.toml - run: cargo build --release --no-default-features --features rustls-native diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index fe8fa119..689bb227 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -140,7 +140,7 @@ jobs: - uses: ./.github/actions/setup-rust with: target: i686-unknown-linux-gnu - - run: sudo apt-get install -y g++-multilib + - run: sudo apt-get update && sudo apt-get install -y g++-multilib - run: cargo test --target i686-unknown-linux-gnu --no-default-features --features http-client-rustls-native working-directory: ./lib @@ -176,7 +176,7 @@ jobs: - uses: ./.github/actions/setup-rust with: version: nightly - - run: sudo apt-get install -y llvm + - run: sudo apt-get update && sudo apt-get install -y llvm - run: cargo test --tests --target x86_64-unknown-linux-gnu --workspace --exclude pyoxigraph --exclude oxigraph-testsuite --exclude oxigraph-cli env: RUSTFLAGS: -Z sanitizer=address @@ -354,7 +354,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - run: sudo apt-get install -y clang-format + - run: sudo apt-get update && sudo apt-get install -y clang-format - run: clang-format --Werror --dry-run oxrocksdb-sys/api/* fuzz_changes: @@ -425,7 +425,7 @@ jobs: runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - - run: sudo apt-get install -y shellcheck + - run: sudo apt-get update && sudo apt-get install -y shellcheck - run: git grep -l '^#\( *shellcheck \|!\(/bin/\|/usr/bin/env \)\(sh\|bash\|dash\|ksh\)\)' | xargs shellcheck spec_links: diff --git a/lib/oxttl/src/n3.rs b/lib/oxttl/src/n3.rs index 72db1611..0642d416 100644 --- a/lib/oxttl/src/n3.rs +++ b/lib/oxttl/src/n3.rs @@ -418,6 +418,33 @@ impl FromReadN3Reader { pub fn prefixes(&self) -> &HashMap> { &self.inner.parser.context.prefixes } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::N3Parser; + /// + /// let file = b"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name \"Foo\" ."; + /// + /// let mut reader = N3Parser::new().parse_read(file.as_ref()); + /// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser. + /// + /// reader.next().unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI. + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.inner + .parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } } impl Iterator for FromReadN3Reader { @@ -498,6 +525,36 @@ impl FromTokioAsyncReadN3Reader { pub fn prefixes(&self) -> &HashMap> { &self.inner.parser.context.prefixes } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::N3Parser; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxttl::ParseError> { + /// let file = b"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name \"Foo\" ."; + /// + /// let mut reader = N3Parser::new().parse_tokio_async_read(file.as_ref()); + /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning + /// + /// reader.next().await.unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI + /// # Ok(()) + /// # } + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.inner + .parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } } /// Parses a N3 file by using a low-level API. Can be built using [`N3Parser::parse`]. @@ -592,6 +649,33 @@ impl LowLevelN3Reader { pub fn prefixes(&self) -> &HashMap> { &self.parser.context.prefixes } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::N3Parser; + /// + /// let file = b"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name \"Foo\" ."; + /// + /// let mut reader = N3Parser::new().parse(); + /// reader.extend_from_slice(file); + /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning + /// + /// reader.read_next().unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } } #[derive(Clone)] diff --git a/lib/oxttl/src/terse.rs b/lib/oxttl/src/terse.rs index 6c83fb71..bc092c1f 100644 --- a/lib/oxttl/src/terse.rs +++ b/lib/oxttl/src/terse.rs @@ -22,7 +22,7 @@ pub struct TriGRecognizer { #[allow(clippy::partial_pub_fields)] pub struct TriGRecognizerContext { - lexer_options: N3LexerOptions, + pub lexer_options: N3LexerOptions, pub with_graph_name: bool, #[cfg(feature = "rdf-star")] pub with_quoted_triples: bool, diff --git a/lib/oxttl/src/toolkit/parser.rs b/lib/oxttl/src/toolkit/parser.rs index 7af93752..6314640d 100644 --- a/lib/oxttl/src/toolkit/parser.rs +++ b/lib/oxttl/src/toolkit/parser.rs @@ -110,10 +110,11 @@ impl Parser { } } if self.lexer.is_end() { - let Some(state) = self.state.take() else { - return None; - }; - state.recognize_end(&mut self.context, &mut self.results, &mut self.errors) + self.state.take()?.recognize_end( + &mut self.context, + &mut self.results, + &mut self.errors, + ) } else { return None; } diff --git a/lib/oxttl/src/trig.rs b/lib/oxttl/src/trig.rs index e97cdce3..70d3edb6 100644 --- a/lib/oxttl/src/trig.rs +++ b/lib/oxttl/src/trig.rs @@ -268,6 +268,33 @@ impl FromReadTriGReader { pub fn prefixes(&self) -> &HashMap> { &self.inner.parser.context.prefixes } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::TriGParser; + /// + /// let file = b"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name \"Foo\" ."; + /// + /// let mut reader = TriGParser::new().parse_read(file.as_ref()); + /// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser. + /// + /// reader.next().unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI. + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.inner + .parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } } impl Iterator for FromReadTriGReader { @@ -347,6 +374,36 @@ impl FromTokioAsyncReadTriGReader { pub fn prefixes(&self) -> &HashMap> { &self.inner.parser.context.prefixes } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::TriGParser; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxttl::ParseError> { + /// let file = b"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name \"Foo\" ."; + /// + /// let mut reader = TriGParser::new().parse_tokio_async_read(file.as_ref()); + /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning + /// + /// reader.next().await.unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI + /// # Ok(()) + /// # } + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.inner + .parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } } /// Parses a TriG file by using a low-level API. Can be built using [`TriGParser::parse`]. @@ -440,6 +497,33 @@ impl LowLevelTriGReader { pub fn prefixes(&self) -> &HashMap> { &self.parser.context.prefixes } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::TriGParser; + /// + /// let file = b"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name \"Foo\" ."; + /// + /// let mut reader = TriGParser::new().parse(); + /// reader.extend_from_slice(file); + /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning + /// + /// reader.read_next().unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } } /// A [TriG](https://www.w3.org/TR/trig/) serializer. diff --git a/lib/oxttl/src/turtle.rs b/lib/oxttl/src/turtle.rs index ca0eedb1..0e225611 100644 --- a/lib/oxttl/src/turtle.rs +++ b/lib/oxttl/src/turtle.rs @@ -270,6 +270,33 @@ impl FromReadTurtleReader { pub fn prefixes(&self) -> &HashMap> { &self.inner.parser.context.prefixes } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::TurtleParser; + /// + /// let file = b"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name \"Foo\" ."; + /// + /// let mut reader = TurtleParser::new().parse_read(file.as_ref()); + /// assert!(reader.base_iri().is_none()); // No base at the beginning because none has been given to the parser. + /// + /// reader.next().unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI. + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.inner + .parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } } impl Iterator for FromReadTurtleReader { @@ -349,6 +376,36 @@ impl FromTokioAsyncReadTurtleReader { pub fn prefixes(&self) -> &HashMap> { &self.inner.parser.context.prefixes } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::TurtleParser; + /// + /// # #[tokio::main(flavor = "current_thread")] + /// # async fn main() -> Result<(), oxttl::ParseError> { + /// let file = b"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name \"Foo\" ."; + /// + /// let mut reader = TurtleParser::new().parse_tokio_async_read(file.as_ref()); + /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning + /// + /// reader.next().await.unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI + /// # Ok(()) + /// # } + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.inner + .parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } } /// Parses a Turtle file by using a low-level API. Can be built using [`TurtleParser::parse`]. @@ -442,6 +499,33 @@ impl LowLevelTurtleReader { pub fn prefixes(&self) -> &HashMap> { &self.parser.context.prefixes } + + /// The base IRI considered at the current step of the parsing. + /// + /// ``` + /// use oxttl::TurtleParser; + /// + /// let file = b"@base . + /// @prefix schema: . + /// a schema:Person ; + /// schema:name \"Foo\" ."; + /// + /// let mut reader = TurtleParser::new().parse(); + /// reader.extend_from_slice(file); + /// assert!(reader.base_iri().is_none()); // No base IRI at the beginning + /// + /// reader.read_next().unwrap()?; // We read the first triple + /// assert_eq!(reader.base_iri(), Some("http://example.com/")); // There is now a base IRI + /// # Result::<_,Box>::Ok(()) + /// ``` + pub fn base_iri(&self) -> Option<&str> { + self.parser + .context + .lexer_options + .base_iri + .as_ref() + .map(Iri::as_str) + } } /// A [Turtle](https://www.w3.org/TR/turtle/) serializer. diff --git a/lib/src/sparql/eval.rs b/lib/src/sparql/eval.rs index 9f317d11..9ae02da1 100644 --- a/lib/src/sparql/eval.rs +++ b/lib/src/sparql/eval.rs @@ -1076,6 +1076,7 @@ impl SimpleEvaluator { inner: spargebra::Query::Select { dataset: None, pattern: graph_pattern.clone(), + #[allow(clippy::useless_asref)] base_iri: self.base_iri.as_ref().map(|iri| iri.as_ref().clone()), }, dataset: QueryDataset::new(), diff --git a/lib/src/storage/small_string.rs b/lib/src/storage/small_string.rs index d5d18987..be836c4d 100644 --- a/lib/src/storage/small_string.rs +++ b/lib/src/storage/small_string.rs @@ -101,7 +101,7 @@ impl fmt::Display for SmallString { impl PartialEq for SmallString { #[inline] fn eq(&self, other: &Self) -> bool { - self.as_str().eq(&**other) + self.as_str() == other.as_str() } }