Merge remote-tracking branch 'origin/main' into next

pull/706/head
Tpt 11 months ago
commit 8104f9e1de
  1. 35
      .github/workflows/artifacts.yml
  2. 33
      CHANGELOG.md
  3. 2
      README.md
  4. 1
      js/README.md
  5. 2
      js/package.json
  6. 2
      lib/spargebra/src/parser.rs
  7. 8
      lib/src/sparql/eval.rs
  8. 3
      lib/src/storage/backend/rocksdb.rs
  9. 12
      lib/tests/store.rs
  10. 2
      oxrocksdb-sys/rocksdb
  11. 1
      testsuite/oxigraph-tests/sparql/ask_from.rq
  12. 11
      testsuite/oxigraph-tests/sparql/manifest.ttl
  13. 3
      testsuite/oxigraph-tests/sparql/small_iri_str.rq
  14. 6
      testsuite/oxigraph-tests/sparql/small_iri_str.srx

@ -108,6 +108,36 @@ jobs:
files: oxigraph_${{ github.event.release.tag_name }}_x86_64_windows_msvc.exe files: oxigraph_${{ github.event.release.tag_name }}_x86_64_windows_msvc.exe
if: github.event_name == 'release' if: github.event_name == 'release'
python_sdist:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
with:
submodules: true
- run: rustup update
- uses: Swatinem/rust-cache@v2
- uses: actions/setup-python@v4
with:
python-version: "3.12"
cache: pip
cache-dependency-path: '**/requirements.dev.txt'
- run: pip install -r python/requirements.dev.txt
- run: maturin build -m python/Cargo.toml
- run: pip install --no-index --find-links=target/wheels/ pyoxigraph
- run: rm -r target/wheels
- run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --ruff
working-directory: ./python
- run: maturin sdist -m python/Cargo.toml
- uses: actions/upload-artifact@v3
with:
name: pyoxigraph_source
path: target/wheels/*.tar.gz
- run: pip install twine && twine upload target/wheels/*
env:
TWINE_USERNAME: __token__
TWINE_PASSWORD: ${{ secrets.PYPI_PASSWORD }}
if: github.event_name == 'release'
wheel_linux: wheel_linux:
runs-on: ubuntu-latest runs-on: ubuntu-latest
strategy: strategy:
@ -229,10 +259,7 @@ jobs:
- run: rm -r target/wheels - run: rm -r target/wheels
- run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --ruff - run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --ruff
working-directory: ./python working-directory: ./python
- run: maturin build --release --features abi3 - run: maturin build --release -m python/Cargo.toml --features abi3
working-directory: ./python
- run: maturin sdist
working-directory: ./python
- uses: actions/upload-artifact@v3 - uses: actions/upload-artifact@v3
with: with:
name: pyoxigraph_wheel_x86_64_windows name: pyoxigraph_wheel_x86_64_windows

@ -1,3 +1,36 @@
## [0.3.22] - 2023-11-29
### Changed
- Allows to compile with more recent `bindgen` and `cc`
- Fixes compatibility with `spin_no_std` feature of `lazy_static`
## [0.3.21] - 2023-11-29
### Changed
- Bulk loader: do not fail when loading empty files.
- Python: fixes source distribution.
- Upgrades RocksDB to 7.8.1.
## [0.3.20] - 2023-10-23
### Changed
- SPARQL: fixes `STR` evaluation on small IRI (less than 16 bytes) that was broken.
- SPARQL update: fixes `WHERE` clause evaluation that was generating too many solutions in some cases.
- Upgrades RocksDB to 8.7.1.
## [0.3.19] - 2023-08-18
### Added
- Python: allows to give `pathlib.Path` argument when a path is expected.
- Cargo.toml: add a documentation field to link to docs.rs documentation.
### Changed
- Upgrades RocksDB to 8.3.2.
## [0.3.18] - 2023-06-13 ## [0.3.18] - 2023-06-13
### Changed ### Changed

@ -72,7 +72,7 @@ Unless you explicitly state otherwise, any contribution intentionally submitted
## Sponsors ## Sponsors
* [RelationLabs](https://relationlabs.ai/) that is building [Relation-Graph](https://github.com/relationlabs/Relation-Graph), a SPARQL database module for the [Substrate blockchain platform](https://substrate.io/) based on Oxigraph. * [RelationLabs](https://relationlabs.ai/) that is building [Relation-Graph](https://github.com/relationlabs/Relation-Graph), a SPARQL database module for the [Substrate blockchain platform](https://substrate.io/) based on Oxigraph.
* [Field 33](https://field33.com) that is building [an ontology management plateform](https://plow.pm/). * [Field 33](https://field33.com) that is building [an ontology management platform](https://plow.pm/).
* [Magnus Bakken](https://github.com/magbak) who is building [chrontext](https://github.com/magbak/chrontext), providing a SPARQL query endpoint on top of joint RDF and time series databases. * [Magnus Bakken](https://github.com/magbak) who is building [chrontext](https://github.com/magbak/chrontext), providing a SPARQL query endpoint on top of joint RDF and time series databases.
* [ACE IoT Solutions](https://aceiotsolutions.com/), a building IOT platform. * [ACE IoT Solutions](https://aceiotsolutions.com/), a building IOT platform.
* [Albin Larsson](https://byabbe.se/) who is building [GovDirectory](https://www.govdirectory.org/), a directory of public agencies based on Wikidata. * [Albin Larsson](https://byabbe.se/) who is building [GovDirectory](https://www.govdirectory.org/), a directory of public agencies based on Wikidata.

@ -246,6 +246,7 @@ store.dump("text/turtle", oxigraph.namedNode("http://example.com/graph"));
* RDF/JS datamodel functions (`namedNode`...) are now available at the root of the `oxigraph` package. You now need to call `oxigraph.namedNode` instead of `store.dataFactory.namedNode`. * RDF/JS datamodel functions (`namedNode`...) are now available at the root of the `oxigraph` package. You now need to call `oxigraph.namedNode` instead of `store.dataFactory.namedNode`.
* [RDF-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html) is now implemented. `Quad` is now a valid value for the `Ωuad` `subject` and `object` properties. * [RDF-star](https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html) is now implemented. `Quad` is now a valid value for the `Ωuad` `subject` and `object` properties.
## How to contribute ## How to contribute
The Oxigraph bindings are written in Rust using [the Rust WASM toolkit](https://rustwasm.github.io/docs.html). The Oxigraph bindings are written in Rust using [the Rust WASM toolkit](https://rustwasm.github.io/docs.html).

@ -10,7 +10,7 @@
"scripts": { "scripts": {
"fmt": "biome format . --write && biome check . --apply-unsafe && biome format . --write", "fmt": "biome format . --write && biome check . --apply-unsafe && biome format . --write",
"test": "biome ci . && wasm-pack build --debug --target nodejs && mocha", "test": "biome ci . && wasm-pack build --debug --target nodejs && mocha",
"build": "rm -rf pkg && wasm-pack build --release --target web --out-name web && mv pkg pkg-web && wasm-pack build --release --target nodejs --out-name node && mv pkg pkg-node && node build_package.js && rm -r pkg-web && rm -r pkg-node", "build": "rm -rf pkg && wasm-pack build --release --target web --out-name web --out-dir pkg-web && wasm-pack build --release --target nodejs --out-name node --out-dir pkg-node && node build_package.js && rm -r pkg-web && rm -r pkg-node",
"release": "npm run build && npm publish ./pkg", "release": "npm run build && npm publish ./pkg",
"pack": "npm run build && npm pack ./pkg" "pack": "npm run build && npm pack ./pkg"
}, },

@ -909,7 +909,7 @@ parser! {
} }
rule DescribeQuery_item() -> NamedNodePattern = i:VarOrIri() _ { i } rule DescribeQuery_item() -> NamedNodePattern = i:VarOrIri() _ { i }
rule AskQuery() -> Query = i("ASK") _ d:DatasetClauses() w:WhereClause() _ g:GroupClause()? _ h:HavingClause()? _ o:OrderClause()? _ l:LimitOffsetClauses()? _ v:ValuesClause() {? rule AskQuery() -> Query = i("ASK") _ d:DatasetClauses() _ w:WhereClause() _ g:GroupClause()? _ h:HavingClause()? _ o:OrderClause()? _ l:LimitOffsetClauses()? _ v:ValuesClause() {?
Ok(Query::Ask { Ok(Query::Ask {
dataset: d, dataset: d,
pattern: build_select(Selection::no_op(), w, g, h, o, l, v, state)?, pattern: build_select(Selection::no_op(), w, g, h, o, l, v, state)?,

@ -2887,7 +2887,13 @@ fn to_bool(term: &EncodedTerm) -> Option<bool> {
fn to_string_id(dataset: &DatasetView, term: &EncodedTerm) -> Option<SmallStringOrId> { fn to_string_id(dataset: &DatasetView, term: &EncodedTerm) -> Option<SmallStringOrId> {
match term { match term {
EncodedTerm::NamedNode { iri_id } => Some((*iri_id).into()), EncodedTerm::NamedNode { iri_id } => Some(
if let Ok(value) = SmallString::try_from(dataset.get_str(iri_id).ok()??.as_str()) {
value.into()
} else {
SmallStringOrId::Big(*iri_id)
},
),
EncodedTerm::DefaultGraph EncodedTerm::DefaultGraph
| EncodedTerm::NumericalBlankNode { .. } | EncodedTerm::NumericalBlankNode { .. }
| EncodedTerm::SmallBlankNode { .. } | EncodedTerm::SmallBlankNode { .. }

@ -803,6 +803,9 @@ impl Db {
&self, &self,
ssts_for_cf: &[(&ColumnFamily, PathBuf)], ssts_for_cf: &[(&ColumnFamily, PathBuf)],
) -> Result<(), StorageError> { ) -> Result<(), StorageError> {
if ssts_for_cf.is_empty() {
return Ok(()); // Rocksdb does not support empty lists
}
if let DbKind::ReadWrite(db) = &self.inner { if let DbKind::ReadWrite(db) = &self.inner {
let mut paths_by_cf = HashMap::<_, Vec<_>>::new(); let mut paths_by_cf = HashMap::<_, Vec<_>>::new();
for (cf, path) in ssts_for_cf { for (cf, path) in ssts_for_cf {

@ -14,6 +14,8 @@ use std::error::Error;
use std::fs::{create_dir_all, remove_dir_all, File}; use std::fs::{create_dir_all, remove_dir_all, File};
#[cfg(not(target_family = "wasm"))] #[cfg(not(target_family = "wasm"))]
use std::io::Write; use std::io::Write;
#[cfg(not(target_family = "wasm"))]
use std::iter::empty;
#[cfg(target_os = "linux")] #[cfg(target_os = "linux")]
use std::iter::once; use std::iter::once;
#[cfg(not(target_family = "wasm"))] #[cfg(not(target_family = "wasm"))]
@ -151,6 +153,16 @@ fn test_bulk_load_graph_lenient() -> Result<(), Box<dyn Error>> {
Ok(()) Ok(())
} }
#[test]
#[cfg(not(target_family = "wasm"))]
fn test_bulk_load_empty() -> Result<(), Box<dyn Error>> {
let store = Store::new()?;
store.bulk_loader().load_quads(empty::<Quad>())?;
assert!(store.is_empty()?);
store.validate()?;
Ok(())
}
#[test] #[test]
fn test_load_dataset() -> Result<(), Box<dyn Error>> { fn test_load_dataset() -> Result<(), Box<dyn Error>> {
let store = Store::new()?; let store = Store::new()?;

@ -1 +1 @@
Subproject commit 8a494fc2f48c23b948c22ebd23ec92e569dc4802 Subproject commit d9f9b9a759821252261151c3ca371947734da720

@ -0,0 +1 @@
ASK FROM <scheme://a.graph> WHERE { }

@ -16,6 +16,7 @@
:long_unicode_escape_with_multibytes_char_update :long_unicode_escape_with_multibytes_char_update
:describe :describe
:describe_where :describe_where
:ask_with_from
:group_concat_with_null :group_concat_with_null
:single_not_exists :single_not_exists
:property_list_path :property_list_path
@ -31,6 +32,7 @@
:one_or_more_shared :one_or_more_shared
:one_or_more_star :one_or_more_star
:in_empty_error :in_empty_error
:small_iri_str
) . ) .
:small_unicode_escape_with_multibytes_char rdf:type mf:NegativeSyntaxTest ; :small_unicode_escape_with_multibytes_char rdf:type mf:NegativeSyntaxTest ;
@ -63,6 +65,10 @@
qt:data <describe_input.ttl> ] ; qt:data <describe_input.ttl> ] ;
mf:result <describe_output.ttl> . mf:result <describe_output.ttl> .
:ask_with_from rdf:type mf:PositiveSyntaxTest ;
mf:name "ASK with FROM" ;
mf:action <ask_from.rq> .
:group_concat_with_null rdf:type mf:QueryEvaluationTest ; :group_concat_with_null rdf:type mf:QueryEvaluationTest ;
mf:name "GROUP_CONCAT with NULL" ; mf:name "GROUP_CONCAT with NULL" ;
rdfs:comment "GROUP_CONCAT should ignore null values" ; rdfs:comment "GROUP_CONCAT should ignore null values" ;
@ -146,3 +152,8 @@
mf:name "IN should propagate errors on the left side, even on the empty input" ; mf:name "IN should propagate errors on the left side, even on the empty input" ;
mf:action [ qt:query <in_empty_error.rq> ] ; mf:action [ qt:query <in_empty_error.rq> ] ;
mf:result <in_empty_error.srx> . mf:result <in_empty_error.srx> .
:small_iri_str rdf:type mf:QueryEvaluationTest ;
mf:name "Small IRI strings should be properly equal to their value" ;
mf:action [ qt:query <small_iri_str.rq> ] ;
mf:result <small_iri_str.srx> .

@ -0,0 +1,3 @@
ASK {
FILTER(STR(<ex:a>) = "ex:a")
}

@ -0,0 +1,6 @@
<?xml version="1.0"?>
<sparql xmlns="http://www.w3.org/2005/sparql-results#">
<head>
</head>
<boolean>true</boolean>
</sparql>
Loading…
Cancel
Save