Compare commits
4 Commits
main
...
nextgraph2
Author | SHA1 | Date |
---|---|---|
Niko PLP | 86a8100121 | 9 months ago |
Niko PLP | e963387b02 | 9 months ago |
Niko PLP | 7b8901718e | 9 months ago |
Niko PLP | b3ec66e21b | 9 months ago |
@ -1,14 +1,14 @@ |
||||
//! A storage backend
|
||||
//! RocksDB is available, if not in memory
|
||||
|
||||
#[cfg(target_family = "wasm")] |
||||
pub use fallback::{ColumnFamily, ColumnFamilyDefinition, Db, Iter, Reader, Transaction}; |
||||
#[cfg(not(target_family = "wasm"))] |
||||
pub use rocksdb::{ |
||||
pub use self::oxi_rocksdb::{ |
||||
ColumnFamily, ColumnFamilyDefinition, Db, Iter, Reader, SstFileWriter, Transaction, |
||||
}; |
||||
#[cfg(target_family = "wasm")] |
||||
pub use fallback::{ColumnFamily, ColumnFamilyDefinition, Db, Iter, Reader, Transaction}; |
||||
|
||||
#[cfg(target_family = "wasm")] |
||||
mod fallback; |
||||
#[cfg(not(target_family = "wasm"))] |
||||
mod rocksdb; |
||||
mod oxi_rocksdb; |
||||
|
@ -1 +0,0 @@ |
||||
Subproject commit d9f9b9a759821252261151c3ca371947734da720 |
@ -1 +0,0 @@ |
||||
../Cargo.lock |
@ -1,23 +0,0 @@ |
||||
[package] |
||||
name = "pyoxigraph" |
||||
version = "0.3.22" |
||||
authors = ["Tpt"] |
||||
license = "MIT OR Apache-2.0" |
||||
readme = "README.md" |
||||
keywords = ["RDF", "SPARQL", "graph-database", "database"] |
||||
repository = "https://github.com/oxigraph/oxigraph/tree/main/python" |
||||
homepage = "https://pyoxigraph.readthedocs.io/" |
||||
description = "Python bindings of Oxigraph, a SPARQL database and RDF toolkit" |
||||
edition = "2021" |
||||
|
||||
[lib] |
||||
crate-type = ["cdylib"] |
||||
name = "pyoxigraph" |
||||
doctest = false |
||||
|
||||
[features] |
||||
abi3 = ["pyo3/abi3-py37"] |
||||
|
||||
[dependencies] |
||||
oxigraph = { version = "0.3.22", path="../lib", features = ["http_client"] } |
||||
pyo3 = { version = "0.19", features = ["extension-module"] } |
@ -1,80 +0,0 @@ |
||||
# Pyoxigraph (Oxigraph for Python) |
||||
|
||||
[![PyPI](https://img.shields.io/pypi/v/pyoxigraph)](https://pypi.org/project/pyoxigraph/) |
||||
[![Conda](https://img.shields.io/conda/vn/conda-forge/pyoxigraph)](https://anaconda.org/conda-forge/pyoxigraph) |
||||
![PyPI - Implementation](https://img.shields.io/pypi/implementation/pyoxigraph) |
||||
![PyPI - Python Version](https://img.shields.io/pypi/pyversions/pyoxigraph) |
||||
[![actions status](https://github.com/oxigraph/oxigraph/workflows/build/badge.svg)](https://github.com/oxigraph/oxigraph/actions) |
||||
[![Gitter](https://badges.gitter.im/oxigraph/community.svg)](https://gitter.im/oxigraph/community?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge) |
||||
|
||||
Pyoxigraph is a graph database library implementing the [SPARQL](https://www.w3.org/TR/sparql11-overview/) standard. |
||||
It is a Python library written on top of [Oxigraph](https://crates.io/crates/oxigraph). |
||||
|
||||
Pyoxigraph offers two stores with [SPARQL 1.1](https://www.w3.org/TR/sparql11-overview/) capabilities. |
||||
One of the store is in-memory, and the other one is disk based. |
||||
|
||||
It also provides a set of utility functions for reading, writing and processing RDF files in |
||||
[Turtle](https://www.w3.org/TR/turtle/), |
||||
[TriG](https://www.w3.org/TR/trig/), |
||||
[N-Triples](https://www.w3.org/TR/n-triples/), |
||||
[N-Quads](https://www.w3.org/TR/n-quads/) and |
||||
[RDF/XML](https://www.w3.org/TR/rdf-syntax-grammar/). |
||||
|
||||
Pyoxigraph is distributed [on Pypi](https://pypi.org/project/pyoxigraph/) and [on conda-forge](https://anaconda.org/conda-forge/pyoxigraph). |
||||
Run `pip install pyoxigraph` to install it. |
||||
|
||||
There exists also a small library providing [rdflib](https://rdflib.readthedocs.io) stores using pyoxigraph: [oxrdflib](https://github.com/oxigraph/oxrdflib). |
||||
|
||||
Pyoxigraph documentation is [available on the Oxigraph website](https://pyoxigraph.readthedocs.io/). |
||||
|
||||
## Build the development version |
||||
|
||||
To build and install the development version of pyoxigraph you need to clone this git repository including submodules (`git clone --recursive https://github.com/oxigraph/oxigraph.git`) |
||||
and to run `pip install .` in the `python` directory (the one this README is in). |
||||
|
||||
## Help |
||||
|
||||
Feel free to use [GitHub discussions](https://github.com/oxigraph/oxigraph/discussions) or [the Gitter chat](https://gitter.im/oxigraph/community) to ask questions or talk about Oxigraph. |
||||
[Bug reports](https://github.com/oxigraph/oxigraph/issues) are also very welcome. |
||||
|
||||
If you need advanced support or are willing to pay to get some extra features, feel free to reach out to [Tpt](https://github.com/Tpt). |
||||
|
||||
## How to contribute |
||||
|
||||
Pyoxigraph is written in Rust using [PyO3](https://github.com/PyO3/pyo3). |
||||
|
||||
Pyoxigraph is built using [Maturin](https://github.com/PyO3/maturin). |
||||
Maturin could be installed using the `pip install 'maturin>=0.9,<0.10'`. |
||||
To install a development version of Oxigraph just run `maturin develop` in this README directory. |
||||
|
||||
### Tests |
||||
|
||||
The Python bindings tests are written in Python. |
||||
To run them use `python -m unittest` in the `tests` directory. |
||||
|
||||
### Docs |
||||
|
||||
The Sphinx documentation can be generated and viewed in the browser using the following command: |
||||
|
||||
``` |
||||
sphinx-autobuild docs docs/_build/html |
||||
``` |
||||
|
||||
Note that you will need to have [sphinx-autobuild](https://pypi.org/project/sphinx-autobuild/) installed. |
||||
|
||||
Alternatively, you can use `sphinx-build` with Python's `http.server` to achieve the same thing. |
||||
|
||||
## License |
||||
|
||||
This project is licensed under either of |
||||
|
||||
- Apache License, Version 2.0, ([LICENSE-APACHE](../LICENSE-APACHE) or |
||||
http://www.apache.org/licenses/LICENSE-2.0) |
||||
- MIT license ([LICENSE-MIT](../LICENSE-MIT) or |
||||
http://opensource.org/licenses/MIT) |
||||
|
||||
at your option. |
||||
|
||||
### Contribution |
||||
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in Oxigraph by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions. |
@ -1,38 +0,0 @@ |
||||
import datetime |
||||
import sys |
||||
from pathlib import Path |
||||
|
||||
import pyoxigraph |
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent.absolute())) |
||||
|
||||
# -- Project information ----------------------------------------------------- |
||||
|
||||
project = "pyoxigraph" |
||||
copyright = f"{datetime.date.today().year}, Oxigraph contributors" |
||||
author = pyoxigraph.__author__ |
||||
version = pyoxigraph.__version__ |
||||
release = pyoxigraph.__version__ |
||||
|
||||
# -- General configuration --------------------------------------------------- |
||||
|
||||
extensions = ["sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.intersphinx"] |
||||
|
||||
exclude_patterns = ["build", "Thumbs.db", ".DS_Store"] |
||||
|
||||
# -- Options for HTML output ------------------------------------------------- |
||||
|
||||
html_theme = "furo" |
||||
html_static_path = [] |
||||
html_logo = "../../logo.svg" |
||||
html_favicon = "../../logo.svg" |
||||
html_theme_options = {"body_max_width": None} |
||||
html_baseurl = "https://pyoxigraph.readthedocs.io/en/stable/" |
||||
|
||||
# -- Options for doctests ------------------------------------------------- |
||||
|
||||
doctest_global_setup = "from pyoxigraph import *\nimport io" |
||||
|
||||
# -- Options for intersphinx ------------------------------------------------- |
||||
|
||||
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} |
@ -1,79 +0,0 @@ |
||||
pyoxigraph |release| |
||||
==================== |
||||
|
||||
.. image:: https://img.shields.io/pypi/v/pyoxigraph |
||||
:alt: PyPI |
||||
:target: https://pypi.org/project/pyoxigraph/ |
||||
.. image:: https://img.shields.io/conda/vn/conda-forge/pyoxigraph |
||||
:alt: conda-forge |
||||
:target: https://anaconda.org/conda-forge/pyoxigraph |
||||
.. image:: https://img.shields.io/pypi/implementation/pyoxigraph |
||||
:alt: PyPI - Implementation |
||||
.. image:: https://img.shields.io/pypi/pyversions/pyoxigraph |
||||
:alt: PyPI - Python Version |
||||
.. image:: https://img.shields.io/pypi/l/pyoxigraph |
||||
:alt: PyPI - License |
||||
|
||||
|
||||
Pyoxigraph is a Python graph database library implementing the `SPARQL <https://www.w3.org/TR/sparql11-overview/>`_ standard. |
||||
|
||||
It is built on top of `Oxigraph <https://crates.io/crates/oxigraph>`_ using `PyO3 <https://pyo3.rs/>`_. |
||||
|
||||
It also provides a set of utility functions for reading, writing, and processing RDF files in |
||||
`Turtle <https://www.w3.org/TR/turtle/>`_, |
||||
`TriG <https://www.w3.org/TR/trig/>`_, |
||||
`N-Triples <https://www.w3.org/TR/n-triples/>`_, |
||||
`N-Quads <https://www.w3.org/TR/n-quads/>`_ and |
||||
`RDF/XML <https://www.w3.org/TR/rdf-syntax-grammar/>`_. |
||||
|
||||
Pyoxigraph is distributed `on Pypi <https://pypi.org/project/pyoxigraph/>`_ and `on conda-forge <https://anaconda.org/conda-forge/pyoxigraph>`_. |
||||
|
||||
There is also a small library providing a `rdflib <https://rdflib.readthedocs.io>`_ store using pyoxigraph: `oxrdflib <https://github.com/oxigraph/oxrdflib>`_. |
||||
|
||||
Oxigraph and pyoxigraph source code are on `GitHub <https://github.com/oxigraph/oxigraph/tree/main/python>`_. |
||||
|
||||
|
||||
Installation |
||||
"""""""""""" |
||||
|
||||
Pyoxigraph is distributed on `Pypi <https://pypi.org/project/pyoxigraph/>`_. |
||||
|
||||
To install it, run the usual ``pip install pyoxigraph`` |
||||
|
||||
|
||||
Example |
||||
""""""" |
||||
|
||||
Insert the triple ``<http://example/> <http://schema.org/name> "example"`` and print the name of ``<http://example/>`` in SPARQL: |
||||
|
||||
:: |
||||
|
||||
from pyoxigraph import * |
||||
|
||||
store = Store() |
||||
ex = NamedNode('http://example/') |
||||
schema_name = NamedNode('http://schema.org/name') |
||||
store.add(Quad(ex, schema_name, Literal('example'))) |
||||
for binding in store.query('SELECT ?name WHERE { <http://example/> <http://schema.org/name> ?name }'): |
||||
print(binding['name'].value) |
||||
|
||||
|
||||
Table of contents |
||||
""""""""""""""""" |
||||
|
||||
.. toctree:: |
||||
|
||||
model |
||||
io |
||||
store |
||||
sparql |
||||
migration |
||||
|
||||
|
||||
Help |
||||
"""" |
||||
|
||||
Feel free to use `GitHub discussions <https://github.com/oxigraph/oxigraph/discussions>`_ or `the Gitter chat <https://gitter.im/oxigraph/community>`_ to ask questions or talk about Oxigraph. |
||||
`Bug reports <https://github.com/oxigraph/oxigraph/issues>`_ are also very welcome. |
||||
|
||||
If you need advanced support or are willing to pay to get some extra features, feel free to reach out to `Tpt <https://github.com/Tpt>`_. |
@ -1,14 +0,0 @@ |
||||
RDF Parsing and Serialization |
||||
============================= |
||||
|
||||
Oxigraph provides functions to parse and serialize RDF files: |
||||
|
||||
|
||||
Parsing |
||||
""""""" |
||||
.. autofunction:: pyoxigraph.parse |
||||
|
||||
|
||||
Serialization |
||||
""""""""""""" |
||||
.. autofunction:: pyoxigraph.serialize |
@ -1,34 +0,0 @@ |
||||
Migration Guide |
||||
=============== |
||||
|
||||
From 0.2 to 0.3 |
||||
""""""""""""""" |
||||
|
||||
* Python 3.6 and ``manylinux2010`` (`PEP 571 <https://www.python.org/dev/peps/pep-0571/>`_) support have been removed. The new minimal versions are Python 3.7 and ``manylinux2014`` (`PEP 599 <https://www.python.org/dev/peps/pep-0599/>`_). |
||||
* The on-disk storage system has been rebuilt on top of `RocksDB <http://rocksdb.org/>`_. |
||||
It is now implemented by the :py:class:`.Store` class that keeps the same API as the late :py:class:`.SledStore` class. |
||||
|
||||
To migrate you have to dump the store content using pyoxigraph **0.2** and the following code: |
||||
|
||||
.. code-block:: python |
||||
|
||||
from pyoxigraph import SledStore |
||||
store = SledStore('MY_STORAGE_PATH') |
||||
with open('temp_file.nq', 'wb') as fp: |
||||
store.dump(fp, "application/n-quads") |
||||
|
||||
And then upgrade to pyoxigraph **0.3** and run: |
||||
|
||||
.. code-block:: python |
||||
|
||||
from pyoxigraph import Store |
||||
store = Store('MY_NEW_STORAGE_PATH') |
||||
with open('temp_file.nq', 'rb') as fp: |
||||
store.bulk_load(fp, "application/n-quads") |
||||
|
||||
* The in-memory storage class :py:class:`.MemoryStore` has been merged into the :py:class:`.Store` class that provides the exact same API as the late :py:class:`.MemoryStore`. |
||||
On platforms other than Linux, a temporary directory is created when opening the :py:class:`.Store` and automatically removed when it is garbage collected. No data is written in this directory. |
||||
* :py:class:`.Store` operations are now transactional using the "repeatable read" isolation level: |
||||
the store only exposes changes that have been "committed" (i.e. no partial writes) |
||||
and the exposed state does not change for the complete duration of a read operation (e.g. a SPARQL query) or a read/write operation (e.g. a SPARQL update). |
||||
* `RDF-star <https://w3c.github.io/rdf-star/cg-spec/2021-12-17.html>`_ is now supported (including serialization formats and SPARQL-star). :py:class:`.Triple` can now be used in :py:attr:`.Triple.object`, :py:attr:`.Triple.object`, :py:attr:`.Quad.subject` and :py:attr:`.Quad.object`. |
@ -1,37 +0,0 @@ |
||||
RDF Model |
||||
========= |
||||
|
||||
Oxigraph provides python classes to represents basic RDF concepts: |
||||
|
||||
|
||||
`IRIs <https://www.w3.org/TR/rdf11-concepts/#dfn-iri>`_ |
||||
""""""""""""""""""""""""""""""""""""""""""""""""""""""" |
||||
.. autoclass:: pyoxigraph.NamedNode |
||||
:members: |
||||
|
||||
|
||||
`Blank Nodes <https://www.w3.org/TR/rdf11-concepts/#dfn-blank-node>`_ |
||||
""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" |
||||
.. autoclass:: pyoxigraph.BlankNode |
||||
:members: |
||||
|
||||
|
||||
`Literals <https://www.w3.org/TR/rdf11-concepts/#dfn-literal>`_ |
||||
""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" |
||||
.. autoclass:: pyoxigraph.Literal |
||||
:members: |
||||
|
||||
|
||||
`Triples <https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-triple>`_ |
||||
""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" |
||||
.. autoclass:: pyoxigraph.Triple |
||||
:members: |
||||
|
||||
|
||||
Quads (`triples <https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-triple>`_ in a `RDF dataset <https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-dataset>`_) |
||||
""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""" |
||||
.. autoclass:: pyoxigraph.Quad |
||||
:members: |
||||
|
||||
.. autoclass:: pyoxigraph.DefaultGraph |
||||
:members: |
@ -1,23 +0,0 @@ |
||||
SPARQL utility objects |
||||
============================= |
||||
|
||||
Oxigraph provides also some utilities related to SPARQL queries: |
||||
|
||||
|
||||
Variable |
||||
"""""""" |
||||
.. autoclass:: pyoxigraph.Variable |
||||
:members: |
||||
|
||||
|
||||
``SELECT`` solutions |
||||
"""""""""""""""""""" |
||||
.. autoclass:: pyoxigraph.QuerySolutions |
||||
:members: |
||||
.. autoclass:: pyoxigraph.QuerySolution |
||||
:members: |
||||
|
||||
``CONSTRUCT`` results |
||||
""""""""""""""""""""" |
||||
.. autoclass:: pyoxigraph.QueryTriples |
||||
:members: |
@ -1,5 +0,0 @@ |
||||
RDF Store |
||||
========= |
||||
|
||||
.. autoclass:: pyoxigraph.Store |
||||
:members: |
@ -1,506 +0,0 @@ |
||||
import argparse |
||||
import ast |
||||
import importlib |
||||
import inspect |
||||
import logging |
||||
import re |
||||
import subprocess |
||||
from typing import Any, Dict, List, Mapping, Optional, Set, Tuple, Union |
||||
|
||||
|
||||
def _path_to_type(*elements: str) -> ast.AST: |
||||
base: ast.AST = ast.Name(id=elements[0], ctx=AST_LOAD) |
||||
for e in elements[1:]: |
||||
base = ast.Attribute(value=base, attr=e, ctx=AST_LOAD) |
||||
return base |
||||
|
||||
|
||||
AST_LOAD = ast.Load() |
||||
AST_ELLIPSIS = ast.Ellipsis() |
||||
AST_STORE = ast.Store() |
||||
AST_TYPING_ANY = _path_to_type("typing", "Any") |
||||
GENERICS = { |
||||
"iterable": _path_to_type("typing", "Iterable"), |
||||
"iterator": _path_to_type("typing", "Iterator"), |
||||
"list": _path_to_type("typing", "List"), |
||||
"io": _path_to_type("typing", "IO"), |
||||
} |
||||
OBJECT_MEMBERS = dict(inspect.getmembers(object)) |
||||
|
||||
|
||||
BUILTINS: Dict[str, Union[None, Tuple[List[ast.AST], ast.AST]]] = { |
||||
"__annotations__": None, |
||||
"__bool__": ([], _path_to_type("bool")), |
||||
"__bytes__": ([], _path_to_type("bytes")), |
||||
"__class__": None, |
||||
"__contains__": ([AST_TYPING_ANY], _path_to_type("bool")), |
||||
"__del__": None, |
||||
"__delattr__": ([_path_to_type("str")], _path_to_type("None")), |
||||
"__delitem__": ([AST_TYPING_ANY], AST_TYPING_ANY), |
||||
"__dict__": None, |
||||
"__dir__": None, |
||||
"__doc__": None, |
||||
"__eq__": ([AST_TYPING_ANY], _path_to_type("bool")), |
||||
"__format__": ([_path_to_type("str")], _path_to_type("str")), |
||||
"__ge__": ([AST_TYPING_ANY], _path_to_type("bool")), |
||||
"__getattribute__": ([_path_to_type("str")], AST_TYPING_ANY), |
||||
"__getitem__": ([AST_TYPING_ANY], AST_TYPING_ANY), |
||||
"__gt__": ([AST_TYPING_ANY], _path_to_type("bool")), |
||||
"__hash__": ([], _path_to_type("int")), |
||||
"__init__": ([], _path_to_type("None")), |
||||
"__init_subclass__": None, |
||||
"__iter__": ([], AST_TYPING_ANY), |
||||
"__le__": ([AST_TYPING_ANY], _path_to_type("bool")), |
||||
"__len__": ([], _path_to_type("int")), |
||||
"__lt__": ([AST_TYPING_ANY], _path_to_type("bool")), |
||||
"__module__": None, |
||||
"__ne__": ([AST_TYPING_ANY], _path_to_type("bool")), |
||||
"__new__": None, |
||||
"__next__": ([], AST_TYPING_ANY), |
||||
"__reduce__": None, |
||||
"__reduce_ex__": None, |
||||
"__repr__": ([], _path_to_type("str")), |
||||
"__setattr__": ([_path_to_type("str"), AST_TYPING_ANY], _path_to_type("None")), |
||||
"__setitem__": ([AST_TYPING_ANY, AST_TYPING_ANY], AST_TYPING_ANY), |
||||
"__sizeof__": None, |
||||
"__str__": ([], _path_to_type("str")), |
||||
"__subclasshook__": None, |
||||
} |
||||
|
||||
|
||||
def module_stubs(module: Any) -> ast.Module: |
||||
types_to_import = {"typing"} |
||||
classes = [] |
||||
functions = [] |
||||
for member_name, member_value in inspect.getmembers(module): |
||||
element_path = [module.__name__, member_name] |
||||
if member_name.startswith("__"): |
||||
pass |
||||
elif inspect.isclass(member_value): |
||||
classes.append( |
||||
class_stubs(member_name, member_value, element_path, types_to_import) |
||||
) |
||||
elif inspect.isbuiltin(member_value): |
||||
functions.append( |
||||
function_stub( |
||||
member_name, |
||||
member_value, |
||||
element_path, |
||||
types_to_import, |
||||
in_class=False, |
||||
) |
||||
) |
||||
else: |
||||
logging.warning(f"Unsupported root construction {member_name}") |
||||
return ast.Module( |
||||
body=[ast.Import(names=[ast.alias(name=t)]) for t in sorted(types_to_import)] |
||||
+ classes |
||||
+ functions, |
||||
type_ignores=[], |
||||
) |
||||
|
||||
|
||||
def class_stubs( |
||||
cls_name: str, cls_def: Any, element_path: List[str], types_to_import: Set[str] |
||||
) -> ast.ClassDef: |
||||
attributes: List[ast.AST] = [] |
||||
methods: List[ast.AST] = [] |
||||
magic_methods: List[ast.AST] = [] |
||||
constants: List[ast.AST] = [] |
||||
for member_name, member_value in inspect.getmembers(cls_def): |
||||
current_element_path = [*element_path, member_name] |
||||
if member_name == "__init__": |
||||
try: |
||||
inspect.signature(cls_def) # we check it actually exists |
||||
methods = [ |
||||
function_stub( |
||||
member_name, |
||||
cls_def, |
||||
current_element_path, |
||||
types_to_import, |
||||
in_class=True, |
||||
), |
||||
*methods, |
||||
] |
||||
except ValueError as e: |
||||
if "no signature found" not in str(e): |
||||
raise ValueError( |
||||
f"Error while parsing signature of {cls_name}.__init_" |
||||
) from e |
||||
elif ( |
||||
member_value == OBJECT_MEMBERS.get(member_name) |
||||
or BUILTINS.get(member_name, ()) is None |
||||
): |
||||
pass |
||||
elif inspect.isdatadescriptor(member_value): |
||||
attributes.extend( |
||||
data_descriptor_stub( |
||||
member_name, member_value, current_element_path, types_to_import |
||||
) |
||||
) |
||||
elif inspect.isroutine(member_value): |
||||
(magic_methods if member_name.startswith("__") else methods).append( |
||||
function_stub( |
||||
member_name, |
||||
member_value, |
||||
current_element_path, |
||||
types_to_import, |
||||
in_class=True, |
||||
) |
||||
) |
||||
elif member_name == "__match_args__": |
||||
constants.append( |
||||
ast.AnnAssign( |
||||
target=ast.Name(id=member_name, ctx=AST_STORE), |
||||
annotation=ast.Subscript( |
||||
value=_path_to_type("typing", "Tuple"), |
||||
slice=ast.Tuple( |
||||
elts=[_path_to_type("str"), ast.Ellipsis()], ctx=AST_LOAD |
||||
), |
||||
ctx=AST_LOAD, |
||||
), |
||||
value=ast.Constant(member_value), |
||||
simple=1, |
||||
) |
||||
) |
||||
else: |
||||
logging.warning( |
||||
f"Unsupported member {member_name} of class {'.'.join(element_path)}" |
||||
) |
||||
|
||||
doc = inspect.getdoc(cls_def) |
||||
doc_comment = build_doc_comment(doc) if doc else None |
||||
return ast.ClassDef( |
||||
cls_name, |
||||
bases=[], |
||||
keywords=[], |
||||
body=( |
||||
([doc_comment] if doc_comment else []) |
||||
+ attributes |
||||
+ methods |
||||
+ magic_methods |
||||
+ constants |
||||
) |
||||
or [AST_ELLIPSIS], |
||||
decorator_list=[_path_to_type("typing", "final")], |
||||
) |
||||
|
||||
|
||||
def data_descriptor_stub( |
||||
data_desc_name: str, |
||||
data_desc_def: Any, |
||||
element_path: List[str], |
||||
types_to_import: Set[str], |
||||
) -> Union[Tuple[ast.AnnAssign, ast.Expr], Tuple[ast.AnnAssign]]: |
||||
annotation = None |
||||
doc_comment = None |
||||
|
||||
doc = inspect.getdoc(data_desc_def) |
||||
if doc is not None: |
||||
annotation = returns_stub(data_desc_name, doc, element_path, types_to_import) |
||||
m = re.findall(r"^ *:return: *(.*) *$", doc, re.MULTILINE) |
||||
if len(m) == 1: |
||||
doc_comment = m[0] |
||||
elif len(m) > 1: |
||||
raise ValueError( |
||||
f"Multiple return annotations found with :return: in {'.'.join(element_path)} documentation" |
||||
) |
||||
|
||||
assign = ast.AnnAssign( |
||||
target=ast.Name(id=data_desc_name, ctx=AST_STORE), |
||||
annotation=annotation or AST_TYPING_ANY, |
||||
simple=1, |
||||
) |
||||
doc_comment = build_doc_comment(doc_comment) if doc_comment else None |
||||
return (assign, doc_comment) if doc_comment else (assign,) |
||||
|
||||
|
||||
def function_stub( |
||||
fn_name: str, |
||||
fn_def: Any, |
||||
element_path: List[str], |
||||
types_to_import: Set[str], |
||||
*, |
||||
in_class: bool, |
||||
) -> ast.FunctionDef: |
||||
body: List[ast.AST] = [] |
||||
doc = inspect.getdoc(fn_def) |
||||
if doc is not None: |
||||
doc_comment = build_doc_comment(doc) |
||||
if doc_comment is not None: |
||||
body.append(doc_comment) |
||||
|
||||
decorator_list = [] |
||||
if in_class and hasattr(fn_def, "__self__"): |
||||
decorator_list.append(ast.Name("staticmethod")) |
||||
|
||||
return ast.FunctionDef( |
||||
fn_name, |
||||
arguments_stub(fn_name, fn_def, doc or "", element_path, types_to_import), |
||||
body or [AST_ELLIPSIS], |
||||
decorator_list=decorator_list, |
||||
returns=returns_stub(fn_name, doc, element_path, types_to_import) |
||||
if doc |
||||
else None, |
||||
lineno=0, |
||||
) |
||||
|
||||
|
||||
def arguments_stub( |
||||
callable_name: str, |
||||
callable_def: Any, |
||||
doc: str, |
||||
element_path: List[str], |
||||
types_to_import: Set[str], |
||||
) -> ast.arguments: |
||||
real_parameters: Mapping[str, inspect.Parameter] = inspect.signature( |
||||
callable_def |
||||
).parameters |
||||
if callable_name == "__init__": |
||||
real_parameters = { |
||||
"self": inspect.Parameter("self", inspect.Parameter.POSITIONAL_ONLY), |
||||
**real_parameters, |
||||
} |
||||
|
||||
parsed_param_types = {} |
||||
optional_params = set() |
||||
|
||||
# Types for magic functions types |
||||
builtin = BUILTINS.get(callable_name) |
||||
if isinstance(builtin, tuple): |
||||
param_names = list(real_parameters.keys()) |
||||
if param_names and param_names[0] == "self": |
||||
del param_names[0] |
||||
for name, t in zip(param_names, builtin[0]): |
||||
parsed_param_types[name] = t |
||||
|
||||
# Types from comment |
||||
for match in re.findall(r"^ *:type *([a-z_]+): ([^\n]*) *$", doc, re.MULTILINE): |
||||
if match[0] not in real_parameters: |
||||
raise ValueError( |
||||
f"The parameter {match[0]} of {'.'.join(element_path)} " |
||||
"is defined in the documentation but not in the function signature" |
||||
) |
||||
type = match[1] |
||||
if type.endswith(", optional"): |
||||
optional_params.add(match[0]) |
||||
type = type[:-10] |
||||
parsed_param_types[match[0]] = convert_type_from_doc( |
||||
type, element_path, types_to_import |
||||
) |
||||
|
||||
# we parse the parameters |
||||
posonlyargs = [] |
||||
args = [] |
||||
vararg = None |
||||
kwonlyargs = [] |
||||
kw_defaults = [] |
||||
kwarg = None |
||||
defaults = [] |
||||
for param in real_parameters.values(): |
||||
if param.name != "self" and param.name not in parsed_param_types: |
||||
raise ValueError( |
||||
f"The parameter {param.name} of {'.'.join(element_path)} " |
||||
"has no type definition in the function documentation" |
||||
) |
||||
param_ast = ast.arg( |
||||
arg=param.name, annotation=parsed_param_types.get(param.name) |
||||
) |
||||
|
||||
default_ast = None |
||||
if param.default != param.empty: |
||||
default_ast = ast.Constant(param.default) |
||||
if param.name not in optional_params: |
||||
raise ValueError( |
||||
f"Parameter {param.name} of {'.'.join(element_path)} " |
||||
"is optional according to the type but not flagged as such in the doc" |
||||
) |
||||
elif param.name in optional_params: |
||||
raise ValueError( |
||||
f"Parameter {param.name} of {'.'.join(element_path)} " |
||||
"is optional according to the documentation but has no default value" |
||||
) |
||||
|
||||
if param.kind == param.POSITIONAL_ONLY: |
||||
posonlyargs.append(param_ast) |
||||
defaults.append(default_ast) |
||||
elif param.kind == param.POSITIONAL_OR_KEYWORD: |
||||
args.append(param_ast) |
||||
defaults.append(default_ast) |
||||
elif param.kind == param.VAR_POSITIONAL: |
||||
vararg = param_ast |
||||
elif param.kind == param.KEYWORD_ONLY: |
||||
kwonlyargs.append(param_ast) |
||||
kw_defaults.append(default_ast) |
||||
elif param.kind == param.VAR_KEYWORD: |
||||
kwarg = param_ast |
||||
|
||||
return ast.arguments( |
||||
posonlyargs=posonlyargs, |
||||
args=args, |
||||
vararg=vararg, |
||||
kwonlyargs=kwonlyargs, |
||||
kw_defaults=kw_defaults, |
||||
defaults=defaults, |
||||
kwarg=kwarg, |
||||
) |
||||
|
||||
|
||||
def returns_stub( |
||||
callable_name: str, doc: str, element_path: List[str], types_to_import: Set[str] |
||||
) -> Optional[ast.AST]: |
||||
m = re.findall(r"^ *:rtype: *([^\n]*) *$", doc, re.MULTILINE) |
||||
if len(m) == 0: |
||||
builtin = BUILTINS.get(callable_name) |
||||
if isinstance(builtin, tuple) and builtin[1] is not None: |
||||
return builtin[1] |
||||
raise ValueError( |
||||
f"The return type of {'.'.join(element_path)} " |
||||
"has no type definition using :rtype: in the function documentation" |
||||
) |
||||
if len(m) > 1: |
||||
raise ValueError( |
||||
f"Multiple return type annotations found with :rtype: for {'.'.join(element_path)}" |
||||
) |
||||
return convert_type_from_doc(m[0], element_path, types_to_import) |
||||
|
||||
|
||||
def convert_type_from_doc( |
||||
type_str: str, element_path: List[str], types_to_import: Set[str] |
||||
) -> ast.AST: |
||||
type_str = type_str.strip() |
||||
return parse_type_to_ast(type_str, element_path, types_to_import) |
||||
|
||||
|
||||
def parse_type_to_ast( |
||||
type_str: str, element_path: List[str], types_to_import: Set[str] |
||||
) -> ast.AST: |
||||
# let's tokenize |
||||
tokens = [] |
||||
current_token = "" |
||||
for c in type_str: |
||||
if "a" <= c <= "z" or "A" <= c <= "Z" or c == ".": |
||||
current_token += c |
||||
else: |
||||
if current_token: |
||||
tokens.append(current_token) |
||||
current_token = "" |
||||
if c != " ": |
||||
tokens.append(c) |
||||
if current_token: |
||||
tokens.append(current_token) |
||||
|
||||
# let's first parse nested parenthesis |
||||
stack: List[List[Any]] = [[]] |
||||
for token in tokens: |
||||
if token == "(": |
||||
children: List[str] = [] |
||||
stack[-1].append(children) |
||||
stack.append(children) |
||||
elif token == ")": |
||||
stack.pop() |
||||
else: |
||||
stack[-1].append(token) |
||||
|
||||
# then it's easy |
||||
def parse_sequence(sequence: List[Any]) -> ast.AST: |
||||
# we split based on "or" |
||||
or_groups: List[List[str]] = [[]] |
||||
for e in sequence: |
||||
if e == "or": |
||||
or_groups.append([]) |
||||
else: |
||||
or_groups[-1].append(e) |
||||
if any(not g for g in or_groups): |
||||
raise ValueError( |
||||
f"Not able to parse type '{type_str}' used by {'.'.join(element_path)}" |
||||
) |
||||
|
||||
new_elements: List[ast.AST] = [] |
||||
for group in or_groups: |
||||
if len(group) == 1 and isinstance(group[0], str): |
||||
parts = group[0].split(".") |
||||
if any(not p for p in parts): |
||||
raise ValueError( |
||||
f"Not able to parse type '{type_str}' used by {'.'.join(element_path)}" |
||||
) |
||||
if len(parts) > 1: |
||||
types_to_import.add(parts[0]) |
||||
new_elements.append(_path_to_type(*parts)) |
||||
elif ( |
||||
len(group) == 2 |
||||
and isinstance(group[0], str) |
||||
and isinstance(group[1], list) |
||||
): |
||||
if group[0] not in GENERICS: |
||||
raise ValueError( |
||||
f"Constructor {group[0]} is not supported in type '{type_str}' used by {'.'.join(element_path)}" |
||||
) |
||||
new_elements.append( |
||||
ast.Subscript( |
||||
value=GENERICS[group[0]], |
||||
slice=parse_sequence(group[1]), |
||||
ctx=AST_LOAD, |
||||
) |
||||
) |
||||
else: |
||||
raise ValueError( |
||||
f"Not able to parse type '{type_str}' used by {'.'.join(element_path)}" |
||||
) |
||||
return ( |
||||
ast.Subscript( |
||||
value=_path_to_type("typing", "Union"), |
||||
slice=ast.Tuple(elts=new_elements, ctx=AST_LOAD), |
||||
ctx=AST_LOAD, |
||||
) |
||||
if len(new_elements) > 1 |
||||
else new_elements[0] |
||||
) |
||||
|
||||
return parse_sequence(stack[0]) |
||||
|
||||
|
||||
def build_doc_comment(doc: str) -> Optional[ast.Expr]: |
||||
lines = [line.strip() for line in doc.split("\n")] |
||||
clean_lines = [] |
||||
for line in lines: |
||||
if line.startswith((":type", ":rtype")): |
||||
continue |
||||
clean_lines.append(line) |
||||
text = "\n".join(clean_lines).strip() |
||||
return ast.Expr(value=ast.Constant(text)) if text else None |
||||
|
||||
|
||||
def format_with_black(code: str) -> str: |
||||
result = subprocess.run( |
||||
["python", "-m", "black", "-t", "py37", "--pyi", "-"], |
||||
input=code.encode(), |
||||
capture_output=True, |
||||
) |
||||
result.check_returncode() |
||||
return result.stdout.decode() |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
parser = argparse.ArgumentParser( |
||||
description="Extract Python type stub from a python module." |
||||
) |
||||
parser.add_argument( |
||||
"module_name", help="Name of the Python module for which generate stubs" |
||||
) |
||||
parser.add_argument( |
||||
"out", |
||||
help="Name of the Python stub file to write to", |
||||
type=argparse.FileType("wt"), |
||||
) |
||||
parser.add_argument( |
||||
"--black", help="Formats the generated stubs using Black", action="store_true" |
||||
) |
||||
args = parser.parse_args() |
||||
stub_content = ast.unparse(module_stubs(importlib.import_module(args.module_name))) |
||||
stub_content = stub_content.replace( |
||||
", /", "" |
||||
) # TODO: remove when targeting Python 3.8+ |
||||
if args.black: |
||||
stub_content = format_with_black(stub_content) |
||||
args.out.write(stub_content) |
@ -1,2 +0,0 @@ |
||||
pyoxigraph.pyoxigraph |
||||
pyoxigraph.DefaultGraph.__init__ |
@ -1,56 +0,0 @@ |
||||
[build-system] |
||||
requires = ["maturin~=1.0"] |
||||
build-backend = "maturin" |
||||
|
||||
[project] |
||||
# Most of the metadata are in Cargo.toml and injected by maturin |
||||
name = "pyoxigraph" |
||||
classifiers = [ |
||||
"Development Status :: 3 - Alpha", |
||||
"Intended Audience :: Developers", |
||||
"License :: OSI Approved :: Apache Software License", |
||||
"License :: OSI Approved :: MIT License", |
||||
"Programming Language :: Python :: 3 :: Only", |
||||
"Programming Language :: Python :: 3.7", |
||||
"Programming Language :: Python :: 3.8", |
||||
"Programming Language :: Python :: 3.9", |
||||
"Programming Language :: Python :: 3.10", |
||||
"Programming Language :: Python :: 3.11", |
||||
"Programming Language :: Rust", |
||||
"Topic :: Database :: Database Engines/Servers", |
||||
"Topic :: Software Development :: Libraries :: Python Modules", |
||||
] |
||||
requires-python = ">=3.7" |
||||
|
||||
[project.urls] |
||||
Changelog = "https://github.com/oxigraph/oxigraph/blob/main/CHANGELOG.md" |
||||
Documentation = "https://pyoxigraph.readthedocs.io/" |
||||
Homepage = "https://pyoxigraph.readthedocs.io/" |
||||
Source = "https://github.com/oxigraph/oxigraph/tree/main/python" |
||||
Tracker = "https://github.com/oxigraph/oxigraph/issues" |
||||
|
||||
[tool.ruff] |
||||
line-length = 120 |
||||
select = [ |
||||
"ARG", |
||||
"B", |
||||
"C40", |
||||
"E", |
||||
"F", |
||||
"FBT", |
||||
"I", |
||||
"ICN", |
||||
"ISC", |
||||
"N", |
||||
"PIE", |
||||
"PTH", |
||||
"RET", |
||||
"RUF", |
||||
"SIM", |
||||
"T10", |
||||
"TCH", |
||||
"TID", |
||||
"UP", |
||||
"W", |
||||
"YTT" |
||||
] |
@ -1,6 +0,0 @@ |
||||
black~=23.1 |
||||
furo |
||||
maturin~=1.0 |
||||
mypy~=1.0 |
||||
ruff~=0.0.255 |
||||
sphinx~=5.3 |
@ -1,372 +0,0 @@ |
||||
#![allow(clippy::needless_option_as_deref)] |
||||
|
||||
use crate::model::{PyQuad, PyTriple}; |
||||
use oxigraph::io::read::{ParseError, QuadReader, TripleReader}; |
||||
use oxigraph::io::{ |
||||
DatasetFormat, DatasetParser, DatasetSerializer, GraphFormat, GraphParser, GraphSerializer, |
||||
}; |
||||
use pyo3::exceptions::{PyIOError, PySyntaxError, PyValueError}; |
||||
use pyo3::prelude::*; |
||||
use pyo3::types::PyBytes; |
||||
use pyo3::{intern, wrap_pyfunction}; |
||||
use std::cmp::max; |
||||
use std::error::Error; |
||||
use std::fs::File; |
||||
use std::io::{self, BufRead, BufReader, BufWriter, Cursor, Read, Write}; |
||||
use std::path::{Path, PathBuf}; |
||||
|
||||
pub fn add_to_module(module: &PyModule) -> PyResult<()> { |
||||
module.add_wrapped(wrap_pyfunction!(parse))?; |
||||
module.add_wrapped(wrap_pyfunction!(serialize)) |
||||
} |
||||
|
||||
/// Parses RDF graph and dataset serialization formats.
|
||||
///
|
||||
/// It currently supports the following formats:
|
||||
///
|
||||
/// * `N-Triples <https://www.w3.org/TR/n-triples/>`_ (``application/n-triples``)
|
||||
/// * `N-Quads <https://www.w3.org/TR/n-quads/>`_ (``application/n-quads``)
|
||||
/// * `Turtle <https://www.w3.org/TR/turtle/>`_ (``text/turtle``)
|
||||
/// * `TriG <https://www.w3.org/TR/trig/>`_ (``application/trig``)
|
||||
/// * `RDF/XML <https://www.w3.org/TR/rdf-syntax-grammar/>`_ (``application/rdf+xml``)
|
||||
///
|
||||
/// It supports also some MIME type aliases.
|
||||
/// For example, ``application/turtle`` could also be used for `Turtle <https://www.w3.org/TR/turtle/>`_
|
||||
/// and ``application/xml`` for `RDF/XML <https://www.w3.org/TR/rdf-syntax-grammar/>`_.
|
||||
///
|
||||
/// :param input: The binary I/O object or file path to read from. For example, it could be a file path as a string or a file reader opened in binary mode with ``open('my_file.ttl', 'rb')``.
|
||||
/// :type input: io(bytes) or io(str) or str or pathlib.Path
|
||||
/// :param mime_type: the MIME type of the RDF serialization.
|
||||
/// :type mime_type: str
|
||||
/// :param base_iri: the base IRI used to resolve the relative IRIs in the file or :py:const:`None` if relative IRI resolution should not be done.
|
||||
/// :type base_iri: str or None, optional
|
||||
/// :return: an iterator of RDF triples or quads depending on the format.
|
||||
/// :rtype: iterator(Triple) or iterator(Quad)
|
||||
/// :raises ValueError: if the MIME type is not supported.
|
||||
/// :raises SyntaxError: if the provided data is invalid.
|
||||
///
|
||||
/// >>> input = io.BytesIO(b'<foo> <p> "1" .')
|
||||
/// >>> list(parse(input, "text/turtle", base_iri="http://example.com/"))
|
||||
/// [<Triple subject=<NamedNode value=http://example.com/foo> predicate=<NamedNode value=http://example.com/p> object=<Literal value=1 datatype=<NamedNode value=http://www.w3.org/2001/XMLSchema#string>>>]
|
||||
#[pyfunction] |
||||
#[pyo3(signature = (input, mime_type, *, base_iri = None))] |
||||
pub fn parse( |
||||
input: PyObject, |
||||
mime_type: &str, |
||||
base_iri: Option<&str>, |
||||
py: Python<'_>, |
||||
) -> PyResult<PyObject> { |
||||
let input = if let Ok(path) = input.extract::<PathBuf>(py) { |
||||
PyReadable::from_file(&path, py).map_err(map_io_err)? |
||||
} else { |
||||
PyReadable::from_data(input, py) |
||||
}; |
||||
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) { |
||||
let mut parser = GraphParser::from_format(graph_format); |
||||
if let Some(base_iri) = base_iri { |
||||
parser = parser |
||||
.with_base_iri(base_iri) |
||||
.map_err(|e| PyValueError::new_err(e.to_string()))?; |
||||
} |
||||
Ok(PyTripleReader { |
||||
inner: py.allow_threads(|| parser.read_triples(input).map_err(map_parse_error))?, |
||||
} |
||||
.into_py(py)) |
||||
} else if let Some(dataset_format) = DatasetFormat::from_media_type(mime_type) { |
||||
let mut parser = DatasetParser::from_format(dataset_format); |
||||
if let Some(base_iri) = base_iri { |
||||
parser = parser |
||||
.with_base_iri(base_iri) |
||||
.map_err(|e| PyValueError::new_err(e.to_string()))?; |
||||
} |
||||
Ok(PyQuadReader { |
||||
inner: py.allow_threads(|| parser.read_quads(input).map_err(map_parse_error))?, |
||||
} |
||||
.into_py(py)) |
||||
} else { |
||||
Err(PyValueError::new_err(format!( |
||||
"Not supported MIME type: {mime_type}" |
||||
))) |
||||
} |
||||
} |
||||
|
||||
/// Serializes an RDF graph or dataset.
|
||||
///
|
||||
/// It currently supports the following formats:
|
||||
///
|
||||
/// * `N-Triples <https://www.w3.org/TR/n-triples/>`_ (``application/n-triples``)
|
||||
/// * `N-Quads <https://www.w3.org/TR/n-quads/>`_ (``application/n-quads``)
|
||||
/// * `Turtle <https://www.w3.org/TR/turtle/>`_ (``text/turtle``)
|
||||
/// * `TriG <https://www.w3.org/TR/trig/>`_ (``application/trig``)
|
||||
/// * `RDF/XML <https://www.w3.org/TR/rdf-syntax-grammar/>`_ (``application/rdf+xml``)
|
||||
///
|
||||
/// It supports also some MIME type aliases.
|
||||
/// For example, ``application/turtle`` could also be used for `Turtle <https://www.w3.org/TR/turtle/>`_
|
||||
/// and ``application/xml`` for `RDF/XML <https://www.w3.org/TR/rdf-syntax-grammar/>`_.
|
||||
///
|
||||
/// :param input: the RDF triples and quads to serialize.
|
||||
/// :type input: iterable(Triple) or iterable(Quad)
|
||||
/// :param output: The binary I/O object or file path to write to. For example, it could be a file path as a string or a file writer opened in binary mode with ``open('my_file.ttl', 'wb')``.
|
||||
/// :type output: io(bytes) or str or pathlib.Path
|
||||
/// :param mime_type: the MIME type of the RDF serialization.
|
||||
/// :type mime_type: str
|
||||
/// :rtype: None
|
||||
/// :raises ValueError: if the MIME type is not supported.
|
||||
/// :raises TypeError: if a triple is given during a quad format serialization or reverse.
|
||||
///
|
||||
/// >>> output = io.BytesIO()
|
||||
/// >>> serialize([Triple(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1'))], output, "text/turtle")
|
||||
/// >>> output.getvalue()
|
||||
/// b'<http://example.com> <http://example.com/p> "1" .\n'
|
||||
#[pyfunction] |
||||
pub fn serialize(input: &PyAny, output: PyObject, mime_type: &str, py: Python<'_>) -> PyResult<()> { |
||||
let output = if let Ok(path) = output.extract::<PathBuf>(py) { |
||||
PyWritable::from_file(&path, py).map_err(map_io_err)? |
||||
} else { |
||||
PyWritable::from_data(output) |
||||
}; |
||||
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) { |
||||
let mut writer = GraphSerializer::from_format(graph_format) |
||||
.triple_writer(output) |
||||
.map_err(map_io_err)?; |
||||
for i in input.iter()? { |
||||
writer |
||||
.write(&*i?.extract::<PyRef<PyTriple>>()?) |
||||
.map_err(map_io_err)?; |
||||
} |
||||
writer.finish().map_err(map_io_err)?; |
||||
Ok(()) |
||||
} else if let Some(dataset_format) = DatasetFormat::from_media_type(mime_type) { |
||||
let mut writer = DatasetSerializer::from_format(dataset_format) |
||||
.quad_writer(output) |
||||
.map_err(map_io_err)?; |
||||
for i in input.iter()? { |
||||
writer |
||||
.write(&*i?.extract::<PyRef<PyQuad>>()?) |
||||
.map_err(map_io_err)?; |
||||
} |
||||
writer.finish().map_err(map_io_err)?; |
||||
Ok(()) |
||||
} else { |
||||
Err(PyValueError::new_err(format!( |
||||
"Not supported MIME type: {mime_type}" |
||||
))) |
||||
} |
||||
} |
||||
|
||||
#[pyclass(name = "TripleReader", module = "pyoxigraph")] |
||||
pub struct PyTripleReader { |
||||
inner: TripleReader<PyReadable>, |
||||
} |
||||
|
||||
#[pymethods] |
||||
impl PyTripleReader { |
||||
fn __iter__(slf: PyRef<'_, Self>) -> PyRef<Self> { |
||||
slf |
||||
} |
||||
|
||||
fn __next__(&mut self, py: Python<'_>) -> PyResult<Option<PyTriple>> { |
||||
py.allow_threads(|| { |
||||
self.inner |
||||
.next() |
||||
.map(|q| Ok(q.map_err(map_parse_error)?.into())) |
||||
.transpose() |
||||
}) |
||||
} |
||||
} |
||||
|
||||
#[pyclass(name = "QuadReader", module = "pyoxigraph")] |
||||
pub struct PyQuadReader { |
||||
inner: QuadReader<PyReadable>, |
||||
} |
||||
|
||||
#[pymethods] |
||||
impl PyQuadReader { |
||||
fn __iter__(slf: PyRef<'_, Self>) -> PyRef<Self> { |
||||
slf |
||||
} |
||||
|
||||
fn __next__(&mut self, py: Python<'_>) -> PyResult<Option<PyQuad>> { |
||||
py.allow_threads(|| { |
||||
self.inner |
||||
.next() |
||||
.map(|q| Ok(q.map_err(map_parse_error)?.into())) |
||||
.transpose() |
||||
}) |
||||
} |
||||
} |
||||
|
||||
pub enum PyReadable { |
||||
Bytes(Cursor<Vec<u8>>), |
||||
Io(BufReader<PyIo>), |
||||
File(BufReader<File>), |
||||
} |
||||
|
||||
impl PyReadable { |
||||
pub fn from_file(file: &Path, py: Python<'_>) -> io::Result<Self> { |
||||
Ok(Self::File(BufReader::new( |
||||
py.allow_threads(|| File::open(file))?, |
||||
))) |
||||
} |
||||
|
||||
pub fn from_data(data: PyObject, py: Python<'_>) -> Self { |
||||
if let Ok(bytes) = data.extract::<Vec<u8>>(py) { |
||||
Self::Bytes(Cursor::new(bytes)) |
||||
} else if let Ok(string) = data.extract::<String>(py) { |
||||
Self::Bytes(Cursor::new(string.into_bytes())) |
||||
} else { |
||||
Self::Io(BufReader::new(PyIo(data))) |
||||
} |
||||
} |
||||
} |
||||
|
||||
impl Read for PyReadable { |
||||
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { |
||||
match self { |
||||
Self::Bytes(bytes) => bytes.read(buf), |
||||
Self::Io(io) => io.read(buf), |
||||
Self::File(file) => file.read(buf), |
||||
} |
||||
} |
||||
} |
||||
|
||||
impl BufRead for PyReadable { |
||||
fn fill_buf(&mut self) -> io::Result<&[u8]> { |
||||
match self { |
||||
Self::Bytes(bytes) => bytes.fill_buf(), |
||||
Self::Io(io) => io.fill_buf(), |
||||
Self::File(file) => file.fill_buf(), |
||||
} |
||||
} |
||||
|
||||
fn consume(&mut self, amt: usize) { |
||||
match self { |
||||
Self::Bytes(bytes) => bytes.consume(amt), |
||||
Self::Io(io) => io.consume(amt), |
||||
Self::File(file) => file.consume(amt), |
||||
} |
||||
} |
||||
} |
||||
|
||||
pub enum PyWritable { |
||||
Io(BufWriter<PyIo>), |
||||
File(BufWriter<File>), |
||||
} |
||||
|
||||
impl PyWritable { |
||||
pub fn from_file(file: &Path, py: Python<'_>) -> io::Result<Self> { |
||||
Ok(Self::File(BufWriter::new( |
||||
py.allow_threads(|| File::create(file))?, |
||||
))) |
||||
} |
||||
|
||||
pub fn from_data(data: PyObject) -> Self { |
||||
Self::Io(BufWriter::new(PyIo(data))) |
||||
} |
||||
} |
||||
|
||||
impl Write for PyWritable { |
||||
fn write(&mut self, buf: &[u8]) -> io::Result<usize> { |
||||
match self { |
||||
Self::Io(io) => io.write(buf), |
||||
Self::File(file) => file.write(buf), |
||||
} |
||||
} |
||||
|
||||
fn flush(&mut self) -> io::Result<()> { |
||||
match self { |
||||
Self::Io(io) => io.flush(), |
||||
Self::File(file) => file.flush(), |
||||
} |
||||
} |
||||
} |
||||
|
||||
pub struct PyIo(PyObject); |
||||
|
||||
impl Read for PyIo { |
||||
fn read(&mut self, buf: &mut [u8]) -> io::Result<usize> { |
||||
Python::with_gil(|py| { |
||||
if buf.is_empty() { |
||||
return Ok(0); |
||||
} |
||||
let to_read = max(1, buf.len() / 4); // We divide by 4 because TextIO works with number of characters and not with number of bytes
|
||||
let read = self |
||||
.0 |
||||
.as_ref(py) |
||||
.call_method1(intern!(py, "read"), (to_read,)) |
||||
.map_err(to_io_err)?; |
||||
let bytes = read |
||||
.extract::<&[u8]>() |
||||
.or_else(|e| read.extract::<&str>().map(str::as_bytes).map_err(|_| e)) |
||||
.map_err(to_io_err)?; |
||||
buf[..bytes.len()].copy_from_slice(bytes); |
||||
Ok(bytes.len()) |
||||
}) |
||||
} |
||||
} |
||||
|
||||
impl Write for PyIo { |
||||
fn write(&mut self, buf: &[u8]) -> io::Result<usize> { |
||||
Python::with_gil(|py| { |
||||
self.0 |
||||
.as_ref(py) |
||||
.call_method1(intern!(py, "write"), (PyBytes::new(py, buf),)) |
||||
.map_err(to_io_err)? |
||||
.extract::<usize>() |
||||
.map_err(to_io_err) |
||||
}) |
||||
} |
||||
|
||||
fn flush(&mut self) -> io::Result<()> { |
||||
Python::with_gil(|py| { |
||||
self.0.as_ref(py).call_method0(intern!(py, "flush"))?; |
||||
Ok(()) |
||||
}) |
||||
} |
||||
} |
||||
|
||||
fn to_io_err(error: impl Into<PyErr>) -> io::Error { |
||||
io::Error::new(io::ErrorKind::Other, error.into()) |
||||
} |
||||
|
||||
pub fn map_io_err(error: io::Error) -> PyErr { |
||||
if error |
||||
.get_ref() |
||||
.map_or(false, <(dyn Error + Send + Sync + 'static)>::is::<PyErr>) |
||||
{ |
||||
*error.into_inner().unwrap().downcast().unwrap() |
||||
} else { |
||||
PyIOError::new_err(error.to_string()) |
||||
} |
||||
} |
||||
|
||||
pub fn map_parse_error(error: ParseError) -> PyErr { |
||||
match error { |
||||
ParseError::Syntax(error) => PySyntaxError::new_err(error.to_string()), |
||||
ParseError::Io(error) => map_io_err(error), |
||||
} |
||||
} |
||||
|
||||
/// Release the GIL
|
||||
/// There should not be ANY use of pyo3 code inside of this method!!!
|
||||
///
|
||||
/// Code from pyo3: https://github.com/PyO3/pyo3/blob/a67180c8a42a0bc0fdc45b651b62c0644130cf47/src/python.rs#L366
|
||||
#[allow(unsafe_code)] |
||||
pub fn allow_threads_unsafe<T>(f: impl FnOnce() -> T) -> T { |
||||
struct RestoreGuard { |
||||
tstate: *mut pyo3::ffi::PyThreadState, |
||||
} |
||||
|
||||
impl Drop for RestoreGuard { |
||||
fn drop(&mut self) { |
||||
unsafe { |
||||
pyo3::ffi::PyEval_RestoreThread(self.tstate); |
||||
} |
||||
} |
||||
} |
||||
|
||||
let _guard = RestoreGuard { |
||||
tstate: unsafe { pyo3::ffi::PyEval_SaveThread() }, |
||||
}; |
||||
f() |
||||
} |
@ -1,30 +0,0 @@ |
||||
mod io; |
||||
mod model; |
||||
mod sparql; |
||||
mod store; |
||||
|
||||
use crate::model::*; |
||||
use crate::sparql::*; |
||||
use crate::store::*; |
||||
use pyo3::prelude::*; |
||||
|
||||
/// Oxigraph Python bindings
|
||||
#[pymodule] |
||||
fn pyoxigraph(_py: Python<'_>, module: &PyModule) -> PyResult<()> { |
||||
module.add("__package__", "pyoxigraph")?; |
||||
module.add("__version__", env!("CARGO_PKG_VERSION"))?; |
||||
module.add("__author__", env!("CARGO_PKG_AUTHORS").replace(':', "\n"))?; |
||||
|
||||
module.add_class::<PyNamedNode>()?; |
||||
module.add_class::<PyBlankNode>()?; |
||||
module.add_class::<PyLiteral>()?; |
||||
module.add_class::<PyDefaultGraph>()?; |
||||
module.add_class::<PyTriple>()?; |
||||
module.add_class::<PyQuad>()?; |
||||
module.add_class::<PyStore>()?; |
||||
module.add_class::<PyVariable>()?; |
||||
module.add_class::<PyQuerySolutions>()?; |
||||
module.add_class::<PyQuerySolution>()?; |
||||
module.add_class::<PyQueryTriples>()?; |
||||
io::add_to_module(module) |
||||
} |
File diff suppressed because it is too large
Load Diff
@ -1,241 +0,0 @@ |
||||
use crate::io::{allow_threads_unsafe, map_io_err, map_parse_error}; |
||||
use crate::map_storage_error; |
||||
use crate::model::*; |
||||
use oxigraph::model::Term; |
||||
use oxigraph::sparql::*; |
||||
use pyo3::basic::CompareOp; |
||||
use pyo3::exceptions::{ |
||||
PyNotImplementedError, PyRuntimeError, PySyntaxError, PyTypeError, PyValueError, |
||||
}; |
||||
|
||||
use pyo3::prelude::*; |
||||
use std::vec::IntoIter; |
||||
|
||||
pub fn parse_query( |
||||
query: &str, |
||||
base_iri: Option<&str>, |
||||
use_default_graph_as_union: bool, |
||||
default_graph: Option<&PyAny>, |
||||
named_graphs: Option<&PyAny>, |
||||
) -> PyResult<Query> { |
||||
let mut query = allow_threads_unsafe(|| Query::parse(query, base_iri)) |
||||
.map_err(|e| map_evaluation_error(e.into()))?; |
||||
|
||||
if use_default_graph_as_union && default_graph.is_some() { |
||||
return Err(PyValueError::new_err( |
||||
"The query() method use_default_graph_as_union and default_graph arguments should not be set at the same time", |
||||
)); |
||||
} |
||||
|
||||
if use_default_graph_as_union { |
||||
query.dataset_mut().set_default_graph_as_union(); |
||||
} |
||||
|
||||
if let Some(default_graph) = default_graph { |
||||
if let Ok(default_graphs) = default_graph.iter() { |
||||
query.dataset_mut().set_default_graph( |
||||
default_graphs |
||||
.map(|graph| Ok(graph?.extract::<PyGraphName>()?.into())) |
||||
.collect::<PyResult<_>>()?, |
||||
) |
||||
} else if let Ok(default_graph) = default_graph.extract::<PyGraphName>() { |
||||
query |
||||
.dataset_mut() |
||||
.set_default_graph(vec![default_graph.into()]); |
||||
} else { |
||||
return Err(PyValueError::new_err( |
||||
format!("The query() method default_graph argument should be a NamedNode, a BlankNode, the DefaultGraph or a not empty list of them. {} found", default_graph.get_type() |
||||
))); |
||||
} |
||||
} |
||||
|
||||
if let Some(named_graphs) = named_graphs { |
||||
query.dataset_mut().set_available_named_graphs( |
||||
named_graphs |
||||
.iter()? |
||||
.map(|graph| Ok(graph?.extract::<PyNamedOrBlankNode>()?.into())) |
||||
.collect::<PyResult<_>>()?, |
||||
) |
||||
} |
||||
|
||||
Ok(query) |
||||
} |
||||
|
||||
pub fn query_results_to_python(py: Python<'_>, results: QueryResults) -> PyObject { |
||||
match results { |
||||
QueryResults::Solutions(inner) => PyQuerySolutions { inner }.into_py(py), |
||||
QueryResults::Graph(inner) => PyQueryTriples { inner }.into_py(py), |
||||
QueryResults::Boolean(b) => b.into_py(py), |
||||
} |
||||
} |
||||
|
||||
/// Tuple associating variables and terms that are the result of a SPARQL ``SELECT`` query.
|
||||
///
|
||||
/// It is the equivalent of a row in SQL.
|
||||
///
|
||||
/// It could be indexes by variable name (:py:class:`Variable` or :py:class:`str`) or position in the tuple (:py:class:`int`).
|
||||
/// Unpacking also works.
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add(Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1')))
|
||||
/// >>> solution = next(store.query('SELECT ?s ?p ?o WHERE { ?s ?p ?o }'))
|
||||
/// >>> solution[Variable('s')]
|
||||
/// <NamedNode value=http://example.com>
|
||||
/// >>> solution['s']
|
||||
/// <NamedNode value=http://example.com>
|
||||
/// >>> solution[0]
|
||||
/// <NamedNode value=http://example.com>
|
||||
/// >>> s, p, o = solution
|
||||
/// >>> s
|
||||
/// <NamedNode value=http://example.com>
|
||||
#[pyclass(frozen, unsendable, name = "QuerySolution", module = "pyoxigraph")] |
||||
pub struct PyQuerySolution { |
||||
inner: QuerySolution, |
||||
} |
||||
|
||||
#[pymethods] |
||||
impl PyQuerySolution { |
||||
fn __repr__(&self) -> String { |
||||
let mut buffer = String::new(); |
||||
buffer.push_str("<QuerySolution"); |
||||
for (k, v) in self.inner.iter() { |
||||
buffer.push(' '); |
||||
buffer.push_str(k.as_str()); |
||||
buffer.push('='); |
||||
term_repr(v.as_ref(), &mut buffer) |
||||
} |
||||
buffer.push('>'); |
||||
buffer |
||||
} |
||||
|
||||
fn __richcmp__(&self, other: &Self, op: CompareOp) -> PyResult<bool> { |
||||
match op { |
||||
CompareOp::Eq => Ok(self.inner == other.inner), |
||||
CompareOp::Ne => Ok(self.inner != other.inner), |
||||
_ => Err(PyNotImplementedError::new_err( |
||||
"Ordering is not implemented", |
||||
)), |
||||
} |
||||
} |
||||
|
||||
fn __len__(&self) -> usize { |
||||
self.inner.len() |
||||
} |
||||
|
||||
fn __getitem__(&self, input: &PyAny) -> PyResult<Option<PyTerm>> { |
||||
if let Ok(key) = usize::extract(input) { |
||||
Ok(self.inner.get(key).map(|term| PyTerm::from(term.clone()))) |
||||
} else if let Ok(key) = <&str>::extract(input) { |
||||
Ok(self.inner.get(key).map(|term| PyTerm::from(term.clone()))) |
||||
} else if let Ok(key) = input.extract::<PyRef<PyVariable>>() { |
||||
Ok(self |
||||
.inner |
||||
.get(<&Variable>::from(&*key)) |
||||
.map(|term| PyTerm::from(term.clone()))) |
||||
} else { |
||||
Err(PyTypeError::new_err(format!( |
||||
"{} is not an integer of a string", |
||||
input.get_type().name()?, |
||||
))) |
||||
} |
||||
} |
||||
|
||||
#[allow(clippy::unnecessary_to_owned)] |
||||
fn __iter__(&self) -> SolutionValueIter { |
||||
SolutionValueIter { |
||||
inner: self.inner.values().to_vec().into_iter(), |
||||
} |
||||
} |
||||
} |
||||
|
||||
#[pyclass(module = "pyoxigraph")] |
||||
pub struct SolutionValueIter { |
||||
inner: IntoIter<Option<Term>>, |
||||
} |
||||
|
||||
#[pymethods] |
||||
impl SolutionValueIter { |
||||
fn __iter__(slf: PyRef<'_, Self>) -> PyRef<Self> { |
||||
slf |
||||
} |
||||
|
||||
fn __next__(&mut self) -> Option<Option<PyTerm>> { |
||||
self.inner.next().map(|v| v.map(PyTerm::from)) |
||||
} |
||||
} |
||||
|
||||
/// An iterator of :py:class:`QuerySolution` returned by a SPARQL ``SELECT`` query
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add(Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1')))
|
||||
/// >>> list(store.query('SELECT ?s WHERE { ?s ?p ?o }'))
|
||||
/// [<QuerySolution s=<NamedNode value=http://example.com>>]
|
||||
#[pyclass(unsendable, name = "QuerySolutions", module = "pyoxigraph")] |
||||
pub struct PyQuerySolutions { |
||||
inner: QuerySolutionIter, |
||||
} |
||||
|
||||
#[pymethods] |
||||
impl PyQuerySolutions { |
||||
/// :return: the ordered list of all variables that could appear in the query results
|
||||
/// :rtype: list(Variable)
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.query('SELECT ?s WHERE { ?s ?p ?o }').variables
|
||||
/// [<Variable value=s>]
|
||||
#[getter] |
||||
fn variables(&self) -> Vec<PyVariable> { |
||||
self.inner |
||||
.variables() |
||||
.iter() |
||||
.map(|v| v.clone().into()) |
||||
.collect() |
||||
} |
||||
|
||||
fn __iter__(slf: PyRef<'_, Self>) -> PyRef<Self> { |
||||
slf |
||||
} |
||||
|
||||
fn __next__(&mut self) -> PyResult<Option<PyQuerySolution>> { |
||||
Ok(allow_threads_unsafe(|| self.inner.next()) |
||||
.transpose() |
||||
.map_err(map_evaluation_error)? |
||||
.map(move |inner| PyQuerySolution { inner })) |
||||
} |
||||
} |
||||
|
||||
/// An iterator of :py:class:`Triple` returned by a SPARQL ``CONSTRUCT`` or ``DESCRIBE`` query
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add(Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1')))
|
||||
/// >>> list(store.query('CONSTRUCT WHERE { ?s ?p ?o }'))
|
||||
/// [<Triple subject=<NamedNode value=http://example.com> predicate=<NamedNode value=http://example.com/p> object=<Literal value=1 datatype=<NamedNode value=http://www.w3.org/2001/XMLSchema#string>>>]
|
||||
#[pyclass(unsendable, name = "QueryTriples", module = "pyoxigraph")] |
||||
pub struct PyQueryTriples { |
||||
inner: QueryTripleIter, |
||||
} |
||||
|
||||
#[pymethods] |
||||
impl PyQueryTriples { |
||||
fn __iter__(slf: PyRef<'_, Self>) -> PyRef<Self> { |
||||
slf |
||||
} |
||||
|
||||
fn __next__(&mut self) -> PyResult<Option<PyTriple>> { |
||||
Ok(allow_threads_unsafe(|| self.inner.next()) |
||||
.transpose() |
||||
.map_err(map_evaluation_error)? |
||||
.map(Into::into)) |
||||
} |
||||
} |
||||
|
||||
pub fn map_evaluation_error(error: EvaluationError) -> PyErr { |
||||
match error { |
||||
EvaluationError::Parsing(error) => PySyntaxError::new_err(error.to_string()), |
||||
EvaluationError::Storage(error) => map_storage_error(error), |
||||
EvaluationError::Io(error) => map_io_err(error), |
||||
EvaluationError::GraphParsing(error) => map_parse_error(error), |
||||
EvaluationError::Query(error) => PyValueError::new_err(error.to_string()), |
||||
_ => PyRuntimeError::new_err(error.to_string()), |
||||
} |
||||
} |
@ -1,907 +0,0 @@ |
||||
#![allow(clippy::needless_option_as_deref)] |
||||
|
||||
use crate::io::{allow_threads_unsafe, map_io_err, map_parse_error, PyReadable, PyWritable}; |
||||
use crate::model::*; |
||||
use crate::sparql::*; |
||||
use oxigraph::io::{DatasetFormat, GraphFormat}; |
||||
use oxigraph::model::{GraphName, GraphNameRef}; |
||||
use oxigraph::sparql::Update; |
||||
use oxigraph::store::{self, LoaderError, SerializerError, StorageError, Store}; |
||||
use pyo3::exceptions::{PyIOError, PyRuntimeError, PyValueError}; |
||||
use pyo3::prelude::*; |
||||
use std::path::PathBuf; |
||||
|
||||
/// RDF store.
|
||||
///
|
||||
/// It encodes a `RDF dataset <https://www.w3.org/TR/rdf11-concepts/#dfn-rdf-dataset>`_ and allows to query it using SPARQL.
|
||||
/// It is based on the `RocksDB <https://rocksdb.org/>`_ key-value database.
|
||||
///
|
||||
/// This store ensures the "repeatable read" isolation level: the store only exposes changes that have
|
||||
/// been "committed" (i.e. no partial writes) and the exposed state does not change for the complete duration
|
||||
/// of a read operation (e.g. a SPARQL query) or a read/write operation (e.g. a SPARQL update).
|
||||
///
|
||||
/// The :py:class:`Store` constructor opens a read-write instance.
|
||||
/// To open a static read-only instance use :py:func:`Store.read_only`
|
||||
/// and to open a read-only instance that tracks a read-write instance use :py:func:`Store.secondary`.
|
||||
///
|
||||
/// :param path: the path of the directory in which the store should read and write its data. If the directory does not exist, it is created.
|
||||
/// If no directory is provided a temporary one is created and removed when the Python garbage collector removes the store.
|
||||
/// In this case, the store data are kept in memory and never written on disk.
|
||||
/// :type path: str or pathlib.Path or None, optional
|
||||
/// :raises IOError: if the target directory contains invalid data or could not be accessed.
|
||||
///
|
||||
/// The :py:func:`str` function provides a serialization of the store in NQuads:
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add(Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1'), NamedNode('http://example.com/g')))
|
||||
/// >>> str(store)
|
||||
/// '<http://example.com> <http://example.com/p> "1" <http://example.com/g> .\n'
|
||||
#[pyclass(frozen, name = "Store", module = "pyoxigraph")] |
||||
#[derive(Clone)] |
||||
pub struct PyStore { |
||||
inner: Store, |
||||
} |
||||
|
||||
#[pymethods] |
||||
impl PyStore { |
||||
#[new] |
||||
#[pyo3(signature = (path = None))] |
||||
fn new(path: Option<PathBuf>, py: Python<'_>) -> PyResult<Self> { |
||||
py.allow_threads(|| { |
||||
Ok(Self { |
||||
inner: if let Some(path) = path { |
||||
Store::open(path) |
||||
} else { |
||||
Store::new() |
||||
} |
||||
.map_err(map_storage_error)?, |
||||
}) |
||||
}) |
||||
} |
||||
|
||||
/// Opens a read-only store from disk.
|
||||
///
|
||||
/// Opening as read-only while having an other process writing the database is undefined behavior.
|
||||
/// :py:func:`Store.secondary` should be used in this case.
|
||||
///
|
||||
/// :param path: path to the primary read-write instance data.
|
||||
/// :type path: str
|
||||
/// :return: the opened store.
|
||||
/// :rtype: Store
|
||||
/// :raises IOError: if the target directory contains invalid data or could not be accessed.
|
||||
#[staticmethod] |
||||
fn read_only(path: &str, py: Python<'_>) -> PyResult<Self> { |
||||
py.allow_threads(|| { |
||||
Ok(Self { |
||||
inner: Store::open_read_only(path).map_err(map_storage_error)?, |
||||
}) |
||||
}) |
||||
} |
||||
|
||||
/// Opens a read-only clone of a running read-write store.
|
||||
///
|
||||
/// Changes done while this process is running will be replicated after a possible lag.
|
||||
///
|
||||
/// It should only be used if a primary instance opened with :py:func:`Store` is running at the same time.
|
||||
///
|
||||
/// If you want to simple read-only store use :py:func:`Store.read_only`.
|
||||
///
|
||||
/// :param primary_path: path to the primary read-write instance data.
|
||||
/// :type primary_path: str
|
||||
/// :param secondary_path: path to an other directory for the secondary instance cache. If not given a temporary directory will be used.
|
||||
/// :type secondary_path: str or None, optional
|
||||
/// :return: the opened store.
|
||||
/// :rtype: Store
|
||||
/// :raises IOError: if the target directories contain invalid data or could not be accessed.
|
||||
#[staticmethod] |
||||
#[pyo3(signature = (primary_path, secondary_path = None))] |
||||
fn secondary( |
||||
primary_path: &str, |
||||
secondary_path: Option<&str>, |
||||
py: Python<'_>, |
||||
) -> PyResult<Self> { |
||||
py.allow_threads(|| { |
||||
Ok(Self { |
||||
inner: if let Some(secondary_path) = secondary_path { |
||||
Store::open_persistent_secondary(primary_path, secondary_path) |
||||
} else { |
||||
Store::open_secondary(primary_path) |
||||
} |
||||
.map_err(map_storage_error)?, |
||||
}) |
||||
}) |
||||
} |
||||
|
||||
/// Adds a quad to the store.
|
||||
///
|
||||
/// :param quad: the quad to add.
|
||||
/// :type quad: Quad
|
||||
/// :rtype: None
|
||||
/// :raises IOError: if an I/O error happens during the quad insertion.
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add(Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1'), NamedNode('http://example.com/g')))
|
||||
/// >>> list(store)
|
||||
/// [<Quad subject=<NamedNode value=http://example.com> predicate=<NamedNode value=http://example.com/p> object=<Literal value=1 datatype=<NamedNode value=http://www.w3.org/2001/XMLSchema#string>> graph_name=<NamedNode value=http://example.com/g>>]
|
||||
fn add(&self, quad: &PyQuad, py: Python<'_>) -> PyResult<()> { |
||||
py.allow_threads(|| { |
||||
self.inner.insert(quad).map_err(map_storage_error)?; |
||||
Ok(()) |
||||
}) |
||||
} |
||||
|
||||
/// Adds atomically a set of quads to this store.
|
||||
///
|
||||
/// Insertion is done in a transactional manner: either the full operation succeeds or nothing is written to the database.
|
||||
/// The :py:func:`bulk_extend` method is also available for much faster loading of a large number of quads but without transactional guarantees.
|
||||
///
|
||||
/// :param quads: the quads to add.
|
||||
/// :type quads: iterable(Quad)
|
||||
/// :rtype: None
|
||||
/// :raises IOError: if an I/O error happens during the quad insertion.
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.extend([Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1'), NamedNode('http://example.com/g'))])
|
||||
/// >>> list(store)
|
||||
/// [<Quad subject=<NamedNode value=http://example.com> predicate=<NamedNode value=http://example.com/p> object=<Literal value=1 datatype=<NamedNode value=http://www.w3.org/2001/XMLSchema#string>> graph_name=<NamedNode value=http://example.com/g>>]
|
||||
fn extend(&self, quads: &PyAny, py: Python<'_>) -> PyResult<()> { |
||||
let quads = quads |
||||
.iter()? |
||||
.map(|q| q?.extract()) |
||||
.collect::<PyResult<Vec<PyQuad>>>()?; |
||||
py.allow_threads(|| { |
||||
self.inner.extend(quads).map_err(map_storage_error)?; |
||||
Ok(()) |
||||
}) |
||||
} |
||||
|
||||
/// Adds a set of quads to this store.
|
||||
///
|
||||
/// This function is designed to be as fast as possible **without** transactional guarantees.
|
||||
/// Only a part of the data might be written to the store.
|
||||
///
|
||||
/// :param quads: the quads to add.
|
||||
/// :type quads: iterable(Quad)
|
||||
/// :rtype: None
|
||||
/// :raises IOError: if an I/O error happens during the quad insertion.
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.bulk_extend([Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1'), NamedNode('http://example.com/g'))])
|
||||
/// >>> list(store)
|
||||
/// [<Quad subject=<NamedNode value=http://example.com> predicate=<NamedNode value=http://example.com/p> object=<Literal value=1 datatype=<NamedNode value=http://www.w3.org/2001/XMLSchema#string>> graph_name=<NamedNode value=http://example.com/g>>]
|
||||
fn bulk_extend(&self, quads: &PyAny) -> PyResult<()> { |
||||
self.inner |
||||
.bulk_loader() |
||||
.load_ok_quads::<PyErr, PythonOrStorageError>( |
||||
quads.iter()?.map(|q| q?.extract::<PyQuad>()), |
||||
)?; |
||||
Ok(()) |
||||
} |
||||
|
||||
/// Removes a quad from the store.
|
||||
///
|
||||
/// :param quad: the quad to remove.
|
||||
/// :type quad: Quad
|
||||
/// :rtype: None
|
||||
/// :raises IOError: if an I/O error happens during the quad removal.
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> quad = Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1'), NamedNode('http://example.com/g'))
|
||||
/// >>> store.add(quad)
|
||||
/// >>> store.remove(quad)
|
||||
/// >>> list(store)
|
||||
/// []
|
||||
fn remove(&self, quad: &PyQuad, py: Python<'_>) -> PyResult<()> { |
||||
py.allow_threads(|| { |
||||
self.inner.remove(quad).map_err(map_storage_error)?; |
||||
Ok(()) |
||||
}) |
||||
} |
||||
|
||||
/// Looks for the quads matching a given pattern.
|
||||
///
|
||||
/// :param subject: the quad subject or :py:const:`None` to match everything.
|
||||
/// :type subject: NamedNode or BlankNode or Triple or None
|
||||
/// :param predicate: the quad predicate or :py:const:`None` to match everything.
|
||||
/// :type predicate: NamedNode or None
|
||||
/// :param object: the quad object or :py:const:`None` to match everything.
|
||||
/// :type object: NamedNode or BlankNode or Literal or Triple or None
|
||||
/// :param graph_name: the quad graph name. To match only the default graph, use :py:class:`DefaultGraph`. To match everything use :py:const:`None`.
|
||||
/// :type graph_name: NamedNode or BlankNode or DefaultGraph or None, optional
|
||||
/// :return: an iterator of the quads matching the pattern.
|
||||
/// :rtype: iterator(Quad)
|
||||
/// :raises IOError: if an I/O error happens during the quads lookup.
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add(Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1'), NamedNode('http://example.com/g')))
|
||||
/// >>> list(store.quads_for_pattern(NamedNode('http://example.com'), None, None, None))
|
||||
/// [<Quad subject=<NamedNode value=http://example.com> predicate=<NamedNode value=http://example.com/p> object=<Literal value=1 datatype=<NamedNode value=http://www.w3.org/2001/XMLSchema#string>> graph_name=<NamedNode value=http://example.com/g>>]
|
||||
#[pyo3(signature = (subject, predicate, object, graph_name = None))] |
||||
fn quads_for_pattern( |
||||
&self, |
||||
subject: &PyAny, |
||||
predicate: &PyAny, |
||||
object: &PyAny, |
||||
graph_name: Option<&PyAny>, |
||||
) -> PyResult<QuadIter> { |
||||
let (subject, predicate, object, graph_name) = |
||||
extract_quads_pattern(subject, predicate, object, graph_name)?; |
||||
Ok(QuadIter { |
||||
inner: self.inner.quads_for_pattern( |
||||
subject.as_ref().map(Into::into), |
||||
predicate.as_ref().map(Into::into), |
||||
object.as_ref().map(Into::into), |
||||
graph_name.as_ref().map(Into::into), |
||||
), |
||||
}) |
||||
} |
||||
|
||||
/// Executes a `SPARQL 1.1 query <https://www.w3.org/TR/sparql11-query/>`_.
|
||||
///
|
||||
/// :param query: the query to execute.
|
||||
/// :type query: str
|
||||
/// :param base_iri: the base IRI used to resolve the relative IRIs in the SPARQL query or :py:const:`None` if relative IRI resolution should not be done.
|
||||
/// :type base_iri: str or None, optional
|
||||
/// :param use_default_graph_as_union: if the SPARQL query should look for triples in all the dataset graphs by default (i.e. without `GRAPH` operations). Disabled by default.
|
||||
/// :type use_default_graph_as_union: bool, optional
|
||||
/// :param default_graph: list of the graphs that should be used as the query default graph. By default, the store default graph is used.
|
||||
/// :type default_graph: NamedNode or BlankNode or DefaultGraph or list(NamedNode or BlankNode or DefaultGraph) or None, optional
|
||||
/// :param named_graphs: list of the named graphs that could be used in SPARQL `GRAPH` clause. By default, all the store named graphs are available.
|
||||
/// :type named_graphs: list(NamedNode or BlankNode) or None, optional
|
||||
/// :return: a :py:class:`bool` for ``ASK`` queries, an iterator of :py:class:`Triple` for ``CONSTRUCT`` and ``DESCRIBE`` queries and an iterator of :py:class:`QuerySolution` for ``SELECT`` queries.
|
||||
/// :rtype: QuerySolutions or QueryTriples or bool
|
||||
/// :raises SyntaxError: if the provided query is invalid.
|
||||
/// :raises IOError: if an I/O error happens while reading the store.
|
||||
///
|
||||
/// ``SELECT`` query:
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add(Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1')))
|
||||
/// >>> [solution['s'] for solution in store.query('SELECT ?s WHERE { ?s ?p ?o }')]
|
||||
/// [<NamedNode value=http://example.com>]
|
||||
///
|
||||
/// ``CONSTRUCT`` query:
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add(Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1')))
|
||||
/// >>> list(store.query('CONSTRUCT WHERE { ?s ?p ?o }'))
|
||||
/// [<Triple subject=<NamedNode value=http://example.com> predicate=<NamedNode value=http://example.com/p> object=<Literal value=1 datatype=<NamedNode value=http://www.w3.org/2001/XMLSchema#string>>>]
|
||||
///
|
||||
/// ``ASK`` query:
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add(Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1')))
|
||||
/// >>> store.query('ASK { ?s ?p ?o }')
|
||||
/// True
|
||||
#[pyo3(signature = (query, *, base_iri = None, use_default_graph_as_union = false, default_graph = None, named_graphs = None))] |
||||
fn query( |
||||
&self, |
||||
query: &str, |
||||
base_iri: Option<&str>, |
||||
use_default_graph_as_union: bool, |
||||
default_graph: Option<&PyAny>, |
||||
named_graphs: Option<&PyAny>, |
||||
py: Python<'_>, |
||||
) -> PyResult<PyObject> { |
||||
let query = parse_query( |
||||
query, |
||||
base_iri, |
||||
use_default_graph_as_union, |
||||
default_graph, |
||||
named_graphs, |
||||
)?; |
||||
let results = |
||||
allow_threads_unsafe(|| self.inner.query(query)).map_err(map_evaluation_error)?; |
||||
Ok(query_results_to_python(py, results)) |
||||
} |
||||
|
||||
/// Executes a `SPARQL 1.1 update <https://www.w3.org/TR/sparql11-update/>`_.
|
||||
///
|
||||
/// Updates are applied in a transactional manner: either the full operation succeeds or nothing is written to the database.
|
||||
///
|
||||
/// :param update: the update to execute.
|
||||
/// :type update: str
|
||||
/// :param base_iri: the base IRI used to resolve the relative IRIs in the SPARQL update or :py:const:`None` if relative IRI resolution should not be done.
|
||||
/// :type base_iri: str or None, optional
|
||||
/// :rtype: None
|
||||
/// :raises SyntaxError: if the provided update is invalid.
|
||||
/// :raises IOError: if an I/O error happens while reading the store.
|
||||
///
|
||||
/// ``INSERT DATA`` update:
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.update('INSERT DATA { <http://example.com> <http://example.com/p> "1" }')
|
||||
/// >>> list(store)
|
||||
/// [<Quad subject=<NamedNode value=http://example.com> predicate=<NamedNode value=http://example.com/p> object=<Literal value=1 datatype=<NamedNode value=http://www.w3.org/2001/XMLSchema#string>> graph_name=<DefaultGraph>>]
|
||||
///
|
||||
/// ``DELETE DATA`` update:
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add(Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1')))
|
||||
/// >>> store.update('DELETE DATA { <http://example.com> <http://example.com/p> "1" }')
|
||||
/// >>> list(store)
|
||||
/// []
|
||||
///
|
||||
/// ``DELETE`` update:
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add(Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1')))
|
||||
/// >>> store.update('DELETE WHERE { <http://example.com> ?p ?o }')
|
||||
/// >>> list(store)
|
||||
/// []
|
||||
#[pyo3(signature = (update, *, base_iri = None))] |
||||
fn update(&self, update: &str, base_iri: Option<&str>, py: Python<'_>) -> PyResult<()> { |
||||
py.allow_threads(|| { |
||||
let update = |
||||
Update::parse(update, base_iri).map_err(|e| map_evaluation_error(e.into()))?; |
||||
self.inner.update(update).map_err(map_evaluation_error) |
||||
}) |
||||
} |
||||
|
||||
/// Loads an RDF serialization into the store.
|
||||
///
|
||||
/// Loads are applied in a transactional manner: either the full operation succeeds or nothing is written to the database.
|
||||
/// The :py:func:`bulk_load` method is also available for much faster loading of big files but without transactional guarantees.
|
||||
///
|
||||
/// Beware, the full file is loaded into memory.
|
||||
///
|
||||
/// It currently supports the following formats:
|
||||
///
|
||||
/// * `N-Triples <https://www.w3.org/TR/n-triples/>`_ (``application/n-triples``)
|
||||
/// * `N-Quads <https://www.w3.org/TR/n-quads/>`_ (``application/n-quads``)
|
||||
/// * `Turtle <https://www.w3.org/TR/turtle/>`_ (``text/turtle``)
|
||||
/// * `TriG <https://www.w3.org/TR/trig/>`_ (``application/trig``)
|
||||
/// * `RDF/XML <https://www.w3.org/TR/rdf-syntax-grammar/>`_ (``application/rdf+xml``)
|
||||
///
|
||||
/// It supports also some MIME type aliases.
|
||||
/// For example, ``application/turtle`` could also be used for `Turtle <https://www.w3.org/TR/turtle/>`_
|
||||
/// and ``application/xml`` for `RDF/XML <https://www.w3.org/TR/rdf-syntax-grammar/>`_.
|
||||
///
|
||||
/// :param input: The binary I/O object or file path to read from. For example, it could be a file path as a string or a file reader opened in binary mode with ``open('my_file.ttl', 'rb')``.
|
||||
/// :type input: io(bytes) or io(str) or str or pathlib.Path
|
||||
/// :param mime_type: the MIME type of the RDF serialization.
|
||||
/// :type mime_type: str
|
||||
/// :param base_iri: the base IRI used to resolve the relative IRIs in the file or :py:const:`None` if relative IRI resolution should not be done.
|
||||
/// :type base_iri: str or None, optional
|
||||
/// :param to_graph: if it is a file composed of triples, the graph in which the triples should be stored. By default, the default graph is used.
|
||||
/// :type to_graph: NamedNode or BlankNode or DefaultGraph or None, optional
|
||||
/// :rtype: None
|
||||
/// :raises ValueError: if the MIME type is not supported or the `to_graph` parameter is given with a quad file.
|
||||
/// :raises SyntaxError: if the provided data is invalid.
|
||||
/// :raises IOError: if an I/O error happens during a quad insertion.
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.load(io.BytesIO(b'<foo> <p> "1" .'), "text/turtle", base_iri="http://example.com/", to_graph=NamedNode("http://example.com/g"))
|
||||
/// >>> list(store)
|
||||
/// [<Quad subject=<NamedNode value=http://example.com/foo> predicate=<NamedNode value=http://example.com/p> object=<Literal value=1 datatype=<NamedNode value=http://www.w3.org/2001/XMLSchema#string>> graph_name=<NamedNode value=http://example.com/g>>]
|
||||
#[pyo3(signature = (input, mime_type, *, base_iri = None, to_graph = None))] |
||||
fn load( |
||||
&self, |
||||
input: PyObject, |
||||
mime_type: &str, |
||||
base_iri: Option<&str>, |
||||
to_graph: Option<&PyAny>, |
||||
py: Python<'_>, |
||||
) -> PyResult<()> { |
||||
let to_graph_name = if let Some(graph_name) = to_graph { |
||||
Some(GraphName::from(&PyGraphNameRef::try_from(graph_name)?)) |
||||
} else { |
||||
None |
||||
}; |
||||
let input = if let Ok(path) = input.extract::<PathBuf>(py) { |
||||
PyReadable::from_file(&path, py).map_err(map_io_err)? |
||||
} else { |
||||
PyReadable::from_data(input, py) |
||||
}; |
||||
py.allow_threads(|| { |
||||
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) { |
||||
self.inner |
||||
.load_graph( |
||||
input, |
||||
graph_format, |
||||
to_graph_name.as_ref().unwrap_or(&GraphName::DefaultGraph), |
||||
base_iri, |
||||
) |
||||
.map_err(map_loader_error) |
||||
} else if let Some(dataset_format) = DatasetFormat::from_media_type(mime_type) { |
||||
if to_graph_name.is_some() { |
||||
return Err(PyValueError::new_err( |
||||
"The target graph name parameter is not available for dataset formats", |
||||
)); |
||||
} |
||||
self.inner |
||||
.load_dataset(input, dataset_format, base_iri) |
||||
.map_err(map_loader_error) |
||||
} else { |
||||
Err(PyValueError::new_err(format!( |
||||
"Not supported MIME type: {mime_type}" |
||||
))) |
||||
} |
||||
}) |
||||
} |
||||
|
||||
/// Loads an RDF serialization into the store.
|
||||
///
|
||||
/// This function is designed to be as fast as possible on big files **without** transactional guarantees.
|
||||
/// If the file is invalid only a piece of it might be written to the store.
|
||||
///
|
||||
/// The :py:func:`load` method is also available for loads with transactional guarantees.
|
||||
///
|
||||
/// It currently supports the following formats:
|
||||
///
|
||||
/// * `N-Triples <https://www.w3.org/TR/n-triples/>`_ (``application/n-triples``)
|
||||
/// * `N-Quads <https://www.w3.org/TR/n-quads/>`_ (``application/n-quads``)
|
||||
/// * `Turtle <https://www.w3.org/TR/turtle/>`_ (``text/turtle``)
|
||||
/// * `TriG <https://www.w3.org/TR/trig/>`_ (``application/trig``)
|
||||
/// * `RDF/XML <https://www.w3.org/TR/rdf-syntax-grammar/>`_ (``application/rdf+xml``)
|
||||
///
|
||||
/// It supports also some MIME type aliases.
|
||||
/// For example, ``application/turtle`` could also be used for `Turtle <https://www.w3.org/TR/turtle/>`_
|
||||
/// and ``application/xml`` for `RDF/XML <https://www.w3.org/TR/rdf-syntax-grammar/>`_.
|
||||
///
|
||||
/// :param input: The binary I/O object or file path to read from. For example, it could be a file path as a string or a file reader opened in binary mode with ``open('my_file.ttl', 'rb')``.
|
||||
/// :type input: io(bytes) or io(str) or str or pathlib.Path
|
||||
/// :param mime_type: the MIME type of the RDF serialization.
|
||||
/// :type mime_type: str
|
||||
/// :param base_iri: the base IRI used to resolve the relative IRIs in the file or :py:const:`None` if relative IRI resolution should not be done.
|
||||
/// :type base_iri: str or None, optional
|
||||
/// :param to_graph: if it is a file composed of triples, the graph in which the triples should be stored. By default, the default graph is used.
|
||||
/// :type to_graph: NamedNode or BlankNode or DefaultGraph or None, optional
|
||||
/// :rtype: None
|
||||
/// :raises ValueError: if the MIME type is not supported or the `to_graph` parameter is given with a quad file.
|
||||
/// :raises SyntaxError: if the provided data is invalid.
|
||||
/// :raises IOError: if an I/O error happens during a quad insertion.
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.bulk_load(io.BytesIO(b'<foo> <p> "1" .'), "text/turtle", base_iri="http://example.com/", to_graph=NamedNode("http://example.com/g"))
|
||||
/// >>> list(store)
|
||||
/// [<Quad subject=<NamedNode value=http://example.com/foo> predicate=<NamedNode value=http://example.com/p> object=<Literal value=1 datatype=<NamedNode value=http://www.w3.org/2001/XMLSchema#string>> graph_name=<NamedNode value=http://example.com/g>>]
|
||||
#[pyo3(signature = (input, mime_type, *, base_iri = None, to_graph = None))] |
||||
fn bulk_load( |
||||
&self, |
||||
input: PyObject, |
||||
mime_type: &str, |
||||
base_iri: Option<&str>, |
||||
to_graph: Option<&PyAny>, |
||||
py: Python<'_>, |
||||
) -> PyResult<()> { |
||||
let to_graph_name = if let Some(graph_name) = to_graph { |
||||
Some(GraphName::from(&PyGraphNameRef::try_from(graph_name)?)) |
||||
} else { |
||||
None |
||||
}; |
||||
let input = if let Ok(path) = input.extract::<PathBuf>(py) { |
||||
PyReadable::from_file(&path, py).map_err(map_io_err)? |
||||
} else { |
||||
PyReadable::from_data(input, py) |
||||
}; |
||||
py.allow_threads(|| { |
||||
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) { |
||||
self.inner |
||||
.bulk_loader() |
||||
.load_graph( |
||||
input, |
||||
graph_format, |
||||
&to_graph_name.unwrap_or(GraphName::DefaultGraph), |
||||
base_iri, |
||||
) |
||||
.map_err(map_loader_error) |
||||
} else if let Some(dataset_format) = DatasetFormat::from_media_type(mime_type) { |
||||
if to_graph_name.is_some() { |
||||
return Err(PyValueError::new_err( |
||||
"The target graph name parameter is not available for dataset formats", |
||||
)); |
||||
} |
||||
self.inner |
||||
.bulk_loader() |
||||
.load_dataset(input, dataset_format, base_iri) |
||||
.map_err(map_loader_error) |
||||
} else { |
||||
Err(PyValueError::new_err(format!( |
||||
"Not supported MIME type: {mime_type}" |
||||
))) |
||||
} |
||||
}) |
||||
} |
||||
|
||||
/// Dumps the store quads or triples into a file.
|
||||
///
|
||||
/// It currently supports the following formats:
|
||||
///
|
||||
/// * `N-Triples <https://www.w3.org/TR/n-triples/>`_ (``application/n-triples``)
|
||||
/// * `N-Quads <https://www.w3.org/TR/n-quads/>`_ (``application/n-quads``)
|
||||
/// * `Turtle <https://www.w3.org/TR/turtle/>`_ (``text/turtle``)
|
||||
/// * `TriG <https://www.w3.org/TR/trig/>`_ (``application/trig``)
|
||||
/// * `RDF/XML <https://www.w3.org/TR/rdf-syntax-grammar/>`_ (``application/rdf+xml``)
|
||||
///
|
||||
/// It supports also some MIME type aliases.
|
||||
/// For example, ``application/turtle`` could also be used for `Turtle <https://www.w3.org/TR/turtle/>`_
|
||||
/// and ``application/xml`` for `RDF/XML <https://www.w3.org/TR/rdf-syntax-grammar/>`_.
|
||||
///
|
||||
/// :param output: The binary I/O object or file path to write to. For example, it could be a file path as a string or a file writer opened in binary mode with ``open('my_file.ttl', 'wb')``.
|
||||
/// :type output: io(bytes) or str or pathlib.Path
|
||||
/// :param mime_type: the MIME type of the RDF serialization.
|
||||
/// :type mime_type: str
|
||||
/// :param from_graph: if a triple based format is requested, the store graph from which dump the triples. By default, the default graph is used.
|
||||
/// :type from_graph: NamedNode or BlankNode or DefaultGraph or None, optional
|
||||
/// :rtype: None
|
||||
/// :raises ValueError: if the MIME type is not supported or the `from_graph` parameter is given with a quad syntax.
|
||||
/// :raises IOError: if an I/O error happens during a quad lookup
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add(Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1'), NamedNode('http://example.com/g')))
|
||||
/// >>> output = io.BytesIO()
|
||||
/// >>> store.dump(output, "text/turtle", from_graph=NamedNode("http://example.com/g"))
|
||||
/// >>> output.getvalue()
|
||||
/// b'<http://example.com> <http://example.com/p> "1" .\n'
|
||||
#[pyo3(signature = (output, mime_type, *, from_graph = None))] |
||||
fn dump( |
||||
&self, |
||||
output: PyObject, |
||||
mime_type: &str, |
||||
from_graph: Option<&PyAny>, |
||||
py: Python<'_>, |
||||
) -> PyResult<()> { |
||||
let output = if let Ok(path) = output.extract::<PathBuf>(py) { |
||||
PyWritable::from_file(&path, py).map_err(map_io_err)? |
||||
} else { |
||||
PyWritable::from_data(output) |
||||
}; |
||||
let from_graph_name = if let Some(graph_name) = from_graph { |
||||
Some(GraphName::from(&PyGraphNameRef::try_from(graph_name)?)) |
||||
} else { |
||||
None |
||||
}; |
||||
py.allow_threads(|| { |
||||
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) { |
||||
self.inner |
||||
.dump_graph( |
||||
output, |
||||
graph_format, |
||||
&from_graph_name.unwrap_or(GraphName::DefaultGraph), |
||||
) |
||||
.map_err(map_serializer_error) |
||||
} else if let Some(dataset_format) = DatasetFormat::from_media_type(mime_type) { |
||||
if from_graph_name.is_some() { |
||||
return Err(PyValueError::new_err( |
||||
"The target graph name parameter is not available for dataset formats", |
||||
)); |
||||
} |
||||
self.inner |
||||
.dump_dataset(output, dataset_format) |
||||
.map_err(map_serializer_error) |
||||
} else { |
||||
Err(PyValueError::new_err(format!( |
||||
"Not supported MIME type: {mime_type}" |
||||
))) |
||||
} |
||||
}) |
||||
} |
||||
|
||||
/// Returns an iterator over all the store named graphs.
|
||||
///
|
||||
/// :return: an iterator of the store graph names.
|
||||
/// :rtype: iterator(NamedNode or BlankNode)
|
||||
/// :raises IOError: if an I/O error happens during the named graphs lookup.
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add(Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1'), NamedNode('http://example.com/g')))
|
||||
/// >>> list(store.named_graphs())
|
||||
/// [<NamedNode value=http://example.com/g>]
|
||||
fn named_graphs(&self) -> GraphNameIter { |
||||
GraphNameIter { |
||||
inner: self.inner.named_graphs(), |
||||
} |
||||
} |
||||
|
||||
/// Returns if the store contains the given named graph.
|
||||
///
|
||||
/// :param graph_name: the name of the named graph.
|
||||
/// :type graph_name: NamedNode or BlankNode or DefaultGraph
|
||||
/// :rtype: bool
|
||||
/// :raises IOError: if an I/O error happens during the named graph lookup.
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add_graph(NamedNode('http://example.com/g'))
|
||||
/// >>> store.contains_named_graph(NamedNode('http://example.com/g'))
|
||||
/// True
|
||||
fn contains_named_graph(&self, graph_name: &PyAny) -> PyResult<bool> { |
||||
let graph_name = GraphName::from(&PyGraphNameRef::try_from(graph_name)?); |
||||
match graph_name { |
||||
GraphName::DefaultGraph => Ok(true), |
||||
GraphName::NamedNode(graph_name) => self.inner.contains_named_graph(&graph_name), |
||||
GraphName::BlankNode(graph_name) => self.inner.contains_named_graph(&graph_name), |
||||
} |
||||
.map_err(map_storage_error) |
||||
} |
||||
|
||||
/// Adds a named graph to the store.
|
||||
///
|
||||
/// :param graph_name: the name of the name graph to add.
|
||||
/// :type graph_name: NamedNode or BlankNode or DefaultGraph
|
||||
/// :rtype: None
|
||||
/// :raises IOError: if an I/O error happens during the named graph insertion.
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add_graph(NamedNode('http://example.com/g'))
|
||||
/// >>> list(store.named_graphs())
|
||||
/// [<NamedNode value=http://example.com/g>]
|
||||
fn add_graph(&self, graph_name: &PyAny, py: Python<'_>) -> PyResult<()> { |
||||
let graph_name = GraphName::from(&PyGraphNameRef::try_from(graph_name)?); |
||||
py.allow_threads(|| { |
||||
match graph_name { |
||||
GraphName::DefaultGraph => Ok(()), |
||||
GraphName::NamedNode(graph_name) => { |
||||
self.inner.insert_named_graph(&graph_name).map(|_| ()) |
||||
} |
||||
GraphName::BlankNode(graph_name) => { |
||||
self.inner.insert_named_graph(&graph_name).map(|_| ()) |
||||
} |
||||
} |
||||
.map_err(map_storage_error) |
||||
}) |
||||
} |
||||
|
||||
/// Clears a graph from the store without removing it.
|
||||
///
|
||||
/// :param graph_name: the name of the name graph to clear.
|
||||
/// :type graph_name: NamedNode or BlankNode or DefaultGraph
|
||||
/// :rtype: None
|
||||
/// :raises IOError: if an I/O error happens during the operation.
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add(Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1'), NamedNode('http://example.com/g')))
|
||||
/// >>> store.clear_graph(NamedNode('http://example.com/g'))
|
||||
/// >>> list(store)
|
||||
/// []
|
||||
/// >>> list(store.named_graphs())
|
||||
/// [<NamedNode value=http://example.com/g>]
|
||||
fn clear_graph(&self, graph_name: &PyAny, py: Python<'_>) -> PyResult<()> { |
||||
let graph_name = GraphName::from(&PyGraphNameRef::try_from(graph_name)?); |
||||
py.allow_threads(|| { |
||||
self.inner |
||||
.clear_graph(&graph_name) |
||||
.map_err(map_storage_error) |
||||
}) |
||||
} |
||||
|
||||
/// Removes a graph from the store.
|
||||
///
|
||||
/// The default graph will not be removed but just cleared.
|
||||
///
|
||||
/// :param graph_name: the name of the name graph to remove.
|
||||
/// :type graph_name: NamedNode or BlankNode or DefaultGraph
|
||||
/// :rtype: None
|
||||
/// :raises IOError: if an I/O error happens during the named graph removal.
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add(Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1'), NamedNode('http://example.com/g')))
|
||||
/// >>> store.remove_graph(NamedNode('http://example.com/g'))
|
||||
/// >>> list(store.named_graphs())
|
||||
/// []
|
||||
fn remove_graph(&self, graph_name: &PyAny, py: Python<'_>) -> PyResult<()> { |
||||
let graph_name = GraphName::from(&PyGraphNameRef::try_from(graph_name)?); |
||||
py.allow_threads(|| { |
||||
match graph_name { |
||||
GraphName::DefaultGraph => self.inner.clear_graph(GraphNameRef::DefaultGraph), |
||||
GraphName::NamedNode(graph_name) => { |
||||
self.inner.remove_named_graph(&graph_name).map(|_| ()) |
||||
} |
||||
GraphName::BlankNode(graph_name) => { |
||||
self.inner.remove_named_graph(&graph_name).map(|_| ()) |
||||
} |
||||
} |
||||
.map_err(map_storage_error) |
||||
}) |
||||
} |
||||
|
||||
/// Clears the store by removing all its contents.
|
||||
///
|
||||
/// :rtype: None
|
||||
/// :raises IOError: if an I/O error happens during the operation.
|
||||
///
|
||||
/// >>> store = Store()
|
||||
/// >>> store.add(Quad(NamedNode('http://example.com'), NamedNode('http://example.com/p'), Literal('1'), NamedNode('http://example.com/g')))
|
||||
/// >>> store.clear()
|
||||
/// >>> list(store)
|
||||
/// []
|
||||
/// >>> list(store.named_graphs())
|
||||
/// []
|
||||
fn clear(&self, py: Python<'_>) -> PyResult<()> { |
||||
py.allow_threads(|| self.inner.clear().map_err(map_storage_error)) |
||||
} |
||||
|
||||
/// Flushes all buffers and ensures that all writes are saved on disk.
|
||||
///
|
||||
/// Flushes are automatically done using background threads but might lag a little bit.
|
||||
///
|
||||
/// :rtype: None
|
||||
/// :raises IOError: if an I/O error happens during the flush.
|
||||
fn flush(&self, py: Python<'_>) -> PyResult<()> { |
||||
py.allow_threads(|| self.inner.flush().map_err(map_storage_error)) |
||||
} |
||||
|
||||
/// Optimizes the database for future workload.
|
||||
///
|
||||
/// Useful to call after a batch upload or another similar operation.
|
||||
///
|
||||
/// :rtype: None
|
||||
/// :raises IOError: if an I/O error happens during the optimization.
|
||||
fn optimize(&self, py: Python<'_>) -> PyResult<()> { |
||||
py.allow_threads(|| self.inner.optimize().map_err(map_storage_error)) |
||||
} |
||||
|
||||
/// Creates database backup into the `target_directory`.
|
||||
///
|
||||
/// After its creation, the backup is usable using :py:class:`Store` constructor.
|
||||
/// like a regular pyxigraph database and operates independently from the original database.
|
||||
///
|
||||
/// Warning: Backups are only possible for on-disk databases created by providing a path to :py:class:`Store` constructor.
|
||||
/// Temporary in-memory databases created without path are not compatible with the backup system.
|
||||
///
|
||||
/// Warning: An error is raised if the ``target_directory`` already exists.
|
||||
///
|
||||
/// If the target directory is in the same file system as the current database,
|
||||
/// the database content will not be fully copied
|
||||
/// but hard links will be used to point to the original database immutable snapshots.
|
||||
/// This allows cheap regular backups.
|
||||
///
|
||||
/// If you want to move your data to another RDF storage system, you should have a look at the :py:func:`dump_dataset` function instead.
|
||||
///
|
||||
/// :param target_directory: the directory name to save the database to.
|
||||
/// :type target_directory: str
|
||||
/// :rtype: None
|
||||
/// :raises IOError: if an I/O error happens during the backup.
|
||||
fn backup(&self, target_directory: &str, py: Python<'_>) -> PyResult<()> { |
||||
py.allow_threads(|| { |
||||
self.inner |
||||
.backup(target_directory) |
||||
.map_err(map_storage_error) |
||||
}) |
||||
} |
||||
|
||||
fn __str__(&self, py: Python<'_>) -> String { |
||||
py.allow_threads(|| self.inner.to_string()) |
||||
} |
||||
|
||||
fn __bool__(&self) -> PyResult<bool> { |
||||
Ok(!self.inner.is_empty().map_err(map_storage_error)?) |
||||
} |
||||
|
||||
fn __len__(&self) -> PyResult<usize> { |
||||
self.inner.len().map_err(map_storage_error) |
||||
} |
||||
|
||||
fn __contains__(&self, quad: &PyQuad) -> PyResult<bool> { |
||||
self.inner.contains(quad).map_err(map_storage_error) |
||||
} |
||||
|
||||
fn __iter__(&self) -> QuadIter { |
||||
QuadIter { |
||||
inner: self.inner.iter(), |
||||
} |
||||
} |
||||
} |
||||
|
||||
#[pyclass(unsendable, module = "pyoxigraph")] |
||||
pub struct QuadIter { |
||||
inner: store::QuadIter, |
||||
} |
||||
|
||||
#[pymethods] |
||||
impl QuadIter { |
||||
fn __iter__(slf: PyRef<'_, Self>) -> PyRef<Self> { |
||||
slf |
||||
} |
||||
|
||||
fn __next__(&mut self) -> PyResult<Option<PyQuad>> { |
||||
self.inner |
||||
.next() |
||||
.map(|q| Ok(q.map_err(map_storage_error)?.into())) |
||||
.transpose() |
||||
} |
||||
} |
||||
|
||||
#[pyclass(unsendable, module = "pyoxigraph")] |
||||
pub struct GraphNameIter { |
||||
inner: store::GraphNameIter, |
||||
} |
||||
|
||||
#[pymethods] |
||||
impl GraphNameIter { |
||||
fn __iter__(slf: PyRef<'_, Self>) -> PyRef<Self> { |
||||
slf |
||||
} |
||||
|
||||
fn __next__(&mut self) -> PyResult<Option<PyNamedOrBlankNode>> { |
||||
self.inner |
||||
.next() |
||||
.map(|q| Ok(q.map_err(map_storage_error)?.into())) |
||||
.transpose() |
||||
} |
||||
} |
||||
|
||||
pub fn extract_quads_pattern<'a>( |
||||
subject: &'a PyAny, |
||||
predicate: &'a PyAny, |
||||
object: &'a PyAny, |
||||
graph_name: Option<&'a PyAny>, |
||||
) -> PyResult<( |
||||
Option<PySubjectRef<'a>>, |
||||
Option<PyNamedNodeRef<'a>>, |
||||
Option<PyTermRef<'a>>, |
||||
Option<PyGraphNameRef<'a>>, |
||||
)> { |
||||
Ok(( |
||||
if subject.is_none() { |
||||
None |
||||
} else { |
||||
Some(TryFrom::try_from(subject)?) |
||||
}, |
||||
if predicate.is_none() { |
||||
None |
||||
} else { |
||||
Some(TryFrom::try_from(predicate)?) |
||||
}, |
||||
if object.is_none() { |
||||
None |
||||
} else { |
||||
Some(TryFrom::try_from(object)?) |
||||
}, |
||||
if let Some(graph_name) = graph_name { |
||||
if graph_name.is_none() { |
||||
None |
||||
} else { |
||||
Some(TryFrom::try_from(graph_name)?) |
||||
} |
||||
} else { |
||||
None |
||||
}, |
||||
)) |
||||
} |
||||
|
||||
pub fn map_storage_error(error: StorageError) -> PyErr { |
||||
match error { |
||||
StorageError::Io(error) => PyIOError::new_err(error.to_string()), |
||||
_ => PyRuntimeError::new_err(error.to_string()), |
||||
} |
||||
} |
||||
|
||||
pub fn map_loader_error(error: LoaderError) -> PyErr { |
||||
match error { |
||||
LoaderError::Storage(error) => map_storage_error(error), |
||||
LoaderError::Parsing(error) => map_parse_error(error), |
||||
} |
||||
} |
||||
|
||||
pub fn map_serializer_error(error: SerializerError) -> PyErr { |
||||
match error { |
||||
SerializerError::Storage(error) => map_storage_error(error), |
||||
SerializerError::Io(error) => PyIOError::new_err(error.to_string()), |
||||
} |
||||
} |
||||
|
||||
enum PythonOrStorageError { |
||||
Python(PyErr), |
||||
Storage(StorageError), |
||||
} |
||||
|
||||
impl From<PyErr> for PythonOrStorageError { |
||||
fn from(error: PyErr) -> Self { |
||||
Self::Python(error) |
||||
} |
||||
} |
||||
|
||||
impl From<StorageError> for PythonOrStorageError { |
||||
fn from(error: StorageError) -> Self { |
||||
Self::Storage(error) |
||||
} |
||||
} |
||||
impl From<PythonOrStorageError> for PyErr { |
||||
fn from(error: PythonOrStorageError) -> Self { |
||||
match error { |
||||
PythonOrStorageError::Python(error) => error, |
||||
PythonOrStorageError::Storage(error) => map_storage_error(error), |
||||
} |
||||
} |
||||
} |
@ -1,114 +0,0 @@ |
||||
import unittest |
||||
from io import BytesIO, StringIO, UnsupportedOperation |
||||
from tempfile import NamedTemporaryFile, TemporaryFile |
||||
|
||||
from pyoxigraph import Literal, NamedNode, Quad, Triple, parse, serialize |
||||
|
||||
EXAMPLE_TRIPLE = Triple( |
||||
NamedNode("http://example.com/foo"), |
||||
NamedNode("http://example.com/p"), |
||||
Literal("éù"), |
||||
) |
||||
EXAMPLE_QUAD = Quad( |
||||
NamedNode("http://example.com/foo"), |
||||
NamedNode("http://example.com/p"), |
||||
Literal("1"), |
||||
NamedNode("http://example.com/g"), |
||||
) |
||||
|
||||
|
||||
class TestParse(unittest.TestCase): |
||||
def test_parse_file(self) -> None: |
||||
with NamedTemporaryFile() as fp: |
||||
fp.write('<foo> <p> "éù" .'.encode()) |
||||
fp.flush() |
||||
self.assertEqual( |
||||
list(parse(fp.name, "text/turtle", base_iri="http://example.com/")), |
||||
[EXAMPLE_TRIPLE], |
||||
) |
||||
|
||||
def test_parse_not_existing_file(self) -> None: |
||||
with self.assertRaises(IOError) as _: |
||||
parse("/tmp/not-existing-oxigraph-file.ttl", "text/turtle") |
||||
|
||||
def test_parse_str_io(self) -> None: |
||||
self.assertEqual( |
||||
list( |
||||
parse( |
||||
StringIO('<foo> <p> "éù" .'), |
||||
"text/turtle", |
||||
base_iri="http://example.com/", |
||||
) |
||||
), |
||||
[EXAMPLE_TRIPLE], |
||||
) |
||||
|
||||
def test_parse_long_str_io(self) -> None: |
||||
self.assertEqual( |
||||
list( |
||||
parse( |
||||
StringIO('<foo> <p> "éù" .\n' * 1024), |
||||
"text/turtle", |
||||
base_iri="http://example.com/", |
||||
) |
||||
), |
||||
[EXAMPLE_TRIPLE] * 1024, |
||||
) |
||||
|
||||
def test_parse_bytes_io(self) -> None: |
||||
self.assertEqual( |
||||
list( |
||||
parse( |
||||
BytesIO('<foo> <p> "éù" .'.encode()), |
||||
"text/turtle", |
||||
base_iri="http://example.com/", |
||||
) |
||||
), |
||||
[EXAMPLE_TRIPLE], |
||||
) |
||||
|
||||
def test_parse_io_error(self) -> None: |
||||
with self.assertRaises(UnsupportedOperation) as _, TemporaryFile("wb") as fp: |
||||
list(parse(fp, mime_type="application/n-triples")) |
||||
|
||||
def test_parse_quad(self) -> None: |
||||
self.assertEqual( |
||||
list( |
||||
parse( |
||||
StringIO('<g> { <foo> <p> "1" }'), |
||||
"application/trig", |
||||
base_iri="http://example.com/", |
||||
) |
||||
), |
||||
[EXAMPLE_QUAD], |
||||
) |
||||
|
||||
|
||||
class TestSerialize(unittest.TestCase): |
||||
def test_serialize_to_bytes_io(self) -> None: |
||||
output = BytesIO() |
||||
serialize([EXAMPLE_TRIPLE], output, "text/turtle") |
||||
self.assertEqual( |
||||
output.getvalue().decode(), |
||||
'<http://example.com/foo> <http://example.com/p> "éù" .\n', |
||||
) |
||||
|
||||
def test_serialize_to_file(self) -> None: |
||||
with NamedTemporaryFile() as fp: |
||||
serialize([EXAMPLE_TRIPLE], fp.name, "text/turtle") |
||||
self.assertEqual( |
||||
fp.read().decode(), |
||||
'<http://example.com/foo> <http://example.com/p> "éù" .\n', |
||||
) |
||||
|
||||
def test_serialize_io_error(self) -> None: |
||||
with self.assertRaises(UnsupportedOperation) as _, TemporaryFile("rb") as fp: |
||||
serialize([EXAMPLE_TRIPLE], fp, "text/turtle") |
||||
|
||||
def test_serialize_quad(self) -> None: |
||||
output = BytesIO() |
||||
serialize([EXAMPLE_QUAD], output, "application/trig") |
||||
self.assertEqual( |
||||
output.getvalue(), |
||||
b'<http://example.com/g> { <http://example.com/foo> <http://example.com/p> "1" }\n', |
||||
) |
@ -1,380 +0,0 @@ |
||||
import copy |
||||
import pickle |
||||
import sys |
||||
import unittest |
||||
|
||||
from pyoxigraph import ( |
||||
BlankNode, |
||||
DefaultGraph, |
||||
Literal, |
||||
NamedNode, |
||||
Quad, |
||||
Triple, |
||||
Variable, |
||||
) |
||||
|
||||
XSD_STRING = NamedNode("http://www.w3.org/2001/XMLSchema#string") |
||||
XSD_INTEGER = NamedNode("http://www.w3.org/2001/XMLSchema#integer") |
||||
RDF_LANG_STRING = NamedNode("http://www.w3.org/1999/02/22-rdf-syntax-ns#langString") |
||||
|
||||
|
||||
def match_works(test: unittest.TestCase, matched_value: str, constraint: str) -> None: |
||||
"""Hack for Python < 3.10 compatibility""" |
||||
if sys.version_info < (3, 10): |
||||
return test.skipTest("match has been introduced by Python 3.10") |
||||
found = True |
||||
exec( |
||||
f""" |
||||
match {matched_value}: |
||||
case {constraint}: |
||||
found = True |
||||
""" |
||||
) |
||||
test.assertTrue(found) |
||||
return None |
||||
|
||||
|
||||
class TestNamedNode(unittest.TestCase): |
||||
def test_constructor(self) -> None: |
||||
self.assertEqual(NamedNode("http://foo").value, "http://foo") |
||||
|
||||
def test_string(self) -> None: |
||||
self.assertEqual(str(NamedNode("http://foo")), "<http://foo>") |
||||
|
||||
def test_equal(self) -> None: |
||||
self.assertEqual(NamedNode("http://foo"), NamedNode("http://foo")) |
||||
self.assertNotEqual(NamedNode("http://foo"), NamedNode("http://bar")) |
||||
|
||||
def test_pickle(self) -> None: |
||||
node = NamedNode("http://foo") |
||||
self.assertEqual(pickle.loads(pickle.dumps(node)), node) |
||||
self.assertEqual(copy.copy(node), node) |
||||
self.assertEqual(copy.deepcopy(node), node) |
||||
|
||||
def test_basic_match(self) -> None: |
||||
match_works(self, 'NamedNode("http://foo")', 'NamedNode("http://foo")') |
||||
|
||||
def test_wildcard_match(self) -> None: |
||||
match_works(self, 'NamedNode("http://foo")', "NamedNode(x)") |
||||
|
||||
|
||||
class TestBlankNode(unittest.TestCase): |
||||
def test_constructor(self) -> None: |
||||
self.assertEqual(BlankNode("foo").value, "foo") |
||||
self.assertNotEqual(BlankNode(), BlankNode()) |
||||
|
||||
def test_string(self) -> None: |
||||
self.assertEqual(str(BlankNode("foo")), "_:foo") |
||||
|
||||
def test_equal(self) -> None: |
||||
self.assertEqual(BlankNode("foo"), BlankNode("foo")) |
||||
self.assertNotEqual(BlankNode("foo"), BlankNode("bar")) |
||||
self.assertNotEqual(BlankNode("foo"), NamedNode("http://foo")) |
||||
self.assertNotEqual(NamedNode("http://foo"), BlankNode("foo")) |
||||
|
||||
def test_pickle(self) -> None: |
||||
node = BlankNode("foo") |
||||
self.assertEqual(pickle.loads(pickle.dumps(node)), node) |
||||
self.assertEqual(copy.copy(node), node) |
||||
self.assertEqual(copy.deepcopy(node), node) |
||||
|
||||
auto = BlankNode() |
||||
self.assertEqual(pickle.loads(pickle.dumps(auto)), auto) |
||||
self.assertEqual(copy.copy(auto), auto) |
||||
self.assertEqual(copy.deepcopy(auto), auto) |
||||
|
||||
def test_basic_match(self) -> None: |
||||
match_works(self, 'BlankNode("foo")', 'BlankNode("foo")') |
||||
|
||||
def test_wildcard_match(self) -> None: |
||||
match_works(self, 'BlankNode("foo")', "BlankNode(x)") |
||||
|
||||
|
||||
class TestLiteral(unittest.TestCase): |
||||
def test_constructor(self) -> None: |
||||
self.assertEqual(Literal("foo").value, "foo") |
||||
self.assertEqual(Literal("foo").datatype, XSD_STRING) |
||||
|
||||
self.assertEqual(Literal("foo", language="en").value, "foo") |
||||
self.assertEqual(Literal("foo", language="en").language, "en") |
||||
self.assertEqual(Literal("foo", language="en").datatype, RDF_LANG_STRING) |
||||
|
||||
self.assertEqual(Literal("foo", datatype=XSD_INTEGER).value, "foo") |
||||
self.assertEqual(Literal("foo", datatype=XSD_INTEGER).datatype, XSD_INTEGER) |
||||
|
||||
def test_string(self) -> None: |
||||
self.assertEqual(str(Literal("foo")), '"foo"') |
||||
self.assertEqual(str(Literal("foo", language="en")), '"foo"@en') |
||||
self.assertEqual( |
||||
str(Literal("foo", datatype=XSD_INTEGER)), |
||||
'"foo"^^<http://www.w3.org/2001/XMLSchema#integer>', |
||||
) |
||||
|
||||
def test_equals(self) -> None: |
||||
self.assertEqual(Literal("foo", datatype=XSD_STRING), Literal("foo")) |
||||
self.assertEqual( |
||||
Literal("foo", language="en", datatype=RDF_LANG_STRING), |
||||
Literal("foo", language="en"), |
||||
) |
||||
self.assertNotEqual(NamedNode("http://foo"), Literal("foo")) |
||||
self.assertNotEqual(Literal("foo"), NamedNode("http://foo")) |
||||
self.assertNotEqual(BlankNode("foo"), Literal("foo")) |
||||
self.assertNotEqual(Literal("foo"), BlankNode("foo")) |
||||
|
||||
def test_pickle(self) -> None: |
||||
simple = Literal("foo") |
||||
self.assertEqual(pickle.loads(pickle.dumps(simple)), simple) |
||||
self.assertEqual(copy.copy(simple), simple) |
||||
self.assertEqual(copy.deepcopy(simple), simple) |
||||
|
||||
lang_tagged = Literal("foo", language="en") |
||||
self.assertEqual(pickle.loads(pickle.dumps(lang_tagged)), lang_tagged) |
||||
self.assertEqual(copy.copy(lang_tagged), lang_tagged) |
||||
self.assertEqual(copy.deepcopy(lang_tagged), lang_tagged) |
||||
|
||||
number = Literal("1", datatype=XSD_INTEGER) |
||||
self.assertEqual(pickle.loads(pickle.dumps(number)), number) |
||||
self.assertEqual(copy.copy(number), number) |
||||
self.assertEqual(copy.deepcopy(number), number) |
||||
|
||||
def test_basic_match(self) -> None: |
||||
match_works( |
||||
self, 'Literal("foo", language="en")', 'Literal("foo", language="en")' |
||||
) |
||||
match_works( |
||||
self, |
||||
'Literal("1", datatype=XSD_INTEGER)', |
||||
'Literal("1", datatype=NamedNode("http://www.w3.org/2001/XMLSchema#integer"))', |
||||
) |
||||
|
||||
def test_wildcard_match(self) -> None: |
||||
match_works(self, 'Literal("foo", language="en")', "Literal(v, language=l)") |
||||
match_works( |
||||
self, 'Literal("1", datatype=XSD_INTEGER)', "Literal(v, datatype=d)" |
||||
) |
||||
|
||||
|
||||
class TestTriple(unittest.TestCase): |
||||
def test_constructor(self) -> None: |
||||
t = Triple( |
||||
NamedNode("http://example.com/s"), |
||||
NamedNode("http://example.com/p"), |
||||
NamedNode("http://example.com/o"), |
||||
) |
||||
self.assertEqual(t.subject, NamedNode("http://example.com/s")) |
||||
self.assertEqual(t.predicate, NamedNode("http://example.com/p")) |
||||
self.assertEqual(t.object, NamedNode("http://example.com/o")) |
||||
|
||||
def test_rdf_star_constructor(self) -> None: |
||||
t = Triple( |
||||
Triple( |
||||
NamedNode("http://example.com/ss"), |
||||
NamedNode("http://example.com/sp"), |
||||
NamedNode("http://example.com/so"), |
||||
), |
||||
NamedNode("http://example.com/p"), |
||||
Triple( |
||||
NamedNode("http://example.com/os"), |
||||
NamedNode("http://example.com/op"), |
||||
NamedNode("http://example.com/oo"), |
||||
), |
||||
) |
||||
self.assertEqual( |
||||
t.subject, |
||||
Triple( |
||||
NamedNode("http://example.com/ss"), |
||||
NamedNode("http://example.com/sp"), |
||||
NamedNode("http://example.com/so"), |
||||
), |
||||
) |
||||
self.assertEqual(t.predicate, NamedNode("http://example.com/p")) |
||||
self.assertEqual( |
||||
t.object, |
||||
Triple( |
||||
NamedNode("http://example.com/os"), |
||||
NamedNode("http://example.com/op"), |
||||
NamedNode("http://example.com/oo"), |
||||
), |
||||
) |
||||
|
||||
def test_mapping(self) -> None: |
||||
t = Triple( |
||||
NamedNode("http://example.com/s"), |
||||
NamedNode("http://example.com/p"), |
||||
NamedNode("http://example.com/o"), |
||||
) |
||||
self.assertEqual(t[0], NamedNode("http://example.com/s")) |
||||
self.assertEqual(t[1], NamedNode("http://example.com/p")) |
||||
self.assertEqual(t[2], NamedNode("http://example.com/o")) |
||||
|
||||
def test_destruct(self) -> None: |
||||
(s, p, o) = Triple( |
||||
NamedNode("http://example.com/s"), |
||||
NamedNode("http://example.com/p"), |
||||
NamedNode("http://example.com/o"), |
||||
) |
||||
self.assertEqual(s, NamedNode("http://example.com/s")) |
||||
self.assertEqual(p, NamedNode("http://example.com/p")) |
||||
self.assertEqual(o, NamedNode("http://example.com/o")) |
||||
|
||||
def test_string(self) -> None: |
||||
self.assertEqual( |
||||
str( |
||||
Triple( |
||||
NamedNode("http://example.com/s"), |
||||
NamedNode("http://example.com/p"), |
||||
NamedNode("http://example.com/o"), |
||||
) |
||||
), |
||||
"<http://example.com/s> <http://example.com/p> <http://example.com/o>", |
||||
) |
||||
|
||||
def test_pickle(self) -> None: |
||||
triple = Triple( |
||||
NamedNode("http://example.com/s"), |
||||
NamedNode("http://example.com/p"), |
||||
NamedNode("http://example.com/o"), |
||||
) |
||||
self.assertEqual(pickle.loads(pickle.dumps(triple)), triple) |
||||
self.assertEqual(copy.copy(triple), triple) |
||||
self.assertEqual(copy.deepcopy(triple), triple) |
||||
|
||||
def test_match(self) -> None: |
||||
match_works( |
||||
self, |
||||
'Triple(NamedNode("http://example.com/s"), NamedNode("http://example.com/p"), ' |
||||
'NamedNode("http://example.com/o"))', |
||||
'Triple(NamedNode("http://example.com/s"), NamedNode(p), o)', |
||||
) |
||||
|
||||
|
||||
class TestDefaultGraph(unittest.TestCase): |
||||
def test_equal(self) -> None: |
||||
self.assertEqual(DefaultGraph(), DefaultGraph()) |
||||
self.assertNotEqual(DefaultGraph(), NamedNode("http://bar")) |
||||
|
||||
def test_pickle(self) -> None: |
||||
self.assertEqual(pickle.loads(pickle.dumps(DefaultGraph())), DefaultGraph()) |
||||
self.assertEqual(copy.copy(DefaultGraph()), DefaultGraph()) |
||||
self.assertEqual(copy.deepcopy(DefaultGraph()), DefaultGraph()) |
||||
|
||||
def test_match(self) -> None: |
||||
match_works(self, "DefaultGraph()", "DefaultGraph()") |
||||
|
||||
|
||||
class TestQuad(unittest.TestCase): |
||||
def test_constructor(self) -> None: |
||||
t = Quad( |
||||
NamedNode("http://example.com/s"), |
||||
NamedNode("http://example.com/p"), |
||||
NamedNode("http://example.com/o"), |
||||
NamedNode("http://example.com/g"), |
||||
) |
||||
self.assertEqual(t.subject, NamedNode("http://example.com/s")) |
||||
self.assertEqual(t.predicate, NamedNode("http://example.com/p")) |
||||
self.assertEqual(t.object, NamedNode("http://example.com/o")) |
||||
self.assertEqual(t.graph_name, NamedNode("http://example.com/g")) |
||||
self.assertEqual( |
||||
t.triple, |
||||
Triple( |
||||
NamedNode("http://example.com/s"), |
||||
NamedNode("http://example.com/p"), |
||||
NamedNode("http://example.com/o"), |
||||
), |
||||
) |
||||
self.assertEqual( |
||||
Quad( |
||||
NamedNode("http://example.com/s"), |
||||
NamedNode("http://example.com/p"), |
||||
NamedNode("http://example.com/o"), |
||||
), |
||||
Quad( |
||||
NamedNode("http://example.com/s"), |
||||
NamedNode("http://example.com/p"), |
||||
NamedNode("http://example.com/o"), |
||||
DefaultGraph(), |
||||
), |
||||
) |
||||
|
||||
def test_mapping(self) -> None: |
||||
t = Quad( |
||||
NamedNode("http://example.com/s"), |
||||
NamedNode("http://example.com/p"), |
||||
NamedNode("http://example.com/o"), |
||||
NamedNode("http://example.com/g"), |
||||
) |
||||
self.assertEqual(t[0], NamedNode("http://example.com/s")) |
||||
self.assertEqual(t[1], NamedNode("http://example.com/p")) |
||||
self.assertEqual(t[2], NamedNode("http://example.com/o")) |
||||
self.assertEqual(t[3], NamedNode("http://example.com/g")) |
||||
|
||||
def test_destruct(self) -> None: |
||||
(s, p, o, g) = Quad( |
||||
NamedNode("http://example.com/s"), |
||||
NamedNode("http://example.com/p"), |
||||
NamedNode("http://example.com/o"), |
||||
NamedNode("http://example.com/g"), |
||||
) |
||||
self.assertEqual(s, NamedNode("http://example.com/s")) |
||||
self.assertEqual(p, NamedNode("http://example.com/p")) |
||||
self.assertEqual(o, NamedNode("http://example.com/o")) |
||||
self.assertEqual(g, NamedNode("http://example.com/g")) |
||||
|
||||
def test_string(self) -> None: |
||||
self.assertEqual( |
||||
str( |
||||
Triple( |
||||
NamedNode("http://example.com/s"), |
||||
NamedNode("http://example.com/p"), |
||||
NamedNode("http://example.com/o"), |
||||
) |
||||
), |
||||
"<http://example.com/s> <http://example.com/p> <http://example.com/o>", |
||||
) |
||||
|
||||
def test_pickle(self) -> None: |
||||
quad = Quad( |
||||
NamedNode("http://example.com/s"), |
||||
NamedNode("http://example.com/p"), |
||||
NamedNode("http://example.com/o"), |
||||
NamedNode("http://example.com/g"), |
||||
) |
||||
self.assertEqual(pickle.loads(pickle.dumps(quad)), quad) |
||||
self.assertEqual(copy.copy(quad), quad) |
||||
self.assertEqual(copy.deepcopy(quad), quad) |
||||
|
||||
def test_match(self) -> None: |
||||
match_works( |
||||
self, |
||||
'Quad(NamedNode("http://example.com/s"), NamedNode("http://example.com/p"), ' |
||||
'NamedNode("http://example.com/o"), NamedNode("http://example.com/g"))', |
||||
'Quad(NamedNode("http://example.com/s"), NamedNode(p), o, NamedNode("http://example.com/g"))', |
||||
) |
||||
|
||||
|
||||
class TestVariable(unittest.TestCase): |
||||
def test_constructor(self) -> None: |
||||
self.assertEqual(Variable("foo").value, "foo") |
||||
|
||||
def test_string(self) -> None: |
||||
self.assertEqual(str(Variable("foo")), "?foo") |
||||
|
||||
def test_equal(self) -> None: |
||||
self.assertEqual(Variable("foo"), Variable("foo")) |
||||
self.assertNotEqual(Variable("foo"), Variable("bar")) |
||||
|
||||
def test_pickle(self) -> None: |
||||
v = Variable("foo") |
||||
self.assertEqual(pickle.loads(pickle.dumps(v)), v) |
||||
self.assertEqual(copy.copy(v), v) |
||||
self.assertEqual(copy.deepcopy(v), v) |
||||
|
||||
def test_basic_match(self) -> None: |
||||
match_works(self, 'Variable("foo")', 'Variable("foo")') |
||||
|
||||
def test_wildcard_match(self) -> None: |
||||
match_works(self, 'Variable("foo")', "Variable(x)") |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
unittest.main() |
@ -1,375 +0,0 @@ |
||||
import unittest |
||||
from io import BytesIO, UnsupportedOperation |
||||
from pathlib import Path |
||||
from tempfile import NamedTemporaryFile, TemporaryDirectory, TemporaryFile |
||||
from typing import Any |
||||
|
||||
from pyoxigraph import ( |
||||
BlankNode, |
||||
DefaultGraph, |
||||
NamedNode, |
||||
Quad, |
||||
QuerySolution, |
||||
QuerySolutions, |
||||
QueryTriples, |
||||
Store, |
||||
Triple, |
||||
Variable, |
||||
) |
||||
|
||||
foo = NamedNode("http://foo") |
||||
bar = NamedNode("http://bar") |
||||
baz = NamedNode("http://baz") |
||||
triple = Triple(foo, foo, foo) |
||||
graph = NamedNode("http://graph") |
||||
|
||||
|
||||
class TestStore(unittest.TestCase): |
||||
def test_add(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, bar, baz)) |
||||
store.add(Quad(foo, bar, baz, DefaultGraph())) |
||||
store.add(Quad(foo, bar, baz, graph)) |
||||
store.add(Quad(triple, bar, baz)) |
||||
store.add(Quad(foo, bar, triple)) |
||||
self.assertEqual(len(store), 4) |
||||
|
||||
def test_extend(self) -> None: |
||||
store = Store() |
||||
store.extend( |
||||
( |
||||
Quad(foo, bar, baz), |
||||
Quad(foo, bar, baz, graph), |
||||
Quad(foo, bar, baz, DefaultGraph()), |
||||
) |
||||
) |
||||
self.assertEqual(len(store), 2) |
||||
|
||||
def test_bulk_extend(self) -> None: |
||||
store = Store() |
||||
store.bulk_extend( |
||||
( |
||||
Quad(foo, bar, baz), |
||||
Quad(foo, bar, baz, graph), |
||||
Quad(foo, bar, baz, DefaultGraph()), |
||||
) |
||||
) |
||||
self.assertEqual(len(store), 2) |
||||
|
||||
def test_remove(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, bar, baz)) |
||||
store.add(Quad(foo, bar, baz, DefaultGraph())) |
||||
store.add(Quad(foo, bar, baz, graph)) |
||||
store.remove(Quad(foo, bar, baz)) |
||||
self.assertEqual(len(store), 1) |
||||
|
||||
def test_len(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, bar, baz)) |
||||
store.add(Quad(foo, bar, baz, graph)) |
||||
self.assertEqual(len(store), 2) |
||||
|
||||
def test_in(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, bar, baz)) |
||||
store.add(Quad(foo, bar, baz, DefaultGraph())) |
||||
store.add(Quad(foo, bar, baz, graph)) |
||||
self.assertIn(Quad(foo, bar, baz), store) |
||||
self.assertIn(Quad(foo, bar, baz, DefaultGraph()), store) |
||||
self.assertIn(Quad(foo, bar, baz, graph), store) |
||||
self.assertNotIn(Quad(foo, bar, baz, foo), store) |
||||
|
||||
def test_iter(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, bar, baz, DefaultGraph())) |
||||
store.add(Quad(foo, bar, baz, graph)) |
||||
self.assertEqual( |
||||
set(store), |
||||
{Quad(foo, bar, baz, DefaultGraph()), Quad(foo, bar, baz, graph)}, |
||||
) |
||||
|
||||
def test_quads_for_pattern(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, bar, baz, DefaultGraph())) |
||||
store.add(Quad(foo, bar, baz, graph)) |
||||
self.assertEqual( |
||||
set(store.quads_for_pattern(None, None, None)), |
||||
{Quad(foo, bar, baz, DefaultGraph()), Quad(foo, bar, baz, graph)}, |
||||
) |
||||
self.assertEqual( |
||||
set(store.quads_for_pattern(foo, None, None)), |
||||
{Quad(foo, bar, baz, DefaultGraph()), Quad(foo, bar, baz, graph)}, |
||||
) |
||||
self.assertEqual( |
||||
set(store.quads_for_pattern(None, None, None, graph)), |
||||
{Quad(foo, bar, baz, graph)}, |
||||
) |
||||
self.assertEqual( |
||||
set(store.quads_for_pattern(foo, None, None, DefaultGraph())), |
||||
{Quad(foo, bar, baz, DefaultGraph())}, |
||||
) |
||||
|
||||
def test_ask_query(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, foo, foo)) |
||||
self.assertTrue(store.query("ASK { ?s ?s ?s }")) |
||||
self.assertFalse(store.query("ASK { FILTER(false) }")) |
||||
|
||||
def test_construct_query(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, bar, baz)) |
||||
results: Any = store.query("CONSTRUCT { ?s ?p ?o } WHERE { ?s ?p ?o }") |
||||
self.assertIsInstance(results, QueryTriples) |
||||
self.assertEqual( |
||||
set(results), |
||||
{Triple(foo, bar, baz)}, |
||||
) |
||||
|
||||
def test_select_query(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, bar, baz)) |
||||
solutions: Any = store.query("SELECT ?s ?o WHERE { ?s ?p ?o }") |
||||
self.assertIsInstance(solutions, QuerySolutions) |
||||
self.assertEqual(solutions.variables, [Variable("s"), Variable("o")]) |
||||
solution = next(solutions) |
||||
self.assertIsInstance(solution, QuerySolution) |
||||
self.assertEqual(solution[0], foo) |
||||
self.assertEqual(solution[1], baz) |
||||
self.assertEqual(solution["s"], foo) |
||||
self.assertEqual(solution["o"], baz) |
||||
self.assertEqual(solution[Variable("s")], foo) |
||||
self.assertEqual(solution[Variable("o")], baz) |
||||
s, o = solution |
||||
self.assertEqual(s, foo) |
||||
self.assertEqual(o, baz) |
||||
|
||||
def test_select_query_union_default_graph(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, bar, baz, graph)) |
||||
results: Any = store.query("SELECT ?s WHERE { ?s ?p ?o }") |
||||
self.assertEqual(len(list(results)), 0) |
||||
results = store.query( |
||||
"SELECT ?s WHERE { ?s ?p ?o }", use_default_graph_as_union=True |
||||
) |
||||
self.assertEqual(len(list(results)), 1) |
||||
results = store.query( |
||||
"SELECT ?s WHERE { ?s ?p ?o }", |
||||
use_default_graph_as_union=True, |
||||
named_graphs=[graph], |
||||
) |
||||
self.assertEqual(len(list(results)), 1) |
||||
|
||||
def test_select_query_with_default_graph(self) -> None: |
||||
store = Store() |
||||
graph_bnode = BlankNode("g") |
||||
store.add(Quad(foo, bar, baz, graph)) |
||||
store.add(Quad(foo, bar, foo)) |
||||
store.add(Quad(foo, bar, bar, graph_bnode)) |
||||
results: Any = store.query("SELECT ?s WHERE { ?s ?p ?o }") |
||||
self.assertEqual(len(list(results)), 1) |
||||
results = store.query("SELECT ?s WHERE { ?s ?p ?o }", default_graph=graph) |
||||
self.assertEqual(len(list(results)), 1) |
||||
results = store.query( |
||||
"SELECT ?s WHERE { ?s ?p ?o }", |
||||
default_graph=[DefaultGraph(), graph, graph_bnode], |
||||
) |
||||
self.assertEqual(len(list(results)), 3) |
||||
|
||||
def test_select_query_with_named_graph(self) -> None: |
||||
store = Store() |
||||
graph_bnode = BlankNode("g") |
||||
store.add(Quad(foo, bar, baz, graph)) |
||||
store.add(Quad(foo, bar, foo)) |
||||
store.add(Quad(foo, bar, bar, graph_bnode)) |
||||
store.add(Quad(foo, bar, bar, foo)) |
||||
results: Any = store.query( |
||||
"SELECT ?s WHERE { GRAPH ?g { ?s ?p ?o } }", |
||||
named_graphs=[graph, graph_bnode], |
||||
) |
||||
self.assertEqual(len(list(results)), 2) |
||||
|
||||
def test_update_insert_data(self) -> None: |
||||
store = Store() |
||||
store.update("INSERT DATA { <http://foo> <http://foo> <http://foo> }") |
||||
self.assertEqual(len(store), 1) |
||||
|
||||
def test_update_delete_data(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, foo, foo)) |
||||
store.update("DELETE DATA { <http://foo> <http://foo> <http://foo> }") |
||||
self.assertEqual(len(store), 0) |
||||
|
||||
def test_update_delete_where(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, foo, foo)) |
||||
store.update("DELETE WHERE { ?v ?v ?v }") |
||||
self.assertEqual(len(store), 0) |
||||
|
||||
def test_update_load(self) -> None: |
||||
store = Store() |
||||
store.update("LOAD <https://www.w3.org/1999/02/22-rdf-syntax-ns>") |
||||
self.assertGreater(len(store), 100) |
||||
|
||||
def test_update_star(self) -> None: |
||||
store = Store() |
||||
store.update( |
||||
"PREFIX : <http://www.example.org/> INSERT DATA { :alice :claims << :bob :age 23 >> }" |
||||
) |
||||
results: Any = store.query( |
||||
"PREFIX : <http://www.example.org/> SELECT ?p ?a WHERE { ?p :claims << :bob :age ?a >> }" |
||||
) |
||||
self.assertEqual(len(list(results)), 1) |
||||
|
||||
def test_load_ntriples_to_default_graph(self) -> None: |
||||
store = Store() |
||||
store.load( |
||||
BytesIO(b"<http://foo> <http://bar> <http://baz> ."), |
||||
mime_type="application/n-triples", |
||||
) |
||||
self.assertEqual(set(store), {Quad(foo, bar, baz, DefaultGraph())}) |
||||
|
||||
def test_load_ntriples_to_named_graph(self) -> None: |
||||
store = Store() |
||||
store.load( |
||||
BytesIO(b"<http://foo> <http://bar> <http://baz> ."), |
||||
mime_type="application/n-triples", |
||||
to_graph=graph, |
||||
) |
||||
self.assertEqual(set(store), {Quad(foo, bar, baz, graph)}) |
||||
|
||||
def test_load_turtle_with_base_iri(self) -> None: |
||||
store = Store() |
||||
store.load( |
||||
BytesIO(b"<http://foo> <http://bar> <> ."), |
||||
mime_type="text/turtle", |
||||
base_iri="http://baz", |
||||
) |
||||
self.assertEqual(set(store), {Quad(foo, bar, baz, DefaultGraph())}) |
||||
|
||||
def test_load_nquads(self) -> None: |
||||
store = Store() |
||||
store.load( |
||||
BytesIO(b"<http://foo> <http://bar> <http://baz> <http://graph>."), |
||||
mime_type="application/n-quads", |
||||
) |
||||
self.assertEqual(set(store), {Quad(foo, bar, baz, graph)}) |
||||
|
||||
def test_load_trig_with_base_iri(self) -> None: |
||||
store = Store() |
||||
store.load( |
||||
BytesIO(b"<http://graph> { <http://foo> <http://bar> <> . }"), |
||||
mime_type="application/trig", |
||||
base_iri="http://baz", |
||||
) |
||||
self.assertEqual(set(store), {Quad(foo, bar, baz, graph)}) |
||||
|
||||
def test_load_file(self) -> None: |
||||
with NamedTemporaryFile(delete=False) as fp: |
||||
file_name = Path(fp.name) |
||||
fp.write(b"<http://foo> <http://bar> <http://baz> <http://graph>.") |
||||
store = Store() |
||||
store.load(file_name, mime_type="application/n-quads") |
||||
file_name.unlink() |
||||
self.assertEqual(set(store), {Quad(foo, bar, baz, graph)}) |
||||
|
||||
def test_load_with_io_error(self) -> None: |
||||
with self.assertRaises(UnsupportedOperation) as _, TemporaryFile("wb") as fp: |
||||
Store().load(fp, mime_type="application/n-triples") |
||||
|
||||
def test_dump_ntriples(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, bar, baz, graph)) |
||||
output = BytesIO() |
||||
store.dump(output, "application/n-triples", from_graph=graph) |
||||
self.assertEqual( |
||||
output.getvalue(), |
||||
b"<http://foo> <http://bar> <http://baz> .\n", |
||||
) |
||||
|
||||
def test_dump_nquads(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, bar, baz, graph)) |
||||
output = BytesIO() |
||||
store.dump(output, "application/n-quads") |
||||
self.assertEqual( |
||||
output.getvalue(), |
||||
b"<http://foo> <http://bar> <http://baz> <http://graph> .\n", |
||||
) |
||||
|
||||
def test_dump_trig(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, bar, baz, graph)) |
||||
store.add(Quad(foo, bar, baz)) |
||||
output = BytesIO() |
||||
store.dump(output, "application/trig") |
||||
self.assertEqual( |
||||
output.getvalue(), |
||||
b"<http://foo> <http://bar> <http://baz> .\n<http://graph> { <http://foo> <http://bar> <http://baz> }\n", |
||||
) |
||||
|
||||
def test_dump_file(self) -> None: |
||||
with NamedTemporaryFile(delete=False) as fp: |
||||
file_name = Path(fp.name) |
||||
store = Store() |
||||
store.add(Quad(foo, bar, baz, graph)) |
||||
store.dump(file_name, "application/n-quads") |
||||
self.assertEqual( |
||||
file_name.read_text(), |
||||
"<http://foo> <http://bar> <http://baz> <http://graph> .\n", |
||||
) |
||||
|
||||
def test_dump_with_io_error(self) -> None: |
||||
with self.assertRaises(OSError) as _, TemporaryFile("rb") as fp: |
||||
Store().dump(fp, mime_type="application/rdf+xml") |
||||
|
||||
def test_write_in_read(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, bar, bar)) |
||||
store.add(Quad(foo, bar, baz)) |
||||
for triple in store: |
||||
store.add(Quad(triple.object, triple.predicate, triple.subject)) |
||||
self.assertEqual(len(store), 4) |
||||
|
||||
def test_add_graph(self) -> None: |
||||
store = Store() |
||||
store.add_graph(graph) |
||||
self.assertEqual(list(store.named_graphs()), [graph]) |
||||
|
||||
def test_remove_graph(self) -> None: |
||||
store = Store() |
||||
store.add(Quad(foo, bar, baz, graph)) |
||||
store.add_graph(NamedNode("http://graph2")) |
||||
store.remove_graph(graph) |
||||
store.remove_graph(NamedNode("http://graph2")) |
||||
self.assertEqual(list(store.named_graphs()), []) |
||||
self.assertEqual(list(store), []) |
||||
|
||||
def test_read_only(self) -> None: |
||||
quad = Quad(foo, bar, baz, graph) |
||||
with TemporaryDirectory() as dir: |
||||
store = Store(dir) |
||||
store.add(quad) |
||||
del store |
||||
store = Store.read_only(dir) |
||||
self.assertEqual(list(store), [quad]) |
||||
|
||||
def test_secondary(self) -> None: |
||||
quad = Quad(foo, bar, baz, graph) |
||||
with TemporaryDirectory() as dir: |
||||
store = Store(dir) |
||||
store.add(quad) |
||||
store.flush() |
||||
|
||||
secondary_store = Store.secondary(dir) |
||||
self.assertEqual(list(secondary_store), [quad]) |
||||
|
||||
store.remove(quad) |
||||
store.flush() |
||||
self.assertEqual(list(secondary_store), []) |
||||
del secondary_store |
||||
del store |
||||
|
||||
|
||||
if __name__ == "__main__": |
||||
unittest.main() |
Loading…
Reference in new issue