|
|
@ -1,16 +1,16 @@ |
|
|
|
#![allow(clippy::needless_option_as_deref)] |
|
|
|
#![allow(clippy::needless_option_as_deref)] |
|
|
|
|
|
|
|
|
|
|
|
use crate::io::{map_parse_error, PyFileLike}; |
|
|
|
use crate::io::{allow_threads_unsafe, map_parse_error, PyFileLike}; |
|
|
|
use crate::model::*; |
|
|
|
use crate::model::*; |
|
|
|
use crate::sparql::*; |
|
|
|
use crate::sparql::*; |
|
|
|
use oxigraph::io::{DatasetFormat, GraphFormat}; |
|
|
|
use oxigraph::io::{DatasetFormat, GraphFormat}; |
|
|
|
use oxigraph::model::GraphNameRef; |
|
|
|
use oxigraph::model::{GraphName, GraphNameRef}; |
|
|
|
use oxigraph::sparql::Update; |
|
|
|
use oxigraph::sparql::Update; |
|
|
|
use oxigraph::store::{self, LoaderError, SerializerError, StorageError, Store}; |
|
|
|
use oxigraph::store::{self, LoaderError, SerializerError, StorageError, Store}; |
|
|
|
use pyo3::exceptions::{PyIOError, PyRuntimeError, PyValueError}; |
|
|
|
use pyo3::exceptions::{PyIOError, PyRuntimeError, PyValueError}; |
|
|
|
use pyo3::prelude::*; |
|
|
|
use pyo3::prelude::*; |
|
|
|
use pyo3::{Py, PyRef}; |
|
|
|
use pyo3::{Py, PyRef}; |
|
|
|
use std::io::BufReader; |
|
|
|
use std::io::{BufReader, BufWriter}; |
|
|
|
|
|
|
|
|
|
|
|
/// RDF store.
|
|
|
|
/// RDF store.
|
|
|
|
///
|
|
|
|
///
|
|
|
@ -43,14 +43,16 @@ pub struct PyStore { |
|
|
|
#[pymethods] |
|
|
|
#[pymethods] |
|
|
|
impl PyStore { |
|
|
|
impl PyStore { |
|
|
|
#[new] |
|
|
|
#[new] |
|
|
|
fn new(path: Option<&str>) -> PyResult<Self> { |
|
|
|
fn new(path: Option<&str>, py: Python<'_>) -> PyResult<Self> { |
|
|
|
Ok(Self { |
|
|
|
py.allow_threads(|| { |
|
|
|
inner: if let Some(path) = path { |
|
|
|
Ok(Self { |
|
|
|
Store::open(path) |
|
|
|
inner: if let Some(path) = path { |
|
|
|
} else { |
|
|
|
Store::open(path) |
|
|
|
Store::new() |
|
|
|
} else { |
|
|
|
} |
|
|
|
Store::new() |
|
|
|
.map_err(map_storage_error)?, |
|
|
|
} |
|
|
|
|
|
|
|
.map_err(map_storage_error)?, |
|
|
|
|
|
|
|
}) |
|
|
|
}) |
|
|
|
}) |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -65,9 +67,11 @@ impl PyStore { |
|
|
|
/// >>> list(store)
|
|
|
|
/// >>> list(store)
|
|
|
|
/// [<Quad subject=<NamedNode value=http://example.com> predicate=<NamedNode value=http://example.com/p> object=<Literal value=1 datatype=<NamedNode value=http://www.w3.org/2001/XMLSchema#string>> graph_name=<NamedNode value=http://example.com/g>>]
|
|
|
|
/// [<Quad subject=<NamedNode value=http://example.com> predicate=<NamedNode value=http://example.com/p> object=<Literal value=1 datatype=<NamedNode value=http://www.w3.org/2001/XMLSchema#string>> graph_name=<NamedNode value=http://example.com/g>>]
|
|
|
|
#[pyo3(text_signature = "($self, quad)")] |
|
|
|
#[pyo3(text_signature = "($self, quad)")] |
|
|
|
fn add(&self, quad: &PyQuad) -> PyResult<()> { |
|
|
|
fn add(&self, quad: &PyQuad, py: Python<'_>) -> PyResult<()> { |
|
|
|
self.inner.insert(quad).map_err(map_storage_error)?; |
|
|
|
py.allow_threads(|| { |
|
|
|
Ok(()) |
|
|
|
self.inner.insert(quad).map_err(map_storage_error)?; |
|
|
|
|
|
|
|
Ok(()) |
|
|
|
|
|
|
|
}) |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
/// Removes a quad from the store.
|
|
|
|
/// Removes a quad from the store.
|
|
|
@ -83,9 +87,11 @@ impl PyStore { |
|
|
|
/// >>> list(store)
|
|
|
|
/// >>> list(store)
|
|
|
|
/// []
|
|
|
|
/// []
|
|
|
|
#[pyo3(text_signature = "($self, quad)")] |
|
|
|
#[pyo3(text_signature = "($self, quad)")] |
|
|
|
fn remove(&self, quad: &PyQuad) -> PyResult<()> { |
|
|
|
fn remove(&self, quad: &PyQuad, py: Python<'_>) -> PyResult<()> { |
|
|
|
self.inner.remove(quad).map_err(map_storage_error)?; |
|
|
|
py.allow_threads(|| { |
|
|
|
Ok(()) |
|
|
|
self.inner.remove(quad).map_err(map_storage_error)?; |
|
|
|
|
|
|
|
Ok(()) |
|
|
|
|
|
|
|
}) |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
/// Looks for the quads matching a given pattern.
|
|
|
|
/// Looks for the quads matching a given pattern.
|
|
|
@ -190,7 +196,8 @@ impl PyStore { |
|
|
|
default_graph, |
|
|
|
default_graph, |
|
|
|
named_graphs, |
|
|
|
named_graphs, |
|
|
|
)?; |
|
|
|
)?; |
|
|
|
let results = self.inner.query(query).map_err(map_evaluation_error)?; |
|
|
|
let results = |
|
|
|
|
|
|
|
allow_threads_unsafe(|| self.inner.query(query)).map_err(map_evaluation_error)?; |
|
|
|
query_results_to_python(py, results) |
|
|
|
query_results_to_python(py, results) |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
@ -229,9 +236,12 @@ impl PyStore { |
|
|
|
/// []
|
|
|
|
/// []
|
|
|
|
#[pyo3(text_signature = "($self, update, *, base_iri)")] |
|
|
|
#[pyo3(text_signature = "($self, update, *, base_iri)")] |
|
|
|
#[args(update, "*", base_iri = "None")] |
|
|
|
#[args(update, "*", base_iri = "None")] |
|
|
|
fn update(&self, update: &str, base_iri: Option<&str>) -> PyResult<()> { |
|
|
|
fn update(&self, update: &str, base_iri: Option<&str>, py: Python<'_>) -> PyResult<()> { |
|
|
|
let update = Update::parse(update, base_iri).map_err(|e| map_evaluation_error(e.into()))?; |
|
|
|
py.allow_threads(|| { |
|
|
|
self.inner.update(update).map_err(map_evaluation_error) |
|
|
|
let update = |
|
|
|
|
|
|
|
Update::parse(update, base_iri).map_err(|e| map_evaluation_error(e.into()))?; |
|
|
|
|
|
|
|
self.inner.update(update).map_err(map_evaluation_error) |
|
|
|
|
|
|
|
}) |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
/// Loads an RDF serialization into the store.
|
|
|
|
/// Loads an RDF serialization into the store.
|
|
|
@ -277,37 +287,40 @@ impl PyStore { |
|
|
|
mime_type: &str, |
|
|
|
mime_type: &str, |
|
|
|
base_iri: Option<&str>, |
|
|
|
base_iri: Option<&str>, |
|
|
|
to_graph: Option<&PyAny>, |
|
|
|
to_graph: Option<&PyAny>, |
|
|
|
|
|
|
|
py: Python<'_>, |
|
|
|
) -> PyResult<()> { |
|
|
|
) -> PyResult<()> { |
|
|
|
let to_graph_name = if let Some(graph_name) = to_graph { |
|
|
|
let to_graph_name = if let Some(graph_name) = to_graph { |
|
|
|
Some(PyGraphNameRef::try_from(graph_name)?) |
|
|
|
Some(GraphName::from(&PyGraphNameRef::try_from(graph_name)?)) |
|
|
|
} else { |
|
|
|
} else { |
|
|
|
None |
|
|
|
None |
|
|
|
}; |
|
|
|
}; |
|
|
|
let input = BufReader::new(PyFileLike::new(input)); |
|
|
|
py.allow_threads(|| { |
|
|
|
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) { |
|
|
|
let input = BufReader::new(PyFileLike::new(input)); |
|
|
|
self.inner |
|
|
|
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) { |
|
|
|
.load_graph( |
|
|
|
self.inner |
|
|
|
input, |
|
|
|
.load_graph( |
|
|
|
graph_format, |
|
|
|
input, |
|
|
|
&to_graph_name.unwrap_or(PyGraphNameRef::DefaultGraph), |
|
|
|
graph_format, |
|
|
|
base_iri, |
|
|
|
to_graph_name.as_ref().unwrap_or(&GraphName::DefaultGraph), |
|
|
|
) |
|
|
|
base_iri, |
|
|
|
.map_err(map_loader_error) |
|
|
|
) |
|
|
|
} else if let Some(dataset_format) = DatasetFormat::from_media_type(mime_type) { |
|
|
|
.map_err(map_loader_error) |
|
|
|
if to_graph_name.is_some() { |
|
|
|
} else if let Some(dataset_format) = DatasetFormat::from_media_type(mime_type) { |
|
|
|
return Err(PyValueError::new_err( |
|
|
|
if to_graph_name.is_some() { |
|
|
|
"The target graph name parameter is not available for dataset formats", |
|
|
|
return Err(PyValueError::new_err( |
|
|
|
)); |
|
|
|
"The target graph name parameter is not available for dataset formats", |
|
|
|
|
|
|
|
)); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
self.inner |
|
|
|
|
|
|
|
.load_dataset(input, dataset_format, base_iri) |
|
|
|
|
|
|
|
.map_err(map_loader_error) |
|
|
|
|
|
|
|
} else { |
|
|
|
|
|
|
|
Err(PyValueError::new_err(format!( |
|
|
|
|
|
|
|
"Not supported MIME type: {}", |
|
|
|
|
|
|
|
mime_type |
|
|
|
|
|
|
|
))) |
|
|
|
} |
|
|
|
} |
|
|
|
self.inner |
|
|
|
}) |
|
|
|
.load_dataset(input, dataset_format, base_iri) |
|
|
|
|
|
|
|
.map_err(map_loader_error) |
|
|
|
|
|
|
|
} else { |
|
|
|
|
|
|
|
Err(PyValueError::new_err(format!( |
|
|
|
|
|
|
|
"Not supported MIME type: {}", |
|
|
|
|
|
|
|
mime_type |
|
|
|
|
|
|
|
))) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
/// Loads an RDF serialization into the store.
|
|
|
|
/// Loads an RDF serialization into the store.
|
|
|
@ -353,37 +366,40 @@ impl PyStore { |
|
|
|
mime_type: &str, |
|
|
|
mime_type: &str, |
|
|
|
base_iri: Option<&str>, |
|
|
|
base_iri: Option<&str>, |
|
|
|
to_graph: Option<&PyAny>, |
|
|
|
to_graph: Option<&PyAny>, |
|
|
|
|
|
|
|
py: Python<'_>, |
|
|
|
) -> PyResult<()> { |
|
|
|
) -> PyResult<()> { |
|
|
|
let to_graph_name = if let Some(graph_name) = to_graph { |
|
|
|
let to_graph_name = if let Some(graph_name) = to_graph { |
|
|
|
Some(PyGraphNameRef::try_from(graph_name)?) |
|
|
|
Some(GraphName::from(&PyGraphNameRef::try_from(graph_name)?)) |
|
|
|
} else { |
|
|
|
} else { |
|
|
|
None |
|
|
|
None |
|
|
|
}; |
|
|
|
}; |
|
|
|
let input = BufReader::new(PyFileLike::new(input)); |
|
|
|
py.allow_threads(|| { |
|
|
|
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) { |
|
|
|
let input = BufReader::new(PyFileLike::new(input)); |
|
|
|
self.inner |
|
|
|
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) { |
|
|
|
.bulk_load_graph( |
|
|
|
self.inner |
|
|
|
input, |
|
|
|
.bulk_load_graph( |
|
|
|
graph_format, |
|
|
|
input, |
|
|
|
&to_graph_name.unwrap_or(PyGraphNameRef::DefaultGraph), |
|
|
|
graph_format, |
|
|
|
base_iri, |
|
|
|
&to_graph_name.unwrap_or(GraphName::DefaultGraph), |
|
|
|
) |
|
|
|
base_iri, |
|
|
|
.map_err(map_loader_error) |
|
|
|
) |
|
|
|
} else if let Some(dataset_format) = DatasetFormat::from_media_type(mime_type) { |
|
|
|
.map_err(map_loader_error) |
|
|
|
if to_graph_name.is_some() { |
|
|
|
} else if let Some(dataset_format) = DatasetFormat::from_media_type(mime_type) { |
|
|
|
return Err(PyValueError::new_err( |
|
|
|
if to_graph_name.is_some() { |
|
|
|
"The target graph name parameter is not available for dataset formats", |
|
|
|
return Err(PyValueError::new_err( |
|
|
|
)); |
|
|
|
"The target graph name parameter is not available for dataset formats", |
|
|
|
|
|
|
|
)); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
self.inner |
|
|
|
|
|
|
|
.bulk_load_dataset(input, dataset_format, base_iri) |
|
|
|
|
|
|
|
.map_err(map_loader_error) |
|
|
|
|
|
|
|
} else { |
|
|
|
|
|
|
|
Err(PyValueError::new_err(format!( |
|
|
|
|
|
|
|
"Not supported MIME type: {}", |
|
|
|
|
|
|
|
mime_type |
|
|
|
|
|
|
|
))) |
|
|
|
} |
|
|
|
} |
|
|
|
self.inner |
|
|
|
}) |
|
|
|
.bulk_load_dataset(input, dataset_format, base_iri) |
|
|
|
|
|
|
|
.map_err(map_loader_error) |
|
|
|
|
|
|
|
} else { |
|
|
|
|
|
|
|
Err(PyValueError::new_err(format!( |
|
|
|
|
|
|
|
"Not supported MIME type: {}", |
|
|
|
|
|
|
|
mime_type |
|
|
|
|
|
|
|
))) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
/// Dumps the store quads or triples into a file.
|
|
|
|
/// Dumps the store quads or triples into a file.
|
|
|
@ -417,36 +433,44 @@ impl PyStore { |
|
|
|
/// b'<http://example.com> <http://example.com/p> "1" .\n'
|
|
|
|
/// b'<http://example.com> <http://example.com/p> "1" .\n'
|
|
|
|
#[pyo3(text_signature = "($self, output, /, mime_type, *, from_graph = None)")] |
|
|
|
#[pyo3(text_signature = "($self, output, /, mime_type, *, from_graph = None)")] |
|
|
|
#[args(output, mime_type, "*", from_graph = "None")] |
|
|
|
#[args(output, mime_type, "*", from_graph = "None")] |
|
|
|
fn dump(&self, output: PyObject, mime_type: &str, from_graph: Option<&PyAny>) -> PyResult<()> { |
|
|
|
fn dump( |
|
|
|
|
|
|
|
&self, |
|
|
|
|
|
|
|
output: PyObject, |
|
|
|
|
|
|
|
mime_type: &str, |
|
|
|
|
|
|
|
from_graph: Option<&PyAny>, |
|
|
|
|
|
|
|
py: Python<'_>, |
|
|
|
|
|
|
|
) -> PyResult<()> { |
|
|
|
let from_graph_name = if let Some(graph_name) = from_graph { |
|
|
|
let from_graph_name = if let Some(graph_name) = from_graph { |
|
|
|
Some(PyGraphNameRef::try_from(graph_name)?) |
|
|
|
Some(GraphName::from(&PyGraphNameRef::try_from(graph_name)?)) |
|
|
|
} else { |
|
|
|
} else { |
|
|
|
None |
|
|
|
None |
|
|
|
}; |
|
|
|
}; |
|
|
|
let output = PyFileLike::new(output); |
|
|
|
py.allow_threads(|| { |
|
|
|
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) { |
|
|
|
let output = BufWriter::new(PyFileLike::new(output)); |
|
|
|
self.inner |
|
|
|
if let Some(graph_format) = GraphFormat::from_media_type(mime_type) { |
|
|
|
.dump_graph( |
|
|
|
self.inner |
|
|
|
output, |
|
|
|
.dump_graph( |
|
|
|
graph_format, |
|
|
|
output, |
|
|
|
&from_graph_name.unwrap_or(PyGraphNameRef::DefaultGraph), |
|
|
|
graph_format, |
|
|
|
) |
|
|
|
&from_graph_name.unwrap_or(GraphName::DefaultGraph), |
|
|
|
.map_err(map_serializer_error) |
|
|
|
) |
|
|
|
} else if let Some(dataset_format) = DatasetFormat::from_media_type(mime_type) { |
|
|
|
.map_err(map_serializer_error) |
|
|
|
if from_graph_name.is_some() { |
|
|
|
} else if let Some(dataset_format) = DatasetFormat::from_media_type(mime_type) { |
|
|
|
return Err(PyValueError::new_err( |
|
|
|
if from_graph_name.is_some() { |
|
|
|
"The target graph name parameter is not available for dataset formats", |
|
|
|
return Err(PyValueError::new_err( |
|
|
|
)); |
|
|
|
"The target graph name parameter is not available for dataset formats", |
|
|
|
|
|
|
|
)); |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
self.inner |
|
|
|
|
|
|
|
.dump_dataset(output, dataset_format) |
|
|
|
|
|
|
|
.map_err(map_serializer_error) |
|
|
|
|
|
|
|
} else { |
|
|
|
|
|
|
|
Err(PyValueError::new_err(format!( |
|
|
|
|
|
|
|
"Not supported MIME type: {}", |
|
|
|
|
|
|
|
mime_type |
|
|
|
|
|
|
|
))) |
|
|
|
} |
|
|
|
} |
|
|
|
self.inner |
|
|
|
}) |
|
|
|
.dump_dataset(output, dataset_format) |
|
|
|
|
|
|
|
.map_err(map_serializer_error) |
|
|
|
|
|
|
|
} else { |
|
|
|
|
|
|
|
Err(PyValueError::new_err(format!( |
|
|
|
|
|
|
|
"Not supported MIME type: {}", |
|
|
|
|
|
|
|
mime_type |
|
|
|
|
|
|
|
))) |
|
|
|
|
|
|
|
} |
|
|
|
|
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
/// Returns an iterator over all the store named graphs.
|
|
|
|
/// Returns an iterator over all the store named graphs.
|
|
|
@ -477,19 +501,20 @@ impl PyStore { |
|
|
|
/// >>> list(store.named_graphs())
|
|
|
|
/// >>> list(store.named_graphs())
|
|
|
|
/// [<NamedNode value=http://example.com/g>]
|
|
|
|
/// [<NamedNode value=http://example.com/g>]
|
|
|
|
#[pyo3(text_signature = "($self, graph_name)")] |
|
|
|
#[pyo3(text_signature = "($self, graph_name)")] |
|
|
|
fn add_graph(&self, graph_name: &PyAny) -> PyResult<()> { |
|
|
|
fn add_graph(&self, graph_name: &PyAny, py: Python<'_>) -> PyResult<()> { |
|
|
|
match PyGraphNameRef::try_from(graph_name)? { |
|
|
|
let graph_name = GraphName::from(&PyGraphNameRef::try_from(graph_name)?); |
|
|
|
PyGraphNameRef::DefaultGraph => Ok(()), |
|
|
|
py.allow_threads(|| { |
|
|
|
PyGraphNameRef::NamedNode(graph_name) => self |
|
|
|
match graph_name { |
|
|
|
.inner |
|
|
|
GraphName::DefaultGraph => Ok(()), |
|
|
|
.insert_named_graph(&PyNamedOrBlankNodeRef::NamedNode(graph_name)) |
|
|
|
GraphName::NamedNode(graph_name) => { |
|
|
|
.map(|_| ()), |
|
|
|
self.inner.insert_named_graph(&graph_name).map(|_| ()) |
|
|
|
PyGraphNameRef::BlankNode(graph_name) => self |
|
|
|
} |
|
|
|
.inner |
|
|
|
GraphName::BlankNode(graph_name) => { |
|
|
|
.insert_named_graph(&PyNamedOrBlankNodeRef::BlankNode(graph_name)) |
|
|
|
self.inner.insert_named_graph(&graph_name).map(|_| ()) |
|
|
|
.map(|_| ()), |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
.map_err(map_storage_error) |
|
|
|
.map_err(map_storage_error) |
|
|
|
|
|
|
|
}) |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
/// Removes a graph from the store.
|
|
|
|
/// Removes a graph from the store.
|
|
|
@ -506,20 +531,21 @@ impl PyStore { |
|
|
|
/// >>> list(store)
|
|
|
|
/// >>> list(store)
|
|
|
|
/// []
|
|
|
|
/// []
|
|
|
|
#[pyo3(text_signature = "($self, graph_name)")] |
|
|
|
#[pyo3(text_signature = "($self, graph_name)")] |
|
|
|
fn remove_graph(&self, graph_name: &PyAny) -> PyResult<()> { |
|
|
|
fn remove_graph(&self, graph_name: &PyAny, py: Python<'_>) -> PyResult<()> { |
|
|
|
match PyGraphNameRef::try_from(graph_name)? { |
|
|
|
let graph_name = GraphName::from(&PyGraphNameRef::try_from(graph_name)?); |
|
|
|
PyGraphNameRef::DefaultGraph => self.inner.clear_graph(GraphNameRef::DefaultGraph), |
|
|
|
py.allow_threads(|| { |
|
|
|
PyGraphNameRef::NamedNode(graph_name) => self |
|
|
|
match graph_name { |
|
|
|
.inner |
|
|
|
GraphName::DefaultGraph => self.inner.clear_graph(GraphNameRef::DefaultGraph), |
|
|
|
.remove_named_graph(&PyNamedOrBlankNodeRef::NamedNode(graph_name)) |
|
|
|
GraphName::NamedNode(graph_name) => { |
|
|
|
.map(|_| ()), |
|
|
|
self.inner.remove_named_graph(&graph_name).map(|_| ()) |
|
|
|
PyGraphNameRef::BlankNode(graph_name) => self |
|
|
|
} |
|
|
|
.inner |
|
|
|
GraphName::BlankNode(graph_name) => { |
|
|
|
.remove_named_graph(&PyNamedOrBlankNodeRef::BlankNode(graph_name)) |
|
|
|
self.inner.remove_named_graph(&graph_name).map(|_| ()) |
|
|
|
.map(|_| ()), |
|
|
|
} |
|
|
|
} |
|
|
|
} |
|
|
|
.map_err(map_storage_error)?; |
|
|
|
.map_err(map_storage_error)?; |
|
|
|
Ok(()) |
|
|
|
Ok(()) |
|
|
|
|
|
|
|
}) |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
/// Creates database backup into the `target_directory`.
|
|
|
|
/// Creates database backup into the `target_directory`.
|
|
|
@ -543,14 +569,16 @@ impl PyStore { |
|
|
|
/// :type target_directory: str
|
|
|
|
/// :type target_directory: str
|
|
|
|
/// :raises IOError: if an I/O error happens during the backup.
|
|
|
|
/// :raises IOError: if an I/O error happens during the backup.
|
|
|
|
#[pyo3(text_signature = "($self, target_directory)")] |
|
|
|
#[pyo3(text_signature = "($self, target_directory)")] |
|
|
|
fn backup(&self, target_directory: &str) -> PyResult<()> { |
|
|
|
fn backup(&self, target_directory: &str, py: Python<'_>) -> PyResult<()> { |
|
|
|
self.inner |
|
|
|
py.allow_threads(|| { |
|
|
|
.backup(target_directory) |
|
|
|
self.inner |
|
|
|
.map_err(map_storage_error) |
|
|
|
.backup(target_directory) |
|
|
|
|
|
|
|
.map_err(map_storage_error) |
|
|
|
|
|
|
|
}) |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
fn __str__(&self) -> String { |
|
|
|
fn __str__(&self, py: Python<'_>) -> String { |
|
|
|
self.inner.to_string() |
|
|
|
py.allow_threads(|| self.inner.to_string()) |
|
|
|
} |
|
|
|
} |
|
|
|
|
|
|
|
|
|
|
|
fn __bool__(&self) -> PyResult<bool> { |
|
|
|
fn __bool__(&self) -> PyResult<bool> { |
|
|
|