Uses Ruff instead of Black

pull/664/head
Tpt 1 year ago
parent ab5f5c1c60
commit 8a7c6cf2c1
  1. 4
      .github/workflows/artifacts.yml
  2. 2
      .github/workflows/manylinux_build.sh
  3. 2
      .github/workflows/musllinux_build.sh
  4. 8
      .github/workflows/tests.yml
  5. 119
      python/generate_stubs.py
  6. 1
      python/pyproject.toml
  7. 3
      python/requirements.dev.txt
  8. 8
      python/tests/test_io.py
  9. 8
      python/tests/test_model.py
  10. 8
      python/tests/test_store.py

@ -187,7 +187,7 @@ jobs:
working-directory: ./python working-directory: ./python
- run: pip install --no-index --find-links=target/wheels/ pyoxigraph - run: pip install --no-index --find-links=target/wheels/ pyoxigraph
- run: rm -r target/wheels - run: rm -r target/wheels
- run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --black - run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --ruff
working-directory: ./python working-directory: ./python
- run: maturin build --release --target universal2-apple-darwin --features abi3 - run: maturin build --release --target universal2-apple-darwin --features abi3
working-directory: ./python working-directory: ./python
@ -226,7 +226,7 @@ jobs:
working-directory: ./python working-directory: ./python
- run: pip install --no-index --find-links=target/wheels/ pyoxigraph - run: pip install --no-index --find-links=target/wheels/ pyoxigraph
- run: rm -r target/wheels - run: rm -r target/wheels
- run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --black - run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --ruff
working-directory: ./python working-directory: ./python
- run: maturin build --release --features abi3 - run: maturin build --release --features abi3
working-directory: ./python working-directory: ./python

@ -12,7 +12,7 @@ python3.12 -m venv venv
source venv/bin/activate source venv/bin/activate
pip install -r requirements.dev.txt pip install -r requirements.dev.txt
maturin develop --release maturin develop --release
python generate_stubs.py pyoxigraph pyoxigraph.pyi --black python generate_stubs.py pyoxigraph pyoxigraph.pyi --ruff
maturin build --release --no-default-features --features abi3 --features rustls --compatibility manylinux2014 maturin build --release --no-default-features --features abi3 --features rustls --compatibility manylinux2014
if [ %for_each_version% ]; then if [ %for_each_version% ]; then
for VERSION in 8 9 10 11 12; do for VERSION in 8 9 10 11 12; do

@ -10,7 +10,7 @@ python3.12 -m venv venv
source venv/bin/activate source venv/bin/activate
pip install -r requirements.dev.txt pip install -r requirements.dev.txt
maturin develop --release maturin develop --release
python generate_stubs.py pyoxigraph pyoxigraph.pyi --black python generate_stubs.py pyoxigraph pyoxigraph.pyi --ruff
maturin build --release --no-default-features --features abi3 --features rustls --compatibility musllinux_1_2 maturin build --release --no-default-features --features abi3 --features rustls --compatibility musllinux_1_2
if [ %for_each_version% ]; then if [ %for_each_version% ]; then
for VERSION in 8 9 10 11 12; do for VERSION in 8 9 10 11 12; do

@ -227,8 +227,6 @@ jobs:
cache: pip cache: pip
cache-dependency-path: '**/requirements.dev.txt' cache-dependency-path: '**/requirements.dev.txt'
- run: pip install -r python/requirements.dev.txt - run: pip install -r python/requirements.dev.txt
- run: python -m black --check --diff --color .
working-directory: ./python
- run: maturin build -m python/Cargo.toml - run: maturin build -m python/Cargo.toml
- run: pip install --no-index --find-links=target/wheels/ pyoxigraph - run: pip install --no-index --find-links=target/wheels/ pyoxigraph
- run: rm -r target/wheels - run: rm -r target/wheels
@ -238,13 +236,15 @@ jobs:
working-directory: ./python/docs working-directory: ./python/docs
- run: sphinx-build -M html . build - run: sphinx-build -M html . build
working-directory: ./python/docs working-directory: ./python/docs
- run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --black - run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --ruff
working-directory: ./python working-directory: ./python
- run: python -m mypy.stubtest pyoxigraph --allowlist=mypy_allowlist.txt - run: python -m mypy.stubtest pyoxigraph --allowlist=mypy_allowlist.txt
working-directory: ./python working-directory: ./python
- run: python -m mypy generate_stubs.py tests --strict - run: python -m mypy generate_stubs.py tests --strict
working-directory: ./python working-directory: ./python
- run: python -m ruff check . - run: python -m ruff format --check .
working-directory: ./python
- run: python -m ruff check --output-format=github .
working-directory: ./python working-directory: ./python
python_msv: python_msv:

@ -77,9 +77,7 @@ def module_stubs(module: Any) -> ast.Module:
if member_name.startswith("__"): if member_name.startswith("__"):
pass pass
elif inspect.isclass(member_value): elif inspect.isclass(member_value):
classes.append( classes.append(class_stubs(member_name, member_value, element_path, types_to_import))
class_stubs(member_name, member_value, element_path, types_to_import)
)
elif inspect.isbuiltin(member_value): elif inspect.isbuiltin(member_value):
functions.append( functions.append(
function_stub( function_stub(
@ -93,16 +91,12 @@ def module_stubs(module: Any) -> ast.Module:
else: else:
logging.warning(f"Unsupported root construction {member_name}") logging.warning(f"Unsupported root construction {member_name}")
return ast.Module( return ast.Module(
body=[ast.Import(names=[ast.alias(name=t)]) for t in sorted(types_to_import)] body=[ast.Import(names=[ast.alias(name=t)]) for t in sorted(types_to_import)] + classes + functions,
+ classes
+ functions,
type_ignores=[], type_ignores=[],
) )
def class_stubs( def class_stubs(cls_name: str, cls_def: Any, element_path: List[str], types_to_import: Set[str]) -> ast.ClassDef:
cls_name: str, cls_def: Any, element_path: List[str], types_to_import: Set[str]
) -> ast.ClassDef:
attributes: List[ast.AST] = [] attributes: List[ast.AST] = []
methods: List[ast.AST] = [] methods: List[ast.AST] = []
magic_methods: List[ast.AST] = [] magic_methods: List[ast.AST] = []
@ -124,20 +118,11 @@ def class_stubs(
] ]
except ValueError as e: except ValueError as e:
if "no signature found" not in str(e): if "no signature found" not in str(e):
raise ValueError( raise ValueError(f"Error while parsing signature of {cls_name}.__init_") from e
f"Error while parsing signature of {cls_name}.__init_" elif member_value == OBJECT_MEMBERS.get(member_name) or BUILTINS.get(member_name, ()) is None:
) from e
elif (
member_value == OBJECT_MEMBERS.get(member_name)
or BUILTINS.get(member_name, ()) is None
):
pass pass
elif inspect.isdatadescriptor(member_value): elif inspect.isdatadescriptor(member_value):
attributes.extend( attributes.extend(data_descriptor_stub(member_name, member_value, current_element_path, types_to_import))
data_descriptor_stub(
member_name, member_value, current_element_path, types_to_import
)
)
elif inspect.isroutine(member_value): elif inspect.isroutine(member_value):
(magic_methods if member_name.startswith("__") else methods).append( (magic_methods if member_name.startswith("__") else methods).append(
function_stub( function_stub(
@ -154,9 +139,7 @@ def class_stubs(
target=ast.Name(id=member_name, ctx=AST_STORE), target=ast.Name(id=member_name, ctx=AST_STORE),
annotation=ast.Subscript( annotation=ast.Subscript(
value=_path_to_type("typing", "Tuple"), value=_path_to_type("typing", "Tuple"),
slice=ast.Tuple( slice=ast.Tuple(elts=[_path_to_type("str"), ast.Ellipsis()], ctx=AST_LOAD),
elts=[_path_to_type("str"), ast.Ellipsis()], ctx=AST_LOAD
),
ctx=AST_LOAD, ctx=AST_LOAD,
), ),
value=ast.Constant(member_value), value=ast.Constant(member_value),
@ -164,9 +147,7 @@ def class_stubs(
) )
) )
else: else:
logging.warning( logging.warning(f"Unsupported member {member_name} of class {'.'.join(element_path)}")
f"Unsupported member {member_name} of class {'.'.join(element_path)}"
)
doc = inspect.getdoc(cls_def) doc = inspect.getdoc(cls_def)
doc_comment = build_doc_comment(doc) if doc else None doc_comment = build_doc_comment(doc) if doc else None
@ -174,13 +155,7 @@ def class_stubs(
cls_name, cls_name,
bases=[], bases=[],
keywords=[], keywords=[],
body=( body=(([doc_comment] if doc_comment else []) + attributes + methods + magic_methods + constants)
([doc_comment] if doc_comment else [])
+ attributes
+ methods
+ magic_methods
+ constants
)
or [AST_ELLIPSIS], or [AST_ELLIPSIS],
decorator_list=[_path_to_type("typing", "final")], decorator_list=[_path_to_type("typing", "final")],
) )
@ -239,9 +214,7 @@ def function_stub(
arguments_stub(fn_name, fn_def, doc or "", element_path, types_to_import), arguments_stub(fn_name, fn_def, doc or "", element_path, types_to_import),
body or [AST_ELLIPSIS], body or [AST_ELLIPSIS],
decorator_list=decorator_list, decorator_list=decorator_list,
returns=returns_stub(fn_name, doc, element_path, types_to_import) returns=returns_stub(fn_name, doc, element_path, types_to_import) if doc else None,
if doc
else None,
lineno=0, lineno=0,
) )
@ -253,9 +226,7 @@ def arguments_stub(
element_path: List[str], element_path: List[str],
types_to_import: Set[str], types_to_import: Set[str],
) -> ast.arguments: ) -> ast.arguments:
real_parameters: Mapping[str, inspect.Parameter] = inspect.signature( real_parameters: Mapping[str, inspect.Parameter] = inspect.signature(callable_def).parameters
callable_def
).parameters
if callable_name == "__init__": if callable_name == "__init__":
real_parameters = { real_parameters = {
"self": inspect.Parameter("self", inspect.Parameter.POSITIONAL_ONLY), "self": inspect.Parameter("self", inspect.Parameter.POSITIONAL_ONLY),
@ -285,9 +256,7 @@ def arguments_stub(
if type.endswith(", optional"): if type.endswith(", optional"):
optional_params.add(match[0]) optional_params.add(match[0])
type = type[:-10] type = type[:-10]
parsed_param_types[match[0]] = convert_type_from_doc( parsed_param_types[match[0]] = convert_type_from_doc(type, element_path, types_to_import)
type, element_path, types_to_import
)
# we parse the parameters # we parse the parameters
posonlyargs = [] posonlyargs = []
@ -303,9 +272,7 @@ def arguments_stub(
f"The parameter {param.name} of {'.'.join(element_path)} " f"The parameter {param.name} of {'.'.join(element_path)} "
"has no type definition in the function documentation" "has no type definition in the function documentation"
) )
param_ast = ast.arg( param_ast = ast.arg(arg=param.name, annotation=parsed_param_types.get(param.name))
arg=param.name, annotation=parsed_param_types.get(param.name)
)
default_ast = None default_ast = None
if param.default != param.empty: if param.default != param.empty:
@ -346,9 +313,7 @@ def arguments_stub(
) )
def returns_stub( def returns_stub(callable_name: str, doc: str, element_path: List[str], types_to_import: Set[str]) -> Optional[ast.AST]:
callable_name: str, doc: str, element_path: List[str], types_to_import: Set[str]
) -> Optional[ast.AST]:
m = re.findall(r"^ *:rtype: *([^\n]*) *$", doc, re.MULTILINE) m = re.findall(r"^ *:rtype: *([^\n]*) *$", doc, re.MULTILINE)
if len(m) == 0: if len(m) == 0:
builtin = BUILTINS.get(callable_name) builtin = BUILTINS.get(callable_name)
@ -359,22 +324,16 @@ def returns_stub(
"has no type definition using :rtype: in the function documentation" "has no type definition using :rtype: in the function documentation"
) )
if len(m) > 1: if len(m) > 1:
raise ValueError( raise ValueError(f"Multiple return type annotations found with :rtype: for {'.'.join(element_path)}")
f"Multiple return type annotations found with :rtype: for {'.'.join(element_path)}"
)
return convert_type_from_doc(m[0], element_path, types_to_import) return convert_type_from_doc(m[0], element_path, types_to_import)
def convert_type_from_doc( def convert_type_from_doc(type_str: str, element_path: List[str], types_to_import: Set[str]) -> ast.AST:
type_str: str, element_path: List[str], types_to_import: Set[str]
) -> ast.AST:
type_str = type_str.strip() type_str = type_str.strip()
return parse_type_to_ast(type_str, element_path, types_to_import) return parse_type_to_ast(type_str, element_path, types_to_import)
def parse_type_to_ast( def parse_type_to_ast(type_str: str, element_path: List[str], types_to_import: Set[str]) -> ast.AST:
type_str: str, element_path: List[str], types_to_import: Set[str]
) -> ast.AST:
# let's tokenize # let's tokenize
tokens = [] tokens = []
current_token = "" current_token = ""
@ -412,26 +371,18 @@ def parse_type_to_ast(
else: else:
or_groups[-1].append(e) or_groups[-1].append(e)
if any(not g for g in or_groups): if any(not g for g in or_groups):
raise ValueError( raise ValueError(f"Not able to parse type '{type_str}' used by {'.'.join(element_path)}")
f"Not able to parse type '{type_str}' used by {'.'.join(element_path)}"
)
new_elements: List[ast.AST] = [] new_elements: List[ast.AST] = []
for group in or_groups: for group in or_groups:
if len(group) == 1 and isinstance(group[0], str): if len(group) == 1 and isinstance(group[0], str):
parts = group[0].split(".") parts = group[0].split(".")
if any(not p for p in parts): if any(not p for p in parts):
raise ValueError( raise ValueError(f"Not able to parse type '{type_str}' used by {'.'.join(element_path)}")
f"Not able to parse type '{type_str}' used by {'.'.join(element_path)}"
)
if len(parts) > 1: if len(parts) > 1:
types_to_import.add(parts[0]) types_to_import.add(parts[0])
new_elements.append(_path_to_type(*parts)) new_elements.append(_path_to_type(*parts))
elif ( elif len(group) == 2 and isinstance(group[0], str) and isinstance(group[1], list):
len(group) == 2
and isinstance(group[0], str)
and isinstance(group[1], list)
):
if group[0] not in GENERICS: if group[0] not in GENERICS:
raise ValueError( raise ValueError(
f"Constructor {group[0]} is not supported in type '{type_str}' used by {'.'.join(element_path)}" f"Constructor {group[0]} is not supported in type '{type_str}' used by {'.'.join(element_path)}"
@ -444,9 +395,7 @@ def parse_type_to_ast(
) )
) )
else: else:
raise ValueError( raise ValueError(f"Not able to parse type '{type_str}' used by {'.'.join(element_path)}")
f"Not able to parse type '{type_str}' used by {'.'.join(element_path)}"
)
return ( return (
ast.Subscript( ast.Subscript(
value=_path_to_type("typing", "Union"), value=_path_to_type("typing", "Union"),
@ -471,33 +420,21 @@ def build_doc_comment(doc: str) -> Optional[ast.Expr]:
return ast.Expr(value=ast.Constant(text)) if text else None return ast.Expr(value=ast.Constant(text)) if text else None
def format_with_black(code: str) -> str: def format_with_ruff(file: str) -> None:
result = subprocess.run( subprocess.check_call(["python", "-m", "ruff", "format", file])
["python", "-m", "black", "-t", "py38", "--pyi", "-"],
input=code.encode(),
capture_output=True,
)
result.check_returncode()
return result.stdout.decode()
if __name__ == "__main__": if __name__ == "__main__":
parser = argparse.ArgumentParser( parser = argparse.ArgumentParser(description="Extract Python type stub from a python module.")
description="Extract Python type stub from a python module." parser.add_argument("module_name", help="Name of the Python module for which generate stubs")
)
parser.add_argument(
"module_name", help="Name of the Python module for which generate stubs"
)
parser.add_argument( parser.add_argument(
"out", "out",
help="Name of the Python stub file to write to", help="Name of the Python stub file to write to",
type=argparse.FileType("wt"), type=argparse.FileType("wt"),
) )
parser.add_argument( parser.add_argument("--ruff", help="Formats the generated stubs using Ruff", action="store_true")
"--black", help="Formats the generated stubs using Black", action="store_true"
)
args = parser.parse_args() args = parser.parse_args()
stub_content = ast.unparse(module_stubs(importlib.import_module(args.module_name))) stub_content = ast.unparse(module_stubs(importlib.import_module(args.module_name)))
if args.black:
stub_content = format_with_black(stub_content)
args.out.write(stub_content) args.out.write(stub_content)
if args.ruff:
format_with_ruff(args.out.name)

@ -39,7 +39,6 @@ select = [
"FBT", "FBT",
"I", "I",
"ICN", "ICN",
"ISC",
"N", "N",
"PIE", "PIE",
"PTH", "PTH",

@ -1,6 +1,5 @@
black~=23.1
furo furo
maturin~=1.0 maturin~=1.0
mypy~=1.0 mypy~=1.0
ruff~=0.0.292 ruff~=0.1.0
sphinx~=7.0 sphinx~=7.0

@ -176,9 +176,7 @@ class TestSerialize(unittest.TestCase):
class TestParseQuerySolutions(unittest.TestCase): class TestParseQuerySolutions(unittest.TestCase):
def test_parse_file(self) -> None: def test_parse_file(self) -> None:
with NamedTemporaryFile(suffix=".tsv") as fp: with NamedTemporaryFile(suffix=".tsv") as fp:
fp.write( fp.write(b'?s\t?p\t?o\n<http://example.com/s>\t<http://example.com/s>\t"1"\n')
b'?s\t?p\t?o\n<http://example.com/s>\t<http://example.com/s>\t"1"\n'
)
fp.flush() fp.flush()
r = parse_query_results(fp.name) r = parse_query_results(fp.name)
self.assertIsInstance(r, QuerySolutions) self.assertIsInstance(r, QuerySolutions)
@ -188,9 +186,7 @@ class TestParseQuerySolutions(unittest.TestCase):
def test_parse_not_existing_file(self) -> None: def test_parse_not_existing_file(self) -> None:
with self.assertRaises(IOError) as _: with self.assertRaises(IOError) as _:
parse_query_results( parse_query_results("/tmp/not-existing-oxigraph-file.ttl", "application/json")
"/tmp/not-existing-oxigraph-file.ttl", "application/json"
)
def test_parse_str_io(self) -> None: def test_parse_str_io(self) -> None:
result = parse_query_results(StringIO("true"), "tsv") result = parse_query_results(StringIO("true"), "tsv")

@ -138,9 +138,7 @@ class TestLiteral(unittest.TestCase):
self.assertEqual(copy.deepcopy(number), number) self.assertEqual(copy.deepcopy(number), number)
def test_basic_match(self) -> None: def test_basic_match(self) -> None:
match_works( match_works(self, 'Literal("foo", language="en")', 'Literal("foo", language="en")')
self, 'Literal("foo", language="en")', 'Literal("foo", language="en")'
)
match_works( match_works(
self, self,
'Literal("1", datatype=XSD_INTEGER)', 'Literal("1", datatype=XSD_INTEGER)',
@ -149,9 +147,7 @@ class TestLiteral(unittest.TestCase):
def test_wildcard_match(self) -> None: def test_wildcard_match(self) -> None:
match_works(self, 'Literal("foo", language="en")', "Literal(v, language=l)") match_works(self, 'Literal("foo", language="en")', "Literal(v, language=l)")
match_works( match_works(self, 'Literal("1", datatype=XSD_INTEGER)', "Literal(v, datatype=d)")
self, 'Literal("1", datatype=XSD_INTEGER)', "Literal(v, datatype=d)"
)
class TestTriple(unittest.TestCase): class TestTriple(unittest.TestCase):

@ -149,9 +149,7 @@ class TestStore(unittest.TestCase):
store.add(Quad(foo, bar, baz, graph)) store.add(Quad(foo, bar, baz, graph))
results: Any = store.query("SELECT ?s WHERE { ?s ?p ?o }") results: Any = store.query("SELECT ?s WHERE { ?s ?p ?o }")
self.assertEqual(len(list(results)), 0) self.assertEqual(len(list(results)), 0)
results = store.query( results = store.query("SELECT ?s WHERE { ?s ?p ?o }", use_default_graph_as_union=True)
"SELECT ?s WHERE { ?s ?p ?o }", use_default_graph_as_union=True
)
self.assertEqual(len(list(results)), 1) self.assertEqual(len(list(results)), 1)
results = store.query( results = store.query(
"SELECT ?s WHERE { ?s ?p ?o }", "SELECT ?s WHERE { ?s ?p ?o }",
@ -246,9 +244,7 @@ class TestStore(unittest.TestCase):
def test_update_star(self) -> None: def test_update_star(self) -> None:
store = Store() store = Store()
store.update( store.update("PREFIX : <http://www.example.org/> INSERT DATA { :alice :claims << :bob :age 23 >> }")
"PREFIX : <http://www.example.org/> INSERT DATA { :alice :claims << :bob :age 23 >> }"
)
results: Any = store.query( results: Any = store.query(
"PREFIX : <http://www.example.org/> SELECT ?p ?a WHERE { ?p :claims << :bob :age ?a >> }" "PREFIX : <http://www.example.org/> SELECT ?p ?a WHERE { ?p :claims << :bob :age ?a >> }"
) )

Loading…
Cancel
Save