diff --git a/.github/workflows/artifacts.yml b/.github/workflows/artifacts.yml index 62123141..5e170600 100644 --- a/.github/workflows/artifacts.yml +++ b/.github/workflows/artifacts.yml @@ -187,7 +187,7 @@ jobs: working-directory: ./python - run: pip install --no-index --find-links=target/wheels/ pyoxigraph - run: rm -r target/wheels - - run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --black + - run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --ruff working-directory: ./python - run: maturin build --release --target universal2-apple-darwin --features abi3 working-directory: ./python @@ -226,7 +226,7 @@ jobs: working-directory: ./python - run: pip install --no-index --find-links=target/wheels/ pyoxigraph - run: rm -r target/wheels - - run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --black + - run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --ruff working-directory: ./python - run: maturin build --release --features abi3 working-directory: ./python diff --git a/.github/workflows/manylinux_build.sh b/.github/workflows/manylinux_build.sh index ccefe8b7..fe1d7a0f 100644 --- a/.github/workflows/manylinux_build.sh +++ b/.github/workflows/manylinux_build.sh @@ -12,7 +12,7 @@ python3.12 -m venv venv source venv/bin/activate pip install -r requirements.dev.txt maturin develop --release -python generate_stubs.py pyoxigraph pyoxigraph.pyi --black +python generate_stubs.py pyoxigraph pyoxigraph.pyi --ruff maturin build --release --no-default-features --features abi3 --features rustls --compatibility manylinux2014 if [ %for_each_version% ]; then for VERSION in 8 9 10 11 12; do diff --git a/.github/workflows/musllinux_build.sh b/.github/workflows/musllinux_build.sh index e85faa3f..6a519d20 100644 --- a/.github/workflows/musllinux_build.sh +++ b/.github/workflows/musllinux_build.sh @@ -10,7 +10,7 @@ python3.12 -m venv venv source venv/bin/activate pip install -r requirements.dev.txt maturin develop --release -python generate_stubs.py pyoxigraph pyoxigraph.pyi --black +python generate_stubs.py pyoxigraph pyoxigraph.pyi --ruff maturin build --release --no-default-features --features abi3 --features rustls --compatibility musllinux_1_2 if [ %for_each_version% ]; then for VERSION in 8 9 10 11 12; do diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 2c0dcfbb..d0e45d20 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -227,8 +227,6 @@ jobs: cache: pip cache-dependency-path: '**/requirements.dev.txt' - run: pip install -r python/requirements.dev.txt - - run: python -m black --check --diff --color . - working-directory: ./python - run: maturin build -m python/Cargo.toml - run: pip install --no-index --find-links=target/wheels/ pyoxigraph - run: rm -r target/wheels @@ -238,13 +236,15 @@ jobs: working-directory: ./python/docs - run: sphinx-build -M html . build working-directory: ./python/docs - - run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --black + - run: python generate_stubs.py pyoxigraph pyoxigraph.pyi --ruff working-directory: ./python - run: python -m mypy.stubtest pyoxigraph --allowlist=mypy_allowlist.txt working-directory: ./python - run: python -m mypy generate_stubs.py tests --strict working-directory: ./python - - run: python -m ruff check . + - run: python -m ruff format --check . + working-directory: ./python + - run: python -m ruff check --output-format=github . working-directory: ./python python_msv: diff --git a/python/generate_stubs.py b/python/generate_stubs.py index f343fd53..5498f2db 100644 --- a/python/generate_stubs.py +++ b/python/generate_stubs.py @@ -77,9 +77,7 @@ def module_stubs(module: Any) -> ast.Module: if member_name.startswith("__"): pass elif inspect.isclass(member_value): - classes.append( - class_stubs(member_name, member_value, element_path, types_to_import) - ) + classes.append(class_stubs(member_name, member_value, element_path, types_to_import)) elif inspect.isbuiltin(member_value): functions.append( function_stub( @@ -93,16 +91,12 @@ def module_stubs(module: Any) -> ast.Module: else: logging.warning(f"Unsupported root construction {member_name}") return ast.Module( - body=[ast.Import(names=[ast.alias(name=t)]) for t in sorted(types_to_import)] - + classes - + functions, + body=[ast.Import(names=[ast.alias(name=t)]) for t in sorted(types_to_import)] + classes + functions, type_ignores=[], ) -def class_stubs( - cls_name: str, cls_def: Any, element_path: List[str], types_to_import: Set[str] -) -> ast.ClassDef: +def class_stubs(cls_name: str, cls_def: Any, element_path: List[str], types_to_import: Set[str]) -> ast.ClassDef: attributes: List[ast.AST] = [] methods: List[ast.AST] = [] magic_methods: List[ast.AST] = [] @@ -124,20 +118,11 @@ def class_stubs( ] except ValueError as e: if "no signature found" not in str(e): - raise ValueError( - f"Error while parsing signature of {cls_name}.__init_" - ) from e - elif ( - member_value == OBJECT_MEMBERS.get(member_name) - or BUILTINS.get(member_name, ()) is None - ): + raise ValueError(f"Error while parsing signature of {cls_name}.__init_") from e + elif member_value == OBJECT_MEMBERS.get(member_name) or BUILTINS.get(member_name, ()) is None: pass elif inspect.isdatadescriptor(member_value): - attributes.extend( - data_descriptor_stub( - member_name, member_value, current_element_path, types_to_import - ) - ) + attributes.extend(data_descriptor_stub(member_name, member_value, current_element_path, types_to_import)) elif inspect.isroutine(member_value): (magic_methods if member_name.startswith("__") else methods).append( function_stub( @@ -154,9 +139,7 @@ def class_stubs( target=ast.Name(id=member_name, ctx=AST_STORE), annotation=ast.Subscript( value=_path_to_type("typing", "Tuple"), - slice=ast.Tuple( - elts=[_path_to_type("str"), ast.Ellipsis()], ctx=AST_LOAD - ), + slice=ast.Tuple(elts=[_path_to_type("str"), ast.Ellipsis()], ctx=AST_LOAD), ctx=AST_LOAD, ), value=ast.Constant(member_value), @@ -164,9 +147,7 @@ def class_stubs( ) ) else: - logging.warning( - f"Unsupported member {member_name} of class {'.'.join(element_path)}" - ) + logging.warning(f"Unsupported member {member_name} of class {'.'.join(element_path)}") doc = inspect.getdoc(cls_def) doc_comment = build_doc_comment(doc) if doc else None @@ -174,13 +155,7 @@ def class_stubs( cls_name, bases=[], keywords=[], - body=( - ([doc_comment] if doc_comment else []) - + attributes - + methods - + magic_methods - + constants - ) + body=(([doc_comment] if doc_comment else []) + attributes + methods + magic_methods + constants) or [AST_ELLIPSIS], decorator_list=[_path_to_type("typing", "final")], ) @@ -239,9 +214,7 @@ def function_stub( arguments_stub(fn_name, fn_def, doc or "", element_path, types_to_import), body or [AST_ELLIPSIS], decorator_list=decorator_list, - returns=returns_stub(fn_name, doc, element_path, types_to_import) - if doc - else None, + returns=returns_stub(fn_name, doc, element_path, types_to_import) if doc else None, lineno=0, ) @@ -253,9 +226,7 @@ def arguments_stub( element_path: List[str], types_to_import: Set[str], ) -> ast.arguments: - real_parameters: Mapping[str, inspect.Parameter] = inspect.signature( - callable_def - ).parameters + real_parameters: Mapping[str, inspect.Parameter] = inspect.signature(callable_def).parameters if callable_name == "__init__": real_parameters = { "self": inspect.Parameter("self", inspect.Parameter.POSITIONAL_ONLY), @@ -285,9 +256,7 @@ def arguments_stub( if type.endswith(", optional"): optional_params.add(match[0]) type = type[:-10] - parsed_param_types[match[0]] = convert_type_from_doc( - type, element_path, types_to_import - ) + parsed_param_types[match[0]] = convert_type_from_doc(type, element_path, types_to_import) # we parse the parameters posonlyargs = [] @@ -303,9 +272,7 @@ def arguments_stub( f"The parameter {param.name} of {'.'.join(element_path)} " "has no type definition in the function documentation" ) - param_ast = ast.arg( - arg=param.name, annotation=parsed_param_types.get(param.name) - ) + param_ast = ast.arg(arg=param.name, annotation=parsed_param_types.get(param.name)) default_ast = None if param.default != param.empty: @@ -346,9 +313,7 @@ def arguments_stub( ) -def returns_stub( - callable_name: str, doc: str, element_path: List[str], types_to_import: Set[str] -) -> Optional[ast.AST]: +def returns_stub(callable_name: str, doc: str, element_path: List[str], types_to_import: Set[str]) -> Optional[ast.AST]: m = re.findall(r"^ *:rtype: *([^\n]*) *$", doc, re.MULTILINE) if len(m) == 0: builtin = BUILTINS.get(callable_name) @@ -359,22 +324,16 @@ def returns_stub( "has no type definition using :rtype: in the function documentation" ) if len(m) > 1: - raise ValueError( - f"Multiple return type annotations found with :rtype: for {'.'.join(element_path)}" - ) + raise ValueError(f"Multiple return type annotations found with :rtype: for {'.'.join(element_path)}") return convert_type_from_doc(m[0], element_path, types_to_import) -def convert_type_from_doc( - type_str: str, element_path: List[str], types_to_import: Set[str] -) -> ast.AST: +def convert_type_from_doc(type_str: str, element_path: List[str], types_to_import: Set[str]) -> ast.AST: type_str = type_str.strip() return parse_type_to_ast(type_str, element_path, types_to_import) -def parse_type_to_ast( - type_str: str, element_path: List[str], types_to_import: Set[str] -) -> ast.AST: +def parse_type_to_ast(type_str: str, element_path: List[str], types_to_import: Set[str]) -> ast.AST: # let's tokenize tokens = [] current_token = "" @@ -412,26 +371,18 @@ def parse_type_to_ast( else: or_groups[-1].append(e) if any(not g for g in or_groups): - raise ValueError( - f"Not able to parse type '{type_str}' used by {'.'.join(element_path)}" - ) + raise ValueError(f"Not able to parse type '{type_str}' used by {'.'.join(element_path)}") new_elements: List[ast.AST] = [] for group in or_groups: if len(group) == 1 and isinstance(group[0], str): parts = group[0].split(".") if any(not p for p in parts): - raise ValueError( - f"Not able to parse type '{type_str}' used by {'.'.join(element_path)}" - ) + raise ValueError(f"Not able to parse type '{type_str}' used by {'.'.join(element_path)}") if len(parts) > 1: types_to_import.add(parts[0]) new_elements.append(_path_to_type(*parts)) - elif ( - len(group) == 2 - and isinstance(group[0], str) - and isinstance(group[1], list) - ): + elif len(group) == 2 and isinstance(group[0], str) and isinstance(group[1], list): if group[0] not in GENERICS: raise ValueError( f"Constructor {group[0]} is not supported in type '{type_str}' used by {'.'.join(element_path)}" @@ -444,9 +395,7 @@ def parse_type_to_ast( ) ) else: - raise ValueError( - f"Not able to parse type '{type_str}' used by {'.'.join(element_path)}" - ) + raise ValueError(f"Not able to parse type '{type_str}' used by {'.'.join(element_path)}") return ( ast.Subscript( value=_path_to_type("typing", "Union"), @@ -471,33 +420,21 @@ def build_doc_comment(doc: str) -> Optional[ast.Expr]: return ast.Expr(value=ast.Constant(text)) if text else None -def format_with_black(code: str) -> str: - result = subprocess.run( - ["python", "-m", "black", "-t", "py38", "--pyi", "-"], - input=code.encode(), - capture_output=True, - ) - result.check_returncode() - return result.stdout.decode() +def format_with_ruff(file: str) -> None: + subprocess.check_call(["python", "-m", "ruff", "format", file]) if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Extract Python type stub from a python module." - ) - parser.add_argument( - "module_name", help="Name of the Python module for which generate stubs" - ) + parser = argparse.ArgumentParser(description="Extract Python type stub from a python module.") + parser.add_argument("module_name", help="Name of the Python module for which generate stubs") parser.add_argument( "out", help="Name of the Python stub file to write to", type=argparse.FileType("wt"), ) - parser.add_argument( - "--black", help="Formats the generated stubs using Black", action="store_true" - ) + parser.add_argument("--ruff", help="Formats the generated stubs using Ruff", action="store_true") args = parser.parse_args() stub_content = ast.unparse(module_stubs(importlib.import_module(args.module_name))) - if args.black: - stub_content = format_with_black(stub_content) args.out.write(stub_content) + if args.ruff: + format_with_ruff(args.out.name) diff --git a/python/pyproject.toml b/python/pyproject.toml index dbdc8cd1..441baffb 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -39,7 +39,6 @@ select = [ "FBT", "I", "ICN", - "ISC", "N", "PIE", "PTH", diff --git a/python/requirements.dev.txt b/python/requirements.dev.txt index 8595505c..67eb7d2b 100644 --- a/python/requirements.dev.txt +++ b/python/requirements.dev.txt @@ -1,6 +1,5 @@ -black~=23.1 furo maturin~=1.0 mypy~=1.0 -ruff~=0.0.292 +ruff~=0.1.0 sphinx~=7.0 diff --git a/python/tests/test_io.py b/python/tests/test_io.py index c761148f..65223049 100644 --- a/python/tests/test_io.py +++ b/python/tests/test_io.py @@ -176,9 +176,7 @@ class TestSerialize(unittest.TestCase): class TestParseQuerySolutions(unittest.TestCase): def test_parse_file(self) -> None: with NamedTemporaryFile(suffix=".tsv") as fp: - fp.write( - b'?s\t?p\t?o\n\t\t"1"\n' - ) + fp.write(b'?s\t?p\t?o\n\t\t"1"\n') fp.flush() r = parse_query_results(fp.name) self.assertIsInstance(r, QuerySolutions) @@ -188,9 +186,7 @@ class TestParseQuerySolutions(unittest.TestCase): def test_parse_not_existing_file(self) -> None: with self.assertRaises(IOError) as _: - parse_query_results( - "/tmp/not-existing-oxigraph-file.ttl", "application/json" - ) + parse_query_results("/tmp/not-existing-oxigraph-file.ttl", "application/json") def test_parse_str_io(self) -> None: result = parse_query_results(StringIO("true"), "tsv") diff --git a/python/tests/test_model.py b/python/tests/test_model.py index 2931d06d..6bed69fd 100644 --- a/python/tests/test_model.py +++ b/python/tests/test_model.py @@ -138,9 +138,7 @@ class TestLiteral(unittest.TestCase): self.assertEqual(copy.deepcopy(number), number) def test_basic_match(self) -> None: - match_works( - self, 'Literal("foo", language="en")', 'Literal("foo", language="en")' - ) + match_works(self, 'Literal("foo", language="en")', 'Literal("foo", language="en")') match_works( self, 'Literal("1", datatype=XSD_INTEGER)', @@ -149,9 +147,7 @@ class TestLiteral(unittest.TestCase): def test_wildcard_match(self) -> None: match_works(self, 'Literal("foo", language="en")', "Literal(v, language=l)") - match_works( - self, 'Literal("1", datatype=XSD_INTEGER)', "Literal(v, datatype=d)" - ) + match_works(self, 'Literal("1", datatype=XSD_INTEGER)', "Literal(v, datatype=d)") class TestTriple(unittest.TestCase): diff --git a/python/tests/test_store.py b/python/tests/test_store.py index 84107d84..b9fc1be8 100644 --- a/python/tests/test_store.py +++ b/python/tests/test_store.py @@ -149,9 +149,7 @@ class TestStore(unittest.TestCase): store.add(Quad(foo, bar, baz, graph)) results: Any = store.query("SELECT ?s WHERE { ?s ?p ?o }") self.assertEqual(len(list(results)), 0) - results = store.query( - "SELECT ?s WHERE { ?s ?p ?o }", use_default_graph_as_union=True - ) + results = store.query("SELECT ?s WHERE { ?s ?p ?o }", use_default_graph_as_union=True) self.assertEqual(len(list(results)), 1) results = store.query( "SELECT ?s WHERE { ?s ?p ?o }", @@ -246,9 +244,7 @@ class TestStore(unittest.TestCase): def test_update_star(self) -> None: store = Store() - store.update( - "PREFIX : INSERT DATA { :alice :claims << :bob :age 23 >> }" - ) + store.update("PREFIX : INSERT DATA { :alice :claims << :bob :age 23 >> }") results: Any = store.query( "PREFIX : SELECT ?p ?a WHERE { ?p :claims << :bob :age ?a >> }" )