Skip to content

Commit 450e031

Browse files
authored
Upgrade tooling (#126)
## Changes - Align our tooling with our other products - Drop support for Python 3.8 and 3.9. Our default runtime is Jammy, which comes with 3.10 - upgraded type hints for a more modern code base - Fix compatibility with Python 3.11+ - The previous flake8 configuration was not properly enforced for some reason. Migrating to ruff made me fix a bunch of violations (which means adding low value docstrings, to be honest)
1 parent 8bd96c0 commit 450e031

21 files changed

+528
-494
lines changed

.github/workflows/ci-checks.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,8 @@ jobs:
1212
fail-fast: true
1313
matrix:
1414
python-version:
15-
- "3.9"
1615
- "3.10"
16+
- "3.12"
1717
steps:
1818
- id: checkout
1919
name: Checkout repo

.github/workflows/ci-tests.yaml

+1-1
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,8 @@ jobs:
1717
fail-fast: true
1818
matrix:
1919
python-version:
20-
- "3.9"
2120
- "3.10"
21+
- "3.12"
2222
needs:
2323
- checks
2424
steps:

poetry.lock

+150-206
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

pyproject.toml

+63-60
Original file line numberDiff line numberDiff line change
@@ -1,61 +1,19 @@
1-
[tool.pytest.ini_options]
2-
addopts = "--doctest-modules --cov=./spark8t"
3-
4-
[tool.flake8]
5-
per-file-ignores = [
6-
'__init__.py:F401',
7-
'tests/*: D',
8-
'tests/test_utils.py: D, F601'
9-
]
10-
ignore = [
11-
# Ignored by black
12-
'E203', 'E266', 'E501', 'W503',
13-
# Ignored to conform to PEP257
14-
'D203', 'D212', 'D213', 'D214', 'D215', 'D404', 'D405', 'D406', 'D407', 'D408', 'D409', 'D410', 'D411',
15-
'D413', 'D415', 'D416', 'D417',
16-
# Ignored to work with Sphinx
17-
'RST303', 'RST304', 'RST307'
18-
]
19-
# line length is intentionally set to 80 here because black uses Bugbear
20-
# See https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html#line-length for more details
21-
max-line-length = "120"
22-
max-complexity = "18"
23-
select = ['B','C','D','E','F','W','T4','B9','RST','DAR']
24-
docstring_style = "sphinx"
25-
26-
[tool.isort]
27-
py_version = 3
28-
profile = 'black'
29-
known_first_party = ['spark8t','tests']
30-
skip_gitignore = true
31-
32-
[tool.mypy]
33-
follow_imports = "silent"
34-
35-
[[tool.mypy.overrides]]
36-
module = [
37-
"parameterized",
38-
"envyaml",
39-
"pytest"
40-
]
41-
ignore_missing_imports = true
42-
431
[tool.poetry]
442
name = "spark8t"
453
version = "0.0.11"
464
description = "This project provides some utilities function and CLI commands to run Spark on K8s."
475
authors = [
48-
"Canonical Data Platform <data-platform@lists.launchpad.net>"
6+
"Canonical Data Platform <data-platform@lists.launchpad.net>",
497
]
508
license = "Apache-2.0"
519
readme = "README.md"
52-
packages = []
10+
requires-poetry = ">=2.0.0"
5311

5412
[tool.poetry.requires-plugins]
5513
poetry-plugin-export = ">=1.0"
5614

5715
[tool.poetry.dependencies]
58-
python = ">3.8,<4.0"
16+
python = ">=3.10,<4.0"
5917
lightkube = ">=0.11"
6018
jinja2 = ">=3.1.2"
6119
envyaml = ">=1.10.211231"
@@ -64,46 +22,91 @@ envyaml = ">=1.10.211231"
6422
optional = true
6523

6624
[tool.poetry.group.fmt.dependencies]
67-
black = ">=21.12b0"
68-
isort = ">=5.10"
69-
lightkube = ">=0.11"
25+
ruff = "^0.8.6"
7026

7127
[tool.poetry.group.lint]
7228
optional = true
7329

7430
[tool.poetry.group.lint.dependencies]
31+
ruff = "^0.8.6"
7532
codespell = "^2.1.0"
76-
flake8 = ">=4.0.1"
77-
Flake8-pyproject = ">=1.1.0"
78-
mypy = ">=0.910"
33+
mypy = "^1.0.0"
7934
pytest-mypy = ">=0.10.3"
80-
lightkube = ">=0.11"
35+
types-pyyaml = "^6.0.12.20241230"
36+
types-pygments = "^2.19.0.20250107"
37+
types-colorama = "^0.4.15.20240311"
38+
types-pyopenssl = "^24.1.0.20240722"
8139

8240
[tool.poetry.group.unit]
8341
optional = true
8442

8543
[tool.poetry.group.unit.dependencies]
86-
tox = ">3.21.4"
87-
mypy = ">=0.910"
8844
pytest-cov = ">=3.0"
8945
pytest = ">=6.2"
9046
pytest-mock = ">=3.10"
91-
lightkube = ">=0.11"
9247
pyOpenSSL = ">=23.1.1"
48+
tox = "^4.23.2"
9349

9450
[tool.poetry.group.integration]
9551
optional = true
9652

9753
[tool.poetry.group.integration.dependencies]
98-
tox = ">3.21.4"
99-
mypy = ">=0.910"
10054
pytest-cov = ">=3.0"
10155
pytest = ">=6.2"
102-
pytest-mock = ">=3.10"
103-
lightkube = ">=0.11"
104-
parameterized = ">=0.9.0"
56+
tox = "^4.23.2"
10557

10658
[build-system]
10759
requires = ["poetry-core"]
10860
build-backend = "poetry.core.masonry.api"
10961

62+
[tool.pytest.ini_options]
63+
addopts = "--doctest-modules --cov=./spark8t"
64+
65+
[tool.ruff]
66+
extend-exclude = ["__pycache__", "*.egg_info"]
67+
target-version = "py38"
68+
src = ["spark8t", "tests"]
69+
70+
[tool.ruff.lint]
71+
select = ['B', 'C', 'D', 'E', 'F', 'W', 'B9']
72+
ignore = ["E501", "D107"]
73+
extend-ignore = [
74+
# Ignored by black
75+
'E203',
76+
'E266',
77+
'E501',
78+
# Ignored to conform to PEP257
79+
'D203',
80+
'D212',
81+
'D213',
82+
'D214',
83+
'D215',
84+
'D404',
85+
'D405',
86+
'D406',
87+
'D407',
88+
'D408',
89+
'D409',
90+
'D410',
91+
'D411',
92+
'D413',
93+
'D415',
94+
'D416',
95+
'D417',
96+
]
97+
per-file-ignores = { "__init__.py" = ["F401"], "tests/*" = ["D"], "tests/test_utils.py" = ["F601"] }
98+
mccabe.max-complexity = 18
99+
100+
[tool.ruff.lint.isort]
101+
known-first-party = ["spark8t", "tests"]
102+
103+
[tool.mypy]
104+
follow_imports = "silent"
105+
106+
[[tool.mypy.overrides]]
107+
module = [
108+
"parameterized",
109+
"envyaml",
110+
"pytest",
111+
]
112+
ignore_missing_imports = true

requirements.txt

+15-15
Original file line numberDiff line numberDiff line change
@@ -1,37 +1,37 @@
1-
anyio==4.5.2 ; python_full_version > "3.8.0" and python_version < "4.0" \
1+
anyio==4.5.2 ; python_version >= "3.10" and python_version < "4.0" \
22
--hash=sha256:23009af4ed04ce05991845451e11ef02fc7c5ed29179ac9a420e5ad0ac7ddc5b \
33
--hash=sha256:c011ee36bc1e8ba40e5a81cb9df91925c218fe9b778554e0b56a21e1b5d4716f
4-
certifi==2024.12.14 ; python_full_version > "3.8.0" and python_version < "4.0" \
4+
certifi==2024.12.14 ; python_version >= "3.10" and python_version < "4.0" \
55
--hash=sha256:1275f7a45be9464efc1173084eaa30f866fe2e47d389406136d332ed4967ec56 \
66
--hash=sha256:b650d30f370c2b724812bee08008be0c4163b163ddaec3f2546c1caf65f191db
7-
envyaml==1.10.211231 ; python_full_version > "3.8.0" and python_version < "4.0" \
7+
envyaml==1.10.211231 ; python_version >= "3.10" and python_version < "4.0" \
88
--hash=sha256:88f8a076159e3c317d3450a5f404132b6ac91aecee4934ea72eac65f911f1244 \
99
--hash=sha256:8d7a7a6be12587cc5da32a587067506b47b849f4643981099ad148015a72de52
10-
exceptiongroup==1.2.2 ; python_full_version > "3.8.0" and python_version < "3.11" \
10+
exceptiongroup==1.2.2 ; python_version >= "3.10" and python_version < "3.11" \
1111
--hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \
1212
--hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc
13-
h11==0.14.0 ; python_full_version > "3.8.0" and python_version < "4.0" \
13+
h11==0.14.0 ; python_version >= "3.10" and python_version < "4.0" \
1414
--hash=sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d \
1515
--hash=sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761
16-
httpcore==1.0.7 ; python_full_version > "3.8.0" and python_version < "4.0" \
16+
httpcore==1.0.7 ; python_version >= "3.10" and python_version < "4.0" \
1717
--hash=sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c \
1818
--hash=sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd
19-
httpx==0.28.1 ; python_full_version > "3.8.0" and python_version < "4.0" \
19+
httpx==0.28.1 ; python_version >= "3.10" and python_version < "4.0" \
2020
--hash=sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc \
2121
--hash=sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad
22-
idna==3.10 ; python_full_version > "3.8.0" and python_version < "4.0" \
22+
idna==3.10 ; python_version >= "3.10" and python_version < "4.0" \
2323
--hash=sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9 \
2424
--hash=sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3
25-
jinja2==3.1.5 ; python_full_version > "3.8.0" and python_version < "4.0" \
25+
jinja2==3.1.5 ; python_version >= "3.10" and python_version < "4.0" \
2626
--hash=sha256:8fefff8dc3034e27bb80d67c671eb8a9bc424c0ef4c0826edbff304cceff43bb \
2727
--hash=sha256:aba0f4dc9ed8013c424088f68a5c226f7d6097ed89b246d7749c2ec4175c6adb
28-
lightkube-models==1.32.0.8 ; python_full_version > "3.8.0" and python_version < "4.0" \
28+
lightkube-models==1.32.0.8 ; python_version >= "3.10" and python_version < "4.0" \
2929
--hash=sha256:73786dac63085521f4c88aa69d86bfdc76a67da997c1770e5bdcef8482e4b2a0 \
3030
--hash=sha256:97f6c2ab554a23a69554dd56ffbd94173fb416af6490c3a21b1e0b8e13a2bafe
31-
lightkube==0.17.1 ; python_full_version > "3.8.0" and python_version < "4.0" \
31+
lightkube==0.17.1 ; python_version >= "3.10" and python_version < "4.0" \
3232
--hash=sha256:3d046c2c46191b3745471763710ef4ed2df4259a7405f798b577df2ae390358a \
3333
--hash=sha256:e0d6b71476a4fa7cbda7080da1f0943e43c7e747212db9f2ec7d87415bf8d23e
34-
markupsafe==2.1.5 ; python_full_version > "3.8.0" and python_version < "4.0" \
34+
markupsafe==2.1.5 ; python_version >= "3.10" and python_version < "4.0" \
3535
--hash=sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf \
3636
--hash=sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff \
3737
--hash=sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f \
@@ -92,7 +92,7 @@ markupsafe==2.1.5 ; python_full_version > "3.8.0" and python_version < "4.0" \
9292
--hash=sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab \
9393
--hash=sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd \
9494
--hash=sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68
95-
pyyaml==6.0.2 ; python_full_version > "3.8.0" and python_version < "4.0" \
95+
pyyaml==6.0.2 ; python_version >= "3.10" and python_version < "4.0" \
9696
--hash=sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff \
9797
--hash=sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48 \
9898
--hash=sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086 \
@@ -146,9 +146,9 @@ pyyaml==6.0.2 ; python_full_version > "3.8.0" and python_version < "4.0" \
146146
--hash=sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba \
147147
--hash=sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12 \
148148
--hash=sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4
149-
sniffio==1.3.1 ; python_full_version > "3.8.0" and python_version < "4.0" \
149+
sniffio==1.3.1 ; python_version >= "3.10" and python_version < "4.0" \
150150
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
151151
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
152-
typing-extensions==4.12.2 ; python_full_version > "3.8.0" and python_version < "3.11" \
152+
typing-extensions==4.12.2 ; python_version >= "3.10" and python_version < "3.11" \
153153
--hash=sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d \
154154
--hash=sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8

spark8t/__init__.py

+1
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
"""This project provides some utilities function and CLI commands to run Spark on K8s."""

spark8t/cli/__init__.py

+2
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,5 @@
1+
"""CLI interface for spark8t."""
2+
13
import os
24

35
from spark8t.domain import Defaults

spark8t/cli/params.py

+15-11
Original file line numberDiff line numberDiff line change
@@ -1,18 +1,20 @@
1+
"""Parameters module."""
2+
13
import logging
24
from argparse import ArgumentParser, Namespace
3-
from typing import Callable, List, Optional
5+
from typing import Callable
46

57
from spark8t.cli import defaults
68
from spark8t.services import AbstractKubeInterface, KubeInterface, LightKube
79
from spark8t.utils import DEFAULT_LOGGING_FILE, config_from_file, environ
810

911

1012
def parse_arguments_with(
11-
parsers: List[Callable[[ArgumentParser], ArgumentParser]],
12-
base_parser: Optional[ArgumentParser] = None,
13+
parsers: list[Callable[[ArgumentParser], ArgumentParser]],
14+
base_parser: ArgumentParser | None = None,
1315
):
1416
"""
15-
Specify a chain of parsers to help parse the list of arguments to main
17+
Specify a chain of parsers to help parse the list of arguments to main.
1618
1719
:param parsers: List of parsers to be applied.
1820
:param namespace: Namespace to be used for parsing.
@@ -26,7 +28,7 @@ def parse_arguments_with(
2628

2729
def add_logging_arguments(parser: ArgumentParser) -> ArgumentParser:
2830
"""
29-
Add logging argument parsing to the existing parser context
31+
Add logging argument parsing to the existing parser context.
3032
3133
:param parser: Input parser to decorate with parsing support for logging args.
3234
"""
@@ -46,7 +48,7 @@ def add_logging_arguments(parser: ArgumentParser) -> ArgumentParser:
4648

4749
def add_ignore_integration_hub(parser: ArgumentParser) -> ArgumentParser:
4850
"""
49-
Add option to exclude the configuration provided by the Spark Integration Hub
51+
Add option to exclude the configuration provided by the Spark Integration Hub.
5052
5153
:param parser: Input parser to decorate with parsing support for logging args.
5254
"""
@@ -61,7 +63,7 @@ def add_ignore_integration_hub(parser: ArgumentParser) -> ArgumentParser:
6163

6264
def spark_user_parser(parser: ArgumentParser) -> ArgumentParser:
6365
"""
64-
Add Spark user related argument parsing to the existing parser context
66+
Add Spark user related argument parsing to the existing parser context.
6567
6668
:param parser: Input parser to decorate with parsing support for Spark params.
6769
"""
@@ -82,7 +84,7 @@ def spark_user_parser(parser: ArgumentParser) -> ArgumentParser:
8284

8385
def k8s_parser(parser: ArgumentParser) -> ArgumentParser:
8486
"""
85-
Add K8s related argument parsing to the existing parser context
87+
Add K8s related argument parsing to the existing parser context.
8688
8789
:param parser: Input parser to decorate with parsing support for Spark params.
8890
"""
@@ -107,7 +109,7 @@ def k8s_parser(parser: ArgumentParser) -> ArgumentParser:
107109

108110
def add_config_arguments(parser: ArgumentParser) -> ArgumentParser:
109111
"""
110-
Add arguments to provide extra configurations for the spark properties
112+
Add arguments to provide extra configurations for the spark properties.
111113
112114
:param parser: Input parser to decorate with parsing support for deploy arguments.
113115
"""
@@ -128,7 +130,7 @@ def add_config_arguments(parser: ArgumentParser) -> ArgumentParser:
128130

129131
def add_deploy_arguments(parser: ArgumentParser) -> ArgumentParser:
130132
"""
131-
Add deployment related argument parsing to the existing parser context
133+
Add deployment related argument parsing to the existing parser context.
132134
133135
:param parser: Input parser to decorate with parsing support for deploy arguments.
134136
"""
@@ -143,6 +145,7 @@ def add_deploy_arguments(parser: ArgumentParser) -> ArgumentParser:
143145

144146

145147
def get_kube_interface(args: Namespace) -> AbstractKubeInterface:
148+
"""Get configured kube interface."""
146149
_class = LightKube if args.backend == "lightkube" else KubeInterface
147150

148151
return _class(
@@ -151,8 +154,9 @@ def get_kube_interface(args: Namespace) -> AbstractKubeInterface:
151154

152155

153156
def setup_logging(
154-
log_level: str, config_file: Optional[str], logger_name: Optional[str] = None
157+
log_level: str, config_file: str | None, logger_name: str | None = None
155158
) -> logging.Logger:
159+
"""Set up logging from configuration file."""
156160
with environ(LOG_LEVEL=log_level) as _:
157161
config_from_file(config_file or DEFAULT_LOGGING_FILE)
158162
return logging.getLogger(logger_name) if logger_name else logging.root

spark8t/cli/pyspark.py

+3-2
Original file line numberDiff line numberDiff line change
@@ -1,9 +1,9 @@
11
#!/usr/bin/env python3
2+
"""Pyspark module."""
23

34
import re
45
from argparse import Namespace
56
from logging import Logger
6-
from typing import Optional
77

88
from spark8t.cli.params import (
99
add_config_arguments,
@@ -22,6 +22,7 @@
2222

2323

2424
def main(args: Namespace, logger: Logger):
25+
"""Pyspark main entrypoint."""
2526
kube_interface = get_kube_interface(args)
2627

2728
registry = K8sServiceAccountRegistry(
@@ -30,7 +31,7 @@ def main(args: Namespace, logger: Logger):
3031
else kube_interface
3132
)
3233

33-
service_account: Optional[ServiceAccount] = (
34+
service_account: ServiceAccount | None = (
3435
registry.get_primary()
3536
if args.username is None and args.namespace is None
3637
else registry.get(f"{args.namespace or 'default'}:{args.username or 'spark'}")

0 commit comments

Comments
 (0)