From 83991e1c453edc46db7e2933aaae84c6414e0dce Mon Sep 17 00:00:00 2001 From: Serge Smertin Date: Fri, 15 Mar 2024 18:25:40 +0100 Subject: [PATCH] Make it work with Python 3.8 --- .github/workflows/push.yml | 2 +- Makefile | 1 - README.md | 2 +- pyproject.toml | 12 +++++++----- src/databricks/labs/pylint/airflow.py | 10 +++++----- 5 files changed, 14 insertions(+), 13 deletions(-) diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml index 72b43b3..85ccc36 100644 --- a/.github/workflows/push.yml +++ b/.github/workflows/push.yml @@ -19,7 +19,7 @@ jobs: strategy: fail-fast: false matrix: - pyVersion: [ '3.10', '3.11', '3.12' ] + pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ] runs-on: ubuntu-latest steps: - name: Checkout diff --git a/Makefile b/Makefile index aa00d24..a045aac 100644 --- a/Makefile +++ b/Makefile @@ -5,7 +5,6 @@ clean: rm -fr **/*.pyc .venv/bin/python: - pip install hatch hatch env create dev: .venv/bin/python diff --git a/README.md b/README.md index 10978b5..c8f598d 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ Databricks Labs PyLint Plugin === -[![python](https://img.shields.io/badge/python-3.10,%203.11,%203.12-green)](https://github.com/databrickslabs/pylint-plugin/actions/workflows/push.yml) +[![python](https://img.shields.io/badge/python-3.8,%203.9,%20,3.10,%203.11,%203.12-green)](https://github.com/databrickslabs/pylint-plugin/actions/workflows/push.yml) [![codecov](https://codecov.io/github/databrickslabs/pylint-plugin/graph/badge.svg?token=x1JSVddfZa)](https://codecov.io/github/databrickslabs/pylint-plugin) [![lines of code](https://tokei.rs/b1/github/databrickslabs/pylint-plugin)]([https://codecov.io/github/databrickslabs/pylint-plugin](https://github.com/databrickslabs/pylint-plugin)) diff --git a/pyproject.toml b/pyproject.toml index f0c5b94..a837989 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,12 +4,14 @@ dynamic = ["version"] description = 'Plugin for PyLint to support Databricks specific code patterns and best practices.' readme = "README.md" license-files = { paths = ["LICENSE", "NOTICE"] } -requires-python = ">=3.10" +requires-python = ">=3.8" keywords = ["Databricks"] classifiers = [ "Development Status :: 3 - Alpha", "License :: Other/Proprietary License", "Programming Language :: Python", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", @@ -43,7 +45,7 @@ dependencies = [ "isort>=2.5.0", ] -python="3.10" +python="3.8" # store virtual env as the child of this folder. Helps VSCode (and PyCharm) to run better path = ".venv" @@ -69,13 +71,13 @@ addopts = "--no-header" cache_dir = ".venv/pytest-cache" [tool.black] -target-version = ["py310"] +target-version = ["py38"] line-length = 120 skip-string-normalization = true [tool.ruff] cache-dir = ".venv/ruff-cache" -target-version = "py310" +target-version = "py38" line-length = 120 [tool.ruff.lint.isort] @@ -178,7 +180,7 @@ persistent = true # Minimum Python version to use for version dependent checks. Will default to the # version used to run pylint. -py-version = "3.10" +py-version = "3.8" # Discover python modules and packages in the file system subtree. # recursive = diff --git a/src/databricks/labs/pylint/airflow.py b/src/databricks/labs/pylint/airflow.py index cb8728b..24227df 100644 --- a/src/databricks/labs/pylint/airflow.py +++ b/src/databricks/labs/pylint/airflow.py @@ -1,4 +1,4 @@ -from typing import Any +from typing import Any, Dict, List import astroid from pylint.checkers import BaseChecker @@ -35,7 +35,7 @@ def visit_call(self, node: astroid.Call): elif arg == "new_cluster": self._check_new_cluster("ephemeral", value, node) - def _check_new_cluster(self, key: str, new_cluster: dict[str, Any], node: astroid.NodeNG): + def _check_new_cluster(self, key: str, new_cluster: Dict[str, Any], node: astroid.NodeNG): if "data_security_mode" not in new_cluster: self.add_message("missing-data-security-mode", node=node, args=(key,)) if "spark_version" in new_cluster and not self._is_supported(new_cluster["spark_version"]): @@ -54,19 +54,19 @@ def _is_supported(spark_version: str): except ValueError: return False - def _check_tasks(self, tasks: list[dict[str, Any]], node: astroid.NodeNG): + def _check_tasks(self, tasks: List[Dict[str, Any]], node: astroid.NodeNG): for task in tasks: if "new_cluster" not in task: return self._check_new_cluster(task["task_key"], task["new_cluster"], node) - def _check_job_clusters(self, job_clusters: list[dict[str, Any]], node: astroid.NodeNG): + def _check_job_clusters(self, job_clusters: List[Dict[str, Any]], node: astroid.NodeNG): for job_cluster in job_clusters: if "new_cluster" not in job_cluster: return self._check_new_cluster(job_cluster["job_cluster_key"], job_cluster["new_cluster"], node) - def _infer_kwargs(self, keywords: list[astroid.Keyword]): + def _infer_kwargs(self, keywords: List[astroid.Keyword]): kwargs = {} for keyword in keywords: kwargs[keyword.arg] = self._infer_value(keyword.value)