Skip to content

Commit

Permalink
Make it work with Python 3.8
Browse files Browse the repository at this point in the history
  • Loading branch information
nfx committed Mar 15, 2024
1 parent 0cbaa84 commit 83991e1
Show file tree
Hide file tree
Showing 5 changed files with 14 additions and 13 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/push.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@ jobs:
strategy:
fail-fast: false
matrix:
pyVersion: [ '3.10', '3.11', '3.12' ]
pyVersion: [ '3.8', '3.9', '3.10', '3.11', '3.12' ]
runs-on: ubuntu-latest
steps:
- name: Checkout
Expand Down
1 change: 0 additions & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ clean:
rm -fr **/*.pyc

.venv/bin/python:
pip install hatch
hatch env create

dev: .venv/bin/python
Expand Down
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
Databricks Labs PyLint Plugin
===

[![python](https://img.shields.io/badge/python-3.10,%203.11,%203.12-green)](https://github.com/databrickslabs/pylint-plugin/actions/workflows/push.yml)
[![python](https://img.shields.io/badge/python-3.8,%203.9,%20,3.10,%203.11,%203.12-green)](https://github.com/databrickslabs/pylint-plugin/actions/workflows/push.yml)
[![codecov](https://codecov.io/github/databrickslabs/pylint-plugin/graph/badge.svg?token=x1JSVddfZa)](https://codecov.io/github/databrickslabs/pylint-plugin) [![lines of code](https://tokei.rs/b1/github/databrickslabs/pylint-plugin)]([https://codecov.io/github/databrickslabs/pylint-plugin](https://github.com/databrickslabs/pylint-plugin))


Expand Down
12 changes: 7 additions & 5 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,14 @@ dynamic = ["version"]
description = 'Plugin for PyLint to support Databricks specific code patterns and best practices.'
readme = "README.md"
license-files = { paths = ["LICENSE", "NOTICE"] }
requires-python = ">=3.10"
requires-python = ">=3.8"
keywords = ["Databricks"]
classifiers = [
"Development Status :: 3 - Alpha",
"License :: Other/Proprietary License",
"Programming Language :: Python",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
Expand Down Expand Up @@ -43,7 +45,7 @@ dependencies = [
"isort>=2.5.0",
]

python="3.10"
python="3.8"

# store virtual env as the child of this folder. Helps VSCode (and PyCharm) to run better
path = ".venv"
Expand All @@ -69,13 +71,13 @@ addopts = "--no-header"
cache_dir = ".venv/pytest-cache"

[tool.black]
target-version = ["py310"]
target-version = ["py38"]
line-length = 120
skip-string-normalization = true

[tool.ruff]
cache-dir = ".venv/ruff-cache"
target-version = "py310"
target-version = "py38"
line-length = 120

[tool.ruff.lint.isort]
Expand Down Expand Up @@ -178,7 +180,7 @@ persistent = true

# Minimum Python version to use for version dependent checks. Will default to the
# version used to run pylint.
py-version = "3.10"
py-version = "3.8"

# Discover python modules and packages in the file system subtree.
# recursive =
Expand Down
10 changes: 5 additions & 5 deletions src/databricks/labs/pylint/airflow.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Any
from typing import Any, Dict, List

import astroid
from pylint.checkers import BaseChecker
Expand Down Expand Up @@ -35,7 +35,7 @@ def visit_call(self, node: astroid.Call):
elif arg == "new_cluster":
self._check_new_cluster("ephemeral", value, node)

def _check_new_cluster(self, key: str, new_cluster: dict[str, Any], node: astroid.NodeNG):
def _check_new_cluster(self, key: str, new_cluster: Dict[str, Any], node: astroid.NodeNG):
if "data_security_mode" not in new_cluster:
self.add_message("missing-data-security-mode", node=node, args=(key,))
if "spark_version" in new_cluster and not self._is_supported(new_cluster["spark_version"]):
Expand All @@ -54,19 +54,19 @@ def _is_supported(spark_version: str):
except ValueError:
return False

def _check_tasks(self, tasks: list[dict[str, Any]], node: astroid.NodeNG):
def _check_tasks(self, tasks: List[Dict[str, Any]], node: astroid.NodeNG):
for task in tasks:
if "new_cluster" not in task:
return
self._check_new_cluster(task["task_key"], task["new_cluster"], node)

def _check_job_clusters(self, job_clusters: list[dict[str, Any]], node: astroid.NodeNG):
def _check_job_clusters(self, job_clusters: List[Dict[str, Any]], node: astroid.NodeNG):
for job_cluster in job_clusters:
if "new_cluster" not in job_cluster:
return
self._check_new_cluster(job_cluster["job_cluster_key"], job_cluster["new_cluster"], node)

def _infer_kwargs(self, keywords: list[astroid.Keyword]):
def _infer_kwargs(self, keywords: List[astroid.Keyword]):
kwargs = {}
for keyword in keywords:
kwargs[keyword.arg] = self._infer_value(keyword.value)
Expand Down

0 comments on commit 83991e1

Please sign in to comment.