diff --git a/.codegen.json b/.codegen.json new file mode 100644 index 0000000..ead7bac --- /dev/null +++ b/.codegen.json @@ -0,0 +1,14 @@ +{ + "version": { + "src/databricks/labs/pylint/__about__.py": "__version__ = \"$VERSION\"" + }, + "toolchain": { + "required": ["python3", "hatch"], + "pre_setup": ["hatch env create"], + "prepend_path": ".venv/bin", + "acceptance_path": "tests/integration", + "test": [ + "pytest -n 4 --cov src --cov-report=xml --timeout 30 tests/unit --durations 20" + ] + } +} \ No newline at end of file diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..f174799 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,6 @@ +version: 2 +updates: + - package-ecosystem: "pip" + directory: "/" + schedule: + interval: "daily" \ No newline at end of file diff --git a/.github/workflows/push.yml b/.github/workflows/push.yml new file mode 100644 index 0000000..72b43b3 --- /dev/null +++ b/.github/workflows/push.yml @@ -0,0 +1,53 @@ +name: build + +on: + pull_request: + types: [opened, synchronize] + merge_group: + types: [checks_requested] + push: + # Always run on push to main. The build cache can only be reused + # if it was saved by a run from the repository's default branch. + # The run result will be identical to that from the merge queue + # because the commit is identical, yet we need to perform it to + # seed the build cache. + branches: + - main + +jobs: + ci: + strategy: + fail-fast: false + matrix: + pyVersion: [ '3.10', '3.11', '3.12' ] + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Install Python + uses: actions/setup-python@v4 + with: + cache: 'pip' + cache-dependency-path: '**/pyproject.toml' + python-version: ${{ matrix.pyVersion }} + + - name: Run unit tests + run: | + pip install hatch==1.9.4 + make test + + - name: Publish test coverage + uses: codecov/codecov-action@v1 + + fmt: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v3 + + - name: Format all files + run: make dev fmt + + - name: Fail on differences + run: git diff --exit-code diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..9841fc4 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,48 @@ +name: Release + +on: + push: + tags: + - 'v*' + +jobs: + publish: + runs-on: ubuntu-latest + environment: release + permissions: + # Used to authenticate to PyPI via OIDC and sign the release's artifacts with sigstore-python. + id-token: write + # Used to attach signing artifacts to the published release. + contents: write + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-python@v4 + with: + cache: 'pip' + cache-dependency-path: '**/pyproject.toml' + python-version: '3.10' + + - name: Build wheels + run: | + pip install hatch==1.9.4 + hatch build + + - name: Draft release + uses: softprops/action-gh-release@v1 + with: + draft: true + files: | + dist/databricks_*.whl + dist/databricks_*.tar.gz + + - uses: pypa/gh-action-pypi-publish@release/v1 + name: Publish package distributions to PyPI + + - name: Sign artifacts with Sigstore + uses: sigstore/gh-action-sigstore-python@v2.1.1 + with: + inputs: | + dist/databricks_*.whl + dist/databricks_*.tar.gz + release-signing-artifacts: true \ No newline at end of file diff --git a/.gitignore b/.gitignore index 68bc17f..bd40e59 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,8 @@ +# macos + +.DS_Store +*.DS_Store + # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] @@ -55,6 +60,8 @@ cover/ *.mo *.pot +*.out + # Django stuff: *.log local_settings.py @@ -82,33 +89,6 @@ target/ profile_default/ ipython_config.py -# pyenv -# For a library or package, you might want to ignore these files since the code is -# intended to run in multiple environments; otherwise, check them in: -# .python-version - -# pipenv -# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. -# However, in case of collaboration, if having platform-specific dependencies or dependencies -# having no cross-platform support, pipenv may install dependencies that don't work, or not -# install all needed dependencies. -#Pipfile.lock - -# poetry -# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. -# This is especially recommended for binary packages to ensure reproducibility, and is more -# commonly ignored for libraries. -# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control -#poetry.lock - -# pdm -# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. -#pdm.lock -# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it -# in version control. -# https://pdm.fming.dev/#use-with-ide -.pdm.toml - # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm __pypackages__/ @@ -120,13 +100,15 @@ celerybeat.pid *.sage.py # Environments -.env +.env.admin .venv +.env.* env/ venv/ ENV/ env.bak/ venv.bak/ +.env # Spyder project settings .spyderproject @@ -157,4 +139,17 @@ cython_debug/ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore # and can be added to the global gitignore or merged into this file. For a more nuclear # option (not recommended) you can uncomment the following to ignore the entire idea folder. -#.idea/ +.idea/ + +# ruff +.ruff_cache +/scratch + +# dev files and scratches +dev/cleanup.py + +.databricks +.vscode + +.python-version +.databricks-login.json diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..b7f4aa1 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,5 @@ +# Version changelog + +## 0.0.0 + +Initial pylint plugin commit diff --git a/CODEOWNERS b/CODEOWNERS new file mode 100644 index 0000000..cc93a75 --- /dev/null +++ b/CODEOWNERS @@ -0,0 +1 @@ +* @nfx diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 0000000..90809b5 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,117 @@ +# Contributing + +## First Principles + +Favoring standard libraries over external dependencies, especially in specific contexts like Databricks, is a best practice in software +development. + +There are several reasons why this approach is encouraged: +- Standard libraries are typically well-vetted, thoroughly tested, and maintained by the official maintainers of the programming language or platform. This ensures a higher level of stability and reliability. +- External dependencies, especially lesser-known or unmaintained ones, can introduce bugs, security vulnerabilities, or compatibility issues that can be challenging to resolve. Adding external dependencies increases the complexity of your codebase. +- Each dependency may have its own set of dependencies, potentially leading to a complex web of dependencies that can be difficult to manage. This complexity can lead to maintenance challenges, increased risk, and longer build times. +- External dependencies can pose security risks. If a library or package has known security vulnerabilities and is widely used, it becomes an attractive target for attackers. Minimizing external dependencies reduces the potential attack surface and makes it easier to keep your code secure. +- Relying on standard libraries enhances code portability. It ensures your code can run on different platforms and environments without being tightly coupled to specific external dependencies. This is particularly important in settings like Databricks, where you may need to run your code on different clusters or setups. +- External dependencies may have their versioning schemes and compatibility issues. When using standard libraries, you have more control over versioning and can avoid conflicts between different dependencies in your project. +- Fewer external dependencies mean faster build and deployment times. Downloading, installing, and managing external packages can slow down these processes, especially in large-scale projects or distributed computing environments like Databricks. +- External dependencies can be abandoned or go unmaintained over time. This can lead to situations where your project relies on outdated or unsupported code. When you depend on standard libraries, you have confidence that the core functionality you rely on will continue to be maintained and improved. + +While minimizing external dependencies is essential, exceptions can be made case-by-case. There are situations where external dependencies are +justified, such as when a well-established and actively maintained library provides significant benefits, like time savings, performance improvements, +or specialized functionality unavailable in standard libraries. + +## Common fixes for `mypy` errors + +See https://mypy.readthedocs.io/en/stable/cheat_sheet_py3.html for more details + +### ..., expression has type "None", variable has type "str" + +* Add `assert ... is not None` if it's a body of a method. Example: + +``` +# error: Argument 1 to "delete" of "DashboardWidgetsAPI" has incompatible type "str | None"; expected "str" +self._ws.dashboard_widgets.delete(widget.id) +``` + +after + +``` +assert widget.id is not None +self._ws.dashboard_widgets.delete(widget.id) +``` + +* Add `... | None` if it's in the dataclass. Example: `cloud: str = None` -> `cloud: str | None = None` + +### ..., has incompatible type "Path"; expected "str" + +Add `.as_posix()` to convert Path to str + +### Argument 2 to "get" of "dict" has incompatible type "None"; expected ... + +Add a valid default value for the dictionary return. + +Example: +```python +def viz_type(self) -> str: + return self.viz.get("type", None) +``` + +after: + +Example: +```python +def viz_type(self) -> str: + return self.viz.get("type", "UNKNOWN") +``` + +## Local Setup + +This section provides a step-by-step guide to set up and start working on the project. These steps will help you set up your project environment and dependencies for efficient development. + +To begin, run `make dev` create the default environment and install development dependencies, assuming you've already cloned the github repo. + +```shell +make dev +``` + +Verify installation with +```shell +make test +``` + +Before every commit, apply the consistent formatting of the code, as we want our codebase look consistent: +```shell +make fmt +``` + +Before every commit, run automated bug detector (`make lint`) and unit tests (`make test`) to ensure that automated +pull request checks do pass, before your code is reviewed by others: +```shell +make test +``` + +## First contribution + +Here are the example steps to submit your first contribution: + +1. Make a Fork from ucx repo (if you really want to contribute) +2. `git clone` +3. `git checkout main` (or `gcm` if you're using [ohmyzsh](https://ohmyz.sh/)). +4. `git pull` (or `gl` if you're using [ohmyzsh](https://ohmyz.sh/)). +5. `git checkout -b FEATURENAME` (or `gcb FEATURENAME` if you're using [ohmyzsh](https://ohmyz.sh/)). +6. .. do the work +7. `make fmt` +8. `make lint` +9. .. fix if any +10. `make test` +11. .. fix if any +12. `git commit -a`. Make sure to enter meaningful commit message title. +13. `git push origin FEATURENAME` +14. Go to GitHub UI and create PR. Alternatively, `gh pr create` (if you have [GitHub CLI](https://cli.github.com/) installed). + Use a meaningful pull request title because it'll appear in the release notes. Use `Resolves #NUMBER` in pull + request description to [automatically link it](https://docs.github.com/en/get-started/writing-on-github/working-with-advanced-formatting/using-keywords-in-issues-and-pull-requests#linking-a-pull-request-to-an-issue) + to an existing issue. +15. announce PR for the review + +## Troubleshooting + +If you encounter any package dependency errors after `git pull`, run `make clean` diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..ec6bdc7 --- /dev/null +++ b/LICENSE @@ -0,0 +1,69 @@ + Databricks License + Copyright (2023) Databricks, Inc. + + Definitions. + + Agreement: The agreement between Databricks, Inc., and you governing + the use of the Databricks Services, as that term is defined in + the Master Cloud Services Agreement (MCSA) located at + www.databricks.com/legal/mcsa. + + Licensed Materials: The source code, object code, data, and/or other + works to which this license applies. + + Scope of Use. You may not use the Licensed Materials except in + connection with your use of the Databricks Services pursuant to + the Agreement. Your use of the Licensed Materials must comply at all + times with any restrictions applicable to the Databricks Services, + generally, and must be used in accordance with any applicable + documentation. You may view, use, copy, modify, publish, and/or + distribute the Licensed Materials solely for the purposes of using + the Licensed Materials within or connecting to the Databricks Services. + If you do not agree to these terms, you may not view, use, copy, + modify, publish, and/or distribute the Licensed Materials. + + Redistribution. You may redistribute and sublicense the Licensed + Materials so long as all use is in compliance with these terms. + In addition: + + - You must give any other recipients a copy of this License; + - You must cause any modified files to carry prominent notices + stating that you changed the files; + - You must retain, in any derivative works that you distribute, + all copyright, patent, trademark, and attribution notices, + excluding those notices that do not pertain to any part of + the derivative works; and + - If a "NOTICE" text file is provided as part of its + distribution, then any derivative works that you distribute + must include a readable copy of the attribution notices + contained within such NOTICE file, excluding those notices + that do not pertain to any part of the derivative works. + + You may add your own copyright statement to your modifications and may + provide additional license terms and conditions for use, reproduction, + or distribution of your modifications, or for any such derivative works + as a whole, provided your use, reproduction, and distribution of + the Licensed Materials otherwise complies with the conditions stated + in this License. + + Termination. This license terminates automatically upon your breach of + these terms or upon the termination of your Agreement. Additionally, + Databricks may terminate this license at any time on notice. Upon + termination, you must permanently delete the Licensed Materials and + all copies thereof. + + DISCLAIMER; LIMITATION OF LIABILITY. + + THE LICENSED MATERIALS ARE PROVIDED “AS-IS” AND WITH ALL FAULTS. + DATABRICKS, ON BEHALF OF ITSELF AND ITS LICENSORS, SPECIFICALLY + DISCLAIMS ALL WARRANTIES RELATING TO THE LICENSED MATERIALS, EXPRESS + AND IMPLIED, INCLUDING, WITHOUT LIMITATION, IMPLIED WARRANTIES, + CONDITIONS AND OTHER TERMS OF MERCHANTABILITY, SATISFACTORY QUALITY OR + FITNESS FOR A PARTICULAR PURPOSE, AND NON-INFRINGEMENT. DATABRICKS AND + ITS LICENSORS TOTAL AGGREGATE LIABILITY RELATING TO OR ARISING OUT OF + YOUR USE OF OR DATABRICKS’ PROVISIONING OF THE LICENSED MATERIALS SHALL + BE LIMITED TO ONE THOUSAND ($1,000) DOLLARS. IN NO EVENT SHALL + THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR + OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE LICENSED MATERIALS OR + THE USE OR OTHER DEALINGS IN THE LICENSED MATERIALS. diff --git a/Makefile b/Makefile new file mode 100644 index 0000000..4b9667f --- /dev/null +++ b/Makefile @@ -0,0 +1,28 @@ +all: clean lint fmt test coverage + +clean: + rm -fr .venv clean htmlcov .mypy_cache .pytest_cache .ruff_cache .coverage coverage.xml + rm -fr **/*.pyc + +.venv/bin/python: + pip install hatch + hatch env create + +dev: .venv/bin/python + @hatch run which python + +lint: + hatch run verify + +fmt: + hatch run fmt + +test: + hatch run test + +integration: + hatch run integration + +coverage: + hatch run coverage && open htmlcov/index.html + diff --git a/NOTICE b/NOTICE new file mode 100644 index 0000000..09c451e --- /dev/null +++ b/NOTICE @@ -0,0 +1,9 @@ +Copyright (2024) Databricks, Inc. + +This Software includes software developed at Databricks (https://www.databricks.com/) and its use is subject to the included LICENSE file. + +This Software contains code from the following open source projects, licensed under the Apache 2.0 license: + +Databricks SDK for Python - https://github.com/databricks/databricks-sdk-py +Copyright 2023 Databricks, Inc. All rights reserved. +License - https://github.com/databricks/databricks-sdk-py/blob/main/LICENSE diff --git a/README.md b/README.md new file mode 100644 index 0000000..331b32e --- /dev/null +++ b/README.md @@ -0,0 +1,34 @@ + +Databricks Labs PyLint Plugin +=== + +[![python](https://img.shields.io/badge/python-3.10,%203.11,%203.12-green)](https://github.com/databrickslabs/pylint-plugin/actions/workflows/push.yml) +[![codecov](https://codecov.io/github/databrickslabs/pylint-plugin/graph/badge.svg?token=x1JSVddfZa)](https://codecov.io/github/databrickslabs/pylint-plugin) [![lines of code](https://tokei.rs/b1/github/databrickslabs/pylint-plugin)]([https://codecov.io/github/databrickslabs/pylint-plugin](https://github.com/databrickslabs/pylint-plugin)) + + +Checks for common mistakes and issues in Python code specifically in Databricks Environment. + + +* [Databricks Labs PyLint Plugin](#databricks-labs-pylint-plugin) +* [Installation](#installation) +* [Project Support](#project-support) + + +# Installation + +You can install this project via `pip`: + +``` +pip install databricks-labs-pylint-plugin +``` + +# Project Support + +Please note that this project is provided for your exploration only and is not +formally supported by Databricks with Service Level Agreements (SLAs). They are +provided AS-IS, and we do not make any guarantees of any kind. Please do not +submit a support ticket relating to any issues arising from the use of this project. + +Any issues discovered through the use of this project should be filed as GitHub +[Issues on this repository](https://github.com/databrickslabs/pylint-plugin/issues). +They will be reviewed as time permits, but no formal SLAs for support exist. diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..ce45d63 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,668 @@ +[project] +name = "databricks-labs-pylint" +dynamic = ["version"] +description = 'Plugin for PyLint to support Databricks specific code patterns and best practices.' +readme = "README.md" +license-files = { paths = ["LICENSE", "NOTICE"] } +requires-python = ">=3.10" +keywords = ["Databricks"] +classifiers = [ + "Development Status :: 3 - Alpha", + "License :: Other/Proprietary License", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: Implementation :: CPython", +] +dependencies = ["pylint", "astroid"] + +[project.urls] +Issues = "https://github.com/databrickslabs/pylint/issues" +Source = "https://github.com/databrickslabs/pylint" + +[build-system] +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.hatch.build] +sources = ["src"] +include = ["src"] + +[tool.hatch.version] +path = "src/databricks/labs/pylint/__about__.py" + +[tool.hatch.envs.default] +dependencies = [ + "coverage[toml]>=6.5", + "pytest", + "pylint", + "pytest-xdist", + "pytest-cov>=4.0.0,<5.0.0", + "pytest-mock>=3.0.0,<4.0.0", + "pytest-timeout", + "ruff>=0.0.243", + "isort>=2.5.0", + "mypy", + "types-PyYAML", + "types-requests", +] + +python="3.10" + +# store virtual env as the child of this folder. Helps VSCode (and PyCharm) to run better +path = ".venv" + +[tool.hatch.envs.default.scripts] +test = "pytest -n 2 --cov src --cov-report=xml --timeout 30 tests/unit --durations 20" +coverage = "pytest -n 2 --cov src tests/unit --timeout 30 --cov-report=html --durations 20" +integration = "pytest -n 10 --cov src tests/integration --durations 20" +fmt = ["isort .", + "ruff format", + "ruff . --fix", + "mypy .", + "pylint --output-format=colorized -j 0 src"] +verify = ["black --check .", + "isort . --check-only", + "ruff .", + "mypy .", + "pylint --output-format=colorized -j 0 src"] + +[tool.isort] +profile = "black" + +[tool.pytest.ini_options] +addopts = "--no-header" +cache_dir = ".venv/pytest-cache" + +[tool.black] +target-version = ["py310"] +line-length = 120 +skip-string-normalization = true + +[tool.ruff] +cache-dir = ".venv/ruff-cache" +target-version = "py310" +line-length = 120 + +[tool.ruff.isort] +known-first-party = ["databricks.labs.pylint"] + +[tool.coverage.run] +branch = true +parallel = true + +[tool.coverage.report] +omit = ["*/working-copy/*", 'src/databricks/labs/pylint/__main__.py'] +exclude_lines = [ + "no cov", + "if __name__ == .__main__.:", + "if TYPE_CHECKING:", +] + +[tool.pylint.main] +# PyLint configuration is adapted from Google Python Style Guide with modifications. +# Sources https://google.github.io/styleguide/pylintrc +# License: https://github.com/google/styleguide/blob/gh-pages/LICENSE + +# Analyse import fallback blocks. This can be used to support both Python 2 and 3 +# compatible code, which means that the block might have code that exists only in +# one or another interpreter, leading to false positives when analysed. +# analyse-fallback-blocks = + +# Clear in-memory caches upon conclusion of linting. Useful if running pylint in +# a server-like mode. +# clear-cache-post-run = + +# Always return a 0 (non-error) status code, even if lint errors are found. This +# is primarily useful in continuous integration scripts. +# exit-zero = + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +# extension-pkg-allow-list = + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) +# extension-pkg-whitelist = + +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +# fail-on = + +# Specify a score threshold under which the program will exit with error. +fail-under = 10.0 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +# from-stdin = + +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, it +# can't be used as an escape character. +# ignore-paths = + +# Files or directories matching the regular expression patterns are skipped. The +# regex matches against base names, not paths. The default value ignores Emacs +# file locks +ignore-patterns = ["^\\.#"] + +# List of module names for which member attributes should not be checked (useful +# for modules/projects where namespaces are manipulated during runtime and thus +# existing member attributes cannot be deduced by static analysis). It supports +# qualified module names, as well as Unix pattern matching. +# ignored-modules = + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +# init-hook = + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. +# jobs = + +# Control the amount of potential inferred values when inferring a single object. +# This can help the performance when dealing with large functions or complex, +# nested conditions. +limit-inference-results = 100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins = ["pylint.extensions.check_elif", "pylint.extensions.bad_builtin", "pylint.extensions.docparams", "pylint.extensions.for_any_all", "pylint.extensions.set_membership", "pylint.extensions.code_style", "pylint.extensions.overlapping_exceptions", "pylint.extensions.typing", "pylint.extensions.redefined_variable_type", "pylint.extensions.comparison_placement", "pylint.extensions.broad_try_clause", "pylint.extensions.dict_init_mutate", "pylint.extensions.consider_refactoring_into_while_condition"] + +# Pickle collected data for later comparisons. +persistent = true + +# Minimum Python version to use for version dependent checks. Will default to the +# version used to run pylint. +py-version = "3.10" + +# Discover python modules and packages in the file system subtree. +# recursive = + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +# source-roots = + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode = true + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +# unsafe-load-any-extension = + +[tool.pylint.basic] +# Naming style matching correct argument names. +argument-naming-style = "snake_case" + +# Regular expression matching correct argument names. Overrides argument-naming- +# style. If left empty, argument names will be checked with the set naming style. +argument-rgx = "[a-z_][a-z0-9_]{2,30}$" + +# Naming style matching correct attribute names. +attr-naming-style = "snake_case" + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming +# style. +attr-rgx = "[a-z_][a-z0-9_]{2,}$" + +# Bad variable names which should always be refused, separated by a comma. +bad-names = ["foo", "bar", "baz", "toto", "tutu", "tata"] + +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +# bad-names-rgxs = + +# Naming style matching correct class attribute names. +class-attribute-naming-style = "any" + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. +class-attribute-rgx = "([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$" + +# Naming style matching correct class constant names. +class-const-naming-style = "UPPER_CASE" + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +# class-const-rgx = + +# Naming style matching correct class names. +class-naming-style = "PascalCase" + +# Regular expression matching correct class names. Overrides class-naming-style. +# If left empty, class names will be checked with the set naming style. +class-rgx = "[A-Z_][a-zA-Z0-9]+$" + +# Naming style matching correct constant names. +const-naming-style = "UPPER_CASE" + +# Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming style. +const-rgx = "(([A-Z_][A-Z0-9_]*)|(__.*__))$" + +# Minimum line length for functions/classes that require docstrings, shorter ones +# are exempt. +docstring-min-length = -1 + +# Naming style matching correct function names. +function-naming-style = "snake_case" + +# Regular expression matching correct function names. Overrides function-naming- +# style. If left empty, function names will be checked with the set naming style. +function-rgx = "[a-z_][a-z0-9_]{2,30}$" + +# Good variable names which should always be accepted, separated by a comma. +good-names = ["i", "j", "k", "ex", "Run", "_"] + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +# good-names-rgxs = + +# Include a hint for the correct naming format with invalid-name. +# include-naming-hint = + +# Naming style matching correct inline iteration names. +inlinevar-naming-style = "any" + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. +inlinevar-rgx = "[A-Za-z_][A-Za-z0-9_]*$" + +# Naming style matching correct method names. +method-naming-style = "snake_case" + +# Regular expression matching correct method names. Overrides method-naming- +# style. If left empty, method names will be checked with the set naming style. +method-rgx = "[a-z_][a-z0-9_]{2,}$" + +# Naming style matching correct module names. +module-naming-style = "snake_case" + +# Regular expression matching correct module names. Overrides module-naming- +# style. If left empty, module names will be checked with the set naming style. +module-rgx = "(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$" + +# Colon-delimited sets of names that determine each other's naming style when the +# name regexes allow several styles. +# name-group = + +# Regular expression which should only match function or class names that do not +# require a docstring. +no-docstring-rgx = "__.*__" + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. These +# decorators are taken in consideration only for invalid-name. +property-classes = ["abc.abstractproperty"] + +# Regular expression matching correct type alias names. If left empty, type alias +# names will be checked with the set naming style. +# typealias-rgx = + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +# typevar-rgx = + +# Naming style matching correct variable names. +variable-naming-style = "snake_case" + +# Regular expression matching correct variable names. Overrides variable-naming- +# style. If left empty, variable names will be checked with the set naming style. +variable-rgx = "[a-z_][a-z0-9_]{2,30}$" + +[tool.pylint.broad_try_clause] +# Maximum number of statements allowed in a try clause +max-try-statements = 7 + +[tool.pylint.classes] +# Warn about protected attribute access inside special methods +# check-protected-access-in-special-methods = + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods = ["__init__", "__new__", "setUp", "__post_init__"] + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected = ["_asdict", "_fields", "_replace", "_source", "_make"] + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg = ["cls"] + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg = ["mcs"] + +[tool.pylint.deprecated_builtins] +# List of builtins function names that should not be used, separated by a comma +bad-functions = ["map", "input"] + +[tool.pylint.design] +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +# exclude-too-few-public-methods = + +# List of qualified class names to ignore when counting class parents (see R0901) +# ignored-parents = + +# Maximum number of arguments for function / method. +max-args = 9 + +# Maximum number of attributes for a class (see R0902). +max-attributes = 11 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr = 5 + +# Maximum number of branch for function / method body. +max-branches = 20 + +# Maximum number of locals for function / method body. +max-locals = 19 + +# Maximum number of parents for a class (see R0901). +max-parents = 7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods = 20 + +# Maximum number of return / yield for function / method body. +max-returns = 11 + +# Maximum number of statements in function / method body. +max-statements = 50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods = 2 + +[tool.pylint.exceptions] +# Exceptions that will emit a warning when caught. +overgeneral-exceptions = ["builtins.Exception"] + +[tool.pylint.format] +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +# expected-line-ending-format = + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines = "^\\s*(# )??$" + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren = 4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string = " " + +# Maximum number of characters on a single line. +max-line-length = 100 + +# Maximum number of lines in a module. +max-module-lines = 2000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +# single-line-class-stmt = + +# Allow the body of an if to be on the same line as the test if there is no else. +# single-line-if-stmt = + +[tool.pylint.imports] +# List of modules that can be imported at any level, not just the top level one. +# allow-any-import-level = + +# Allow explicit reexports by alias from a package __init__. +# allow-reexport-from-package = + +# Allow wildcard imports from modules that define __all__. +# allow-wildcard-with-all = + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules = ["regsub", "TERMIOS", "Bastion", "rexec"] + +# Output a graph (.gv or any supported image format) of external dependencies to +# the given file (report RP0402 must not be disabled). +# ext-import-graph = + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be disabled). +# import-graph = + +# Output a graph (.gv or any supported image format) of internal dependencies to +# the given file (report RP0402 must not be disabled). +# int-import-graph = + +# Force import order to recognize a module as part of the standard compatibility +# libraries. +# known-standard-library = + +# Force import order to recognize a module as part of a third party library. +known-third-party = ["enchant"] + +# Couples of modules and preferred modules, separated by a comma. +# preferred-modules = + +[tool.pylint.logging] +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style = "old" + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules = ["logging"] + +[tool.pylint."messages control"] +# Only show warnings with the listed confidence levels. Leave empty to show all. +# Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, UNDEFINED. +confidence = ["HIGH", "CONTROL_FLOW", "INFERENCE", "INFERENCE_FAILURE", "UNDEFINED"] + +# Disable the message, report, category or checker with the given id(s). You can +# either give multiple identifiers separated by comma (,) or put this option +# multiple times (only on the command line, not in the configuration file where +# it should appear only once). You can also use "--disable=all" to disable +# everything first and then re-enable specific checks. For example, if you want +# to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable = ["raw-checker-failed", "bad-inline-option", "locally-disabled", "file-ignored", "suppressed-message", "deprecated-pragma", "use-implicit-booleaness-not-comparison-to-string", "use-implicit-booleaness-not-comparison-to-zero", "consider-using-augmented-assign", "prefer-typing-namedtuple", "attribute-defined-outside-init", "invalid-name", "missing-module-docstring", "missing-class-docstring", "missing-function-docstring", "protected-access", "too-few-public-methods", "line-too-long", "too-many-lines", "trailing-whitespace", "missing-final-newline", "trailing-newlines", "bad-indentation", "unnecessary-semicolon", "multiple-statements", "superfluous-parens", "mixed-line-endings", "unexpected-line-ending-format", "fixme", "consider-using-assignment-expr", "logging-fstring-interpolation", "consider-using-any-or-all"] + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where it +# should appear only once). See also the "--disable" option for examples. +enable = ["useless-suppression", "use-symbolic-message-instead"] + +[tool.pylint.method_args] +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods = ["requests.api.delete", "requests.api.get", "requests.api.head", "requests.api.options", "requests.api.patch", "requests.api.post", "requests.api.put", "requests.api.request"] + +[tool.pylint.miscellaneous] +# List of note tags to take in consideration, separated by a comma. +notes = ["FIXME", "XXX", "TODO"] + +# Regular expression of note tags to take in consideration. +# notes-rgx = + +[tool.pylint.parameter_documentation] +# Whether to accept totally missing parameter documentation in the docstring of a +# function that has parameters. +accept-no-param-doc = true + +# Whether to accept totally missing raises documentation in the docstring of a +# function that raises an exception. +accept-no-raise-doc = true + +# Whether to accept totally missing return documentation in the docstring of a +# function that returns a statement. +accept-no-return-doc = true + +# Whether to accept totally missing yields documentation in the docstring of a +# generator. +accept-no-yields-doc = true + +# If the docstring type cannot be guessed the specified docstring type will be +# used. +default-docstring-type = "default" + +[tool.pylint.refactoring] +# Maximum number of nested blocks for function / method body +max-nested-blocks = 5 + +# Complete name of functions that never returns. When checking for inconsistent- +# return-statements if a never returning function is called then it will be +# considered as an explicit return statement and no message will be printed. +never-returning-functions = ["sys.exit", "argparse.parse_error"] + +[tool.pylint.reports] +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each category, +# as well as 'statement' which is the total number of statements analyzed. This +# score is used by the global evaluation report (RP0004). +evaluation = "max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10))" + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +# msg-template = + +# Set the output format. Available formats are: text, parseable, colorized, json2 +# (improved json format), json (old json format) and msvs (visual studio). You +# can also give a reporter class, e.g. mypackage.mymodule.MyReporterClass. +# output-format = + +# Tells whether to display a full report or only the messages. +# reports = + +# Activate the evaluation score. +score = true + +[tool.pylint.similarities] +# Comments are removed from the similarity computation +ignore-comments = true + +# Docstrings are removed from the similarity computation +ignore-docstrings = true + +# Imports are removed from the similarity computation +ignore-imports = true + +# Signatures are removed from the similarity computation +ignore-signatures = true + +# Minimum lines number of a similarity. +min-similarity-lines = 6 + +[tool.pylint.spelling] +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions = 2 + +# Spelling dictionary name. No available dictionaries : You need to install both +# the python package and the system dependency for enchant to work. +# spelling-dict = + +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives = "fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy:,pragma:,# noinspection" + +# List of comma separated words that should not be checked. +# spelling-ignore-words = + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file = ".pyenchant_pylint_custom_dict.txt" + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +# spelling-store-unknown-words = + +[tool.pylint.typecheck] +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators = ["contextlib.contextmanager"] + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members = "REQUEST,acl_users,aq_parent,argparse.Namespace" + +# Tells whether missing members accessed in mixin class should be ignored. A +# class is considered mixin if its name matches the mixin-class-rgx option. +# Tells whether to warn about missing members when the owner of the attribute is +# inferred to be None. +ignore-none = true + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference can +# return multiple potential results while evaluating a Python object, but some +# branches might not be evaluated, which results in partial inference. In that +# case, it might be useful to still emit no-member and other checks for the rest +# of the inferred objects. +ignore-on-opaque-inference = true + +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins = ["no-member", "not-async-context-manager", "not-context-manager", "attribute-defined-outside-init"] + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes = ["SQLObject", "optparse.Values", "thread._local", "_thread._local"] + +# Show a hint with possible names when a member name was not found. The aspect of +# finding the hint is based on edit distance. +missing-member-hint = true + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance = 1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices = 1 + +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx = ".*MixIn" + +# List of decorators that change the signature of a decorated function. +# signature-mutators = + +[tool.pylint.variables] +# List of additional names supposed to be defined in builtins. Remember that you +# should avoid defining new builtins when possible. +# additional-builtins = + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables = true + +# List of names allowed to shadow builtins +# allowed-redefined-builtins = + +# List of strings which can identify a callback function by name. A callback name +# must start or end with one of those strings. +callbacks = ["cb_", "_cb"] + +# A regular expression matching the name of dummy variables (i.e. expected to not +# be used). +dummy-variables-rgx = "_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_" + +# Argument names that match this expression will be ignored. +ignored-argument-names = "_.*|^ignored_|^unused_" + +# Tells whether we should check for unused import in __init__ files. +# init-import = + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules = ["six.moves", "past.builtins", "future.builtins", "builtins", "io"] diff --git a/src/databricks/__init__.py b/src/databricks/__init__.py new file mode 100644 index 0000000..90c4472 --- /dev/null +++ b/src/databricks/__init__.py @@ -0,0 +1,3 @@ +# DO NOT ADD ANYTHING ELSE TO THIS FILE FOR COMPATIBILITY WITH OTHER databricks.* PACKAGES +# SEE https://packaging.python.org/guides/packaging-namespace-packages/#pkgutil-style-namespace-packages +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/src/databricks/labs/__init__.py b/src/databricks/labs/__init__.py new file mode 100644 index 0000000..3ffb9c3 --- /dev/null +++ b/src/databricks/labs/__init__.py @@ -0,0 +1,3 @@ +# DO NOT ADD ANYTHING ELSE TO THIS FILE FOR COMPATIBILITY WITH OTHER databricks.labs.* PACKAGES +# SEE https://packaging.python.org/guides/packaging-namespace-packages/#pkgutil-style-namespace-packages +__path__ = __import__("pkgutil").extend_path(__path__, __name__) diff --git a/src/databricks/labs/pylint/__about__.py b/src/databricks/labs/pylint/__about__.py new file mode 100644 index 0000000..6c8e6b9 --- /dev/null +++ b/src/databricks/labs/pylint/__about__.py @@ -0,0 +1 @@ +__version__ = "0.0.0" diff --git a/src/databricks/labs/pylint/__init__.py b/src/databricks/labs/pylint/__init__.py new file mode 100644 index 0000000..4e42e96 --- /dev/null +++ b/src/databricks/labs/pylint/__init__.py @@ -0,0 +1 @@ +from databricks.labs.pylint.__about__ import __version__ \ No newline at end of file