Skip to content

Commit

Permalink
chore: add pylint (#424)
Browse files Browse the repository at this point in the history
* chore: add pylint

Signed-off-by: Henry Schreiner <henryschreineriii@gmail.com>

* ci: add pylint

Signed-off-by: Henry Schreiner <henryschreineriii@gmail.com>
  • Loading branch information
henryiii authored Oct 9, 2022
1 parent 723c161 commit 17e6595
Showing 12 changed files with 134 additions and 54 deletions.
32 changes: 32 additions & 0 deletions .github/matchers/pylint.json
Original file line number Diff line number Diff line change
@@ -0,0 +1,32 @@
{
"problemMatcher": [
{
"severity": "warning",
"pattern": [
{
"regexp": "^([^:]+):(\\d+):(\\d+): ([A-DF-Z]\\d+): \\033\\[[\\d;]+m([^\\033]+).*$",
"file": 1,
"line": 2,
"column": 3,
"code": 4,
"message": 5
}
],
"owner": "pylint-warning"
},
{
"severity": "error",
"pattern": [
{
"regexp": "^([^:]+):(\\d+):(\\d+): (E\\d+): \\033\\[[\\d;]+m([^\\033]+).*$",
"file": 1,
"line": 2,
"column": 3,
"code": 4,
"message": 5
}
],
"owner": "pylint-error"
}
]
}
4 changes: 4 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -21,6 +21,10 @@ jobs:
- uses: pre-commit/action@v3.0.0
with:
extra_args: --all-files --hook-stage manual
- name: PyLint
run: |
echo "::add-matcher::$GITHUB_WORKSPACE/.github/matchers/pylint.json"
pipx run nox -s pylint
checks:
name: Check ${{ matrix.os }} 🐍 ${{ matrix.python-version }}
13 changes: 12 additions & 1 deletion noxfile.py
Original file line number Diff line number Diff line change
@@ -4,7 +4,7 @@

import nox

nox.options.sessions = ["lint", "tests"]
nox.options.sessions = ["lint", "pylint", "tests"]


@nox.session
@@ -13,6 +13,17 @@ def lint(session: nox.Session) -> None:
session.run("pre-commit", "run", "--all-files", *session.posargs)


@nox.session
def pylint(session: nox.Session) -> None:
"""
Run pylint.
"""

session.install("pylint~=2.15.0")
session.install("-e", ".[dev]")
session.run("pylint", "src", *session.posargs)


@nox.session
def tests(session: nox.Session) -> None:
session.install("-e", ".[test]")
31 changes: 31 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
@@ -112,3 +112,34 @@ addopts = [
filterwarnings = [
"error",
]

[tool.pylint]
master.py-version = "3.7"
reports.output-format = "colorized"
similarities.ignore-imports = "yes"
master.jobs = "0"
messages_control.enable = [
"useless-suppression",
]
messages_control.disable = [
"duplicate-code",
"fixme",
"invalid-name",
"invalid-unary-operand-type",
"line-too-long",
"missing-class-docstring",
"missing-function-docstring",
"missing-module-docstring",
"too-few-public-methods",
"too-many-arguments",
"too-many-branches",
"too-many-instance-attributes",
"too-many-lines",
"too-many-locals",
"too-many-public-methods",
"too-many-return-statements",
"too-many-statements",
"unsubscriptable-object",
"unsupported-assignment-operation",
"wrong-import-position",
]
5 changes: 3 additions & 2 deletions src/particle/converters/bimap.py
Original file line number Diff line number Diff line change
@@ -85,7 +85,7 @@ def __init__(
elif isinstance(filename, HasOpen):
file_object = filename.open()
else:
file_object = open(filename) # type: ignore[arg-type]
file_object = open(filename, encoding="utf_8") # type: ignore[arg-type]

with file_object as _f:
self._to_map = {
@@ -175,7 +175,7 @@ def DirectionalMaps(
elif isinstance(filename, HasRead):
file_object = filename
else:
file_object = open(filename) # type: ignore[arg-type]
file_object = open(filename, encoding="utf_8") # type: ignore[arg-type]

with file_object as _f:
skipinitialspace = True
@@ -211,6 +211,7 @@ def DirectionalMaps(


class DirectionalMap(StrStrMapping):
# pylint: disable-next=redefined-builtin
def __init__(self, name_A: str, name_B: str, map: dict[str, str]) -> None:
"""
Directional map class providing a A -> B mapping.
4 changes: 3 additions & 1 deletion src/particle/data/__init__.py
Original file line number Diff line number Diff line change
@@ -10,7 +10,9 @@
if sys.version_info < (3, 9):
import importlib_resources as resources
else:
import importlib.resources as resources
from importlib import resources

__all__ = ["basepath"]


basepath = resources.files(__name__)
4 changes: 3 additions & 1 deletion src/particle/lhcb/data/__init__.py
Original file line number Diff line number Diff line change
@@ -10,7 +10,9 @@
if sys.version_info < (3, 9):
import importlib_resources as resources
else:
import importlib.resources as resources
from importlib import resources


__all__ = ["basepath"]

basepath = resources.files(__name__)
14 changes: 6 additions & 8 deletions src/particle/particle/convert.py
Original file line number Diff line number Diff line change
@@ -106,7 +106,8 @@ def filter_file(fileobject: StringOrIO) -> TextIO:

if not hasattr(fileobject, "read"):
assert isinstance(fileobject, str)
fileobject = open(fileobject, encoding="utf-8")
with open(fileobject, encoding="utf-8") as f:
return filter_file(f)

assert not isinstance(fileobject, (str, Traversable))

@@ -117,9 +118,6 @@ def filter_file(fileobject: StringOrIO) -> TextIO:
stream.write(line)
stream.seek(0)

if not fileobject.closed:
fileobject.close()

return stream


@@ -145,8 +143,8 @@ def get_from_pdg_extended(
>>> full_table = get_from_pdg_extended('particle/data/mass_width_2008.fwf',
... ['particle/data/pdgid_to_latexname.csv'])
"""
"Read a file, plus a list of LaTeX files, to produce a pandas DataFrame with particle information"

# Read a file, plus a list of LaTeX files, to produce a pandas DataFrame with particle information
def unmap(mapping: dict[str, T]) -> Callable[[str], T]:
return lambda x: mapping[x.strip()]

@@ -373,7 +371,7 @@ def update_from_mcd(


def produce_files(
particle2008: str | Path,
particle2008: str | Path, # pylint: disable=unused-argument
particle2021: str | Path,
version: str,
year: str,
@@ -494,5 +492,5 @@ def run_convert(args: Any) -> None:
)
parser_convert.set_defaults(func=run_convert)

args = parser.parse_args()
args.func(args)
args_ = parser.parse_args()
args_.func(args_)
4 changes: 2 additions & 2 deletions src/particle/particle/kinematics.py
Original file line number Diff line number Diff line change
@@ -52,7 +52,7 @@ def width_to_lifetime(Gamma: float) -> float:

if Gamma < 0.0:
raise ValueError(f"Input provided, {Gamma} <= 0!")
elif Gamma == 0:
if Gamma == 0:
return float("inf")

# Just need to first make sure that the width is in the standard unit MeV
@@ -98,7 +98,7 @@ def lifetime_to_width(tau: float) -> float:

if tau < 0:
raise ValueError(f"Input provided, {tau} <= 0!")
elif tau == 0:
if tau == 0:
return float("inf")

# Just need to first make sure that the lifetime is in the standard unit ns
36 changes: 18 additions & 18 deletions src/particle/particle/particle.py
Original file line number Diff line number Diff line change
@@ -549,7 +549,7 @@ def load_table(
with data.basepath.joinpath("nuclei2020.csv").open() as fb:
cls.load_table(fb, append=True, _name="nuclei2020.csv")
return
elif isinstance(filename, HasRead):
if isinstance(filename, HasRead):
tmp_name = _name or filename.name
cls._table_names.append(tmp_name or f"{filename!r} {len(cls._table_names)}")
open_file = filename
@@ -559,7 +559,7 @@ def load_table(
else:
cls._table_names.append(str(filename))
assert not isinstance(filename, Traversable)
open_file = open(filename)
open_file = open(filename, encoding="utf_8")

with open_file as f:
r = csv.DictReader(line for line in f if not line.startswith("#"))
@@ -812,11 +812,11 @@ def _width_or_lifetime(self) -> str:
"""
if self.width is None:
return "Width = None"
elif self.width == 0:
if self.width == 0:
return "Width = 0.0 MeV"
elif self.width_lower is None or self.width_upper is None:
if self.width_lower is None or self.width_upper is None:
return f"Width < {self.width} MeV"
elif (
if (
self.width < 0.05
): # corresponds to a lifetime of approximately 1.3e-20 seconds
assert self.lifetime is not None
@@ -831,9 +831,9 @@ def _width_or_lifetime(self) -> str:
e = width_to_lifetime(self.width - self.width_lower) - self.lifetime
lifetime = str_with_unc(self.lifetime, e, e)
return f"Lifetime = {lifetime} ns"
else:
width = str_with_unc(self.width, self.width_upper, self.width_lower)
return f"Width = {width} MeV"

width = str_with_unc(self.width, self.width_upper, self.width_lower)
return f"Width = {width} MeV"

def _charge_in_name(self) -> bool:
"""Assess whether the particle charge is part of the particle name.
@@ -859,15 +859,16 @@ def _charge_in_name(self) -> bool:
# Quarkonia never exhibit the 0 charge
# All eta, eta', h, h', omega, phi, f, f' light mesons are supposed to have an s-sbar component (see PDG site),
# but some particles have pdgid.has_strange==False :S! Play it safe ...
elif any(
if any(
chr in self.pdg_name
for chr in ("eta", "h(", "h'(", "omega", "phi", "f", "f'")
):
return False
elif pid.has_strange or pid.has_charm or pid.has_bottom or pid.has_top:
if pid.has_strange or pid.has_charm or pid.has_bottom or pid.has_top:
return False
else: # Light unflavoured mesons
return True

# Light unflavoured mesons
return True
# Lambda baryons
if (
self.pdgid.is_baryon
@@ -904,9 +905,9 @@ def _str_mass(self) -> str:
"""
if self.mass is None:
return "None"
else:
txt = str_with_unc(self.mass, self.mass_upper, self.mass_lower)
return f"{txt} MeV"

txt = str_with_unc(self.mass, self.mass_upper, self.mass_lower)
return f"{txt} MeV"

def describe(self) -> str:
"Make a nice high-density string for a particle's properties."
@@ -1068,7 +1069,7 @@ def finditer(
if particle is not None:
if particle and int(item.pdgid) < 0:
continue
elif (not particle) and int(item.pdgid) > 0:
if (not particle) and int(item.pdgid) > 0:
continue

# If a filter function is passed, evaluate and skip if False
@@ -1176,8 +1177,7 @@ def from_string(cls: type[Self], name: str) -> Self:
matches = cls.from_string_list(name)
if matches:
return matches[0]
else:
raise ParticleNotFound(f"{name} not found in particle table")
raise ParticleNotFound(f"{name} not found in particle table")

@classmethod
def from_string_list(cls: type[Self], name: str) -> list[Self]:
Loading

0 comments on commit 17e6595

Please sign in to comment.