` + sponsors.forEach(function (sponsor) { + html += ` + + + + ` + }); + html += '
' + sponsorsDiv.innerHTML = html; + } + }); +} + +function updateInsidersPage(author_username) { + const sponsorURL = `https://github.com/sponsors/${author_username}` + const dataURL = `https://raw.githubusercontent.com/${author_username}/sponsors/main`; + getJSON(dataURL + '/numbers.json', function (err, numbers) { + document.getElementById('sponsors-count').innerHTML = numbers.count; + Array.from(document.getElementsByClassName('sponsors-total')).forEach(function (element) { + element.innerHTML = '$ ' + humanReadableAmount(numbers.total); + }); + getJSON(dataURL + '/sponsors.json', function (err, sponsors) { + const sponsorsElem = document.getElementById('sponsors'); + const privateSponsors = numbers.count - sponsors.length; + sponsors.forEach(function (sponsor) { + sponsorsElem.innerHTML += ` + + + + `; + }); + if (privateSponsors > 0) { + sponsorsElem.innerHTML += ` + + +${privateSponsors} + + `; + } + }); + }); + updatePremiumSponsors(dataURL, "gold"); + updatePremiumSponsors(dataURL, "silver"); + updatePremiumSponsors(dataURL, "bronze"); +} diff --git a/docs/license.md b/docs/license.md new file mode 100644 index 0000000..e81c0ed --- /dev/null +++ b/docs/license.md @@ -0,0 +1,10 @@ +--- +hide: +- feedback +--- + +# License + +``` +--8<-- "LICENSE" +``` diff --git a/duties.py b/duties.py new file mode 100644 index 0000000..4ed2d1f --- /dev/null +++ b/duties.py @@ -0,0 +1,219 @@ +"""Development tasks.""" + +from __future__ import annotations + +import os +import sys +from contextlib import contextmanager +from importlib.metadata import version as pkgversion +from pathlib import Path +from typing import TYPE_CHECKING, Iterator + +from duty import duty, tools + +if TYPE_CHECKING: + from duty.context import Context + + +PY_SRC_PATHS = (Path(_) for _ in ("src", "tests", "duties.py", "scripts")) +PY_SRC_LIST = tuple(str(_) for _ in PY_SRC_PATHS) +PY_SRC = " ".join(PY_SRC_LIST) +CI = os.environ.get("CI", "0") in {"1", "true", "yes", ""} +WINDOWS = os.name == "nt" +PTY = not WINDOWS and not CI +MULTIRUN = os.environ.get("MULTIRUN", "0") == "1" + + +def pyprefix(title: str) -> str: # noqa: D103 + if MULTIRUN: + prefix = f"(python{sys.version_info.major}.{sys.version_info.minor})" + return f"{prefix:14}{title}" + return title + + +@contextmanager +def material_insiders() -> Iterator[bool]: # noqa: D103 + if "+insiders" in pkgversion("mkdocs-material"): + os.environ["MATERIAL_INSIDERS"] = "true" + try: + yield True + finally: + os.environ.pop("MATERIAL_INSIDERS") + else: + yield False + + +@duty +def changelog(ctx: Context, bump: str = "") -> None: + """Update the changelog in-place with latest commits. + + Parameters: + bump: Bump option passed to git-changelog. + """ + ctx.run(tools.git_changelog(bump=bump or None), title="Updating changelog") + + +@duty(pre=["check_quality", "check_types", "check_docs", "check_dependencies", "check-api"]) +def check(ctx: Context) -> None: # noqa: ARG001 + """Check it all!""" + + +@duty +def check_quality(ctx: Context) -> None: + """Check the code quality.""" + ctx.run( + tools.ruff.check(*PY_SRC_LIST, config="config/ruff.toml"), + title=pyprefix("Checking code quality"), + ) + + +@duty +def check_docs(ctx: Context) -> None: + """Check if the documentation builds correctly.""" + Path("htmlcov").mkdir(parents=True, exist_ok=True) + Path("htmlcov/index.html").touch(exist_ok=True) + with material_insiders(): + ctx.run( + tools.mkdocs.build(strict=True, verbose=True), + title=pyprefix("Building documentation"), + ) + + +@duty +def check_types(ctx: Context) -> None: + """Check that the code is correctly typed.""" + ctx.run( + tools.mypy(*PY_SRC_LIST, config_file="config/mypy.ini"), + title=pyprefix("Type-checking"), + ) + + +@duty +def check_api(ctx: Context, *cli_args: str) -> None: + """Check for API breaking changes.""" + ctx.run( + tools.griffe.check("griffe_autodocstringstyle", search=["src"], color=True).add_args(*cli_args), + title="Checking for API breaking changes", + nofail=True, + ) + + +@duty +def docs(ctx: Context, *cli_args: str, host: str = "127.0.0.1", port: int = 8000) -> None: + """Serve the documentation (localhost:8000). + + Parameters: + host: The host to serve the docs from. + port: The port to serve the docs on. + """ + with material_insiders(): + ctx.run( + tools.mkdocs.serve(dev_addr=f"{host}:{port}").add_args(*cli_args), + title="Serving documentation", + capture=False, + ) + + +@duty +def docs_deploy(ctx: Context) -> None: + """Deploy the documentation to GitHub pages.""" + os.environ["DEPLOY"] = "true" + with material_insiders() as insiders: + if not insiders: + ctx.run(lambda: False, title="Not deploying docs without Material for MkDocs Insiders!") + origin = ctx.run("git config --get remote.origin.url", silent=True) + if "pawamoy-insiders/griffe-autodocstringstyle" in origin: + ctx.run("git remote add upstream git@github.com:mkdocstrings/griffe-autodocstringstyle", silent=True, nofail=True) + ctx.run( + tools.mkdocs.gh_deploy(remote_name="upstream", force=True), + title="Deploying documentation", + ) + else: + ctx.run( + lambda: False, + title="Not deploying docs from public repository (do that from insiders instead!)", + nofail=True, + ) + + +@duty +def format(ctx: Context) -> None: + """Run formatting tools on the code.""" + ctx.run( + tools.ruff.check(*PY_SRC_LIST, config="config/ruff.toml", fix_only=True, exit_zero=True), + title="Auto-fixing code", + ) + ctx.run(tools.ruff.format(*PY_SRC_LIST, config="config/ruff.toml"), title="Formatting code") + + +@duty +def build(ctx: Context) -> None: + """Build source and wheel distributions.""" + ctx.run( + tools.build(), + title="Building source and wheel distributions", + pty=PTY, + ) + + +@duty +def publish(ctx: Context) -> None: + """Publish source and wheel distributions to PyPI.""" + if not Path("dist").exists(): + ctx.run("false", title="No distribution files found") + dists = [str(dist) for dist in Path("dist").iterdir()] + ctx.run( + tools.twine.upload(*dists, skip_existing=True), + title="Publishing source and wheel distributions to PyPI", + pty=PTY, + ) + + +@duty(post=["build", "publish", "docs-deploy"]) +def release(ctx: Context, version: str = "") -> None: + """Release a new Python package. + + Parameters: + version: The new version number to use. + """ + origin = ctx.run("git config --get remote.origin.url", silent=True) + if "pawamoy-insiders/griffe-autodocstringstyle" in origin: + ctx.run( + lambda: False, + title="Not releasing from insiders repository (do that from public repo instead!)", + ) + if not (version := (version or input("> Version to release: ")).strip()): + ctx.run("false", title="A version must be provided") + ctx.run("git add pyproject.toml CHANGELOG.md", title="Staging files", pty=PTY) + ctx.run(["git", "commit", "-m", f"chore: Prepare release {version}"], title="Committing changes", pty=PTY) + ctx.run(f"git tag {version}", title="Tagging commit", pty=PTY) + ctx.run("git push", title="Pushing commits", pty=False) + ctx.run("git push --tags", title="Pushing tags", pty=False) + + +@duty(silent=True, aliases=["cov"]) +def coverage(ctx: Context) -> None: + """Report coverage as text and HTML.""" + ctx.run(tools.coverage.combine(), nofail=True) + ctx.run(tools.coverage.report(rcfile="config/coverage.ini"), capture=False) + ctx.run(tools.coverage.html(rcfile="config/coverage.ini")) + + +@duty +def test(ctx: Context, *cli_args: str, match: str = "") -> None: + """Run the test suite. + + Parameters: + match: A pytest expression to filter selected tests. + """ + py_version = f"{sys.version_info.major}{sys.version_info.minor}" + os.environ["COVERAGE_FILE"] = f".coverage.{py_version}" + ctx.run( + tools.pytest( + "tests", + config_file="config/pytest.ini", + select=match, + color="yes", + ).add_args("-n", "auto", *cli_args), + title=pyprefix("Running tests"), + ) diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 0000000..35ade93 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,172 @@ +site_name: "griffe-autodocstringstyle" +site_description: "Set docstring style to 'auto' for external packages." +site_url: "https://mkdocstrings.github.io/griffe-autodocstringstyle" +repo_url: "https://github.com/mkdocstrings/griffe-autodocstringstyle" +repo_name: "mkdocstrings/griffe-autodocstringstyle" +site_dir: "site" +watch: [mkdocs.yml, README.md, CONTRIBUTING.md, CHANGELOG.md, src/griffe_autodocstringstyle] +copyright: Copyright © 2024 Timothée Mazzucotelli +edit_uri: edit/main/docs/ + +validation: + omitted_files: warn + absolute_links: warn + unrecognized_links: warn + +nav: +- Home: + - Overview: index.md + - Changelog: changelog.md + - Credits: credits.md + - License: license.md +# defer to gen-files + literate-nav +- API reference: + - griffe-autodocstringstyle: reference/ +- Development: + - Contributing: contributing.md + - Code of Conduct: code_of_conduct.md + - Coverage report: coverage.md +- Insiders: + - insiders/index.md + - Getting started: + - Installation: insiders/installation.md + - Changelog: insiders/changelog.md +- Author's website: https://pawamoy.github.io/ + +theme: + name: material + custom_dir: docs/.overrides + icon: + logo: material/currency-sign + features: + - announce.dismiss + - content.action.edit + - content.action.view + - content.code.annotate + - content.code.copy + - content.tooltips + - navigation.footer + - navigation.indexes + - navigation.sections + - navigation.tabs + - navigation.tabs.sticky + - navigation.top + - search.highlight + - search.suggest + - toc.follow + palette: + - media: "(prefers-color-scheme)" + toggle: + icon: material/brightness-auto + name: Switch to light mode + - media: "(prefers-color-scheme: light)" + scheme: default + primary: teal + accent: purple + toggle: + icon: material/weather-sunny + name: Switch to dark mode + - media: "(prefers-color-scheme: dark)" + scheme: slate + primary: black + accent: lime + toggle: + icon: material/weather-night + name: Switch to system preference + +extra_css: +- css/material.css +- css/mkdocstrings.css +- css/insiders.css + +extra_javascript: +- js/feedback.js + +markdown_extensions: +- attr_list +- admonition +- callouts +- footnotes +- pymdownx.emoji: + emoji_index: !!python/name:material.extensions.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg +- pymdownx.magiclink +- pymdownx.snippets: + base_path: [!relative $config_dir] + check_paths: true +- pymdownx.superfences +- pymdownx.tabbed: + alternate_style: true + slugify: !!python/object/apply:pymdownx.slugs.slugify + kwds: + case: lower +- pymdownx.tasklist: + custom_checkbox: true +- toc: + permalink: "¤" + +plugins: +- search +- markdown-exec +- gen-files: + scripts: + - scripts/gen_ref_nav.py +- literate-nav: + nav_file: SUMMARY.md +- coverage +- mkdocstrings: + handlers: + python: + import: + - https://docs.python.org/3/objects.inv + paths: [src] + options: + docstring_options: + ignore_init_summary: true + docstring_section_style: list + filters: ["!^_"] + heading_level: 1 + inherited_members: true + merge_init_into_class: true + separate_signature: true + show_root_heading: true + show_root_full_path: false + show_signature_annotations: true + show_symbol_type_heading: true + show_symbol_type_toc: true + signature_crossrefs: true + summary: true +- git-committers: + enabled: !ENV [DEPLOY, false] + repository: mkdocstrings/griffe-autodocstringstyle +- minify: + minify_html: !ENV [DEPLOY, false] +- group: + enabled: !ENV [MATERIAL_INSIDERS, false] + plugins: + - typeset + +extra: + social: + - icon: fontawesome/brands/github + link: https://github.com/pawamoy + - icon: fontawesome/brands/mastodon + link: https://fosstodon.org/@pawamoy + - icon: fontawesome/brands/twitter + link: https://twitter.com/pawamoy + - icon: fontawesome/brands/gitter + link: https://gitter.im/griffe-autodocstringstyle/community + - icon: fontawesome/brands/python + link: https://pypi.org/project/griffe-autodocstringstyle/ + analytics: + feedback: + title: Was this page helpful? + ratings: + - icon: material/emoticon-happy-outline + name: This page was helpful + data: 1 + note: Thanks for your feedback! + - icon: material/emoticon-sad-outline + name: This page could be improved + data: 0 + note: Let us know how we can improve this page. diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..5b8ee12 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,66 @@ +[build-system] +requires = ["pdm-backend"] +build-backend = "pdm.backend" + +[project] +name = "griffe-autodocstringstyle" +description = "Set docstring style to 'auto' for external packages." +authors = [{name = "Timothée Mazzucotelli", email = "dev@pawamoy.fr"}] +license = {text = "ISC"} +readme = "README.md" +requires-python = ">=3.8" +keywords = [] +dynamic = ["version"] +classifiers = [ + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Programming Language :: Python", + "Programming Language :: Python :: 3", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Topic :: Documentation", + "Topic :: Software Development", + "Topic :: Utilities", + "Typing :: Typed", +] +dependencies = [] + +[project.urls] +Homepage = "https://mkdocstrings.github.io/griffe-autodocstringstyle" +Documentation = "https://mkdocstrings.github.io/griffe-autodocstringstyle" +Changelog = "https://mkdocstrings.github.io/griffe-autodocstringstyle/changelog" +Repository = "https://github.com/mkdocstrings/griffe-autodocstringstyle" +Issues = "https://github.com/mkdocstrings/griffe-autodocstringstyle/issues" +Discussions = "https://github.com/mkdocstrings/griffe-autodocstringstyle/discussions" +Gitter = "https://gitter.im/mkdocstrings/griffe-autodocstringstyle" +Funding = "https://github.com/sponsors/pawamoy" + +[tool.pdm] +version = {source = "scm"} + +[tool.pdm.build] +package-dir = "src" +editable-backend = "editables" +excludes = ["**/.pytest_cache"] +source-includes = [ + "config", + "docs", + "scripts", + "share", + "tests", + "devdeps.txt", + "duties.py", + "mkdocs.yml", + "*.md", + "LICENSE", +] + +[tool.pdm.build.wheel-data] +data = [ + {path = "share/**/*", relative-to = "."}, +] diff --git a/scripts/gen_credits.py b/scripts/gen_credits.py new file mode 100644 index 0000000..af3278a --- /dev/null +++ b/scripts/gen_credits.py @@ -0,0 +1,179 @@ +"""Script to generate the project's credits.""" + +from __future__ import annotations + +import os +import sys +from collections import defaultdict +from importlib.metadata import distributions +from itertools import chain +from pathlib import Path +from textwrap import dedent +from typing import Dict, Iterable, Union + +from jinja2 import StrictUndefined +from jinja2.sandbox import SandboxedEnvironment +from packaging.requirements import Requirement + +# TODO: Remove once support for Python 3.10 is dropped. +if sys.version_info >= (3, 11): + import tomllib +else: + import tomli as tomllib + +project_dir = Path(os.getenv("MKDOCS_CONFIG_DIR", ".")) +with project_dir.joinpath("pyproject.toml").open("rb") as pyproject_file: + pyproject = tomllib.load(pyproject_file) +project = pyproject["project"] +project_name = project["name"] +with project_dir.joinpath("devdeps.txt").open() as devdeps_file: + devdeps = [line.strip() for line in devdeps_file if line.strip() and not line.strip().startswith(("-e", "#"))] + +PackageMetadata = Dict[str, Union[str, Iterable[str]]] +Metadata = Dict[str, PackageMetadata] + + +def _merge_fields(metadata: dict) -> PackageMetadata: + fields = defaultdict(list) + for header, value in metadata.items(): + fields[header.lower()].append(value.strip()) + return { + field: value if len(value) > 1 or field in ("classifier", "requires-dist") else value[0] + for field, value in fields.items() + } + + +def _norm_name(name: str) -> str: + return name.replace("_", "-").replace(".", "-").lower() + + +def _requirements(deps: list[str]) -> dict[str, Requirement]: + return {_norm_name((req := Requirement(dep)).name): req for dep in deps} + + +def _extra_marker(req: Requirement) -> str | None: + if not req.marker: + return None + try: + return next(marker[2].value for marker in req.marker._markers if getattr(marker[0], "value", None) == "extra") + except StopIteration: + return None + + +def _get_metadata() -> Metadata: + metadata = {} + for pkg in distributions(): + name = _norm_name(pkg.name) # type: ignore[attr-defined,unused-ignore] + metadata[name] = _merge_fields(pkg.metadata) # type: ignore[arg-type] + metadata[name]["spec"] = set() + metadata[name]["extras"] = set() + metadata[name].setdefault("summary", "") + _set_license(metadata[name]) + return metadata + + +def _set_license(metadata: PackageMetadata) -> None: + license_field = metadata.get("license-expression", metadata.get("license", "")) + license_name = license_field if isinstance(license_field, str) else " + ".join(license_field) + check_classifiers = license_name in ("UNKNOWN", "Dual License", "") or license_name.count("\n") + if check_classifiers: + license_names = [] + for classifier in metadata["classifier"]: + if classifier.startswith("License ::"): + license_names.append(classifier.rsplit("::", 1)[1].strip()) + license_name = " + ".join(license_names) + metadata["license"] = license_name or "?" + + +def _get_deps(base_deps: dict[str, Requirement], metadata: Metadata) -> Metadata: + deps = {} + for dep_name, dep_req in base_deps.items(): + if dep_name not in metadata or dep_name == "griffe-autodocstringstyle": + continue + metadata[dep_name]["spec"] |= {str(spec) for spec in dep_req.specifier} # type: ignore[operator] + metadata[dep_name]["extras"] |= dep_req.extras # type: ignore[operator] + deps[dep_name] = metadata[dep_name] + + again = True + while again: + again = False + for pkg_name in metadata: + if pkg_name in deps: + for pkg_dependency in metadata[pkg_name].get("requires-dist", []): + requirement = Requirement(pkg_dependency) + dep_name = _norm_name(requirement.name) + extra_marker = _extra_marker(requirement) + if ( + dep_name in metadata + and dep_name not in deps + and dep_name != project["name"] + and (not extra_marker or extra_marker in deps[pkg_name]["extras"]) + ): + metadata[dep_name]["spec"] |= {str(spec) for spec in requirement.specifier} # type: ignore[operator] + deps[dep_name] = metadata[dep_name] + again = True + + return deps + + +def _render_credits() -> str: + metadata = _get_metadata() + dev_dependencies = _get_deps(_requirements(devdeps), metadata) + prod_dependencies = _get_deps( + _requirements( + chain( # type: ignore[arg-type] + project.get("dependencies", []), + chain(*project.get("optional-dependencies", {}).values()), + ), + ), + metadata, + ) + + template_data = { + "project_name": project_name, + "prod_dependencies": sorted(prod_dependencies.values(), key=lambda dep: str(dep["name"]).lower()), + "dev_dependencies": sorted(dev_dependencies.values(), key=lambda dep: str(dep["name"]).lower()), + "more_credits": "http://pawamoy.github.io/credits/", + } + template_text = dedent( + """ + # Credits + + These projects were used to build *{{ project_name }}*. **Thank you!** + + [Python](https://www.python.org/) | + [uv](https://github.com/astral-sh/uv) | + [copier-uv](https://github.com/pawamoy/copier-uv) + + {% macro dep_line(dep) -%} + [{{ dep.name }}](https://pypi.org/project/{{ dep.name }}/) | {{ dep.summary }} | {{ ("`" ~ dep.spec|sort(reverse=True)|join(", ") ~ "`") if dep.spec else "" }} | `{{ dep.version }}` | {{ dep.license }} + {%- endmacro %} + + {% if prod_dependencies -%} + ### Runtime dependencies + + Project | Summary | Version (accepted) | Version (last resolved) | License + ------- | ------- | ------------------ | ----------------------- | ------- + {% for dep in prod_dependencies -%} + {{ dep_line(dep) }} + {% endfor %} + + {% endif -%} + {% if dev_dependencies -%} + ### Development dependencies + + Project | Summary | Version (accepted) | Version (last resolved) | License + ------- | ------- | ------------------ | ----------------------- | ------- + {% for dep in dev_dependencies -%} + {{ dep_line(dep) }} + {% endfor %} + + {% endif -%} + {% if more_credits %}**[More credits from the author]({{ more_credits }})**{% endif %} + """, + ) + jinja_env = SandboxedEnvironment(undefined=StrictUndefined) + return jinja_env.from_string(template_text).render(**template_data) + + +print(_render_credits()) diff --git a/scripts/gen_ref_nav.py b/scripts/gen_ref_nav.py new file mode 100644 index 0000000..6939e86 --- /dev/null +++ b/scripts/gen_ref_nav.py @@ -0,0 +1,37 @@ +"""Generate the code reference pages and navigation.""" + +from pathlib import Path + +import mkdocs_gen_files + +nav = mkdocs_gen_files.Nav() +mod_symbol = '
'
+
+root = Path(__file__).parent.parent
+src = root / "src"
+
+for path in sorted(src.rglob("*.py")):
+ module_path = path.relative_to(src).with_suffix("")
+ doc_path = path.relative_to(src).with_suffix(".md")
+ full_doc_path = Path("reference", doc_path)
+
+ parts = tuple(module_path.parts)
+
+ if parts[-1] == "__init__":
+ parts = parts[:-1]
+ doc_path = doc_path.with_name("index.md")
+ full_doc_path = full_doc_path.with_name("index.md")
+ elif parts[-1].startswith("_"):
+ continue
+
+ nav_parts = [f"{mod_symbol} {part}" for part in parts]
+ nav[tuple(nav_parts)] = doc_path.as_posix()
+
+ with mkdocs_gen_files.open(full_doc_path, "w") as fd:
+ ident = ".".join(parts)
+ fd.write(f"---\ntitle: {ident}\n---\n\n::: {ident}")
+
+ mkdocs_gen_files.set_edit_path(full_doc_path, ".." / path.relative_to(root))
+
+with mkdocs_gen_files.open("reference/SUMMARY.md", "w") as nav_file:
+ nav_file.writelines(nav.build_literate_nav())
diff --git a/scripts/insiders.py b/scripts/insiders.py
new file mode 100644
index 0000000..1521248
--- /dev/null
+++ b/scripts/insiders.py
@@ -0,0 +1,203 @@
+"""Functions related to Insiders funding goals."""
+
+from __future__ import annotations
+
+import json
+import logging
+import os
+import posixpath
+from dataclasses import dataclass
+from datetime import date, datetime, timedelta
+from itertools import chain
+from pathlib import Path
+from typing import Iterable, cast
+from urllib.error import HTTPError
+from urllib.parse import urljoin
+from urllib.request import urlopen
+
+import yaml
+
+logger = logging.getLogger(f"mkdocs.logs.{__name__}")
+
+
+def human_readable_amount(amount: int) -> str: # noqa: D103
+ str_amount = str(amount)
+ if len(str_amount) >= 4: # noqa: PLR2004
+ return f"{str_amount[:len(str_amount)-3]},{str_amount[-3:]}"
+ return str_amount
+
+
+@dataclass
+class Project:
+ """Class representing an Insiders project."""
+
+ name: str
+ url: str
+
+
+@dataclass
+class Feature:
+ """Class representing an Insiders feature."""
+
+ name: str
+ ref: str | None
+ since: date | None
+ project: Project | None
+
+ def url(self, rel_base: str = "..") -> str | None: # noqa: D102
+ if not self.ref:
+ return None
+ if self.project:
+ rel_base = self.project.url
+ return posixpath.join(rel_base, self.ref.lstrip("/"))
+
+ def render(self, rel_base: str = "..", *, badge: bool = False) -> None: # noqa: D102
+ new = ""
+ if badge:
+ recent = self.since and date.today() - self.since <= timedelta(days=60) # noqa: DTZ011
+ if recent:
+ ft_date = self.since.strftime("%B %d, %Y") # type: ignore[union-attr]
+ new = f' :material-alert-decagram:{{ .new-feature .vibrate title="Added on {ft_date}" }}'
+ project = f"[{self.project.name}]({self.project.url}) — " if self.project else ""
+ feature = f"[{self.name}]({self.url(rel_base)})" if self.ref else self.name
+ print(f"- [{'x' if self.since else ' '}] {project}{feature}{new}")
+
+
+@dataclass
+class Goal:
+ """Class representing an Insiders goal."""
+
+ name: str
+ amount: int
+ features: list[Feature]
+ complete: bool = False
+
+ @property
+ def human_readable_amount(self) -> str: # noqa: D102
+ return human_readable_amount(self.amount)
+
+ def render(self, rel_base: str = "..") -> None: # noqa: D102
+ print(f"#### $ {self.human_readable_amount} — {self.name}\n")
+ if self.features:
+ for feature in self.features:
+ feature.render(rel_base)
+ print("")
+ else:
+ print("There are no features in this goal for this project. ")
+ print(
+ "[See the features in this goal **for all Insiders projects.**]"
+ f"(https://pawamoy.github.io/insiders/#{self.amount}-{self.name.lower().replace(' ', '-')})",
+ )
+
+
+def load_goals(data: str, funding: int = 0, project: Project | None = None) -> dict[int, Goal]:
+ """Load goals from JSON data.
+
+ Parameters:
+ data: The JSON data.
+ funding: The current total funding, per month.
+ origin: The origin of the data (URL).
+
+ Returns:
+ A dictionaries of goals, keys being their target monthly amount.
+ """
+ goals_data = yaml.safe_load(data)["goals"]
+ return {
+ amount: Goal(
+ name=goal_data["name"],
+ amount=amount,
+ complete=funding >= amount,
+ features=[
+ Feature(
+ name=feature_data["name"],
+ ref=feature_data.get("ref"),
+ since=feature_data.get("since") and datetime.strptime(feature_data["since"], "%Y/%m/%d").date(), # noqa: DTZ007
+ project=project,
+ )
+ for feature_data in goal_data["features"]
+ ],
+ )
+ for amount, goal_data in goals_data.items()
+ }
+
+
+def _load_goals_from_disk(path: str, funding: int = 0) -> dict[int, Goal]:
+ project_dir = os.getenv("MKDOCS_CONFIG_DIR", ".")
+ try:
+ data = Path(project_dir, path).read_text()
+ except OSError as error:
+ raise RuntimeError(f"Could not load data from disk: {path}") from error
+ return load_goals(data, funding)
+
+
+def _load_goals_from_url(source_data: tuple[str, str, str], funding: int = 0) -> dict[int, Goal]:
+ project_name, project_url, data_fragment = source_data
+ data_url = urljoin(project_url, data_fragment)
+ try:
+ with urlopen(data_url) as response: # noqa: S310
+ data = response.read()
+ except HTTPError as error:
+ raise RuntimeError(f"Could not load data from network: {data_url}") from error
+ return load_goals(data, funding, project=Project(name=project_name, url=project_url))
+
+
+def _load_goals(source: str | tuple[str, str, str], funding: int = 0) -> dict[int, Goal]:
+ if isinstance(source, str):
+ return _load_goals_from_disk(source, funding)
+ return _load_goals_from_url(source, funding)
+
+
+def funding_goals(source: str | list[str | tuple[str, str, str]], funding: int = 0) -> dict[int, Goal]:
+ """Load funding goals from a given data source.
+
+ Parameters:
+ source: The data source (local file path or URL).
+ funding: The current total funding, per month.
+
+ Returns:
+ A dictionaries of goals, keys being their target monthly amount.
+ """
+ if isinstance(source, str):
+ return _load_goals_from_disk(source, funding)
+ goals = {}
+ for src in source:
+ source_goals = _load_goals(src, funding)
+ for amount, goal in source_goals.items():
+ if amount not in goals:
+ goals[amount] = goal
+ else:
+ goals[amount].features.extend(goal.features)
+ return {amount: goals[amount] for amount in sorted(goals)}
+
+
+def feature_list(goals: Iterable[Goal]) -> list[Feature]:
+ """Extract feature list from funding goals.
+
+ Parameters:
+ goals: A list of funding goals.
+
+ Returns:
+ A list of features.
+ """
+ return list(chain.from_iterable(goal.features for goal in goals))
+
+
+def load_json(url: str) -> str | list | dict: # noqa: D103
+ with urlopen(url) as response: # noqa: S310
+ return json.loads(response.read().decode())
+
+
+data_source = globals()["data_source"]
+sponsor_url = "https://github.com/sponsors/pawamoy"
+data_url = "https://raw.githubusercontent.com/pawamoy/sponsors/main"
+numbers: dict[str, int] = load_json(f"{data_url}/numbers.json") # type: ignore[assignment]
+sponsors: list[dict] = load_json(f"{data_url}/sponsors.json") # type: ignore[assignment]
+current_funding = numbers["total"]
+sponsors_count = numbers["count"]
+goals = funding_goals(data_source, funding=current_funding)
+ongoing_goals = [goal for goal in goals.values() if not goal.complete]
+unreleased_features = sorted(
+ (ft for ft in feature_list(ongoing_goals) if ft.since),
+ key=lambda ft: cast(date, ft.since),
+ reverse=True,
+)
diff --git a/scripts/make b/scripts/make
new file mode 100755
index 0000000..d898022
--- /dev/null
+++ b/scripts/make
@@ -0,0 +1,210 @@
+#!/usr/bin/env python3
+"""Management commands."""
+
+from __future__ import annotations
+
+import os
+import shutil
+import subprocess
+import sys
+from contextlib import contextmanager
+from pathlib import Path
+from typing import Any, Iterator
+
+PYTHON_VERSIONS = os.getenv("PYTHON_VERSIONS", "3.8 3.9 3.10 3.11 3.12 3.13").split()
+
+exe = ""
+prefix = ""
+
+
+def shell(cmd: str, capture_output: bool = False, **kwargs: Any) -> str | None:
+ """Run a shell command."""
+ if capture_output:
+ return subprocess.check_output(cmd, shell=True, text=True, **kwargs) # noqa: S602
+ subprocess.run(cmd, shell=True, check=True, stderr=subprocess.STDOUT, **kwargs) # noqa: S602
+ return None
+
+
+@contextmanager
+def environ(**kwargs: str) -> Iterator[None]:
+ """Temporarily set environment variables."""
+ original = dict(os.environ)
+ os.environ.update(kwargs)
+ try:
+ yield
+ finally:
+ os.environ.clear()
+ os.environ.update(original)
+
+
+def uv_install() -> None:
+ """Install dependencies using uv."""
+ uv_opts = ""
+ if "UV_RESOLUTION" in os.environ:
+ uv_opts = f"--resolution={os.getenv('UV_RESOLUTION')}"
+ requirements = shell(f"uv pip compile {uv_opts} pyproject.toml devdeps.txt", capture_output=True)
+ shell("uv pip install -r -", input=requirements, text=True)
+ if "CI" not in os.environ:
+ shell("uv pip install --no-deps -e .")
+ else:
+ shell("uv pip install --no-deps .")
+
+
+def setup() -> None:
+ """Setup the project."""
+ if not shutil.which("uv"):
+ raise ValueError("make: setup: uv must be installed, see https://github.com/astral-sh/uv")
+
+ print("Installing dependencies (default environment)") # noqa: T201
+ default_venv = Path(".venv")
+ if not default_venv.exists():
+ shell("uv venv --python python")
+ uv_install()
+
+ if PYTHON_VERSIONS:
+ for version in PYTHON_VERSIONS:
+ print(f"\nInstalling dependencies (python{version})") # noqa: T201
+ venv_path = Path(f".venvs/{version}")
+ if not venv_path.exists():
+ shell(f"uv venv --python {version} {venv_path}")
+ with environ(VIRTUAL_ENV=str(venv_path.resolve())):
+ uv_install()
+
+
+def activate(path: str) -> None:
+ """Activate a virtual environment."""
+ global exe, prefix # noqa: PLW0603
+
+ if (bin := Path(path, "bin")).exists():
+ activate_script = bin / "activate_this.py"
+ elif (scripts := Path(path, "Scripts")).exists():
+ activate_script = scripts / "activate_this.py"
+ exe = ".exe"
+ prefix = f"{path}/Scripts/"
+ else:
+ raise ValueError(f"make: activate: Cannot find activation script in {path}")
+
+ if not activate_script.exists():
+ raise ValueError(f"make: activate: Cannot find activation script in {path}")
+
+ exec(activate_script.read_text(), {"__file__": str(activate_script)}) # noqa: S102
+
+
+def run(version: str, cmd: str, *args: str, **kwargs: Any) -> None:
+ """Run a command in a virtual environment."""
+ kwargs = {"check": True, **kwargs}
+ if version == "default":
+ activate(".venv")
+ subprocess.run([f"{prefix}{cmd}{exe}", *args], **kwargs) # noqa: S603, PLW1510
+ else:
+ activate(f".venvs/{version}")
+ os.environ["MULTIRUN"] = "1"
+ subprocess.run([f"{prefix}{cmd}{exe}", *args], **kwargs) # noqa: S603, PLW1510
+
+
+def multirun(cmd: str, *args: str, **kwargs: Any) -> None:
+ """Run a command for all configured Python versions."""
+ if PYTHON_VERSIONS:
+ for version in PYTHON_VERSIONS:
+ run(version, cmd, *args, **kwargs)
+ else:
+ run("default", cmd, *args, **kwargs)
+
+
+def allrun(cmd: str, *args: str, **kwargs: Any) -> None:
+ """Run a command in all virtual environments."""
+ run("default", cmd, *args, **kwargs)
+ if PYTHON_VERSIONS:
+ multirun(cmd, *args, **kwargs)
+
+
+def clean() -> None:
+ """Delete build artifacts and cache files."""
+ paths_to_clean = ["build", "dist", "htmlcov", "site", ".coverage*", ".pdm-build"]
+ for path in paths_to_clean:
+ shell(f"rm -rf {path}")
+
+ cache_dirs = [".cache", ".pytest_cache", ".mypy_cache", ".ruff_cache", "__pycache__"]
+ for dirpath in Path(".").rglob("*"):
+ if any(dirpath.match(pattern) for pattern in cache_dirs) and not (dirpath.match(".venv") or dirpath.match(".venvs")):
+ shutil.rmtree(path, ignore_errors=True)
+
+
+def vscode() -> None:
+ """Configure VSCode to work on this project."""
+ Path(".vscode").mkdir(parents=True, exist_ok=True)
+ shell("cp -v config/vscode/* .vscode")
+
+
+def main() -> int:
+ """Main entry point."""
+ args = list(sys.argv[1:])
+ if not args or args[0] == "help":
+ if len(args) > 1:
+ run("default", "duty", "--help", args[1])
+ else:
+ print("Available commands") # noqa: T201
+ print(" help Print this help. Add task name to print help.") # noqa: T201
+ print(" setup Setup all virtual environments (install dependencies).") # noqa: T201
+ print(" run Run a command in the default virtual environment.") # noqa: T201
+ print(" multirun Run a command for all configured Python versions.") # noqa: T201
+ print(" allrun Run a command in all virtual environments.") # noqa: T201
+ print(" 3.x Run a command in the virtual environment for Python 3.x.") # noqa: T201
+ print(" clean Delete build artifacts and cache files.") # noqa: T201
+ print(" vscode Configure VSCode to work on this project.") # noqa: T201
+ try:
+ run("default", "python", "-V", capture_output=True)
+ except (subprocess.CalledProcessError, ValueError):
+ pass
+ else:
+ print("\nAvailable tasks") # noqa: T201
+ run("default", "duty", "--list")
+ return 0
+
+ while args:
+ cmd = args.pop(0)
+
+ if cmd == "run":
+ run("default", *args)
+ return 0
+
+ if cmd == "multirun":
+ multirun(*args)
+ return 0
+
+ if cmd == "allrun":
+ allrun(*args)
+ return 0
+
+ if cmd.startswith("3."):
+ run(cmd, *args)
+ return 0
+
+ opts = []
+ while args and (args[0].startswith("-") or "=" in args[0]):
+ opts.append(args.pop(0))
+
+ if cmd == "clean":
+ clean()
+ elif cmd == "setup":
+ setup()
+ elif cmd == "vscode":
+ vscode()
+ elif cmd == "check":
+ multirun("duty", "check-quality", "check-types", "check-docs")
+ run("default", "duty", "check-api")
+ elif cmd in {"check-quality", "check-docs", "check-types", "test"}:
+ multirun("duty", cmd, *opts)
+ else:
+ run("default", "duty", cmd, *opts)
+
+ return 0
+
+
+if __name__ == "__main__":
+ try:
+ sys.exit(main())
+ except subprocess.CalledProcessError as process:
+ if process.output:
+ print(process.output, file=sys.stderr) # noqa: T201
+ sys.exit(process.returncode)
diff --git a/src/griffe_autodocstringstyle/__init__.py b/src/griffe_autodocstringstyle/__init__.py
new file mode 100644
index 0000000..6dbdb3a
--- /dev/null
+++ b/src/griffe_autodocstringstyle/__init__.py
@@ -0,0 +1,8 @@
+"""griffe-autodocstringstyle package.
+
+Set docstring style to 'auto' for external packages.
+"""
+
+from __future__ import annotations
+
+__all__: list[str] = []
diff --git a/src/griffe_autodocstringstyle/debug.py b/src/griffe_autodocstringstyle/debug.py
new file mode 100644
index 0000000..e1fd30e
--- /dev/null
+++ b/src/griffe_autodocstringstyle/debug.py
@@ -0,0 +1,109 @@
+"""Debugging utilities."""
+
+from __future__ import annotations
+
+import os
+import platform
+import sys
+from dataclasses import dataclass
+from importlib import metadata
+
+
+@dataclass
+class Variable:
+ """Dataclass describing an environment variable."""
+
+ name: str
+ """Variable name."""
+ value: str
+ """Variable value."""
+
+
+@dataclass
+class Package:
+ """Dataclass describing a Python package."""
+
+ name: str
+ """Package name."""
+ version: str
+ """Package version."""
+
+
+@dataclass
+class Environment:
+ """Dataclass to store environment information."""
+
+ interpreter_name: str
+ """Python interpreter name."""
+ interpreter_version: str
+ """Python interpreter version."""
+ interpreter_path: str
+ """Path to Python executable."""
+ platform: str
+ """Operating System."""
+ packages: list[Package]
+ """Installed packages."""
+ variables: list[Variable]
+ """Environment variables."""
+
+
+def _interpreter_name_version() -> tuple[str, str]:
+ if hasattr(sys, "implementation"):
+ impl = sys.implementation.version
+ version = f"{impl.major}.{impl.minor}.{impl.micro}"
+ kind = impl.releaselevel
+ if kind != "final":
+ version += kind[0] + str(impl.serial)
+ return sys.implementation.name, version
+ return "", "0.0.0"
+
+
+def get_version(dist: str = "griffe-autodocstringstyle") -> str:
+ """Get version of the given distribution.
+
+ Parameters:
+ dist: A distribution name.
+
+ Returns:
+ A version number.
+ """
+ try:
+ return metadata.version(dist)
+ except metadata.PackageNotFoundError:
+ return "0.0.0"
+
+
+def get_debug_info() -> Environment:
+ """Get debug/environment information.
+
+ Returns:
+ Environment information.
+ """
+ py_name, py_version = _interpreter_name_version()
+ packages = ["griffe-autodocstringstyle"]
+ variables = ["PYTHONPATH", *[var for var in os.environ if var.startswith("GRIFFE_AUTODOCSTRINGSTYLE")]]
+ return Environment(
+ interpreter_name=py_name,
+ interpreter_version=py_version,
+ interpreter_path=sys.executable,
+ platform=platform.platform(),
+ variables=[Variable(var, val) for var in variables if (val := os.getenv(var))],
+ packages=[Package(pkg, get_version(pkg)) for pkg in packages],
+ )
+
+
+def print_debug_info() -> None:
+ """Print debug/environment information."""
+ info = get_debug_info()
+ print(f"- __System__: {info.platform}")
+ print(f"- __Python__: {info.interpreter_name} {info.interpreter_version} ({info.interpreter_path})")
+ print("- __Environment variables__:")
+ for var in info.variables:
+ print(f" - `{var.name}`: `{var.value}`")
+ print("- __Installed packages__:")
+ for pkg in info.packages:
+ print(f" - `{pkg.name}` v{pkg.version}")
+
+
+if __name__ == "__main__":
+ print_debug_info()
diff --git a/src/griffe_autodocstringstyle/py.typed b/src/griffe_autodocstringstyle/py.typed
new file mode 100644
index 0000000..e69de29
diff --git a/tests/__init__.py b/tests/__init__.py
new file mode 100644
index 0000000..314ba0f
--- /dev/null
+++ b/tests/__init__.py
@@ -0,0 +1,7 @@
+"""Tests suite for `griffe_autodocstringstyle`."""
+
+from pathlib import Path
+
+TESTS_DIR = Path(__file__).parent
+TMP_DIR = TESTS_DIR / "tmp"
+FIXTURES_DIR = TESTS_DIR / "fixtures"
diff --git a/tests/conftest.py b/tests/conftest.py
new file mode 100644
index 0000000..3be27ba
--- /dev/null
+++ b/tests/conftest.py
@@ -0,0 +1 @@
+"""Configuration for the pytest test suite."""