Skip to content

Commit

Permalink
tricks on coverage: still can't figure out multiprocessing
Browse files Browse the repository at this point in the history
  • Loading branch information
azuline committed Oct 26, 2023
1 parent 5ae96fc commit 7b67765
Show file tree
Hide file tree
Showing 4 changed files with 11 additions and 6 deletions.
4 changes: 2 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -4,11 +4,11 @@ typecheck:
mypy .

test:
pytest -n logical --cov=. .
pytest -n logical .
coverage html

test-seq:
pytest --cov=. .
pytest .
coverage html

lintcheck:
Expand Down
1 change: 0 additions & 1 deletion flake.nix
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,6 @@
echo "$path"
}
export ROSE_ROOT="$(find-up flake.nix)"
# We intentionally do not allow installing Python packages to the
# global Python environment. Mutable Python installations should be
# handled via a virtualenv.
Expand Down
2 changes: 1 addition & 1 deletion rose/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@

# Add a logging handler for stdout unless we are testing. Pytest
# captures logging output on its own, so by default, we do not attach our own.
if "pytest" not in sys.modules or LOG_EVEN_THOUGH_WERE_IN_TEST:
if "pytest" not in sys.modules or LOG_EVEN_THOUGH_WERE_IN_TEST: # pragma: no cover
stream_formatter = logging.Formatter(
"[%(asctime)s] %(levelname)s: %(message)s",
datefmt="%H:%M:%S",
Expand Down
10 changes: 8 additions & 2 deletions rose/cache.py
Original file line number Diff line number Diff line change
Expand Up @@ -290,6 +290,12 @@ def update_cache_for_releases(
# Create a queue to propagate exceptions back up to the parent.
error_queue = manager.Queue()

# Track coverage. This won't do anything in production.
with contextlib.suppress(ImportError):
from pytest_cov.embed import cleanup_on_sigterm

cleanup_on_sigterm()

with multiprocessing.Pool(processes=c.max_proc) as pool:
# At 0, no batch. At 1, 1 batch. At 49, 1 batch. At 50, 1 batch. At 51, 2 batches.
for i in range(0, len(release_dirs), batch_size):
Expand All @@ -314,7 +320,7 @@ def _update_cache_for_releases_process(
force: bool,
known_virtual_dirnames: dict[str, bool],
error_queue: "multiprocessing.Queue[Any]",
) -> None:
) -> None: # pragma: no cover
"""General error handling stuff for the cache update subprocess."""
try:
return _update_cache_for_releases_executor(c, release_dirs, force, known_virtual_dirnames)
Expand All @@ -329,7 +335,7 @@ def _update_cache_for_releases_executor(
release_dirs: list[Path],
force: bool,
known_virtual_dirnames: dict[str, bool],
) -> None:
) -> None: # pragma: no cover
"""The implementation logic, split out for multiprocessing."""
# First, call readdir on every release directory. We store the results in a map of
# Path Basename -> (Release ID if exists, filenames).
Expand Down

0 comments on commit 7b67765

Please sign in to comment.