diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS new file mode 100644 index 0000000..fe19a31 --- /dev/null +++ b/.github/CODEOWNERS @@ -0,0 +1 @@ +* @altaf-ali diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index 9cc5b82..4013d29 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -8,7 +8,7 @@ on: push: branches: [ main ] pull_request: - branches: [ main ] + branches: [ main, develop ] # Allows you to run this workflow manually from the Actions tab workflow_dispatch: @@ -60,7 +60,7 @@ jobs: uses: actions/checkout@v3 with: repository: spack/spack - ref: 0707ffd4e466402bf19dff1add59eaf2b6d9154e + ref: e8658d6493887ef702dd38f0e9ee5870a1651c1e path: spack - name: Update PATH diff --git a/README.md b/README.md index f59b25a..9a1e8dd 100644 --- a/README.md +++ b/README.md @@ -26,6 +26,20 @@ SoftPack Core - GraphQL backend service ## Installation +### External dependencies + +SoftPack Core requires Python version 3.11 or greater. + +This project also relies on Spack. Install that first: + +``` console +$ git clone -c feature.manyFiles=true --depth 1 https://github.com/spack/spack.git +$ source spack/share/spack/setup-env.sh +``` + +To start the service, you will also need to configure a git repository to store +artifacts. + ### Stable release To install SoftPack Core, run this command in your @@ -90,6 +104,48 @@ Run tests with [Tox][] poetry run tox ``` +To run integration tests, you need a git repository set up with token access and +a branch named after your git repo username (stripped of any @domain if your +username is an email address). + +Make sure the artifacts/repo section of ~/.softpack/core/config.yml is +configured correctly: + +``` +artifacts: + repo: + url: https://github.com/[your-org]/development-softpack-artifacts.git + username: [your-username] + author: [your-name] + email: [your-email] + writer: [your-token] +``` + +Then enable the integration tests by suppling --repo to `poetry run pytest`, or +to tox like this: + +``` +poetry run tox -- -- --repo +``` + +To discover all tests and run them (skipping integration tests with no --repo): + +``` console +poetry run pytest tests -sv +``` + +To run just the integration tests: + +``` console +poetry run pytest tests/integration -sv --repo +``` + +To run an individual test: + +``` console +poetry run pytest tests/integration/test_artifacts.py::test_clone -sv --repo +``` + Run [MkDocs] server to view documentation: ``` console diff --git a/poetry.lock b/poetry.lock index d9493d8..3a2eacc 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,4 +1,4 @@ -# This file is automatically @generated by Poetry 1.5.1 and should not be changed by hand. +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "aiosqlite" @@ -978,6 +978,7 @@ files = [ {file = "greenlet-2.0.2-cp27-cp27m-win32.whl", hash = "sha256:6c3acb79b0bfd4fe733dff8bc62695283b57949ebcca05ae5c129eb606ff2d74"}, {file = "greenlet-2.0.2-cp27-cp27m-win_amd64.whl", hash = "sha256:283737e0da3f08bd637b5ad058507e578dd462db259f7f6e4c5c365ba4ee9343"}, {file = "greenlet-2.0.2-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d27ec7509b9c18b6d73f2f5ede2622441de812e7b1a80bbd446cb0633bd3d5ae"}, + {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:d967650d3f56af314b72df7089d96cda1083a7fc2da05b375d2bc48c82ab3f3c"}, {file = "greenlet-2.0.2-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:30bcf80dda7f15ac77ba5af2b961bdd9dbc77fd4ac6105cee85b0d0a5fcf74df"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:26fbfce90728d82bc9e6c38ea4d038cba20b7faf8a0ca53a9c07b67318d46088"}, {file = "greenlet-2.0.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9190f09060ea4debddd24665d6804b995a9c122ef5917ab26e1566dcc712ceeb"}, @@ -986,6 +987,7 @@ files = [ {file = "greenlet-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:76ae285c8104046b3a7f06b42f29c7b73f77683df18c49ab5af7983994c2dd91"}, {file = "greenlet-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:2d4686f195e32d36b4d7cf2d166857dbd0ee9f3d20ae349b6bf8afc8485b3645"}, {file = "greenlet-2.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:c4302695ad8027363e96311df24ee28978162cdcdd2006476c43970b384a244c"}, + {file = "greenlet-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d4606a527e30548153be1a9f155f4e283d109ffba663a15856089fb55f933e47"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c48f54ef8e05f04d6eff74b8233f6063cb1ed960243eacc474ee73a2ea8573ca"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a1846f1b999e78e13837c93c778dcfc3365902cfb8d1bdb7dd73ead37059f0d0"}, {file = "greenlet-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a06ad5312349fec0ab944664b01d26f8d1f05009566339ac6f63f56589bc1a2"}, @@ -1015,6 +1017,7 @@ files = [ {file = "greenlet-2.0.2-cp37-cp37m-win32.whl", hash = "sha256:3f6ea9bd35eb450837a3d80e77b517ea5bc56b4647f5502cd28de13675ee12f7"}, {file = "greenlet-2.0.2-cp37-cp37m-win_amd64.whl", hash = "sha256:7492e2b7bd7c9b9916388d9df23fa49d9b88ac0640db0a5b4ecc2b653bf451e3"}, {file = "greenlet-2.0.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b864ba53912b6c3ab6bcb2beb19f19edd01a6bfcbdfe1f37ddd1778abfe75a30"}, + {file = "greenlet-2.0.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1087300cf9700bbf455b1b97e24db18f2f77b55302a68272c56209d5587c12d1"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:ba2956617f1c42598a308a84c6cf021a90ff3862eddafd20c3333d50f0edb45b"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fc3a569657468b6f3fb60587e48356fe512c1754ca05a564f11366ac9e306526"}, {file = "greenlet-2.0.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8eab883b3b2a38cc1e050819ef06a7e6344d4a990d24d45bc6f2cf959045a45b"}, @@ -1023,6 +1026,7 @@ files = [ {file = "greenlet-2.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:b0ef99cdbe2b682b9ccbb964743a6aca37905fda5e0452e5ee239b1654d37f2a"}, {file = "greenlet-2.0.2-cp38-cp38-win32.whl", hash = "sha256:b80f600eddddce72320dbbc8e3784d16bd3fb7b517e82476d8da921f27d4b249"}, {file = "greenlet-2.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:4d2e11331fc0c02b6e84b0d28ece3a36e0548ee1a1ce9ddde03752d9b79bba40"}, + {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8512a0c38cfd4e66a858ddd1b17705587900dd760c6003998e9472b77b56d417"}, {file = "greenlet-2.0.2-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:88d9ab96491d38a5ab7c56dd7a3cc37d83336ecc564e4e8816dbed12e5aaefc8"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:561091a7be172ab497a3527602d467e2b3fbe75f9e783d8b8ce403fa414f71a6"}, {file = "greenlet-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:971ce5e14dc5e73715755d0ca2975ac88cfdaefcaab078a284fea6cfabf866df"}, @@ -2605,13 +2609,13 @@ testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "no [[package]] name = "pytest-asyncio" -version = "0.21.0" +version = "0.21.1" description = "Pytest support for asyncio" optional = false python-versions = ">=3.7" files = [ - {file = "pytest-asyncio-0.21.0.tar.gz", hash = "sha256:2b38a496aef56f56b0e87557ec313e11e1ab9276fc3863f6a7be0f1d0e415e1b"}, - {file = "pytest_asyncio-0.21.0-py3-none-any.whl", hash = "sha256:f2b3366b7cd501a4056858bd39349d5af19742aed2d81660b7998b6341c7eb9c"}, + {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, + {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, ] [package.dependencies] @@ -2672,6 +2676,23 @@ pytest = ">=6.0,<8.0" [package.extras] testing = ["pytest-asyncio (==0.20.*)", "pytest-cov (==4.*)"] +[[package]] +name = "pytest-mock" +version = "3.11.1" +description = "Thin-wrapper around the mock package for easier use with pytest" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-mock-3.11.1.tar.gz", hash = "sha256:7f6b125602ac6d743e523ae0bfa71e1a697a2f5534064528c6ff84c2f7c2fc7f"}, + {file = "pytest_mock-3.11.1-py3-none-any.whl", hash = "sha256:21c279fff83d70763b05f8874cc9cfb3fcacd6d354247a976f9529d19f9acf39"}, +] + +[package.dependencies] +pytest = ">=5.0" + +[package.extras] +dev = ["pre-commit", "pytest-asyncio", "tox"] + [[package]] name = "python-box" version = "7.0.1" @@ -2734,6 +2755,20 @@ files = [ pyasn1 = ">=0.3.7" pyasn1_modules = ">=0.1.5" +[[package]] +name = "python-multipart" +version = "0.0.6" +description = "A streaming multipart parser for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "python_multipart-0.0.6-py3-none-any.whl", hash = "sha256:ee698bab5ef148b0a760751c261902cd096e57e10558e11aca17646b74ee1c18"}, + {file = "python_multipart-0.0.6.tar.gz", hash = "sha256:e9925a80bb668529f1b67c7fdb0a5dacdd7cbfc6fb0bff3ea443fe22bdd62132"}, +] + +[package.extras] +dev = ["atomicwrites (==1.2.1)", "attrs (==19.2.0)", "coverage (==6.5.0)", "hatch", "invoke (==1.7.3)", "more-itertools (==4.3.0)", "pbr (==4.3.0)", "pluggy (==1.0.0)", "py (==1.11.0)", "pytest (==7.2.0)", "pytest-cov (==4.0.0)", "pytest-timeout (==2.1.0)", "pyyaml (==5.1)"] + [[package]] name = "python-slugify" version = "8.0.1" @@ -3134,7 +3169,8 @@ files = [ {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win32.whl", hash = "sha256:763d65baa3b952479c4e972669f679fe490eee058d5aa85da483ebae2009d231"}, {file = "ruamel.yaml.clib-0.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:d000f258cf42fec2b1bbf2863c61d7b8918d31ffee905da62dede869254d3b8a"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:045e0626baf1c52e5527bd5db361bc83180faaba2ff586e763d3d5982a876a9e"}, - {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_12_6_arm64.whl", hash = "sha256:721bc4ba4525f53f6a611ec0967bdcee61b31df5a56801281027a3a6d1c2daf5"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-macosx_13_0_arm64.whl", hash = "sha256:1a6391a7cabb7641c32517539ca42cf84b87b667bad38b78d4d42dd23e957c81"}, + {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux2014_aarch64.whl", hash = "sha256:9c7617df90c1365638916b98cdd9be833d31d337dbcd722485597b43c4a215bf"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:41d0f1fa4c6830176eef5b276af04c89320ea616655d01327d5ce65e50575c94"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win32.whl", hash = "sha256:f6d3d39611ac2e4f62c3128a9eed45f19a6608670c5a2f4f07f24e8de3441d38"}, {file = "ruamel.yaml.clib-0.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:da538167284de58a52109a9b89b8f6a53ff8437dd6dc26d33b57bf6699153122"}, @@ -3312,7 +3348,7 @@ files = [ ] [package.dependencies] -greenlet = {version = "!=0.4.17", optional = true, markers = "python_version >= \"3\" and (platform_machine == \"win32\" or platform_machine == \"WIN32\" or platform_machine == \"AMD64\" or platform_machine == \"amd64\" or platform_machine == \"x86_64\" or platform_machine == \"ppc64le\" or platform_machine == \"aarch64\" or extra == \"asyncio\")"} +greenlet = {version = "!=0.4.17", optional = true, markers = "python_version >= \"3\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\" or extra == \"asyncio\")"} [package.extras] aiomysql = ["aiomysql", "greenlet (!=0.4.17)"] @@ -3845,4 +3881,4 @@ testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more [metadata] lock-version = "2.0" python-versions = ">=3.9,<4.0" -content-hash = "edfd9534063f496bf6c7de432111da487ed9fa24990f181ec1472b1c0fdf0038" +content-hash = "275c6e07b55140f0db60185c15cd07c6cc3e6210bb46a879d04aebb528628743" diff --git a/pyproject.toml b/pyproject.toml index 8616ac2..a3cb5d3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,8 +13,6 @@ classifiers=[ 'License :: OSI Approved :: MIT License', 'Natural Language :: English', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.9', - 'Programming Language :: Python :: 3.10', 'Programming Language :: Python :: 3.11', ] packages = [ @@ -48,6 +46,9 @@ singleton-decorator = "^1.0.0" sqlalchemy = "1.4.45" strawberry-graphql = "^0.177.1" typer = "^0.9.0" +pytest-mock = "^3.11.1" +pytest-asyncio = "^0.21.1" +python-multipart = "^0.0.6" [tool.poetry.group.dev] optional = true @@ -132,11 +133,13 @@ skip_gitignore = true disallow_untyped_calls = true disallow_untyped_defs = true ignore_missing_imports = true +plugins = "strawberry.ext.mypy_plugin" [tool.pytest.ini_options] filterwarnings = [ "ignore::DeprecationWarning:starlette" ] +markers = "repo: mark test as altering a real git repo" [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/softpack_core/artifacts.py b/softpack_core/artifacts.py index 90c19dc..3d9d9b9 100644 --- a/softpack_core/artifacts.py +++ b/softpack_core/artifacts.py @@ -5,22 +5,71 @@ """ import itertools +import shutil from dataclasses import dataclass +from enum import Enum from pathlib import Path -from typing import Iterable, Iterator, Optional, cast +from typing import Iterable, Iterator, List, Optional, Tuple, Union import pygit2 +import strawberry from box import Box +from fastapi import UploadFile + +from softpack_core.spack import Spack from .app import app -from .config.models import Credentials from .ldapapi import LDAP +@strawberry.type +class Package(Spack.PackageBase): + """A Strawberry model representing a package.""" + + version: Optional[str] = None + + @classmethod + def from_name(cls, name: str) -> 'Package': + """Makes a new Package based on the name. + + Args: + name (str): Combined name and version string, deliniated by an '@'. + + Returns: + Package: A Package with name set, and version set if given name had + a version. + """ + parts = name.split("@", 2) + + if len(parts) == 2: + return Package(name=parts[0], version=parts[1]) + + return Package(name=name) + + +@strawberry.enum +class State(Enum): + """Environment states.""" + + ready = 'ready' + queued = 'queued' + + class Artifacts: """Artifacts repo access class.""" environments_root = "environments" + environments_file = "softpack.yml" + module_file = "module" + readme_file = "README.md" + built_by_softpack_file = ".built_by_softpack" + built_by_softpack = "softpack" + generated_from_module_file = ".generated_from_module" + generated_from_module = "module" + users_folder_name = "users" + groups_folder_name = "groups" + credentials_callback = None + signature = None @dataclass class Object: @@ -68,13 +117,33 @@ def get(self, key: str) -> "Artifacts.Object": return Artifacts.Object(path=self.path, obj=self.obj[key]) def spec(self) -> Box: - """Get spec dictionary. + """Get dictionary of the softpack.yml file contents. + + Also includes the contents of any README.md file. Returns: Box: A boxed dictionary. """ - spec = self.obj["softpack.yml"] - return Box.from_yaml(spec.data) + info = Box.from_yaml(self.obj[Artifacts.environments_file].data) + + if Artifacts.readme_file in self.obj: + info["readme"] = self.obj[Artifacts.readme_file].data.decode() + + if Artifacts.module_file in self.obj: + info["state"] = State.ready + else: + info["state"] = State.queued + + if Artifacts.generated_from_module_file in self.obj: + info["type"] = Artifacts.generated_from_module + else: + info["type"] = Artifacts.built_by_softpack + + info.packages = list( + map(lambda p: Package.from_name(p), info.packages) + ) + + return info def __iter__(self) -> Iterator["Artifacts.Object"]: """A generator for returning items under an artifacts. @@ -97,27 +166,31 @@ def __init__(self) -> None: path = self.settings.artifacts.path.expanduser() / ".git" credentials = None try: - credentials = cast( - Credentials, self.settings.artifacts.repo.reader - ) credentials = pygit2.UserPass( - credentials.username, - credentials.password, + self.settings.artifacts.repo.username, + self.settings.artifacts.repo.writer, ) except Exception as e: print(e) - callbacks = pygit2.RemoteCallbacks(credentials=credentials) + self.credentials_callback = pygit2.RemoteCallbacks( + credentials=credentials + ) + + branch = self.settings.artifacts.repo.branch + if branch is None: + branch = "main" if path.is_dir(): - self.repo = pygit2.Repository(path) - else: - self.repo = pygit2.clone_repository( - self.settings.artifacts.repo.url, - path=path, - callbacks=callbacks, - bare=True, - ) + shutil.rmtree(path) + + self.repo = pygit2.clone_repository( + self.settings.artifacts.repo.url, + path=path, + callbacks=self.credentials_callback, + bare=True, + checkout_branch=branch, + ) self.reference = "/".join( [ @@ -127,6 +200,11 @@ def __init__(self) -> None: ] ) + self.signature = pygit2.Signature( + self.settings.artifacts.repo.author, + self.settings.artifacts.repo.email, + ) + def user_folder(self, user: Optional[str] = None) -> Path: """Get the user folder for a given user. @@ -136,7 +214,7 @@ def user_folder(self, user: Optional[str] = None) -> Path: Returns: Path: A user folder. """ - return self.environments_folder("users", user) + return self.environments_folder(self.users_folder_name, user) def group_folder(self, group: Optional[str] = None) -> Path: """Get the group folder for a given group. @@ -147,7 +225,7 @@ def group_folder(self, group: Optional[str] = None) -> Path: Returns: Path: A group folder. """ - return self.environments_folder("groups", group) + return self.environments_folder(self.groups_folder_name, group) def environments_folder(self, *args: Optional[str]) -> Path: """Get the folder under the environments folder. @@ -160,27 +238,25 @@ def environments_folder(self, *args: Optional[str]) -> Path: """ return Path(self.environments_root, *filter(None, list(args))) - def iter_user(self, user: Optional[str] = None) -> list[pygit2.Tree]: - """Iterate environments for a given user. - - Args: - user: A username or None. + def iter_users(self) -> list[pygit2.Tree]: + """Iterate environments for all users. Returns: list[pygit2.Tree]: List of environments """ - return self.iter_environments(self.user_folder(user)) - - def iter_group(self, group: Optional[str] = None) -> list[pygit2.Tree]: - """Iterate environments for a given group. + return self.iter_environments( + self.environments_folder(self.users_folder_name) + ) - Args: - group: A group name or None. + def iter_groups(self) -> list[pygit2.Tree]: + """Iterate environments for all groups. Returns: list[pygit2.Tree]: List of environments """ - return self.iter_environments(self.group_folder(group)) + return self.iter_environments( + self.environments_folder(self.groups_folder_name) + ) def iter_environments(self, path: Path) -> list[pygit2.Tree]: """Iterate environments under a path. @@ -191,7 +267,10 @@ def iter_environments(self, path: Path) -> list[pygit2.Tree]: Returns: list[pygit2.Tree]: List of environments """ - return [path / folder.name for folder in self.tree(str(path))] + try: + return [path / folder.name for folder in self.tree(str(path))] + except KeyError: + return list(()) def tree(self, path: str) -> pygit2.Tree: """Return a Tree object. @@ -205,7 +284,7 @@ def tree(self, path: str) -> pygit2.Tree: return self.repo.lookup_reference(self.reference).peel().tree[path] def environments(self, path: Path) -> Iterable: - """Return a list of environments in the repo. + """Return a list of environments in the repo under the given path. Args: path: a searchable path within the repo @@ -218,29 +297,190 @@ def environments(self, path: Path) -> Iterable: except KeyError: return iter(()) - def iter(self, user: Optional[str] = None) -> Iterable: - """Return am iterator for the specified user. + def iter(self) -> Iterable: + """Return an iterator over all environments. + + Returns: + Iterator: an iterator + """ + folders = self.iter_users() + self.iter_groups() + + return itertools.chain.from_iterable(map(self.environments, folders)) + + def get(self, path: Path, name: str) -> Optional[pygit2.Tree]: + """Return the environment at the specified name and path. Args: - user: a username + path: the path containing the environment folder + name: the name of the environment folder Returns: - Iterator: an iterator + pygit2.Tree: a pygit2.Tree or None """ try: - if user: - folders = list( - itertools.chain( - [self.user_folder(user)], - map(self.group_folder, self.ldap.groups(user) or []), - ) + return self.tree(str(self.environments_folder(str(path), name))) + except KeyError: + return None + + def commit_and_push( + self, tree_oid: pygit2.Oid, message: str + ) -> pygit2.Oid: + """Commit and push current changes to the remote repository. + + Args: + tree_oid: the oid of the tree object that will be committed. The + tree this refers to will replace the entire contents of the repo. + message: the commit message + """ + ref = self.repo.head.name + parents = [self.repo.lookup_reference(ref).target] + oid = self.repo.create_commit( + ref, self.signature, self.signature, message, tree_oid, parents + ) + remote = self.repo.remotes[0] + remote.push([self.repo.head.name], callbacks=self.credentials_callback) + return oid + + def build_tree( + self, + repo: pygit2.Repository, + root_tree: pygit2.Tree, + new_tree: pygit2.Oid, + path: Path, + ) -> pygit2.Oid: + """Expand new/updated sub tree to include the entire repository. + + Args: + repo: a bare repository + root_tree: the tree containing the entire repository + new_tree: the oid of the new/updated sub tree to be added to the + repository + path: the path from root_tree root to new_tree root + """ + while str(path) != ".": + try: + sub_treebuilder = repo.TreeBuilder( + root_tree[str(path.parent)] + if str(path.parent) != "." + else root_tree ) - else: - folders = self.iter_user() + self.iter_group() + except KeyError: + sub_treebuilder = repo.TreeBuilder() - return itertools.chain.from_iterable( - map(self.environments, folders) + sub_treebuilder.insert( + path.name, new_tree, pygit2.GIT_FILEMODE_TREE ) + new_tree = sub_treebuilder.write() + path = path.parent + return new_tree + + def create_file( + self, + folder_path: Path, + file_name: str, + contents: str, + new_folder: bool = False, + overwrite: bool = False, + ) -> pygit2.Oid: + """Create one or more file in the artifacts repo. - except KeyError: - return iter(()) + Args: + folder_path: the path to the folder the file will be placed in + file_name: the name of the file + contents: the contents of the file + new_folder: if True, create the file's parent folder as well + overwrite: if True, overwrite the file at the specified path + + Returns: + the OID of the new tree structure of the repository + """ + return self.create_files( + folder_path, [(file_name, contents)], new_folder, overwrite + ) + + def create_files( + self, + folder_path: Path, + files: List[Tuple[str, Union[str, UploadFile]]], + new_folder: bool = False, + overwrite: bool = False, + ) -> pygit2.Oid: + """Create one or more files in the artifacts repo. + + Args: + folder_path: the path to the folder the files will be placed + files: Array of tuples, containing file name and contents. + file_name: the name of the file + contents: the contents of the file + new_folder: if True, create the file's parent folder as well + overwrite: if True, overwrite the file at the specified path + + Returns: + the OID of the new tree structure of the repository + """ + for file_name, _ in files: + if not overwrite and self.get(Path(folder_path), file_name): + raise FileExistsError("File already exists") + + root_tree = self.repo.head.peel(pygit2.Tree) + full_path = Path(self.environments_root, folder_path) + + if new_folder: + new_treebuilder = self.repo.TreeBuilder() + else: + folder = root_tree[full_path] + new_treebuilder = self.repo.TreeBuilder(folder) + + for file_name, contents in files: + if isinstance(contents, str): + file_oid = self.repo.create_blob(contents) + else: + file_oid = self.repo.create_blob_fromiobase(contents.file) + new_treebuilder.insert( + file_name, file_oid, pygit2.GIT_FILEMODE_BLOB + ) + + new_tree = new_treebuilder.write() + + # Expand to include the whole repo + full_tree = self.build_tree(self.repo, root_tree, new_tree, full_path) + + # Check for errors in the new tree + new_tree = self.repo.get(full_tree) + diff = self.repo.diff(new_tree, root_tree) + if len(diff) > len(files): + raise RuntimeError("Too many changes to the repo") + elif len(diff) < 1: + raise RuntimeError("No changes made to the environment") + + return full_tree + + def delete_environment( + self, + name: str, + path: str, + ) -> pygit2.Oid: + """Delete an environment folder in GitLab. + + Args: + name: the name of the environment + path: the path of the environment + commit_message: the commit message + + Returns: + the OID of the new tree structure of the repository + """ + if len(Path(path).parts) != 2: + raise ValueError("Not a valid environment path") + + # Get repository tree + root_tree = self.repo.head.peel(pygit2.Tree) + # Find environment in the tree + full_path = Path(self.environments_root, path) + target_tree = root_tree[full_path] + # Remove the environment + tree_builder = self.repo.TreeBuilder(target_tree) + tree_builder.remove(name) + new_tree = tree_builder.write() + + return self.build_tree(self.repo, root_tree, new_tree, full_path) diff --git a/softpack_core/config/conf/config.yml b/softpack_core/config/conf/config.yml index dce7203..e61c037 100644 --- a/softpack_core/config/conf/config.yml +++ b/softpack_core/config/conf/config.yml @@ -7,13 +7,17 @@ server: - http://localhost - http://localhost:8080 - http://localhost:3000 + - http://localhost:5173 artifacts: repo: url: https://github.com/softpack-io/softpack-artifacts.git + username: softpack-core + author: softpack + email: softpack@sanger.ac.uk path: ./softpack-artifacts -# LVault Config +# Vault Config # vault: # url: # path: diff --git a/softpack_core/config/models.py b/softpack_core/config/models.py index b0cd305..d054fef 100644 --- a/softpack_core/config/models.py +++ b/softpack_core/config/models.py @@ -31,13 +31,6 @@ class VaultConfig(BaseModel): token: str -class Credentials(BaseModel): - """Credentials model.""" - - username: str - password: str - - class ArtifactsConfig(BaseModel): """Artifacts config model.""" @@ -45,8 +38,12 @@ class Repo(BaseModel): """Repo model.""" url: AnyUrl - reader: Optional[Credentials] - writer: Optional[Credentials] + username: Optional[str] + author: str + email: str + reader: Optional[str] + writer: Optional[str] + branch: Optional[str] path: Path repo: Repo diff --git a/softpack_core/config/settings.py b/softpack_core/config/settings.py index c109265..539cda6 100644 --- a/softpack_core/config/settings.py +++ b/softpack_core/config/settings.py @@ -4,7 +4,6 @@ LICENSE file in the root directory of this source tree. """ -import itertools import sys from pathlib import Path from typing import Any, Optional, Tuple @@ -111,15 +110,13 @@ def get_secret(path: Path, key: str) -> dict[str, Any]: secrets = client.secrets.kv.v1.list_secrets( path=str(vault.path), mount_point="/" ) - merged_secrets = dict( - itertools.chain.from_iterable( - [ - get_secret(vault.path, key).items() - for key in secrets["data"]["keys"] - ] - ) - ) - return {vault.path.name: merged_secrets} + + return { + vault.path.name: { + key: get_secret(vault.path, key) + for key in secrets["data"]["keys"] + } + } except Exception as e: print(e, file=sys.stderr) diff --git a/softpack_core/ldapapi.py b/softpack_core/ldapapi.py index 899face..9c6c2f2 100644 --- a/softpack_core/ldapapi.py +++ b/softpack_core/ldapapi.py @@ -19,8 +19,9 @@ class LDAP: def __init__(self) -> None: """Constructor.""" - self.settings = cast(LDAPConfig, app.settings.ldap) - self.initialize() + if app.settings.ldap is not None: + self.settings = cast(LDAPConfig, app.settings.ldap) + self.initialize() def initialize(self) -> None: """Initialize an LDAP client. diff --git a/softpack_core/module.py b/softpack_core/module.py new file mode 100644 index 0000000..4af4043 --- /dev/null +++ b/softpack_core/module.py @@ -0,0 +1,127 @@ +"""Copyright (c) 2023 Genome Research Ltd. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +import re +from pathlib import Path +from string import Template +from typing import Union, cast + + +def ToSoftpackYML(name: str, contents: Union[bytes, str]) -> bytes: + """Converts an shpc-style module file to a softpack.yml file. + + It should have a format similar to that produced by shpc, with `module + whatis` outputting a "Name: " line, a "Version: " line, and optionally a + "Packages: " line to say what packages are available. Each package should + be separated by a comma. + + `module help` output will be translated into the description in the + softpack.yml. + + Args: + contents (bytes): The byte content of the module file. + + Returns: + bytes: The byte content of the softpack.yml file. + """ + in_help = False + + version = "" + packages: list[str] = [] + description = "" + + contents_bytes: bytes + + if type(contents) == str: + contents_bytes = contents.encode() + else: + contents_bytes = cast(bytes, contents) + + for line in contents_bytes.splitlines(): + line = line.lstrip() + if in_help: + if line == b"}": + in_help = False + elif line.startswith(b"puts stderr "): + line_str = ( + line.removeprefix(b"puts stderr") + .lstrip() + .decode('unicode_escape') + .replace("\\$", "$") + .removeprefix("\"") + .removesuffix("\"") + ) + description += " " + line_str + "\n" + else: + if line.startswith(b"proc ModulesHelp"): + in_help = True + elif line.startswith(b"module-whatis "): + line_str = ( + line.removeprefix(b"module-whatis") + .lstrip() + .decode('unicode_escape') + .removeprefix("\"") + .removesuffix("\"") + .lstrip() + ) + + if line_str.startswith("Name:"): + nv = line_str.removeprefix("Name:") + if nv != "": + name_value = list( + map(lambda x: x.strip().split()[0], nv.split(":")) + ) + + if name_value[0] is not None: + name = name_value[0] + + if len(name_value) > 1 and name_value[1] != "": + version = name_value[1].strip() + elif line_str.startswith("Version:"): + ver = line_str.removeprefix("Version:") + if ver != "": + vers = ver.split()[0] + if vers is not None and vers != "": + version = vers + elif line_str.startswith("Packages:"): + packages = list( + filter( + None, + map( + lambda x: x.strip(), + re.split( + r'[,\s]+', + line_str.removeprefix("Packages:"), + ), + ), + ) + ) + + if version != "": + name += f"@{version}" + + packages.insert(0, name) + + package_str = "\n - ".join(packages) + + return ( + f"description: |\n{description}packages:\n - {package_str}\n".encode() + ) + + +def GenerateEnvReadme(module_path: str) -> bytes: + """Generates a simple README file for the environment. + + Args: + module_path (str): The module path as used by the module command. + + Returns: + bytes: The byte content of the README.md file. + """ + with open(Path(__file__).parent / "templates" / "readme.tmpl", "r") as fh: + tmpl = Template(fh.read()) + + return tmpl.substitute({"module_path": module_path}).encode() diff --git a/softpack_core/schemas/environment.py b/softpack_core/schemas/environment.py index ba6d9e0..cd2b169 100644 --- a/softpack_core/schemas/environment.py +++ b/softpack_core/schemas/environment.py @@ -4,23 +4,170 @@ LICENSE file in the root directory of this source tree. """ -import os -import uuid +import io from dataclasses import dataclass -from typing import Iterable, Optional +from pathlib import Path +from typing import Iterable, List, Optional, Tuple, Union, cast +import httpx +import starlette.datastructures import strawberry +from fastapi import UploadFile +from strawberry.file_uploads import Upload -from softpack_core.artifacts import Artifacts +from softpack_core.artifacts import Artifacts, Package, State +from softpack_core.module import GenerateEnvReadme, ToSoftpackYML from softpack_core.schemas.base import BaseSchema -from softpack_core.spack import Spack + + +# Interfaces +@strawberry.interface +class Success: + """Interface for successful results.""" + + message: str + + +@strawberry.interface +class Error: + """Interface for errors.""" + + message: str + + +# Success types +@strawberry.type +class CreateEnvironmentSuccess(Success): + """Environment successfully scheduled.""" + + +@strawberry.type +class UpdateEnvironmentSuccess(Success): + """Environment successfully updated.""" + + +@strawberry.type +class DeleteEnvironmentSuccess(Success): + """Environment successfully deleted.""" + + +@strawberry.type +class WriteArtifactSuccess(Success): + """Artifact successfully created.""" + + commit_oid: str + + +# Error types +@strawberry.type +class InvalidInputError(Error): + """Invalid input data.""" @strawberry.type -class Package(Spack.PackageBase): - """A Strawberry model representing a package.""" +class EnvironmentNotFoundError(Error): + """Environment not found.""" + + path: str + name: str + + +@strawberry.type +class EnvironmentAlreadyExistsError(Error): + """Environment name already exists.""" + + path: str + name: str + + +# Unions +CreateResponse = strawberry.union( + "CreateResponse", + [ + CreateEnvironmentSuccess, + InvalidInputError, + EnvironmentAlreadyExistsError, + ], +) + +UpdateResponse = strawberry.union( + "UpdateResponse", + [ + UpdateEnvironmentSuccess, + InvalidInputError, + EnvironmentNotFoundError, + ], +) + +DeleteResponse = strawberry.union( + "DeleteResponse", + [ + DeleteEnvironmentSuccess, + EnvironmentNotFoundError, + ], +) + +WriteArtifactResponse = strawberry.union( + "WriteArtifactResponse", + [ + WriteArtifactSuccess, + InvalidInputError, + ], +) + + +@strawberry.input +class PackageInput(Package): + """A Strawberry input model representing a package.""" + + def to_package(self) -> Package: + """Create a Package object from a PackageInput object. + + Return: a Package object + """ + return Package(**vars(self)) + + +@strawberry.input +class EnvironmentInput: + """A Strawberry input model representing an environment.""" + + name: str + path: str + description: str + packages: list[PackageInput] + + def validate(cls) -> Union[None, InvalidInputError]: + """Validate that all values have been supplied. + + Returns: + None if good, or InvalidInputError if not all values supplied. + """ + if any(len(value) == 0 for value in vars(cls).values()): + return InvalidInputError(message="all fields must be filled in") + + return None + + @classmethod + def from_path(cls, environment_path: str) -> 'EnvironmentInput': + """from_path creates a new EnvironmentInput based on an env path. + + Args: + environment_path (str): path of the environment. + + Returns: + EnvironmentInput: a package-less, description-less + EnvironmentInput. + """ + environment_dirs = environment_path.split("/") + environment_name = environment_dirs.pop() - version: Optional[str] = None + return EnvironmentInput( + name=environment_name, + path="/".join(environment_dirs), + description="", + packages=list(), + ) @strawberry.type @@ -31,26 +178,25 @@ class Environment: name: str path: str description: str + readme: str + type: str packages: list[Package] + state: Optional[State] artifacts = Artifacts() @classmethod - def iter(cls, all: bool = False) -> Iterable["Environment"]: - """Get an iterator over Environment objects. + def iter(cls) -> Iterable["Environment"]: + """Get an iterator over all Environment objects. Returns: Iterable[Environment]: An iterator of Environment objects. """ - user = None - if not user: - # TODO: set username from the environment for now - # eventually this needs to be the name of the authenticated user - user = os.environ["USER"] - environments = cls.artifacts.iter(user=user) - return map(cls.from_artifact, environments) + environment_folders = cls.artifacts.iter() + environment_objects = map(cls.from_artifact, environment_folders) + return filter(None, environment_objects) @classmethod - def from_artifact(cls, obj: Artifacts.Object) -> "Environment": + def from_artifact(cls, obj: Artifacts.Object) -> Optional["Environment"]: """Create an Environment object from an artifact. Args: @@ -59,33 +205,417 @@ def from_artifact(cls, obj: Artifacts.Object) -> "Environment": Returns: Environment: An Environment object. """ - spec = obj.spec() - return Environment( - id=obj.oid, - name=obj.name, - path=obj.path.parent, - description=spec.description, - packages=map( - lambda package: Package(id=package, name=package), - spec.packages, - ), # type: ignore [call-arg] + try: + spec = obj.spec() + return Environment( + id=obj.oid, + name=obj.name, + path=str(obj.path.parent), + description=spec.description, + packages=spec.packages, + state=spec.state, + readme=spec.get("readme", ""), + type=spec.get("type", ""), + ) + except KeyError: + return None + + @classmethod + def create(cls, env: EnvironmentInput) -> CreateResponse: # type: ignore + """Create an Environment. + + Args: + env: Details of the new environment + + Returns: + A message confirming the success or failure of the operation. + """ + result = env.validate() + if result is not None: + return result + + response = cls.create_new_env(env, Artifacts.built_by_softpack_file) + if not isinstance(response, CreateEnvironmentSuccess): + return response + + # TODO: remove hard-coding of URL. + # Send build request + httpx.post( + "http://0.0.0.0:7080/environments/build", + json={ + "name": f"{env.path}/{env.name}", + "model": { + "description": env.description, + "packages": [f"{pkg.name}" for pkg in env.packages], + }, + }, + ) + + return CreateEnvironmentSuccess( + message="Successfully scheduled environment creation" + ) + + @classmethod + def create_new_env( + cls, env: EnvironmentInput, env_type: str + ) -> CreateResponse: # type: ignore + """Create a new environment in the repository. + + Adds an empty .created file in the desired location. Fails if this + already exists. + + Args: + env (EnvironmentInput): Details of the new environment. + env_type (str): One of Artifacts.built_by_softpack_file or + Artifacts.generated_from_module_file that denotes how the + environment was made. + + Returns: + CreateResponse: a CreateEnvironmentSuccess on success, or one of + (InvalidInputError, EnvironmentAlreadyExistsError) on error. + """ + # Check if a valid path has been provided. TODO: improve this to check + # that they can only create stuff in their own users folder, or in + # group folders of unix groups they belong to. + valid_dirs = [ + cls.artifacts.users_folder_name, + cls.artifacts.groups_folder_name, + ] + if not any(env.path.startswith(dir) for dir in valid_dirs): + return InvalidInputError(message="Invalid path") + + # Check if an env with same name already exists at given path + if cls.artifacts.get(Path(env.path), env.name): + return EnvironmentAlreadyExistsError( + message="This name is already used in this location", + path=env.path, + name=env.name, + ) + + # Create folder with place-holder file + new_folder_path = Path(env.path, env.name) + try: + tree_oid = cls.artifacts.create_file( + new_folder_path, env_type, "", True + ) + cls.artifacts.commit_and_push( + tree_oid, "create environment folder" + ) + except RuntimeError as e: + return InvalidInputError(message=str(e)) + + return CreateEnvironmentSuccess( + message="Successfully created environment in artifacts repo" ) @classmethod - def create(cls, name: str) -> "Environment": - """Create an Environment object. + def update( + cls, + env: EnvironmentInput, + current_path: str, + current_name: str, + ) -> UpdateResponse: # type: ignore + """Update an Environment. Args: - name: Name for an environment. + env: Details of the updated environment + path: The path of the current environment + name: The name of the current environment Returns: - Environment: A newly created Environment. + A message confirming the success or failure of the operation. """ - return Environment( - id=uuid.uuid4().hex, + result = env.validate() + if result is not None: + return result + + if current_name == "" or current_path == "": + return InvalidInputError(message="current values must be supplied") + + if env.path != current_path or env.name != current_name: + return InvalidInputError( + message=("change of name or path not currently supported") + ) + + result2 = cls.check_env_exists(Path(current_path, current_name)) + if result2 is not None: + return result2 + + httpx.post( + "http://0.0.0.0:7080/environments/build", + json={ + "name": f"{env.path}/{env.name}", + "model": { + "description": env.description, + "packages": [pkg.name for pkg in env.packages or []], + }, + }, + ) + + # TODO: validate the post worked + + return UpdateEnvironmentSuccess( + message="Successfully updated environment" + ) + + @classmethod + def check_env_exists( + cls, path: Path + ) -> Union[None, EnvironmentNotFoundError]: + """check_env_exists checks if an env with the given path exists. + + Args: + path (Path): path of the environment + + Returns: + Union[None, EnvironmentNotFoundError]: an error if env not found. + """ + if cls.artifacts.get(path.parent, path.name): + return None + + return EnvironmentNotFoundError( + message="No environment with this path and name found.", + path=str(path.parent), + name=path.name, + ) + + @classmethod + def delete(cls, name: str, path: str) -> DeleteResponse: # type: ignore + """Delete an Environment. + + Args: + name: the name of of environment + path: the path of the environment + + Returns: + A message confirming the success or failure of the operation. + """ + if cls.artifacts.get(Path(path), name): + tree_oid = cls.artifacts.delete_environment(name, path) + cls.artifacts.commit_and_push(tree_oid, "delete environment") + return DeleteEnvironmentSuccess( + message="Successfully deleted the environment" + ) + + return EnvironmentNotFoundError( + message="No environment with this name found in this location.", + path=path, name=name, - packges=[Package(id="unknown", name="unknown-package")], - ) # type: ignore [call-arg] + ) + + @classmethod + async def create_from_module( + cls, file: Upload, module_path: str, environment_path: str + ) -> CreateResponse: # type: ignore + """Create an Environment based on an existing module. + + The environment will not be built; a "fake" softpack.yml and the + supplied module file will be written as artifacts in a newly created + environment instead, so that they can be discovered. + + Args: + file: the module file to add to the repo, and to parse to fake a + corresponding softpack.yml. It should have a format similar + to that produced by shpc, with `module whatis` outputting + a "Name: " line, a "Version: " line, and optionally a + "Packages: " line to say what packages are available. + `module help` output will be translated into the description + in the softpack.yml. + module_path: the local path that users can `module load` - this is + used to auto-generate usage help text for this + environment. + environment_path: the subdirectories of environments folder that + artifacts will be stored in, eg. + users/username/software_name + + Returns: + A message confirming the success or failure of the operation. + """ + env = EnvironmentInput.from_path(environment_path) + + response = cls.create_new_env( + env, Artifacts.generated_from_module_file + ) + if not isinstance(response, CreateEnvironmentSuccess): + return response + + result = await cls.convert_module_file_to_artifacts( + file, env.name, environment_path, module_path + ) + + if not isinstance(result, WriteArtifactSuccess): + cls.delete(name=env.name, path=environment_path) + return InvalidInputError( + message="Write of module file failed: " + result.message + ) + + return CreateEnvironmentSuccess( + message="Successfully created environment in artifacts repo" + ) + + @classmethod + async def convert_module_file_to_artifacts( + cls, file: Upload, env_name: str, env_path: str, module_path: str + ) -> WriteArtifactResponse: # type: ignore + """convert_module_file_to_artifacts parses a module and writes to repo. + + Args: + file (Upload): shpc-style module file contents. + env_name (str): name of the environment. + env_path (str): path of the environment. + module_path (str): the `module load` path users will use. + + Returns: + WriteArtifactResponse: success or failure indicator. + """ + contents = await file.read() + yml = ToSoftpackYML(env_name, contents) + readme = GenerateEnvReadme(module_path) + + module_file = UploadFile( + filename=cls.artifacts.module_file, file=io.BytesIO(contents) + ) + softpack_file = UploadFile( + filename=cls.artifacts.environments_file, file=io.BytesIO(yml) + ) + readme_file = UploadFile( + filename=cls.artifacts.readme_file, file=io.BytesIO(readme) + ) + + return await cls.write_module_artifacts( + module_file=module_file, + softpack_file=softpack_file, + readme_file=readme_file, + environment_path=env_path, + ) + + @classmethod + async def write_module_artifacts( + cls, + module_file: Upload, + softpack_file: Upload, + readme_file: Upload, + environment_path: str, + ) -> WriteArtifactResponse: # type: ignore + """Writes the given module and softpack files to the artifacts repo. + + Args: + module_file (Upload): An shpc-style module file. + softpack_file (Upload): A "fake" softpack.yml file describing what + the module file offers. + readme_file (Upload): An README.md file containing usage + instructions. + environment_path (str): Path to the environment, eg. + users/user/env. + + Returns: + WriteArtifactResponse: contains message and commit hash of + softpack.yml upload. + """ + module_file.name = cls.artifacts.module_file + readme_file.name = cls.artifacts.readme_file + softpack_file.name = cls.artifacts.environments_file + + return await cls.write_artifacts( + folder_path=environment_path, + files=[module_file, readme_file, softpack_file], + ) + + @classmethod + async def write_artifact( + cls, file: Upload, folder_path: str, file_name: str + ) -> WriteArtifactResponse: # type: ignore + """Add a file to the Artifacts repo. + + Args: + file: the file to be added to the repo. + folder_path: the path to the folder that the file will be added to. + file_name: the name of the file to be added. + """ + file.name = file_name + + return await cls.write_artifacts(folder_path, [file]) + + @classmethod + async def write_artifacts( + cls, folder_path: str, files: list[Union[Upload, UploadFile]] + ) -> WriteArtifactResponse: # type: ignore + """Add one or more files to the Artifacts repo. + + Args: + folder_path: the path to the folder that the file will be added to. + files: the files to add to the repo. + """ + try: + new_files: List[Tuple[str, Union[str, UploadFile]]] = [] + for file in files: + if isinstance(file, starlette.datastructures.UploadFile): + new_files.append( + (file.filename or "", cast(UploadFile, file)) + ) + else: + new_files.append( + (file.name, cast(str, (await file.read()).decode())) + ) + + tree_oid = cls.artifacts.create_files( + Path(folder_path), new_files, overwrite=True + ) + commit_oid = cls.artifacts.commit_and_push( + tree_oid, "write artifact" + ) + return WriteArtifactSuccess( + message="Successfully written artifact(s)", + commit_oid=str(commit_oid), + ) + + except Exception as e: + return InvalidInputError(message=str(e)) + + @classmethod + async def update_from_module( + cls, file: Upload, module_path: str, environment_path: str + ) -> UpdateResponse: # type: ignore + """Update an Environment based on an existing module. + + Same as create_from_module, but only works for an existing environment. + + Args: + file: the module file to add to the repo, and to parse to fake a + corresponding softpack.yml. It should have a format similar + to that produced by shpc, with `module whatis` outputting + a "Name: " line, a "Version: " line, and optionally a + "Packages: " line to say what packages are available. + `module help` output will be translated into the description + in the softpack.yml. + module_path: the local path that users can `module load` - this is + used to auto-generate usage help text for this + environment. + environment_path: the subdirectories of environments folder that + artifacts will be stored in, eg. + users/username/software_name + + Returns: + A message confirming the success or failure of the operation. + """ + env = EnvironmentInput.from_path(environment_path) + + result = cls.check_env_exists(Path(environment_path)) + if result is not None: + return result + + result = await cls.convert_module_file_to_artifacts( + file, env.name, environment_path, module_path + ) + + if not isinstance(result, WriteArtifactSuccess): + return InvalidInputError( + message="Write of module file failed: " + result.message + ) + + return UpdateEnvironmentSuccess( + message="Successfully updated environment in artifacts repo" + ) class EnvironmentSchema(BaseSchema): @@ -101,4 +631,18 @@ class Query: class Mutation: """GraphQL mutation schema.""" - createEnvironment: Environment = Environment.create # type: ignore + createEnvironment: CreateResponse = Environment.create # type: ignore + updateEnvironment: UpdateResponse = Environment.update # type: ignore + deleteEnvironment: DeleteResponse = Environment.delete # type: ignore + # writeArtifact: WriteArtifactResponse = ( # type: ignore + # Environment.write_artifact + # ) + # writeArtifacts: WriteArtifactResponse = ( # type: ignore + # Environment.write_artifacts + # ) + createFromModule: CreateResponse = ( # type: ignore + Environment.create_from_module + ) + updateFromModule: UpdateResponse = ( # type: ignore + Environment.update_from_module + ) diff --git a/softpack_core/schemas/package_collection.py b/softpack_core/schemas/package_collection.py index 289a859..24093de 100644 --- a/softpack_core/schemas/package_collection.py +++ b/softpack_core/schemas/package_collection.py @@ -52,7 +52,7 @@ def from_collection( return PackageCollection( id=collection.id, name=collection.name, - packages=map(cls.from_package, collection.packages), + packages=list(map(cls.from_package, collection.packages)), ) # type: ignore [call-arg] @classmethod @@ -67,7 +67,7 @@ def from_package(cls, package: Spack.Package) -> PackageMultiVersion: """ return PackageMultiVersion( - id=package.id, name=package.name, versions=package.versions + name=package.name, versions=package.versions ) # type: ignore [call-arg] diff --git a/softpack_core/service.py b/softpack_core/service.py index eaafe32..6ea3829 100644 --- a/softpack_core/service.py +++ b/softpack_core/service.py @@ -5,11 +5,22 @@ """ +import urllib.parse +from pathlib import Path + import typer import uvicorn +from fastapi import APIRouter, Request, UploadFile from typer import Typer from typing_extensions import Annotated +from softpack_core.schemas.environment import ( + CreateEnvironmentSuccess, + Environment, + EnvironmentInput, + WriteArtifactSuccess, +) + from .api import API from .app import app @@ -19,6 +30,7 @@ class ServiceAPI(API): prefix = "/service" commands = Typer(help="Commands for managing core service.") + router = APIRouter() @staticmethod @commands.command(help="Start the SoftPack Core API service.") @@ -46,3 +58,37 @@ def run( reload=reload, log_level="debug", ) + + @staticmethod + @router.post("/upload") + async def upload_artifacts( # type: ignore[no-untyped-def] + request: Request, + file: list[UploadFile], + ): + """upload_artifacts is a POST fn that adds files to an environment. + + The environment does not need to exist already. + + Args: + file (List[UploadFile]): The files to be uploaded. + request (Request): The POST request which contains the environment + path in the query. + + Returns: + WriteArtifactResponse + """ + env_path = urllib.parse.unquote(request.url.query) + if Environment.check_env_exists(Path(env_path)) is not None: + create_response = Environment.create_new_env( + EnvironmentInput.from_path(env_path), + Environment.artifacts.built_by_softpack_file, + ) + + if not isinstance(create_response, CreateEnvironmentSuccess): + return create_response + + resp = await Environment.write_artifacts(env_path, file) + if not isinstance(resp, WriteArtifactSuccess): + raise Exception(resp) + + return resp diff --git a/softpack_core/spack.py b/softpack_core/spack.py index 641eab9..6bc13fc 100644 --- a/softpack_core/spack.py +++ b/softpack_core/spack.py @@ -63,7 +63,6 @@ def load_repo_list(self) -> list: class PackageBase: """Wrapper for a spack package.""" - id: str name: str @dataclass @@ -77,7 +76,6 @@ def load_package_list(self) -> list[Package]: return list( map( lambda package: self.Package( - id=uuid.uuid4().hex, name=package.name, versions=[ str(ver) for ver in list(package.versions.keys()) diff --git a/softpack_core/templates/readme.tmpl b/softpack_core/templates/readme.tmpl new file mode 100644 index 0000000..9e41bf0 --- /dev/null +++ b/softpack_core/templates/readme.tmpl @@ -0,0 +1,15 @@ +# Usage + +To use this environment, run: + +``` +module load $module_path +``` + +This will usually add your desired software to your PATH. Check the description +of the environment for more information, which might also be available by +running: + +``` +module help $module_path +``` diff --git a/tests/__init__.py b/tests/__init__.py deleted file mode 100644 index f4b2d43..0000000 --- a/tests/__init__.py +++ /dev/null @@ -1 +0,0 @@ -"""Unit test package for softpack-core.""" diff --git a/tests/conftest.py b/tests/conftest.py index c9b0e64..af3f163 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -4,66 +4,28 @@ LICENSE file in the root directory of this source tree. """ - -import time -from multiprocessing import Process -from threading import Thread - -import httpx import pytest -import requests -from fastapi.testclient import TestClient -from typer.testing import CliRunner - -from softpack_core.app import app - - -@pytest.fixture -def client() -> TestClient: - return TestClient(app.router) - - -class CLI: - def __init__(self): - self.runner = CliRunner() - - def invoke(self, *args, **kwargs): - return self.runner.invoke(app.commands, *args, **kwargs) - - -@pytest.fixture -def cli() -> CLI: - return CLI() - - -def service_run(): - cli = CLI() - cli.invoke(["service", "run"]) - - -@pytest.fixture -def service_factory(): - def create_service(module): - service = module(target=service_run, daemon=True) - service.start() - while True: - try: - response = requests.get(app.url()) - if response.status_code == httpx.codes.OK: - break - except requests.ConnectionError: - time.sleep(0.1) - continue - return service - - return create_service - - -@pytest.fixture -def service(service_factory): - return service_factory(Process) -@pytest.fixture -def service_thread(service_factory): - return service_factory(Thread) +def pytest_addoption(parser): + parser.addoption( + "--repo", + action="store_true", + default=False, + help=("run integration tests that alter your real git repo"), + ) + + +def pytest_collection_modifyitems(config, items): + if config.getoption("--repo"): + return + skip_repo = pytest.mark.skip( + reason=( + "specify --repo to run integration " + "tests that will alter your " + "configured git repo" + ) + ) + for item in items: + if "repo" in item.keywords: + item.add_marker(skip_repo) diff --git a/tests/files/modules/all_fields.mod b/tests/files/modules/all_fields.mod new file mode 100644 index 0000000..9d51de2 --- /dev/null +++ b/tests/files/modules/all_fields.mod @@ -0,0 +1,17 @@ +#%Module + +proc ModulesHelp { } { + + puts stderr "Help text line 1" + + puts stderr "" + puts stderr "Help text line 2" + +} + +module-whatis "Name: name_of_container " +module-whatis "Version:1.0.1" + +module-whatis "Foo: bar" + +module-whatis "Packages: pkg1, pkg2,pkg3 pkg4 " \ No newline at end of file diff --git a/tests/files/modules/all_fields.yml b/tests/files/modules/all_fields.yml new file mode 100644 index 0000000..2ddaa4c --- /dev/null +++ b/tests/files/modules/all_fields.yml @@ -0,0 +1,10 @@ +description: | + Help text line 1 + + Help text line 2 +packages: + - name_of_container@1.0.1 + - pkg1 + - pkg2 + - pkg3 + - pkg4 diff --git a/tests/files/modules/bad_name.mod b/tests/files/modules/bad_name.mod new file mode 100644 index 0000000..ef6da5e --- /dev/null +++ b/tests/files/modules/bad_name.mod @@ -0,0 +1,17 @@ +#%Module + +proc ModulesHelp { } { + + puts stderr "Help text line 1" + + puts stderr "" + puts stderr "Help text line 2" + +} + +module-whatis "Name: name_of_container\nmore_name " +module-whatis "Version:1.0.1" + +module-whatis "Foo: bar" + +module-whatis "Packages: pkg1, pkg2,pkg3 pkg4 " \ No newline at end of file diff --git a/tests/files/modules/bad_name.yml b/tests/files/modules/bad_name.yml new file mode 100644 index 0000000..2ddaa4c --- /dev/null +++ b/tests/files/modules/bad_name.yml @@ -0,0 +1,10 @@ +description: | + Help text line 1 + + Help text line 2 +packages: + - name_of_container@1.0.1 + - pkg1 + - pkg2 + - pkg3 + - pkg4 diff --git a/tests/files/modules/bad_packages.mod b/tests/files/modules/bad_packages.mod new file mode 100644 index 0000000..f7018f5 --- /dev/null +++ b/tests/files/modules/bad_packages.mod @@ -0,0 +1,17 @@ +#%Module + +proc ModulesHelp { } { + + puts stderr "Help text line 1" + + puts stderr "" + puts stderr "Help text line 2" + +} + +module-whatis "Name: name_of_container " +module-whatis "Version:1.0.1" + +module-whatis "Foo: bar" + +module-whatis "Packages: pkg1, pkg\n2,pkg3 pkg4 " \ No newline at end of file diff --git a/tests/files/modules/bad_packages.yml b/tests/files/modules/bad_packages.yml new file mode 100644 index 0000000..d57aafd --- /dev/null +++ b/tests/files/modules/bad_packages.yml @@ -0,0 +1,11 @@ +description: | + Help text line 1 + + Help text line 2 +packages: + - name_of_container@1.0.1 + - pkg1 + - pkg + - 2 + - pkg3 + - pkg4 diff --git a/tests/files/modules/bad_version.mod b/tests/files/modules/bad_version.mod new file mode 100644 index 0000000..a9fd5c6 --- /dev/null +++ b/tests/files/modules/bad_version.mod @@ -0,0 +1,17 @@ +#%Module + +proc ModulesHelp { } { + + puts stderr "Help text line 1" + + puts stderr "" + puts stderr "Help text line 2" + +} + +module-whatis "Name: name_of_container " +module-whatis "Version: 1.0.1\na" + +module-whatis "Foo: bar" + +module-whatis "Packages: pkg1, pkg2,pkg3 pkg4 " \ No newline at end of file diff --git a/tests/files/modules/bad_version.yml b/tests/files/modules/bad_version.yml new file mode 100644 index 0000000..2ddaa4c --- /dev/null +++ b/tests/files/modules/bad_version.yml @@ -0,0 +1,10 @@ +description: | + Help text line 1 + + Help text line 2 +packages: + - name_of_container@1.0.1 + - pkg1 + - pkg2 + - pkg3 + - pkg4 diff --git a/tests/files/modules/empty_name.mod b/tests/files/modules/empty_name.mod new file mode 100644 index 0000000..e7b6335 --- /dev/null +++ b/tests/files/modules/empty_name.mod @@ -0,0 +1,17 @@ +#%Module + +proc ModulesHelp { } { + + puts stderr "Help text line 1" + + puts stderr "" + puts stderr "Help text line 2" + +} + +module-whatis "Name:" +module-whatis "Version:1.0.1" + +module-whatis "Foo: bar" + +module-whatis "Packages: pkg1, pkg2,pkg3 pkg4 " \ No newline at end of file diff --git a/tests/files/modules/empty_name.yml b/tests/files/modules/empty_name.yml new file mode 100644 index 0000000..2350c8c --- /dev/null +++ b/tests/files/modules/empty_name.yml @@ -0,0 +1,10 @@ +description: | + Help text line 1 + + Help text line 2 +packages: + - empty_name@1.0.1 + - pkg1 + - pkg2 + - pkg3 + - pkg4 diff --git a/tests/files/modules/minimal.mod b/tests/files/modules/minimal.mod new file mode 100644 index 0000000..e69de29 diff --git a/tests/files/modules/minimal.yml b/tests/files/modules/minimal.yml new file mode 100644 index 0000000..c625ae5 --- /dev/null +++ b/tests/files/modules/minimal.yml @@ -0,0 +1,3 @@ +description: | +packages: + - minimal diff --git a/tests/files/modules/no_description.mod b/tests/files/modules/no_description.mod new file mode 100644 index 0000000..ef5e493 --- /dev/null +++ b/tests/files/modules/no_description.mod @@ -0,0 +1,7 @@ +#%Module + + + +module-whatis "Name: name_of_container " + +module-whatis "Foo: bar" diff --git a/tests/files/modules/no_description.yml b/tests/files/modules/no_description.yml new file mode 100644 index 0000000..0ca0685 --- /dev/null +++ b/tests/files/modules/no_description.yml @@ -0,0 +1,3 @@ +description: | +packages: + - name_of_container diff --git a/tests/files/modules/no_name.mod b/tests/files/modules/no_name.mod new file mode 100644 index 0000000..62c8892 --- /dev/null +++ b/tests/files/modules/no_name.mod @@ -0,0 +1,12 @@ +#%Module + +proc ModulesHelp { } { + + puts stderr "Help text line 1" + + puts stderr "" + puts stderr "Help text line 2" + +} + +module-whatis "Foo: bar" diff --git a/tests/files/modules/no_name.yml b/tests/files/modules/no_name.yml new file mode 100644 index 0000000..9a29b05 --- /dev/null +++ b/tests/files/modules/no_name.yml @@ -0,0 +1,6 @@ +description: | + Help text line 1 + + Help text line 2 +packages: + - no_name diff --git a/tests/files/modules/no_pkgs.mod b/tests/files/modules/no_pkgs.mod new file mode 100644 index 0000000..4dd4b62 --- /dev/null +++ b/tests/files/modules/no_pkgs.mod @@ -0,0 +1,15 @@ +#%Module + +proc ModulesHelp { } { + + puts stderr "Help text line 1" + + puts stderr "" + puts stderr "Help text line 2" + +} + +module-whatis "Name: name_of_container " +module-whatis "Version:1.0.1" + +module-whatis "Foo: bar" diff --git a/tests/files/modules/no_pkgs.yml b/tests/files/modules/no_pkgs.yml new file mode 100644 index 0000000..3e07a3d --- /dev/null +++ b/tests/files/modules/no_pkgs.yml @@ -0,0 +1,6 @@ +description: | + Help text line 1 + + Help text line 2 +packages: + - name_of_container@1.0.1 diff --git a/tests/files/modules/no_version.mod b/tests/files/modules/no_version.mod new file mode 100644 index 0000000..cbbd338 --- /dev/null +++ b/tests/files/modules/no_version.mod @@ -0,0 +1,14 @@ +#%Module + +proc ModulesHelp { } { + + puts stderr "Help text line 1" + + puts stderr "" + puts stderr "Help text line 2" + +} + +module-whatis "Name: name_of_container " + +module-whatis "Foo: bar" diff --git a/tests/files/modules/no_version.yml b/tests/files/modules/no_version.yml new file mode 100644 index 0000000..d6afca5 --- /dev/null +++ b/tests/files/modules/no_version.yml @@ -0,0 +1,6 @@ +description: | + Help text line 1 + + Help text line 2 +packages: + - name_of_container diff --git a/tests/files/modules/shpc.mod b/tests/files/modules/shpc.mod new file mode 100644 index 0000000..0feb40c --- /dev/null +++ b/tests/files/modules/shpc.mod @@ -0,0 +1,210 @@ +#%Module + +#===== +# Created by singularity-hpc (https://github.com/singularityhub/singularity-hpc) +# ## +# quay.io/biocontainers/ldsc:1.0.1--pyhdfd78af_2 on 2023-08-15 12:08:41.851818 +#===== + +proc ModulesHelp { } { + + puts stderr "This module is a singularity container wrapper for quay.io/biocontainers/ldsc:1.0.1--pyhdfd78af_2 v1.0.1--pyhdfd78af_2" + + puts stderr "" + puts stderr "Container (available through variable SINGULARITY_CONTAINER):" + puts stderr "" + puts stderr " - /software/hgi/containers/shpc/quay.io/biocontainers/ldsc/1.0.1--pyhdfd78af_2/quay.io-biocontainers-ldsc-1.0.1--pyhdfd78af_2-sha256:308ddebaa643d50306779ce42752eb4c4a3e1635be74531594013959e312af2c.sif" + puts stderr "" + puts stderr "Commands include:" + puts stderr "" + puts stderr " - ldsc-run:" + puts stderr " singularity run -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh \"\$@\"" + puts stderr " - ldsc-shell:" + puts stderr " singularity shell -s /bin/sh -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh " + puts stderr " - ldsc-exec:" + puts stderr " singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh \"\$@\"" + puts stderr " - ldsc-inspect-runscript:" + puts stderr " singularity inspect -r " + puts stderr " - ldsc-inspect-deffile:" + puts stderr " singularity inspect -d " + puts stderr " - ldsc-container:" + puts stderr " echo \"\$SINGULARITY_CONTAINER\"" + puts stderr "" + puts stderr " - ldsc.py:" + puts stderr " singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/ldsc.py \"\$@\"" + puts stderr " - munge_sumstats.py:" + puts stderr " singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/munge_sumstats.py \"\$@\"" + puts stderr " - f2py2:" + puts stderr " singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/f2py2 \"\$@\"" + puts stderr " - f2py2.7:" + puts stderr " singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/f2py2.7 \"\$@\"" + puts stderr " - shiftBed:" + puts stderr " singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/shiftBed \"\$@\"" + puts stderr " - annotateBed:" + puts stderr " singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/annotateBed \"\$@\"" + puts stderr " - bamToBed:" + puts stderr " singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/bamToBed \"\$@\"" + puts stderr " - bamToFastq:" + puts stderr " singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/bamToFastq \"\$@\"" + puts stderr " - bed12ToBed6:" + puts stderr " singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/bed12ToBed6 \"\$@\"" + puts stderr " - bedToBam:" + puts stderr " singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/bedToBam \"\$@\"" + puts stderr " - bedToIgv:" + puts stderr " singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/bedToIgv \"\$@\"" + puts stderr " - bedpeToBam:" + puts stderr " singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/bedpeToBam \"\$@\"" + + puts stderr "" + puts stderr "For each of the above, you can export:" + puts stderr "" + puts stderr " - SINGULARITY_OPTS: to define custom options for singularity (e.g., --debug)" + puts stderr " - SINGULARITY_COMMAND_OPTS: to define custom options for the command (e.g., -b)" + puts stderr " - SINGULARITY_CONTAINER: The Singularity (sif) path" + +} + +set view_dir "[file dirname [file dirname ${ModulesCurrentModulefile}] ]" +set view_name "[file tail ${view_dir}]" +set view_module ".view_module" +set view_modulefile "${view_dir}/${view_module}" + +if {[file exists ${view_modulefile}]} { + source ${view_modulefile} +} + +# Environment - only set if not already defined +if { ![info exists ::env(SINGULARITY_OPTS)] } { + setenv SINGULARITY_OPTS "" +} +if { ![info exists ::env(SINGULARITY_COMMAND_OPTS)] } { + setenv SINGULARITY_COMMAND_OPTS "" +} + +# Variables + +set name quay.io/biocontainers/ldsc:1.0.1--pyhdfd78af_2 +set version 1.0.1--pyhdfd78af_2 +set description "$name - $version" +set containerPath /software/hgi/containers/shpc/quay.io/biocontainers/ldsc/1.0.1--pyhdfd78af_2/quay.io-biocontainers-ldsc-1.0.1--pyhdfd78af_2-sha256:308ddebaa643d50306779ce42752eb4c4a3e1635be74531594013959e312af2c.sif + + +set helpcommand "This module is a singularity container wrapper for quay.io/biocontainers/ldsc:1.0.1--pyhdfd78af_2 v1.0.1--pyhdfd78af_2. " +set busybox "BusyBox v1.32.1 (2021-04-13 11:15:36 UTC) multi-call binary." +set deb-list "gcc-8-base_8.3.0-6_amd64.deb, libc6_2.28-10_amd64.deb, libgcc1_1%3a8.3.0-6_amd64.deb, bash_5.0-4_amd64.deb, libc-bin_2.28-10_amd64.deb, libtinfo6_6.1+20181013-2+deb10u2_amd64.deb, ncurses-base_6.1+20181013-2+deb10u2_all.deb, base-files_10.3+deb10u9_amd64.deb" +set glibc "GNU C Library (Debian GLIBC 2.28-10) stable release version 2.28." +set io.buildah.version "1.19.6" +set org.label-schema.build-arch "amd64" +set org.label-schema.build-date "Tuesday_15_August_2023_12:8:5_BST" +set org.label-schema.schema-version "1.0" +set org.label-schema.usage.singularity.deffile.bootstrap "docker" +set org.label-schema.usage.singularity.deffile.from "quay.io/biocontainers/ldsc@sha256:308ddebaa643d50306779ce42752eb4c4a3e1635be74531594013959e312af2c" +set org.label-schema.usage.singularity.version "3.10.0" +set pkg-list "gcc-8-base, libc6, libgcc1, bash, libc-bin, libtinfo6, ncurses-base, base-files" + + +# directory containing this modulefile, once symlinks resolved (dynamically defined) +set moduleDir [file dirname [expr { [string equal [file type ${ModulesCurrentModulefile}] "link"] ? [file readlink ${ModulesCurrentModulefile}] : ${ModulesCurrentModulefile} }]] + +# conflict with modules with the same alias name +conflict ldsc +conflict quay.io/biocontainers/ldsc:1.0.1--pyhdfd78af_2 +conflict ldsc.py +conflict munge_sumstats.py +conflict f2py2 +conflict f2py2.7 +conflict shiftBed +conflict annotateBed +conflict bamToBed +conflict bamToFastq +conflict bed12ToBed6 +conflict bedToBam +conflict bedToIgv +conflict bedpeToBam + + +# singularity environment variable to set shell +setenv SINGULARITY_SHELL /bin/sh + +# service environment variable to access full SIF image path +setenv SINGULARITY_CONTAINER "${containerPath}" + +# interactive shell to any container, plus exec for aliases +set shellCmd "singularity \${SINGULARITY_OPTS} shell \${SINGULARITY_COMMAND_OPTS} -s /bin/sh -B ${moduleDir}/99-shpc.sh:/.singularity.d/env/99-shpc.sh ${containerPath}" +set execCmd "singularity \${SINGULARITY_OPTS} exec \${SINGULARITY_COMMAND_OPTS} -B ${moduleDir}/99-shpc.sh:/.singularity.d/env/99-shpc.sh " +set runCmd "singularity \${SINGULARITY_OPTS} run \${SINGULARITY_COMMAND_OPTS} -B ${moduleDir}/99-shpc.sh:/.singularity.d/env/99-shpc.sh ${containerPath}" +set inspectCmd "singularity \${SINGULARITY_OPTS} inspect \${SINGULARITY_COMMAND_OPTS} " + +# if we have any wrapper scripts, add bin to path +prepend-path PATH "${moduleDir}/bin" + +# "aliases" to module commands +if { [ module-info shell bash ] } { + if { [ module-info mode load ] } { + + + + + + + + + + + + + + } + if { [ module-info mode remove ] } { + + + + + + + + + + + + + + } +} else { + + + + + + + + + + + + + +} + + + +#===== +# Module options +#===== +module-whatis " Name: quay.io/biocontainers/ldsc:1.0.1--pyhdfd78af_2" +module-whatis " Version: 1.0.1--pyhdfd78af_2" + + +module-whatis " busybox: BusyBox v1.32.1 (2021-04-13 11:15:36 UTC) multi-call binary." +module-whatis " deb-list: gcc-8-base_8.3.0-6_amd64.deb, libc6_2.28-10_amd64.deb, libgcc1_1%3a8.3.0-6_amd64.deb, bash_5.0-4_amd64.deb, libc-bin_2.28-10_amd64.deb, libtinfo6_6.1+20181013-2+deb10u2_amd64.deb, ncurses-base_6.1+20181013-2+deb10u2_all.deb, base-files_10.3+deb10u9_amd64.deb" +module-whatis " glibc: GNU C Library (Debian GLIBC 2.28-10) stable release version 2.28." +module-whatis " io.buildah.version: 1.19.6" +module-whatis " org.label-schema.build-arch: amd64" +module-whatis " org.label-schema.build-date: Tuesday_15_August_2023_12:8:5_BST" +module-whatis " org.label-schema.schema-version: 1.0" +module-whatis " org.label-schema.usage.singularity.deffile.bootstrap: docker" +module-whatis " org.label-schema.usage.singularity.deffile.from: quay.io/biocontainers/ldsc@sha256:308ddebaa643d50306779ce42752eb4c4a3e1635be74531594013959e312af2c" +module-whatis " org.label-schema.usage.singularity.version: 3.10.0" +module-whatis " pkg-list: gcc-8-base, libc6, libgcc1, bash, libc-bin, libtinfo6, ncurses-base, base-files" + +module load /software/modules/ISG/singularity/3.10.0 \ No newline at end of file diff --git a/tests/files/modules/shpc.readme b/tests/files/modules/shpc.readme new file mode 100644 index 0000000..8ecb4f3 --- /dev/null +++ b/tests/files/modules/shpc.readme @@ -0,0 +1,15 @@ +# Usage + +To use this environment, run: + +``` +module load HGI/common/some_environment +``` + +This will usually add your desired software to your PATH. Check the description +of the environment for more information, which might also be available by +running: + +``` +module help HGI/common/some_environment +``` diff --git a/tests/files/modules/shpc.yml b/tests/files/modules/shpc.yml new file mode 100644 index 0000000..81a872f --- /dev/null +++ b/tests/files/modules/shpc.yml @@ -0,0 +1,54 @@ +description: | + This module is a singularity container wrapper for quay.io/biocontainers/ldsc:1.0.1--pyhdfd78af_2 v1.0.1--pyhdfd78af_2 + + Container (available through variable SINGULARITY_CONTAINER): + + - /software/hgi/containers/shpc/quay.io/biocontainers/ldsc/1.0.1--pyhdfd78af_2/quay.io-biocontainers-ldsc-1.0.1--pyhdfd78af_2-sha256:308ddebaa643d50306779ce42752eb4c4a3e1635be74531594013959e312af2c.sif + + Commands include: + + - ldsc-run: + singularity run -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh "$@" + - ldsc-shell: + singularity shell -s /bin/sh -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh + - ldsc-exec: + singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh "$@" + - ldsc-inspect-runscript: + singularity inspect -r + - ldsc-inspect-deffile: + singularity inspect -d + - ldsc-container: + echo "$SINGULARITY_CONTAINER" + + - ldsc.py: + singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/ldsc.py "$@" + - munge_sumstats.py: + singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/munge_sumstats.py "$@" + - f2py2: + singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/f2py2 "$@" + - f2py2.7: + singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/f2py2.7 "$@" + - shiftBed: + singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/shiftBed "$@" + - annotateBed: + singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/annotateBed "$@" + - bamToBed: + singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/bamToBed "$@" + - bamToFastq: + singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/bamToFastq "$@" + - bed12ToBed6: + singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/bed12ToBed6 "$@" + - bedToBam: + singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/bedToBam "$@" + - bedToIgv: + singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/bedToIgv "$@" + - bedpeToBam: + singularity exec -B /99-shpc.sh:/.singularity.d/env/99-shpc.sh /usr/local/bin/bedpeToBam "$@" + + For each of the above, you can export: + + - SINGULARITY_OPTS: to define custom options for singularity (e.g., --debug) + - SINGULARITY_COMMAND_OPTS: to define custom options for the command (e.g., -b) + - SINGULARITY_CONTAINER: The Singularity (sif) path +packages: + - quay.io/biocontainers/ldsc@1.0.1--pyhdfd78af_2 diff --git a/tests/files/modules/version_in_name.mod b/tests/files/modules/version_in_name.mod new file mode 100644 index 0000000..854b071 --- /dev/null +++ b/tests/files/modules/version_in_name.mod @@ -0,0 +1,15 @@ +#%Module + +proc ModulesHelp { } { + + puts stderr "Help text line 1" + + puts stderr "" + puts stderr "Help text line 2" + +} + +module-whatis "Name: name_of_container:1.0.2 " +module-whatis "Version:" + +module-whatis "Foo: bar" diff --git a/tests/files/modules/version_in_name.yml b/tests/files/modules/version_in_name.yml new file mode 100644 index 0000000..948a7d6 --- /dev/null +++ b/tests/files/modules/version_in_name.yml @@ -0,0 +1,6 @@ +description: | + Help text line 1 + + Help text line 2 +packages: + - name_of_container@1.0.2 diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 0000000..4a69d90 --- /dev/null +++ b/tests/integration/__init__.py @@ -0,0 +1 @@ +"""Integration tests for softpack-core.""" diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py new file mode 100644 index 0000000..56aee99 --- /dev/null +++ b/tests/integration/conftest.py @@ -0,0 +1,60 @@ +"""Copyright (c) 2023 Genome Research Ltd. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +import os + +import pytest + +from softpack_core.artifacts import Artifacts, Package, app +from softpack_core.schemas.environment import Environment, EnvironmentInput +from tests.integration.utils import ( + get_user_path_without_environments, + new_test_artifacts, +) + + +@pytest.fixture(scope="package", autouse=True) +def testable_artifacts_setup(): + user = app.settings.artifacts.repo.username.split('@', 1)[0] + + if user is None: + user = os.getlogin() + + if user is None or user == "main": + pytest.skip( + ("Your artifacts repo username must be defined in your config.") + ) + + if app.settings.artifacts.repo.writer is None: + pytest.skip( + ("Your artifacts repo writer must be defined in your config.") + ) + + app.settings.artifacts.repo.branch = user + + +@pytest.fixture() +def httpx_post(mocker): + post_mock = mocker.patch('httpx.post') + return post_mock + + +@pytest.fixture +def testable_env_input(mocker) -> EnvironmentInput: + ad = new_test_artifacts() + artifacts: Artifacts = ad["artifacts"] + user = ad["test_user"] + + mocker.patch.object(Environment, 'artifacts', new=artifacts) + + testable_env_input = EnvironmentInput( + name="test_env_create", + path=str(get_user_path_without_environments(artifacts, user)), + description="description", + packages=[Package(name="pkg_test")], + ) + + yield testable_env_input diff --git a/tests/integration/test_artifacts.py b/tests/integration/test_artifacts.py new file mode 100644 index 0000000..92a8923 --- /dev/null +++ b/tests/integration/test_artifacts.py @@ -0,0 +1,227 @@ +"""Copyright (c) 2023 Genome Research Ltd. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +import os +import shutil +from pathlib import Path + +import pygit2 +import pytest + +from softpack_core.artifacts import Artifacts, app +from tests.integration.utils import ( + commit_and_push_test_repo_changes, + delete_environments_folder_from_test_repo, + file_in_remote, + file_in_repo, + get_user_path_without_environments, + new_test_artifacts, +) + +pytestmark = pytest.mark.repo + + +def test_clone() -> None: + ad = new_test_artifacts() + artifacts: Artifacts = ad["artifacts"] + path = artifacts.repo.path + assert path.startswith(ad["temp_dir"].name) + + shutil.rmtree(ad["temp_dir"].name) + assert os.path.isdir(path) is False + + artifacts = Artifacts() + assert os.path.isdir(path) is True + + orig_repo_path = app.settings.artifacts.path + ad_for_changing = new_test_artifacts() + artifacts_for_changing: Artifacts = ad_for_changing["artifacts"] + + oid, file_path = add_test_file_to_repo(artifacts_for_changing) + commit_and_push_test_repo_changes(artifacts_for_changing, oid, "add file") + + app.settings.artifacts.path = orig_repo_path + artifacts = Artifacts() + + assert file_in_repo(artifacts, file_path) + + delete_environments_folder_from_test_repo(artifacts) + + try: + artifacts.iter() + except BaseException as e: + print(e) + assert False + + +def test_commit_and_push() -> None: + ad = new_test_artifacts() + artifacts: Artifacts = ad["artifacts"] + old_commit_oid = ad["initial_commit_oid"] + + new_tree, file_path = add_test_file_to_repo(artifacts) + + new_commit_oid = artifacts.commit_and_push(new_tree, "commit new file") + repo_head = artifacts.repo.head.peel(pygit2.Commit).oid + + assert old_commit_oid != new_commit_oid + assert new_commit_oid == repo_head + + assert file_in_remote(file_path) + + +def add_test_file_to_repo(artifacts: Artifacts) -> tuple[pygit2.Oid, Path]: + new_file_name = "new_file.txt" + oid = artifacts.repo.create_blob(b"") + root = artifacts.repo.head.peel(pygit2.Tree) + tree = root[artifacts.environments_root] + tb = artifacts.repo.TreeBuilder(tree) + tb.insert(new_file_name, oid, pygit2.GIT_FILEMODE_BLOB) + oid = tb.write() + tb = artifacts.repo.TreeBuilder(root) + tb.insert(artifacts.environments_root, oid, pygit2.GIT_FILEMODE_TREE) + return tb.write(), Path(artifacts.environments_root, new_file_name) + + +def test_create_file() -> None: + ad = new_test_artifacts() + artifacts: Artifacts = ad["artifacts"] + user = ad["test_user"] + + new_test_env = "test_create_file_env" + + user_envs_tree = get_user_envs_tree( + artifacts, user, artifacts.repo.head.peel(pygit2.Tree).oid + ) + assert new_test_env not in [obj.name for obj in user_envs_tree] + + folder_path = Path( + get_user_path_without_environments(artifacts, user), new_test_env + ) + basename = "create_file.txt" + + oid = artifacts.create_file( + folder_path, basename, "lorem ipsum", True, False + ) + + user_envs_tree = get_user_envs_tree(artifacts, user, oid) + assert new_test_env in [obj.name for obj in user_envs_tree] + assert basename in [obj.name for obj in user_envs_tree[new_test_env]] + + artifacts.commit_and_push(oid, "create file") + + with pytest.raises(RuntimeError) as exc_info: + artifacts.create_file( + folder_path, basename, "lorem ipsum", False, True + ) + assert exc_info.value.args[0] == 'No changes made to the environment' + + basename2 = "create_file2.txt" + with pytest.raises(RuntimeError) as exc_info: + artifacts.create_file( + folder_path, basename2, "lorem ipsum", True, False + ) + assert exc_info.value.args[0] == 'Too many changes to the repo' + + oid = artifacts.create_file( + folder_path, basename2, "lorem ipsum", False, False + ) + + artifacts.commit_and_push(oid, "create file2") + + user_envs_tree = get_user_envs_tree(artifacts, user, oid) + assert basename2 in [obj.name for obj in user_envs_tree[new_test_env]] + + with pytest.raises(FileExistsError) as exc_info: + artifacts.create_file( + folder_path, basename, "lorem ipsum", False, False + ) + assert exc_info.value.args[0] == 'File already exists' + + oid = artifacts.create_file(folder_path, basename, "override", False, True) + + artifacts.commit_and_push(oid, "update created file") + + user_envs_tree = get_user_envs_tree(artifacts, user, oid) + assert basename in [obj.name for obj in user_envs_tree[new_test_env]] + assert user_envs_tree[new_test_env][basename].data.decode() == "override" + + assert file_in_remote( + Path(artifacts.environments_root, folder_path, basename), + Path(artifacts.environments_root, folder_path, basename2), + ) + + +def get_user_envs_tree( + artifacts: Artifacts, user: str, oid: pygit2.Oid +) -> pygit2.Tree: + new_tree = artifacts.repo.get(oid) + return new_tree[artifacts.user_folder(user)] + + +def test_delete_environment() -> None: + ad = new_test_artifacts() + artifacts: Artifacts = ad["artifacts"] + user = ad["test_user"] + env_for_deleting = ad["test_environment"] + + user_envs_tree = get_user_envs_tree( + artifacts, user, artifacts.repo.head.peel(pygit2.Tree).oid + ) + assert env_for_deleting in [obj.name for obj in user_envs_tree] + + oid = artifacts.delete_environment( + env_for_deleting, get_user_path_without_environments(artifacts, user) + ) + + artifacts.commit_and_push(oid, "delete new env") + + user_envs_tree = get_user_envs_tree(artifacts, user, oid) + assert env_for_deleting not in [obj.name for obj in user_envs_tree] + + with pytest.raises(ValueError) as exc_info: + artifacts.delete_environment(user, artifacts.users_folder_name) + assert exc_info.value.args[0] == 'Not a valid environment path' + + with pytest.raises(KeyError) as exc_info: + artifacts.delete_environment(env_for_deleting, "foo/bar") + assert exc_info + + +def test_iter() -> None: + ad = new_test_artifacts() + artifacts: Artifacts = ad["artifacts"] + user = ad["test_user"] + + user_found = False + num_user_envs = 0 + num_group_envs = 0 + + envs = artifacts.iter() + + for env in envs: + if str(env.path).startswith(artifacts.users_folder_name): + num_user_envs += 1 + if str(env.path).startswith( + f"{artifacts.users_folder_name}/{user}" + ): + user_found = True + elif str(env.path).startswith(artifacts.groups_folder_name): + num_group_envs += 1 + + assert user_found is True + assert num_user_envs == 1 + assert num_group_envs == 1 + + envs = artifacts.iter() + pkgs = list(envs)[0].spec().packages + assert len(pkgs) == 3 + assert pkgs[0].name == "pck1" + assert pkgs[0].version == "1" + assert pkgs[1].name == "pck2" + assert pkgs[1].version == "v2.0.1" + assert pkgs[2].name == "pck3" + assert pkgs[2].version is None diff --git a/tests/integration/test_builderupload.py b/tests/integration/test_builderupload.py new file mode 100644 index 0000000..b250177 --- /dev/null +++ b/tests/integration/test_builderupload.py @@ -0,0 +1,58 @@ +"""Copyright (c) 2023 Genome Research Ltd. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +from pathlib import Path + +import pytest +from fastapi.testclient import TestClient + +from softpack_core.app import app +from softpack_core.schemas.environment import Environment +from softpack_core.service import ServiceAPI +from tests.integration.utils import file_in_repo + +pytestmark = pytest.mark.repo + + +def test_builder_upload(testable_env_input): + ServiceAPI.register() + client = TestClient(app.router) + + env_parent = "groups/hgi" + env_name = "unknown-env" + env_path = env_parent + "/" + env_name + + softpackYaml = "softpack.yaml" + softpackYamlContents = b"softpack yaml file" + + spackLock = "spack.lock" + spackLockContents = b"spack lock file" + + assert Environment.check_env_exists(Path(env_path)) is not None + resp = client.post( + url="/upload?" + env_path, + files=[ + ("file", (softpackYaml, softpackYamlContents)), + ("file", (spackLock, spackLockContents)), + ], + ) + assert resp.status_code == 200 + assert resp.json().get("message") == "Successfully written artifact(s)" + assert Environment.check_env_exists(Path(env_path)) is None + assert file_in_repo( + Environment.artifacts, + Path(Environment.artifacts.environments_root, env_path, softpackYaml), + ) + assert file_in_repo( + Environment.artifacts, + Path(Environment.artifacts.environments_root, env_path, spackLock), + ) + + tree = Environment.artifacts.get(env_parent, env_name) + assert tree is not None + + assert tree[softpackYaml].data == softpackYamlContents + assert tree[spackLock].data == spackLockContents diff --git a/tests/integration/test_environment.py b/tests/integration/test_environment.py new file mode 100644 index 0000000..11678e6 --- /dev/null +++ b/tests/integration/test_environment.py @@ -0,0 +1,352 @@ +"""Copyright (c) 2023 Genome Research Ltd. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +import io +from pathlib import Path +from typing import Optional + +import pygit2 +import pytest +from fastapi import UploadFile + +from softpack_core.artifacts import Artifacts +from softpack_core.schemas.environment import ( + CreateEnvironmentSuccess, + DeleteEnvironmentSuccess, + Environment, + EnvironmentAlreadyExistsError, + EnvironmentInput, + EnvironmentNotFoundError, + InvalidInputError, + Package, + State, + UpdateEnvironmentSuccess, + WriteArtifactSuccess, +) +from tests.integration.utils import file_in_remote + +pytestmark = pytest.mark.repo + + +def test_create(httpx_post, testable_env_input: EnvironmentInput) -> None: + result = Environment.create(testable_env_input) + assert isinstance(result, CreateEnvironmentSuccess) + httpx_post.assert_called_once() + builder_called_correctly(httpx_post, testable_env_input) + + result = Environment.create( + EnvironmentInput( + name="test_env_create2", + path="groups/not_already_in_repo", + description="description2", + packages=[Package(name="pkg_test2")], + ) + ) + assert isinstance(result, CreateEnvironmentSuccess) + httpx_post.assert_called() + + path = Path( + Environment.artifacts.environments_root, + testable_env_input.path, + testable_env_input.name, + Environment.artifacts.built_by_softpack_file, + ) + assert file_in_remote(path) + + result = Environment.create(testable_env_input) + assert isinstance(result, EnvironmentAlreadyExistsError) + + orig_name = testable_env_input.name + testable_env_input.name = "" + result = Environment.create(testable_env_input) + assert isinstance(result, InvalidInputError) + + testable_env_input.name = orig_name + testable_env_input.path = "invalid/path" + result = Environment.create(testable_env_input) + assert isinstance(result, InvalidInputError) + + +def builder_called_correctly( + post_mock, testable_env_input: EnvironmentInput +) -> None: + # TODO: don't mock this; actually have a real builder service to test with? + # Also need to not hard-code the url here. + post_mock.assert_called_with( + "http://0.0.0.0:7080/environments/build", + json={ + "name": f"{testable_env_input.path}/{testable_env_input.name}", + "model": { + "description": testable_env_input.description, + "packages": [ + f"{pkg.name}" for pkg in testable_env_input.packages + ], + }, + }, + ) + + +def test_update(httpx_post, testable_env_input) -> None: + result = Environment.create(testable_env_input) + assert isinstance(result, CreateEnvironmentSuccess) + httpx_post.assert_called_once() + + testable_env_input.description = "updated description" + result = Environment.update( + testable_env_input, + testable_env_input.path, + testable_env_input.name, + ) + assert isinstance(result, UpdateEnvironmentSuccess) + + builder_called_correctly(httpx_post, testable_env_input) + + result = Environment.update( + testable_env_input, "invalid/path", "invalid_name" + ) + assert isinstance(result, InvalidInputError) + + testable_env_input.name = "" + result = Environment.update( + testable_env_input, + testable_env_input.path, + testable_env_input.name, + ) + assert isinstance(result, InvalidInputError) + + testable_env_input.name = "invalid_name" + testable_env_input.path = "invalid/path" + result = Environment.update( + testable_env_input, "invalid/path", "invalid_name" + ) + assert isinstance(result, EnvironmentNotFoundError) + + +def test_delete(httpx_post, testable_env_input) -> None: + result = Environment.delete( + testable_env_input.name, testable_env_input.path + ) + assert isinstance(result, EnvironmentNotFoundError) + + result = Environment.create(testable_env_input) + assert isinstance(result, CreateEnvironmentSuccess) + httpx_post.assert_called_once() + + path = Path( + Environment.artifacts.environments_root, + testable_env_input.path, + testable_env_input.name, + Artifacts.built_by_softpack_file, + ) + assert file_in_remote(path) + + result = Environment.delete( + testable_env_input.name, testable_env_input.path + ) + assert isinstance(result, DeleteEnvironmentSuccess) + + assert not file_in_remote(path) + + +@pytest.mark.asyncio +async def test_write_artifact(httpx_post, testable_env_input): + upload = UploadFile(filename="example.txt", file=io.BytesIO(b"mock data")) + + result = await Environment.write_artifact( + file=upload, + folder_path=f"{testable_env_input.path}/{testable_env_input.name}", + file_name=upload.filename, + ) + assert isinstance(result, InvalidInputError) + + result = Environment.create(testable_env_input) + assert isinstance(result, CreateEnvironmentSuccess) + httpx_post.assert_called_once() + + result = await Environment.write_artifact( + file=upload, + folder_path=f"{testable_env_input.path}/{testable_env_input.name}", + file_name=upload.filename, + ) + assert isinstance(result, WriteArtifactSuccess) + + path = Path( + Environment.artifacts.environments_root, + testable_env_input.path, + testable_env_input.name, + upload.filename, + ) + assert file_in_remote(path) + + result = await Environment.write_artifact( + file=upload, + folder_path="invalid/env/path", + file_name=upload.filename, + ) + assert isinstance(result, InvalidInputError) + + +def test_iter(testable_env_input): + envs = Environment.iter() + assert len(list(envs)) == 2 + + +@pytest.mark.asyncio +async def test_states(httpx_post, testable_env_input): + result = Environment.create(testable_env_input) + assert isinstance(result, CreateEnvironmentSuccess) + httpx_post.assert_called_once() + + upload = UploadFile( + filename=Artifacts.environments_file, + file=io.BytesIO( + b"description: test env\n" b"packages:\n - zlib@v1.1\n" + ), + ) + + result = await Environment.write_artifact( + file=upload, + folder_path=f"{testable_env_input.path}/{testable_env_input.name}", + file_name=upload.filename, + ) + assert isinstance(result, WriteArtifactSuccess) + + env = get_env_from_iter(testable_env_input.name) + assert env is not None + assert any(p.name == "zlib" for p in env.packages) + assert any(p.version == "v1.1" for p in env.packages) + assert env.type == Artifacts.built_by_softpack + assert env.state == State.queued + + upload = UploadFile( + filename=Artifacts.module_file, file=io.BytesIO(b"#%Module") + ) + + result = await Environment.write_artifact( + file=upload, + folder_path=f"{testable_env_input.path}/{testable_env_input.name}", + file_name=upload.filename, + ) + assert isinstance(result, WriteArtifactSuccess) + + env = get_env_from_iter(testable_env_input.name) + assert env is not None + assert env.type == Artifacts.built_by_softpack + assert env.state == State.ready + + +def get_env_from_iter(name: str) -> Optional[Environment]: + envs = Environment.iter() + return next((env for env in envs if env.name == name), None) + + +@pytest.mark.asyncio +async def test_create_from_module(httpx_post, testable_env_input): + test_files_dir = Path(__file__).parent.parent / "files" / "modules" + test_file_path = test_files_dir / "shpc.mod" + + with open(test_file_path, "rb") as fh: + data = fh.read() + + upload = UploadFile(filename="shpc.mod", file=io.BytesIO(data)) + + env_name = "some-environment" + name = "groups/hgi/" + env_name + module_path = "HGI/common/some_environment" + + result = await Environment.create_from_module( + file=upload, + module_path=module_path, + environment_path=name, + ) + + assert isinstance(result, CreateEnvironmentSuccess) + + upload = UploadFile(filename="shpc.mod", file=io.BytesIO(data)) + + result = await Environment.create_from_module( + file=upload, + module_path=module_path, + environment_path=name, + ) + + assert isinstance(result, EnvironmentAlreadyExistsError) + + parent_path = Path( + Environment.artifacts.group_folder(), + "hgi", + env_name, + ) + + readme_path = Path(parent_path, Environment.artifacts.readme_file) + assert file_in_remote( + Path(parent_path, Environment.artifacts.environments_file), + Path(parent_path, Environment.artifacts.module_file), + readme_path, + Path(parent_path, Environment.artifacts.generated_from_module_file), + ) + + with open(test_files_dir / "shpc.readme", "rb") as fh: + expected_readme_data = fh.read() + + tree = Environment.artifacts.repo.head.peel(pygit2.Tree) + obj = tree[str(readme_path)] + assert obj is not None + assert obj.data == expected_readme_data + + envs = list(Environment.iter()) + assert len(envs) == 3 + + env = get_env_from_iter(env_name) + assert env is not None + + package_name = "quay.io/biocontainers/ldsc" + package_version = "1.0.1--pyhdfd78af_2" + + assert len(env.packages) == 1 + assert env.packages[0].name == package_name + assert env.packages[0].version == package_version + assert "module load " + module_path in env.readme + assert env.type == Artifacts.generated_from_module + assert env.state == State.ready + + test_modifiy_file_path = test_files_dir / "all_fields.mod" + + with open(test_modifiy_file_path, "rb") as fh: + data = fh.read() + + upload = UploadFile(filename="all_fields.mod", file=io.BytesIO(data)) + + module_path = "HGI/common/all_fields" + + result = await Environment.update_from_module( + file=upload, + module_path=module_path, + environment_path=name, + ) + + assert isinstance(result, UpdateEnvironmentSuccess) + env = get_env_from_iter(env_name) + assert env is not None + + package_name = "name_of_container" + package_version = "1.0.1" + + assert len(env.packages) == 5 + assert env.packages[0].name == package_name + assert env.packages[0].version == package_version + assert "module load " + module_path in env.readme + assert env.type == Artifacts.generated_from_module + assert env.state == State.ready + + upload = UploadFile(filename="all_fields.mod", file=io.BytesIO(data)) + + result = await Environment.update_from_module( + file=upload, + module_path=module_path, + environment_path="users/non/existant", + ) + assert isinstance(result, EnvironmentNotFoundError) diff --git a/tests/integration/utils.py b/tests/integration/utils.py new file mode 100644 index 0000000..6948588 --- /dev/null +++ b/tests/integration/utils.py @@ -0,0 +1,195 @@ +"""Copyright (c) 2023 Genome Research Ltd. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +import tempfile +from pathlib import Path +from typing import Union + +import pygit2 +import pytest + +from softpack_core.artifacts import Artifacts, app + +artifacts_dict = dict[ + str, + Union[str, pygit2.Oid, Path, Artifacts, tempfile.TemporaryDirectory[str]], +] + + +def new_test_artifacts() -> artifacts_dict: + temp_dir = tempfile.TemporaryDirectory() + app.settings.artifacts.path = Path(temp_dir.name) + + artifacts = Artifacts() + + branch_name = app.settings.artifacts.repo.branch + branch = artifacts.repo.branches.get(branch_name) + + if branch is None or branch_name == "main": + pytest.skip( + ( + "Your artifacts repo must have a branch named after your " + "username." + ) + ) + + dict = reset_test_repo(artifacts) + dict["temp_dir"] = temp_dir + dict["artifacts"] = artifacts + + return dict + + +def reset_test_repo(artifacts: Artifacts) -> artifacts_dict: + delete_environments_folder_from_test_repo(artifacts) + + return create_initial_test_repo_state(artifacts) + + +def delete_environments_folder_from_test_repo(artifacts: Artifacts): + tree = artifacts.repo.head.peel(pygit2.Tree) + if artifacts.environments_root in tree: + treeBuilder = artifacts.repo.TreeBuilder(tree) + treeBuilder.remove(artifacts.environments_root) + oid = treeBuilder.write() + commit_and_push_test_repo_changes( + artifacts, oid, "delete environments" + ) + + +def commit_and_push_test_repo_changes( + artifacts: Artifacts, oid: pygit2.Oid, msg: str +) -> pygit2.Oid: + ref = artifacts.repo.head.name + oid = artifacts.repo.create_commit( + ref, + artifacts.signature, + artifacts.signature, + msg, + oid, + [artifacts.repo.lookup_reference(ref).target], + ) + remote = artifacts.repo.remotes[0] + remote.push( + [artifacts.repo.head.name], callbacks=artifacts.credentials_callback + ) + return oid + + +def create_initial_test_repo_state(artifacts: Artifacts) -> artifacts_dict: + dir_path = app.settings.artifacts.path + test_user = "test_user" + test_group = "test_group" + test_env = "test_environment" + user_env_path = Path( + dir_path, + "environments", + artifacts.users_folder_name, + test_user, + test_env, + ) + group_env_path = Path( + dir_path, + "environments", + artifacts.groups_folder_name, + test_group, + test_env, + ) + file_basename = Artifacts.environments_file + + softpack_yml_data = ( + b"description: \"desc\"\npackages:\n" + b" - pck1@1\n - pck2@v2.0.1\n - pck3" + ) + + oid = artifacts.repo.create_blob(softpack_yml_data) + + userTestEnv = artifacts.repo.TreeBuilder() + userTestEnv.insert(file_basename, oid, pygit2.GIT_FILEMODE_BLOB) + + testUser = artifacts.repo.TreeBuilder() + testUser.insert(test_env, userTestEnv.write(), pygit2.GIT_FILEMODE_TREE) + + usersFolder = artifacts.repo.TreeBuilder() + usersFolder.insert(test_user, testUser.write(), pygit2.GIT_FILEMODE_TREE) + + oid = artifacts.repo.create_blob(softpack_yml_data) + + userGroupEnv = artifacts.repo.TreeBuilder() + userGroupEnv.insert(file_basename, oid, pygit2.GIT_FILEMODE_BLOB) + + testGroup = artifacts.repo.TreeBuilder() + testGroup.insert(test_env, userGroupEnv.write(), pygit2.GIT_FILEMODE_TREE) + + groupsFolder = artifacts.repo.TreeBuilder() + groupsFolder.insert( + test_group, testGroup.write(), pygit2.GIT_FILEMODE_TREE + ) + + environments = artifacts.repo.TreeBuilder() + environments.insert( + artifacts.users_folder_name, + usersFolder.write(), + pygit2.GIT_FILEMODE_TREE, + ) + environments.insert( + artifacts.groups_folder_name, + groupsFolder.write(), + pygit2.GIT_FILEMODE_TREE, + ) + + tree = artifacts.repo.head.peel(pygit2.Tree) + treeBuilder = artifacts.repo.TreeBuilder(tree) + treeBuilder.insert( + artifacts.environments_root, + environments.write(), + pygit2.GIT_FILEMODE_TREE, + ) + + oid = commit_and_push_test_repo_changes( + artifacts, treeBuilder.write(), "Add test environments" + ) + + dict: artifacts_dict = { + "initial_commit_oid": oid, + "test_user": test_user, + "test_group": test_group, + "test_environment": test_env, + "user_env_path": user_env_path, + "group_env_path": group_env_path, + "basename": file_basename, + } + return dict + + +def get_user_path_without_environments( + artifacts: Artifacts, user: str +) -> Path: + return Path(*(artifacts.user_folder(user).parts[1:])) + + +def file_in_remote(*paths_with_environment: Union[str, Path]) -> bool: + temp_dir = tempfile.TemporaryDirectory() + app.settings.artifacts.path = Path(temp_dir.name) + artifacts = Artifacts() + + for path_with_environment in paths_with_environment: + path = Path(path_with_environment) + + if not file_in_repo(artifacts, path): + return False + + return True + + +def file_in_repo(artifacts: Artifacts, path: Path) -> bool: + current = artifacts.repo.head.peel(pygit2.Tree) + for part in path.parts: + if part not in current: + return False + current = current[part] + + return True diff --git a/tests/test_module.py b/tests/test_module.py new file mode 100644 index 0000000..4f2006a --- /dev/null +++ b/tests/test_module.py @@ -0,0 +1,43 @@ +"""Copyright (c) 2023 Genome Research Ltd. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + +from pathlib import Path + +from softpack_core.module import GenerateEnvReadme, ToSoftpackYML + + +def pytest_generate_tests(metafunc): + if "module_input" not in metafunc.fixturenames: + return + + metafunc.parametrize( + "module_input", + list((Path(__file__).parent / "files" / "modules").glob("*.mod")), + ) + + +def test_tosoftpack(module_input: Path) -> None: + output = str(module_input).removesuffix(".mod") + ".yml" + + with open(module_input, "rb") as fh: + module_data = fh.read() + + yml = ToSoftpackYML(module_input.name.removesuffix(".mod"), module_data) + + with open(output, "rb") as fh: + expected_yml = fh.read() + assert yml == expected_yml + + +def test_generate_env_readme() -> None: + test_files_dir = Path(__file__).parent / "files" / "modules" + + readme_data = GenerateEnvReadme("HGI/common/some_environment") + + with open(test_files_dir / "shpc.readme", "rb") as fh: + expected_readme_data = fh.read() + + assert readme_data == expected_readme_data diff --git a/tests/unit/__init__.py b/tests/unit/__init__.py new file mode 100644 index 0000000..1521582 --- /dev/null +++ b/tests/unit/__init__.py @@ -0,0 +1 @@ +"""Unit tests for softpack-core.""" diff --git a/tests/unit/conftest.py b/tests/unit/conftest.py new file mode 100644 index 0000000..c9b0e64 --- /dev/null +++ b/tests/unit/conftest.py @@ -0,0 +1,69 @@ +"""Copyright (c) 2023 Genome Research Ltd. + +This source code is licensed under the MIT license found in the +LICENSE file in the root directory of this source tree. +""" + + +import time +from multiprocessing import Process +from threading import Thread + +import httpx +import pytest +import requests +from fastapi.testclient import TestClient +from typer.testing import CliRunner + +from softpack_core.app import app + + +@pytest.fixture +def client() -> TestClient: + return TestClient(app.router) + + +class CLI: + def __init__(self): + self.runner = CliRunner() + + def invoke(self, *args, **kwargs): + return self.runner.invoke(app.commands, *args, **kwargs) + + +@pytest.fixture +def cli() -> CLI: + return CLI() + + +def service_run(): + cli = CLI() + cli.invoke(["service", "run"]) + + +@pytest.fixture +def service_factory(): + def create_service(module): + service = module(target=service_run, daemon=True) + service.start() + while True: + try: + response = requests.get(app.url()) + if response.status_code == httpx.codes.OK: + break + except requests.ConnectionError: + time.sleep(0.1) + continue + return service + + return create_service + + +@pytest.fixture +def service(service_factory): + return service_factory(Process) + + +@pytest.fixture +def service_thread(service_factory): + return service_factory(Thread) diff --git a/tests/test_app.py b/tests/unit/test_app.py similarity index 100% rename from tests/test_app.py rename to tests/unit/test_app.py diff --git a/tests/test_graphql.py b/tests/unit/test_graphql.py similarity index 100% rename from tests/test_graphql.py rename to tests/unit/test_graphql.py diff --git a/tests/test_main.py b/tests/unit/test_main.py similarity index 100% rename from tests/test_main.py rename to tests/unit/test_main.py diff --git a/tests/test_service.py b/tests/unit/test_service.py similarity index 100% rename from tests/test_service.py rename to tests/unit/test_service.py diff --git a/tox.ini b/tox.ini index 3ccaa24..14d2cf0 100644 --- a/tox.ini +++ b/tox.ini @@ -1,12 +1,10 @@ [tox] isolated_build = true -envlist = py39, py310, py311, format, lint, build +envlist = py311, format, lint, build [gh-actions] python = 3.11: py311, format, lint, build - 3.10: py310 - 3.9: py39 [flake8] max-line-length = 79 @@ -63,7 +61,7 @@ commands = --cov-branch \ --cov-report=xml \ --cov-report=term-missing \ - tests + tests {posargs} [testenv:format] skip_install = true