diff --git a/.gitignore b/.gitignore index 673918cb5..f23337518 100644 --- a/.gitignore +++ b/.gitignore @@ -70,4 +70,7 @@ usr/bin/aptly dpkgs/ debian/changelog.dpkg-bak -docs/ +docs/docs.go +docs/swagger.json +docs/swagger.yaml +docs/swagger.conf diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index d4ab7e558..780dbf5db 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -110,15 +110,13 @@ make docker-build To run aptly commands in the development docker container, run: ``` -make docker-aptly +make docker-shell ``` Example: ``` -$ make docker-aptly -bash: cannot set terminal process group (16): Inappropriate ioctl for device -bash: no job control in this shell -aptly@b43e8473ef81:/app$ aptly version +$ make docker-shell +aptly@b43e8473ef81:/work/src$ aptly version aptly version: 1.5.0+189+g0fc90dff ``` @@ -152,41 +150,27 @@ Run `make help` for more information. This section describes local setup to start contributing to aptly. -#### Go & Python - -You would need `Go` (latest version is recommended) and `Python` 3.9 (or newer, the CI currently tests against 3.11). - -If you're new to Go, follow [getting started guide](https://golang.org/doc/install) to install it and perform -initial setup. With Go 1.8+, default `$GOPATH` is `$HOME/go`, so rest of this document assumes that. - -Usually `$GOPATH/bin` is appended to your `$PATH` to make it easier to run built binaries, but you might choose -to prepend it or to skip this test if you're security conscious. - #### Dependencies -You would need some additional tools and Python virtual environment to run tests and checks, install them with: +Building aptly requires go version 1.22. - make prepare dev system/env +On Debian bookworm with backports enabled, go can be installed with: -This is usually one-time action. - -Aptly is using Go modules to manage dependencies, download modules using: - - make modules + apt install -t bookworm-backports golang-go #### Building -If you want to build aptly binary from your current source tree, run: +To build aptly, run: - make install + make build -This would build `aptly` in `$GOPATH/bin`, so depending on your `$PATH`, you should be able to run it immediately with: +Run aptly: - aptly + build/aptly -Or, if it's not on your path: +To install aptly into `$GOPATH/bin`, run: - ~/go/bin/aptly + make install #### Unit-tests @@ -244,26 +228,6 @@ There are some packages available under `system/files/` directory which are used this default location. You can run aptly under different user or by using non-default config location with non-default aptly root directory. -#### Style Checks - -Style checks could be run with: - - make check - -aptly is using [golangci-lint](https://github.com/golangci/golangci-lint) to run style checks on Go code. Configuration -for the linter could be found in [.golangci.yml](.golangci.yml) file. - -Python code (system tests) are linted with [flake8 tool](https://pypi.python.org/pypi/flake8). - -#### Vendored Code - -aptly is using Go vendoring for all the libraries aptly depends upon. `vendor/` directory is checked into the source -repository to avoid any problems if source repositories go away. Go build process will automatically prefer vendored -packages over packages in `$GOPATH`. - -If you want to update vendored dependencies or to introduce new dependency, use [dep tool](https://github.com/golang/dep). -Usually all you need is `dep ensure` or `dep ensure -update`. - ### man Page aptly is using combination of [Go templates](http://godoc.org/text/template) and automatically generated text to build `aptly.1` man page. If either source diff --git a/Makefile b/Makefile index 632d7066c..3661948bb 100644 --- a/Makefile +++ b/Makefile @@ -44,7 +44,10 @@ version: ## Print aptly version swagger-install: # Install swag - @test -f $(BINPATH)/swag || GOOS=linux GOARCH=amd64 go install github.com/swaggo/swag/cmd/swag@latest + @test -f $(BINPATH)/swag || GOOS= GOARCH= go install github.com/swaggo/swag/cmd/swag@latest + # Generate swagger.conf + cp docs/swagger.conf.tpl docs/swagger.conf + echo "// @version $(VERSION)" >> docs/swagger.conf azurite-start: azurite & \ @@ -55,16 +58,16 @@ azurite-stop: swagger: swagger-install # Generate swagger docs - @PATH=$(BINPATH)/:$(PATH) swag init --markdownFiles docs + @PATH=$(BINPATH)/:$(PATH) swag init --parseDependency --parseInternal --markdownFiles docs --generalInfo docs/swagger.conf etcd-install: # Install etcd - test -d /srv/etcd || system/t13_etcd/install-etcd.sh + test -d /tmp/aptly-etcd || system/t13_etcd/install-etcd.sh flake8: ## run flake8 on system test python files flake8 system/ -lint: +lint: prepare # Install golangci-lint @test -f $(BINPATH)/golangci-lint || go install github.com/golangci/golangci-lint/cmd/golangci-lint@$(GOLANGCI_LINT_VERSION) # Running lint @@ -76,17 +79,19 @@ build: prepare swagger ## Build aptly install: @echo "\e[33m\e[1mBuilding aptly ...\e[0m" - go generate + # go generate + @go generate + # go install -v @out=`mktemp`; if ! go install -v > $$out 2>&1; then cat $$out; rm -f $$out; echo "\nBuild failed\n"; exit 1; else rm -f $$out; fi test: prepare swagger etcd-install ## Run unit tests @echo "\e[33m\e[1mStarting etcd ...\e[0m" - @mkdir -p /tmp/etcd-data; system/t13_etcd/start-etcd.sh > /tmp/etcd-data/etcd.log 2>&1 & + @mkdir -p /tmp/aptly-etcd-data; system/t13_etcd/start-etcd.sh > /tmp/aptly-etcd-data/etcd.log 2>&1 & @echo "\e[33m\e[1mRunning go test ...\e[0m" go test -v ./... -gocheck.v=true -coverprofile=unit.out; echo $$? > .unit-test.ret @echo "\e[33m\e[1mStopping etcd ...\e[0m" @pid=`cat /tmp/etcd.pid`; kill $$pid - @rm -f /tmp/etcd-data/etcd.log + @rm -f /tmp/aptly-etcd-data/etcd.log @ret=`cat .unit-test.ret`; if [ "$$ret" = "0" ]; then echo "\n\e[32m\e[1mUnit Tests SUCCESSFUL\e[0m"; else echo "\n\e[31m\e[1mUnit Tests FAILED\e[0m"; fi; rm -f .unit-test.ret; exit $$ret system-test: prepare swagger etcd-install ## Run system tests @@ -107,7 +112,7 @@ serve: prepare swagger-install ## Run development server (auto recompiling) test -f $(BINPATH)/air || go install github.com/air-verse/air@v1.52.3 cp debian/aptly.conf ~/.aptly.conf sed -i /enableSwaggerEndpoint/s/false/true/ ~/.aptly.conf - PATH=$(BINPATH):$$PATH air -build.pre_cmd 'swag init -q --markdownFiles docs' -build.exclude_dir docs,system,debian,pgp/keyrings,pgp/test-bins,completion.d,man,deb/testdata,console,_man,systemd,obj-x86_64-linux-gnu -- api serve -listen 0.0.0.0:3142 + PATH=$(BINPATH):$$PATH air -build.pre_cmd 'swag init -q --markdownFiles docs --generalInfo docs/swagger.conf' -build.exclude_dir docs,system,debian,pgp/keyrings,pgp/test-bins,completion.d,man,deb/testdata,console,_man,systemd,obj-x86_64-linux-gnu -- api serve -listen 0.0.0.0:3142 dpkg: prepare swagger ## Build debian packages @test -n "$(DEBARCH)" || (echo "please define DEBARCH"; exit 1) @@ -205,7 +210,8 @@ man: ## Create man pages clean: ## remove local build and module cache # Clean all generated and build files - test -d .go/ && chmod u+w -R .go/ && rm -rf .go/ || true + find .go/ -type d ! -perm -u=w -exec chmod u+w {} \; + rm -rf .go/ rm -rf build/ obj-*-linux-gnu* tmp/ rm -f unit.out aptly.test VERSION docs/docs.go docs/swagger.json docs/swagger.yaml docs/swagger.conf find system/ -type d -name __pycache__ -exec rm -rf {} \; 2>/dev/null || true diff --git a/api/api.go b/api/api.go index a52406d80..adeef9d92 100644 --- a/api/api.go +++ b/api/api.go @@ -23,12 +23,43 @@ import ( // 3. SnapshotCollection // 4. PublishedRepoCollection -// GET /api/version +type aptlyVersion struct { + // Aptly Version + Version string `json:"Version"` +} + +// @Summary Aptly version +// @Description **Get aptly version** +// @Description +// @Description **Example:** +// @Description ``` +// @Description $ curl http://localhost:8080/api/version +// @Description {"Version":"0.9~dev"} +// @Description ``` +// @Tags Status +// @Produce json +// @Success 200 {object} aptlyVersion +// @Router /api/version [get] func apiVersion(c *gin.Context) { c.JSON(200, gin.H{"Version": aptly.Version}) } -// GET /api/ready +type aptlyStatus struct { + // Aptly Status + Status string `json:"Status" example:"'Aptly is ready', 'Aptly is unavailable', 'Aptly is healthy'"` +} + +// @Summary Ready State +// @Description **Get aptly ready state** +// @Description +// @Description Return aptly ready state: +// @Description - `Aptly is ready` (HTTP 200) +// @Description - `Aptly is unavailable` (HTTP 503) +// @Tags Status +// @Produce json +// @Success 200 {object} aptlyStatus "Aptly is ready" +// @Failure 503 {object} aptlyStatus "Aptly is unavailable" +// @Router /api/ready [get] func apiReady(isReady *atomic.Value) func(*gin.Context) { return func(c *gin.Context) { if isReady == nil || !isReady.Load().(bool) { @@ -40,7 +71,15 @@ func apiReady(isReady *atomic.Value) func(*gin.Context) { } } -// GET /api/healthy +// @Summary Health State +// @Description **Get aptly health state** +// @Description +// @Description Return aptly health state: +// @Description - `Aptly is healthy` (HTTP 200) +// @Tags Status +// @Produce json +// @Success 200 {object} aptlyStatus +// @Router /api/healthy [get] func apiHealthy(c *gin.Context) { c.JSON(200, gin.H{"Status": "Aptly is healthy"}) } diff --git a/api/db.go b/api/db.go index 3f8b826dd..259a94aa4 100644 --- a/api/db.go +++ b/api/db.go @@ -11,9 +11,17 @@ import ( "github.com/gin-gonic/gin" ) -// POST /api/db/cleanup +// @Summary DB Cleanup +// @Description **Cleanup Aptly DB** +// @Description Database cleanup removes information about unreferenced packages and deletes files in the package pool that aren’t used by packages anymore. +// @Description It is a good idea to run this command after massive deletion of mirrors, snapshots or local repos. +// @Tags Database +// @Produce json +// @Param _async query bool false "Run in background and return task object" +// @Success 200 {object} string "Output" +// @Failure 404 {object} Error "Not Found" +// @Router /api/db/cleanup [post] func apiDbCleanup(c *gin.Context) { - resources := []string{string(task.AllResourcesKey)} maybeRunTaskInBackground(c, "Clean up db", resources, func(out aptly.Progress, detail *task.Detail) (*task.ProcessReturnValue, error) { var err error diff --git a/api/files.go b/api/files.go index 7ae682f85..2d042a5d7 100644 --- a/api/files.go +++ b/api/files.go @@ -34,10 +34,16 @@ func verifyDir(c *gin.Context) bool { return true } -// @Summary Get files -// @Description Get list of uploaded files. +// @Summary List Directories +// @Description **Get list of upload directories** +// @Description +// @Description **Example:** +// @Description ``` +// @Description $ curl http://localhost:8080/api/files +// @Description ["aptly-0.9"] +// @Description ``` // @Tags Files -// @Produce json +// @Produce json // @Success 200 {array} string "List of files" // @Router /api/files [get] func apiFilesListDirs(c *gin.Context) { @@ -67,7 +73,27 @@ func apiFilesListDirs(c *gin.Context) { c.JSON(200, list) } -// POST /files/:dir/ +// @Summary Upload Files +// @Description **Upload files to a directory** +// @Description +// @Description - one or more files can be uploaded +// @Description - existing uploaded are overwritten +// @Description +// @Description **Example:** +// @Description ``` +// @Description $ curl -X POST -F file=@aptly_0.9~dev+217+ge5d646c_i386.deb http://localhost:8080/api/files/aptly-0.9 +// @Description ["aptly-0.9/aptly_0.9~dev+217+ge5d646c_i386.deb"] +// @Description ``` +// @Tags Files +// @Accept multipart/form-data +// @Param dir path string true "Directory to upload files to. Created if does not exist" +// @Param files formData file true "Files to upload" +// @Produce json +// @Success 200 {array} string "list of uploaded files" +// @Failure 400 {object} Error "Bad Request" +// @Failure 404 {object} Error "Not Found" +// @Failure 500 {object} Error "Internal Server Error" +// @Router /api/files/{dir} [post] func apiFilesUpload(c *gin.Context) { if !verifyDir(c) { return @@ -118,10 +144,23 @@ func apiFilesUpload(c *gin.Context) { apiFilesUploadedCounter.WithLabelValues(c.Params.ByName("dir")).Inc() c.JSON(200, stored) - } -// GET /files/:dir +// @Summary List Files +// @Description **Show uploaded files in upload directory** +// @Description +// @Description **Example:** +// @Description ``` +// @Description $ curl http://localhost:8080/api/files/aptly-0.9 +// @Description ["aptly_0.9~dev+217+ge5d646c_i386.deb"] +// @Description ``` +// @Tags Files +// @Produce json +// @Param dir path string true "Directory to list" +// @Success 200 {array} string "Files found in directory" +// @Failure 404 {object} Error "Not Found" +// @Failure 500 {object} Error "Internal Server Error" +// @Router /api/files/{dir} [get] func apiFilesListFiles(c *gin.Context) { if !verifyDir(c) { return @@ -159,7 +198,20 @@ func apiFilesListFiles(c *gin.Context) { c.JSON(200, list) } -// DELETE /files/:dir +// @Summary Delete Directory +// @Description **Delete upload directory and uploaded files within** +// @Description +// @Description **Example:** +// @Description ``` +// @Description $ curl -X DELETE http://localhost:8080/api/files/aptly-0.9 +// @Description {} +// @Description ``` +// @Tags Files +// @Produce json +// @Param dir path string true "Directory" +// @Success 200 {object} string "msg" +// @Failure 500 {object} Error "Internal Server Error" +// @Router /api/files/{dir} [delete] func apiFilesDeleteDir(c *gin.Context) { if !verifyDir(c) { return @@ -174,7 +226,22 @@ func apiFilesDeleteDir(c *gin.Context) { c.JSON(200, gin.H{}) } -// DELETE /files/:dir/:name +// @Summary Delete File +// @Description **Delete a uploaded file in upload directory** +// @Description +// @Description **Example:** +// @Description ``` +// @Description $ curl -X DELETE http://localhost:8080/api/files/aptly-0.9/aptly_0.9~dev+217+ge5d646c_i386.deb +// @Description {} +// @Description ``` +// @Tags Files +// @Produce json +// @Param dir path string true "Directory to delete from" +// @Param name path string true "File to delete" +// @Success 200 {object} string "msg" +// @Failure 400 {object} Error "Bad Request" +// @Failure 500 {object} Error "Internal Server Error" +// @Router /api/files/{dir}/{name} [delete] func apiFilesDeleteFile(c *gin.Context) { if !verifyDir(c) { return diff --git a/api/gpg.go b/api/gpg.go index 72e938d2f..4ea79b68d 100644 --- a/api/gpg.go +++ b/api/gpg.go @@ -12,15 +12,29 @@ import ( "github.com/gin-gonic/gin" ) -// POST /api/gpg -func apiGPGAddKey(c *gin.Context) { - var b struct { - Keyserver string - GpgKeyID string - GpgKeyArmor string - Keyring string - } +type gpgAddKeyParams struct { + // Keyserver, when downloading GpgKeyIDs + Keyserver string `json:"Keyserver" example:"hkp://keyserver.ubuntu.com:80"` + // GpgKeyIDs to download from Keyserver, comma separated list + GpgKeyID string `json:"GpgKeyID" example:"EF0F382A1A7B6500,8B48AD6246925553"` + // Armored gpg public ket, instead of downloading from keyserver + GpgKeyArmor string `json:"GpgKeyArmor" example:""` + // Keyring for adding the keys (default: trustedkeys.gpg) + Keyring string `json:"Keyring" example:"trustedkeys.gpg"` +} +// @Summary Add GPG Keys +// @Description **Adds GPG keys to aptly keyring** +// @Description +// @Description Add GPG public keys for veryfing remote repositories for mirroring. +// @Tags Mirrors +// @Produce json +// @Success 200 {object} string "OK" +// @Failure 400 {object} Error "Bad Request" +// @Failure 404 {object} Error "Not Found" +// @Router /api/gpg [post] +func apiGPGAddKey(c *gin.Context) { + b := gpgAddKeyParams{} if c.Bind(&b) != nil { return } diff --git a/api/graph.go b/api/graph.go index d04e687f8..78d4958fd 100644 --- a/api/graph.go +++ b/api/graph.go @@ -12,7 +12,27 @@ import ( "github.com/gin-gonic/gin" ) -// GET /api/graph.:ext?layout=[vertical|horizontal(default)] +// @Summary Graph Output +// @Description **Generate dependency graph** +// @Description +// @Description Command graph generates graph of dependencies: +// @Description +// @Description * between snapshots and mirrors (what mirror was used to create each snapshot) +// @Description * between snapshots and local repos (what local repo was used to create snapshot) +// @Description * between snapshots (pulling, merging, etc.) +// @Description * between snapshots, local repos and published repositories (how snapshots were published). +// @Description +// @Description Graph is rendered to PNG file using graphviz package. +// @Description +// @Description Example URL: `http://localhost:8080/api/graph.svg?layout=vertical` +// @Tags Status +// @Produce image/png, image/svg+xml +// @Param ext path string true "ext specifies desired file extension, e.g. .png, .svg." +// @Param layout query string false "Change between a `horizontal` (default) and a `vertical` graph layout." +// @Success 200 {object} []byte "Output" +// @Failure 404 {object} Error "Not Found" +// @Failure 500 {object} Error "Internal Server Error" +// @Router /api/graph.{ext} [get] func apiGraph(c *gin.Context) { var ( err error diff --git a/api/mirror.go b/api/mirror.go index 0052f2a59..48a505fec 100644 --- a/api/mirror.go +++ b/api/mirror.go @@ -31,7 +31,7 @@ func getVerifier(keyRings []string) (pgp.Verifier, error) { return verifier, nil } -// @Summary Get mirrors +// @Summary Get Mirrors // @Description **Show list of currently available mirrors** // @Description Each mirror is returned as in “show” API. // @Tags Mirrors @@ -82,8 +82,8 @@ type mirrorCreateParams struct { IgnoreSignatures bool ` json:"IgnoreSignatures"` } -// @Summary Create mirror -// @Description **Create a mirror** +// @Summary Create Mirror +// @Description **Create a mirror of a remote repository** // @Tags Mirrors // @Consume json // @Param request body mirrorCreateParams true "Parameters" @@ -164,6 +164,7 @@ func apiMirrorsCreate(c *gin.Context) { // @Tags Mirrors // @Param name path string true "mirror name" // @Param force query int true "force: 1 to enable" +// @Param _async query bool false "Run in background and return task object" // @Produce json // @Success 200 {object} task.ProcessReturnValue // @Failure 404 {object} Error "Mirror not found" @@ -364,6 +365,7 @@ type mirrorUpdateParams struct { // @Param name path string true "mirror name to update" // @Consume json // @Param request body mirrorUpdateParams true "Parameters" +// @Param _async query bool false "Run in background and return task object" // @Produce json // @Success 200 {object} task.ProcessReturnValue "Mirror was updated successfully" // @Success 202 {object} task.Task "Mirror is being updated" diff --git a/api/packages.go b/api/packages.go index 09ae785a5..631d30147 100644 --- a/api/packages.go +++ b/api/packages.go @@ -1,10 +1,19 @@ package api import ( + _ "github.com/aptly-dev/aptly/deb" // for swagger "github.com/gin-gonic/gin" ) -// GET /api/packages/:key +// @Summary Show packages +// @Description **Show information about package by package key** +// @Description Package keys could be obtained from various GET .../packages APIs. +// @Tags Packages +// @Produce json +// @Param key path string true "package key (unique package identifier)" +// @Success 200 {object} deb.Package "OK" +// @Failure 404 {object} Error "Not Found" +// @Router /api/packages/{key} [get] func apiPackagesShow(c *gin.Context) { collectionFactory := context.NewCollectionFactory() p, err := collectionFactory.PackageCollection().ByKey([]byte(c.Params.ByName("key"))) diff --git a/api/publish.go b/api/publish.go index 9b10e65fc..453f9a3d7 100644 --- a/api/publish.go +++ b/api/publish.go @@ -179,9 +179,16 @@ type publishedRepoCreateParams struct { // @Description // @Description The prefix may contain a storage specifier, e.g. `s3:packages/`, or it may also be empty to publish to the root directory. // @Description +// @Description **Example:** +// @Description ``` +// @Description $ curl -X POST -H 'Content-Type: application/json' --data '{"Distribution": "wheezy", "Sources": [{"Name": "aptly-repo"}]}' http://localhost:8080/api/publish//repos +// @Description {"Architectures":["i386"],"Distribution":"wheezy","Label":"","Origin":"","Prefix":".","SourceKind":"local","Sources":[{"Component":"main","Name":"aptly-repo"}],"Storage":""} +// @Description ``` +// @Description // @Description See also: `aptly publish create` // @Tags Publish // @Param prefix path string true "publishing prefix" +// @Param _async query bool false "Run in background and return task object" // @Consume json // @Param request body publishedRepoCreateParams true "Parameters" // @Produce json @@ -389,6 +396,7 @@ type publishedRepoUpdateSwitchParams struct { // @Produce json // @Param prefix path string true "publishing prefix" // @Param distribution path string true "distribution name" +// @Param _async query bool false "Run in background and return task object" // @Consume json // @Param request body publishedRepoUpdateSwitchParams true "Parameters" // @Produce json @@ -519,6 +527,7 @@ func apiPublishUpdateSwitch(c *gin.Context) { // @Param distribution path string true "distribution name" // @Param force query int true "force: 1 to enable" // @Param skipCleanup query int true "skipCleanup: 1 to enable" +// @Param _async query bool false "Run in background and return task object" // @Success 200 // @Failure 400 {object} Error "Bad Request" // @Failure 404 {object} Error "Published repository not found" @@ -565,6 +574,7 @@ func apiPublishDrop(c *gin.Context) { // @Tags Publish // @Param prefix path string true "publishing prefix" // @Param distribution path string true "distribution name" +// @Param _async query bool false "Run in background and return task object" // @Consume json // @Param request body sourceParams true "Parameters" // @Produce json @@ -682,6 +692,7 @@ func apiPublishListChanges(c *gin.Context) { // @Tags Publish // @Param prefix path string true "publishing prefix" // @Param distribution path string true "distribution name" +// @Param _async query bool false "Run in background and return task object" // @Consume json // @Param request body []sourceParams true "Parameters" // @Produce json @@ -747,6 +758,7 @@ func apiPublishSetSources(c *gin.Context) { // @Tags Publish // @Param prefix path string true "publishing prefix" // @Param distribution path string true "distribution name" +// @Param _async query bool false "Run in background and return task object" // @Produce json // @Success 200 // @Failure 400 {object} Error "Bad Request" @@ -799,6 +811,7 @@ func apiPublishDropChanges(c *gin.Context) { // @Param prefix path string true "publishing prefix" // @Param distribution path string true "distribution name" // @Param component path string true "component name" +// @Param _async query bool false "Run in background and return task object" // @Consume json // @Param request body sourceParams true "Parameters" // @Produce json @@ -878,6 +891,7 @@ func apiPublishUpdateSource(c *gin.Context) { // @Param prefix path string true "publishing prefix" // @Param distribution path string true "distribution name" // @Param component path string true "component name" +// @Param _async query bool false "Run in background and return task object" // @Produce json // @Success 200 // @Failure 400 {object} Error "Bad Request" @@ -954,6 +968,7 @@ type publishedRepoUpdateParams struct { // @Tags Publish // @Param prefix path string true "publishing prefix" // @Param distribution path string true "distribution name" +// @Param _async query bool false "Run in background and return task object" // @Consume json // @Param request body publishedRepoUpdateParams true "Parameters" // @Produce json diff --git a/api/repos.go b/api/repos.go index 005c2aaa1..3acc29ea3 100644 --- a/api/repos.go +++ b/api/repos.go @@ -18,7 +18,13 @@ import ( "github.com/gin-gonic/gin" ) -// GET /repos +// @Summary Serve HTML listing of repo +// @Description If ServeInAPIMode is enabled in aptly config, +// @Description this endpoint is enabled which returns an HTML listing of each repo that can be browsed +// @Tags Repos +// @Produce html +// @Success 200 {object} string "HTML" +// @Router /api/repos [get] func reposListInAPIMode(localRepos map[string]utils.FileSystemPublishRoot) gin.HandlerFunc { return func(c *gin.Context) { c.Writer.Header().Set("Content-Type", "text/html; charset=utf-8") @@ -35,7 +41,15 @@ func reposListInAPIMode(localRepos map[string]utils.FileSystemPublishRoot) gin.H } } -// GET /repos/:storage/*pkgPath +// @Summary Serve package in API mode +// @Description If ServeInAPIMode is enabled in aptly config, +// @Description this api serves a specified package from storage +// @Tags Repos +// @Param storage path string true "Storage" +// @Param pkgPath path string true "Package Path" allowReserved=true +// @Produce json +// @Success 200 "" +// @Router /api/{storage}/{pkgPath} [get] func reposServeInAPIMode(c *gin.Context) { pkgpath := c.Param("pkgPath") @@ -51,7 +65,8 @@ func reposServeInAPIMode(c *gin.Context) { } // @Summary Get repos -// @Description Get list of available repos. Each repo is returned as in “show” API. +// @Description **Get list of available repos** +// @Description Each repo is returned as in “show” API. // @Tags Repos // @Produce json // @Success 200 {array} deb.LocalRepo @@ -83,7 +98,14 @@ type repoCreateParams struct { } // @Summary Create repository -// @Description Create a local repository. +// @Description **Create a local repository** +// @Description +// @Description Distribution and component are used as defaults when publishing repo either directly or via snapshot. +// @Description +// @Description ``` +// @Description $ curl -X POST -H 'Content-Type: application/json' --data '{"Name": "aptly-repo"}' http://localhost:8080/api/repos +// @Description {"Name":"aptly-repo","Comment":"","DefaultDistribution":"","DefaultComponent":""} +// @Description ``` // @Tags Repos // @Produce json // @Consume json @@ -142,15 +164,28 @@ func apiReposCreate(c *gin.Context) { c.JSON(http.StatusCreated, repo) } -// PUT /api/repos/:name -func apiReposEdit(c *gin.Context) { - var b struct { - Name *string - Comment *string - DefaultDistribution *string - DefaultComponent *string - } +type reposEditParams struct { + // Name of repository to modify + Name *string `binding:"required" json:"Name" example:"repo1"` + // Change Comment of repository + Comment *string ` json:"Comment" example:"example repo"` + // Change Default Distribution for publishing + DefaultDistribution *string ` json:"DefaultDistribution" example:""` + // Change Devault Component for publishing + DefaultComponent *string ` json:"DefaultComponent" example:""` +} +// @Summary Update repo +// @Description **Update local repository meta information** +// @Tags Repos +// @Produce json +// @Param request body reposEditParams true "Parameters" +// @Success 200 {object} deb.LocalRepo "msg" +// @Failure 404 {object} Error "Not Found" +// @Failure 500 {object} Error "Internal Server Error" +// @Router /api/repos/{name} [put] +func apiReposEdit(c *gin.Context) { + var b reposEditParams if c.Bind(&b) != nil { return } @@ -214,7 +249,18 @@ func apiReposShow(c *gin.Context) { c.JSON(200, repo) } -// DELETE /api/repos/:name +// @Summary Drop Repository +// @Description Drop/delete a repo +// @Description Cannot drop repos that are published. +// @Description Needs force=1 to drop repos used as source by other repos. +// @Tags Repos +// @Produce json +// @Param _async query bool false "Run in background and return task object" +// @Param force query int false "force: 1 to enable" +// @Success 200 {object} task.ProcessReturnValue "Repo object" +// @Failure 404 {object} Error "Not Found" +// @Failure 404 {object} Error "Repo Conflict" +// @Router /api/repos/{name} [delete] func apiReposDrop(c *gin.Context) { force := c.Request.URL.Query().Get("force") == "1" name := c.Params.ByName("name") @@ -249,7 +295,27 @@ func apiReposDrop(c *gin.Context) { }) } -// GET /api/repos/:name/packages +// @Summary List Repo Packages +// @Description **Return a list of packages present in the repo** +// @Description +// @Description If `q` query parameter is missing, return all packages, otherwise return packages that match q +// @Description +// @Description **Example:** +// @Description ``` +// @Description $ curl http://localhost:8080/api/repos/aptly-repo/packages +// @Description ["Pi386 aptly 0.8 966561016b44ed80"] +// @Description ``` +// @Tags Repos +// @Produce json +// @Param name path string true "Snapshot to search" +// @Param q query string true "Package query (e.g Name%20(~%20matlab))" +// @Param withDeps query string true "Set to 1 to include dependencies when evaluating package query" +// @Param format query string true "Set to 'details' to return extra info about each package" +// @Param maximumVersion query string true "Set to 1 to only return the highest version for each package name" +// @Success 200 {object} string "msg" +// @Failure 404 {object} Error "Not Found" +// @Failure 404 {object} Error "Internal Server Error" +// @Router /api/repos/{name}/packages [get] func apiReposPackagesShow(c *gin.Context) { collectionFactory := context.NewCollectionFactory() collection := collectionFactory.LocalRepoCollection() @@ -269,11 +335,14 @@ func apiReposPackagesShow(c *gin.Context) { showPackages(c, repo.RefList(), collectionFactory) } +type reposPackagesAddDeleteParams struct { + // Package Refs + PackageRefs []string `binding:"required" json:"PackageRefs" example:""` +} + // Handler for both add and delete func apiReposPackagesAddDelete(c *gin.Context, taskNamePrefix string, cb func(list *deb.PackageList, p *deb.Package, out aptly.Progress) error) { - var b struct { - PackageRefs []string - } + var b reposPackagesAddDeleteParams if c.Bind(&b) != nil { return @@ -330,7 +399,21 @@ func apiReposPackagesAddDelete(c *gin.Context, taskNamePrefix string, cb func(li }) } -// POST /repos/:name/packages +// @Summary Add Packages by Key +// @Description **Add packages to local repository by package keys.** +// @Description +// @Description Any package can be added that is present in the aptly database (from any mirror, snapshot, local repository). This API combined with package list (search) APIs allows one to implement importing, copying, moving packages around. +// @Description +// @Description API verifies that packages actually exist in aptly database and checks constraint that conflicting packages can’t be part of the same local repository. +// @Tags Repos +// @Produce json +// @Param request body reposPackagesAddDeleteParams true "Parameters" +// @Param _async query bool false "Run in background and return task object" +// @Success 200 {object} string "msg" +// @Failure 400 {object} Error "Bad Request" +// @Failure 404 {object} Error "Not Found" +// @Failure 400 {object} Error "Internal Server Error" +// @Router /api/repos/{name}/packages [post] func apiReposPackagesAdd(c *gin.Context) { apiReposPackagesAddDelete(c, "Add packages to repo ", func(list *deb.PackageList, p *deb.Package, out aptly.Progress) error { out.Printf("Adding package %s\n", p.Name) @@ -338,7 +421,19 @@ func apiReposPackagesAdd(c *gin.Context) { }) } -// DELETE /repos/:name/packages +// @Summary Delete Packages by Key +// @Description **Remove packages from local repository by package keys.** +// @Description +// @Description Any package(s) can be removed from a local repository. Package references from a local repository can be retrieved with GET /api/repos/:name/packages. +// @Tags Repos +// @Produce json +// @Param request body reposPackagesAddDeleteParams true "Parameters" +// @Param _async query bool false "Run in background and return task object" +// @Success 200 {object} string "msg" +// @Failure 400 {object} Error "Bad Request" +// @Failure 404 {object} Error "Not Found" +// @Failure 400 {object} Error "Internal Server Error" +// @Router /api/repos/{name}/packages [delete] func apiReposPackagesDelete(c *gin.Context) { apiReposPackagesAddDelete(c, "Delete packages from repo ", func(list *deb.PackageList, p *deb.Package, out aptly.Progress) error { out.Printf("Removing package %s\n", p.Name) @@ -347,13 +442,27 @@ func apiReposPackagesDelete(c *gin.Context) { }) } -// POST /repos/:name/file/:dir/:file +// @Summary Add packages from uploaded file +// @Description Import packages from files (uploaded using File Upload API) to the local repository. If directory specified, aptly would discover package files automatically. +// @Description Adding same package to local repository is not an error. +// @Description By default aptly would try to remove every successfully processed file and directory `dir` (if it becomes empty after import). +// @Tags Repos +// @Param name path string true "Repository name" +// @Param dir path string true "Directory of packages" +// @Param file path string false "Filename (optional)" +// @Param _async query bool false "Run in background and return task object" +// @Produce json +// @Success 200 {string} string "OK" +// @Failure 400 {object} Error "wrong file" +// @Failure 404 {object} Error "Repository not found" +// @Failure 500 {object} Error "Error adding files" +// @Router /api/repos/{name}/file/{dir}/{file} [post] func apiReposPackageFromFile(c *gin.Context) { // redirect all work to dir method apiReposPackageFromDir(c) } -// @Summary Add packages from uploaded file/directory +// @Summary Add packages from uploaded directory // @Description Import packages from files (uploaded using File Upload API) to the local repository. If directory specified, aptly would discover package files automatically. // @Description Adding same package to local repository is not an error. // @Description By default aptly would try to remove every successfully processed file and directory `dir` (if it becomes empty after import). @@ -363,12 +472,13 @@ func apiReposPackageFromFile(c *gin.Context) { // @Consume json // @Param noRemove query string false "when value is set to 1, don’t remove any files" // @Param forceReplace query string false "when value is set to 1, remove packages conflicting with package being added (in local repository)" +// @Param _async query bool false "Run in background and return task object" // @Produce json // @Success 200 {string} string "OK" // @Failure 400 {object} Error "wrong file" // @Failure 404 {object} Error "Repository not found" // @Failure 500 {object} Error "Error adding files" -// @Router /api/repos/{name}/{dir} [post] +// @Router /api/repos/{name}/file/{dir} [post] func apiReposPackageFromDir(c *gin.Context) { forceReplace := c.Request.URL.Query().Get("forceReplace") == "1" noRemove := c.Request.URL.Query().Get("noRemove") == "1" @@ -487,16 +597,33 @@ func apiReposPackageFromDir(c *gin.Context) { }) } -// POST /repos/:name/copy/:src/:file +type reposCopyPackageParams struct { + // Copy also dependencies + WithDeps bool `json:"with-deps,omitempty"` + // Do not perform operations + DryRun bool `json:"dry-run,omitempty"` +} + +// @Summary Copy Package +// @Description Copies a package from a source to destination repository +// @Tags Repos +// @Produce json +// @Param name path string true "Source repo" +// @Param src path string true "Destination repo" +// @Param file path string true "File/packages to copy" +// @Param _async query bool false "Run in background and return task object" +// @Success 200 {object} task.ProcessReturnValue "msg" +// @Failure 400 {object} Error "Bad Request" +// @Failure 404 {object} Error "Not Found" +// @Failure 422 {object} Error "Unprocessable Entity" +// @Failure 500 {object} Error "Internal Server Error" +// @Router /api/repos/{name}/copy/{src}/{file} [post] func apiReposCopyPackage(c *gin.Context) { dstRepoName := c.Params.ByName("name") srcRepoName := c.Params.ByName("src") fileName := c.Params.ByName("file") - jsonBody := struct { - WithDeps bool `json:"with-deps,omitempty"` - DryRun bool `json:"dry-run,omitempty"` - }{ + jsonBody := reposCopyPackageParams{ WithDeps: false, DryRun: false, } @@ -550,7 +677,6 @@ func apiReposCopyPackage(c *gin.Context) { dstList, err := deb.NewPackageListFromRefList(dstRepo.RefList(), collectionFactory.PackageCollection(), context.Progress()) if err != nil { return &task.ProcessReturnValue{Code: http.StatusInternalServerError, Value: nil}, fmt.Errorf("unable to load packages in dest: %s", err) - } srcList, err := deb.NewPackageListFromRefList(srcRefList, collectionFactory.PackageCollection(), context.Progress()) @@ -626,13 +752,46 @@ func apiReposCopyPackage(c *gin.Context) { }) } -// POST /repos/:name/include/:dir/:file +// @Summary Include Packages from File Upload +// @Description Allows automatic processing of .changes file controlling package upload (uploaded using File Upload API) to the local repository. i.e. Exposes repo include command in api. +// @Tags Repos +// @Produce json +// @Param forceReplace query int false "when value is set to 1, when adding package that conflicts with existing package, remove existing package" +// @Param noRemoveFiles query int false "when value is set to 1, don’t remove files that have been imported successfully into repository" +// @Param acceptUnsigned query int false "when value is set to 1, accept unsigned .changes files" +// @Param ignoreSignature query int false "when value is set to 1 disable verification of .changes file signature" +// @Param _async query bool false "Run in background and return task object" +// @Success 200 {object} string "msg" +// @Failure 404 {object} Error "Not Found" +// @Router /api/repos/{name}/include/{dir}/{file} [post] func apiReposIncludePackageFromFile(c *gin.Context) { // redirect all work to dir method apiReposIncludePackageFromDir(c) } -// POST /repos/:name/include/:dir +type reposIncludePackageFromDirReport struct { + Warnings []string + Added []string + Deleted []string +} + +type reposIncludePackageFromDirResponse struct { + Report reposIncludePackageFromDirReport + FailedFiles []string +} + +// @Summary Include Packages from Dir Upload +// @Description Allows automatic processing of .changes file controlling package upload (uploaded using File Upload API) to the local repository. i.e. Exposes repo include command in api. +// @Tags Repos +// @Produce json +// @Param forceReplace query int false "when value is set to 1, when adding package that conflicts with existing package, remove existing package" +// @Param noRemoveFiles query int false "when value is set to 1, don’t remove files that have been imported successfully into repository" +// @Param acceptUnsigned query int false "when value is set to 1, accept unsigned .changes files" +// @Param ignoreSignature query int false "when value is set to 1 disable verification of .changes file signature" +// @Param _async query bool false "Run in background and return task object" +// @Success 200 {object} reposIncludePackageFromDirResponse "Response" +// @Failure 404 {object} Error "Not Found" +// @Router /api/repos/{name}/include/{dir} [post] func apiReposIncludePackageFromDir(c *gin.Context) { forceReplace := c.Request.URL.Query().Get("forceReplace") == "1" noRemoveFiles := c.Request.URL.Query().Get("noRemoveFiles") == "1" @@ -734,6 +893,5 @@ func apiReposIncludePackageFromDir(c *gin.Context) { "Report": reporter, "FailedFiles": failedFiles, }}, nil - }) } diff --git a/api/router.go b/api/router.go index ada14b888..3536ab096 100644 --- a/api/router.go +++ b/api/router.go @@ -12,7 +12,7 @@ import ( "github.com/prometheus/client_golang/prometheus/promhttp" "github.com/rs/zerolog/log" - _ "github.com/aptly-dev/aptly/docs" // import docs + "github.com/aptly-dev/aptly/docs" swaggerFiles "github.com/swaggo/files" ginSwagger "github.com/swaggo/gin-swagger" ) @@ -27,26 +27,22 @@ func apiMetricsGet() gin.HandlerFunc { } func redirectSwagger(c *gin.Context) { + if c.Request.URL.Path == "/docs/index.html" { + c.Redirect(http.StatusMovedPermanently, "/docs.html") + return + } if c.Request.URL.Path == "/docs/" { - c.Redirect(http.StatusMovedPermanently, "/docs/index.html") + c.Redirect(http.StatusMovedPermanently, "/docs.html") + return + } + if c.Request.URL.Path == "/docs" { + c.Redirect(http.StatusMovedPermanently, "/docs.html") return } c.Next() } // Router returns prebuilt with routes http.Handler -// @title Aptly API -// @version 1.0 -// @description Aptly REST API Documentation - -// @contact.name Aptly -// @contact.url http://github.com/aptly-dev/aptly -// @contact.email support@aptly.info - -// @license.name MIT License -// @license.url http://www. - -// @BasePath /api func Router(c *ctx.AptlyContext) http.Handler { if aptly.EnableDebug { gin.SetMode(gin.DebugMode) @@ -73,6 +69,9 @@ func Router(c *ctx.AptlyContext) http.Handler { router.Use(gin.Recovery(), gin.ErrorLogger()) if c.Config().EnableSwaggerEndpoint { + router.GET("docs.html", func(c *gin.Context) { + c.Data(http.StatusOK, "text/html; charset=utf-8", docs.DocsHTML) + }) router.Use(redirectSwagger) url := ginSwagger.URL("/docs/doc.json") router.GET("/docs/*any", ginSwagger.WrapHandler(swaggerFiles.Handler, url)) @@ -233,7 +232,6 @@ func Router(c *ctx.AptlyContext) http.Handler { api.GET("/tasks/:id/return_value", apiTasksReturnValueShow) api.GET("/tasks/:id", apiTasksShow) api.DELETE("/tasks/:id", apiTasksDelete) - api.POST("/tasks-dummy", apiTasksDummy) } return router diff --git a/api/s3.go b/api/s3.go index 33ed0c83e..f38b08470 100644 --- a/api/s3.go +++ b/api/s3.go @@ -4,10 +4,12 @@ import ( "github.com/gin-gonic/gin" ) -// @Summary Get S3 buckets -// @Description Get list of S3 buckets. -// @Tags S3 -// @Produce json +// @Summary S3 buckets +// @Description **Get list of S3 buckets** +// @Description +// @Description List configured S3 buckets. +// @Tags Status +// @Produce json // @Success 200 {array} string "List of S3 buckets" // @Router /api/s3 [get] func apiS3List(c *gin.Context) { diff --git a/api/snapshot.go b/api/snapshot.go index 1a9c22786..d020c3dc8 100644 --- a/api/snapshot.go +++ b/api/snapshot.go @@ -14,8 +14,10 @@ import ( "github.com/gin-gonic/gin" ) -// @Summary Get snapshots -// @Description Get list of available snapshots. Each snapshot is returned as in “show” API. +// @Summary List Snapshots +// @Description **Get list of snapshots** +// @Description +// @Description Each snapshot is returned as in “show” API. // @Tags Snapshots // @Produce json // @Success 200 {array} deb.Snapshot @@ -39,19 +41,34 @@ func apiSnapshotsList(c *gin.Context) { c.JSON(200, result) } -// POST /api/mirrors/:name/snapshots/ +type snapshotsCreateFromMirrorParams struct { + // Name of snapshot to create + Name string `binding:"required" json:"Name" example:"snap1"` + // Description of snapshot + Description string ` json:"Description"` +} + +// @Summary Snapshot Mirror +// @Description **Create a snapshot of a mirror** +// @Tags Snapshots +// @Produce json +// @Param request body snapshotsCreateFromMirrorParams true "Parameters" +// @Param name path string true "Mirror name" +// @Param _async query bool false "Run in background and return task object" +// @Success 201 {object} deb.Snapshot "Created Snapshot" +// @Failure 400 {object} Error "Bad Request" +// @Failure 404 {object} Error "Mirror Not Found" +// @Failure 409 {object} Error "Conflicting snapshot" +// @Failure 500 {object} Error "Internal Server Error" +// @Router /api/mirrors/{name}/snapshots [post] func apiSnapshotsCreateFromMirror(c *gin.Context) { var ( err error repo *deb.RemoteRepo snapshot *deb.Snapshot + b snapshotsCreateFromMirrorParams ) - var b struct { - Name string `binding:"required"` - Description string - } - if c.Bind(&b) != nil { return } @@ -98,20 +115,37 @@ func apiSnapshotsCreateFromMirror(c *gin.Context) { }) } -// POST /api/snapshots +type snapshotsCreateParams struct { + // Name of snapshot to create + Name string `binding:"required" json:"Name" example:"snap2"` + // Description of snapshot + Description string ` json:"Description"` + // List of source snapshots + SourceSnapshots []string ` json:"SourceSnapshots" example:"snap1"` + // List of package refs + PackageRefs []string ` json:"PackageRefs" example:""` +} + +// @Summary Snapshot Packages +// @Description **Create a snapshot from package refs** +// @Description +// @Description Refs can be obtained from snapshots, local repos, or mirrors +// @Tags Snapshots +// @Param request body snapshotsCreateParams true "Parameters" +// @Param _async query bool false "Run in background and return task object" +// @Produce json +// @Success 201 {object} deb.Snapshot "Created snapshot" +// @Failure 400 {object} Error "Bad Request" +// @Failure 404 {object} Error "Source snapshot or package refs not found" +// @Failure 500 {object} Error "Internal Server Error" +// @Router /api/snapshots [post] func apiSnapshotsCreate(c *gin.Context) { var ( err error snapshot *deb.Snapshot + b snapshotsCreateParams ) - var b struct { - Name string `binding:"required"` - Description string - SourceSnapshots []string - PackageRefs []string - } - if c.Bind(&b) != nil { return } @@ -173,19 +207,35 @@ func apiSnapshotsCreate(c *gin.Context) { }) } -// POST /api/repos/:name/snapshots +type snapshotsCreateFromRepositoryParams struct { + // Name of snapshot to create + Name string `binding:"required" json:"Name" example:"snap1"` + // Description of snapshot + Description string ` json:"Description"` +} + +// @Summary Snapshot Repository +// @Description **Create a snapshot of a repository by name** +// @Tags Snapshots +// @Param name path string true "Repository name" +// @Consume json +// @Param request body snapshotsCreateFromRepositoryParams true "Parameters" +// @Param name path string true "Name of the snapshot" +// @Param _async query bool false "Run in background and return task object" +// @Produce json +// @Success 201 {object} deb.Snapshot "Created snapshot object" +// @Failure 400 {object} Error "Bad Request" +// @Failure 500 {object} Error "Internal Server Error" +// @Failure 404 {object} Error "Repo Not Found" +// @Router /api/repos/{name}/snapshots [post] func apiSnapshotsCreateFromRepository(c *gin.Context) { var ( err error repo *deb.LocalRepo snapshot *deb.Snapshot + b snapshotsCreateFromRepositoryParams ) - var b struct { - Name string `binding:"required"` - Description string - } - if c.Bind(&b) != nil { return } @@ -227,18 +277,32 @@ func apiSnapshotsCreateFromRepository(c *gin.Context) { }) } -// PUT /api/snapshots/:name +type snapshotsUpdateParams struct { + // Change Name of snapshot + Name string ` json:"Name" example:"snap2"` + // Change Description of snapshot + Description string `json:"Description"` +} + +// @Summary Update Snapshot +// @Description **Update snapshot metadata (Name, Description)** +// @Tags Snapshots +// @Param request body snapshotsUpdateParams true "Parameters" +// @Param name path string true "Snapshot name" +// @Param _async query bool false "Run in background and return task object" +// @Produce json +// @Success 200 {object} deb.Snapshot "Updated snapshot object" +// @Failure 404 {object} Error "Snapshot Not Found" +// @Failure 409 {object} Error "Conflicting snapshot" +// @Failure 500 {object} Error "Internal Server Error" +// @Router /api/snapshots/{name} [put] func apiSnapshotsUpdate(c *gin.Context) { var ( err error snapshot *deb.Snapshot + b snapshotsUpdateParams ) - var b struct { - Name string - Description string - } - if c.Bind(&b) != nil { return } @@ -277,7 +341,15 @@ func apiSnapshotsUpdate(c *gin.Context) { }) } -// GET /api/snapshots/:name +// @Summary Get snapshot information +// @Description **Query detailed information about a snapshot by name** +// @Tags Snapshots +// @Param name path string true "Name of the snapshot" +// @Produce json +// @Success 200 {object} deb.Snapshot "msg" +// @Failure 404 {object} Error "Snapshot Not Found" +// @Failure 500 {object} Error "Internal Server Error" +// @Router /api/snapshots/{name} [get] func apiSnapshotsShow(c *gin.Context) { collectionFactory := context.NewCollectionFactory() collection := collectionFactory.SnapshotCollection() @@ -297,7 +369,20 @@ func apiSnapshotsShow(c *gin.Context) { c.JSON(200, snapshot) } -// DELETE /api/snapshots/:name +// @Summary Drop Snapshot +// @Description **Drop/delete snapshot by name** +// @Description Cannot drop snapshots that are published. +// @Description Needs force=1 to drop snapshots used as source by other snapshots. +// @Tags Snapshots +// @Param name path string true "Snapshot name" +// @Param force query string false "Force operation" +// @Param _async query bool false "Run in background and return task object" +// @Produce json +// @Success 200 "" +// @Failure 404 {object} Error "Snapshot Not Found" +// @Failure 409 {object} Error "Snapshot in use" +// @Failure 500 {object} Error "Internal Server Error" +// @Router /api/snapshots/{name} [delete] func apiSnapshotsDrop(c *gin.Context) { name := c.Params.ByName("name") force := c.Request.URL.Query().Get("force") == "1" @@ -336,7 +421,19 @@ func apiSnapshotsDrop(c *gin.Context) { }) } -// GET /api/snapshots/:name/diff/:withSnapshot +// @Summary Snapshot diff +// @Description **Return the diff between two snapshots (name & withSnapshot)** +// @Description Provide `onlyMatching=1` to return only packages present in both snapshots. +// @Description Otherwise, returns a `left` and `right` result providing packages only in the first and second snapshots +// @Tags Snapshots +// @Produce json +// @Param name path string true "Snapshot name" +// @Param withSnapshot path string true "Snapshot name to diff against" +// @Param onlyMatching query string false "Only return packages present in both snapshots" +// @Success 200 {array} deb.PackageDiff "Package Diff" +// @Failure 404 {object} Error "Snapshot Not Found" +// @Failure 500 {object} Error "Internal Server Error" +// @Router /api/snapshots/{name}/diff/{withSnapshot} [get] func apiSnapshotsDiff(c *gin.Context) { onlyMatching := c.Request.URL.Query().Get("onlyMatching") == "1" @@ -387,7 +484,20 @@ func apiSnapshotsDiff(c *gin.Context) { c.JSON(200, result) } -// GET /api/snapshots/:name/packages +// @Summary List Snapshot Packages +// @Description **List all packages in snapshot or perform search on snapshot contents and return results** +// @Description If `q` query parameter is missing, return all packages, otherwise return packages that match q +// @Tags Snapshots +// @Produce json +// @Param name path string true "Snapshot to search" +// @Param q query string false "Package query (e.g Name%20(~%20matlab))" +// @Param withDeps query string false "Set to 1 to include dependencies when evaluating package query" +// @Param format query string false "Set to 'details' to return extra info about each package" +// @Param maximumVersion query string false "Set to 1 to only return the highest version for each package name" +// @Success 200 {array} string "Package info" +// @Failure 404 {object} Error "Snapshot Not Found" +// @Failure 500 {object} Error "Internal Server Error" +// @Router /api/snapshots/{name}/packages [get] func apiSnapshotsSearchPackages(c *gin.Context) { collectionFactory := context.NewCollectionFactory() collection := collectionFactory.SnapshotCollection() @@ -419,13 +529,14 @@ type snapshotsMergeParams struct { // @Description // @Description If only one snapshot is specified, merge copies source into destination. // @Tags Snapshots +// @Consume json +// @Produce json // @Param name path string true "Name of the snapshot to be created" // @Param latest query int false "merge only the latest version of each package" // @Param no-remove query int false "all versions of packages are preserved during merge" -// @Consume json // @Param request body snapshotsMergeParams true "Parameters" -// @Produce json -// @Success 200 +// @Param _async query bool false "Run in background and return task object" +// @Success 201 {object} deb.Snapshot "Resulting snapshot object" // @Failure 400 {object} Error "Bad Request" // @Failure 404 {object} Error "Not Found" // @Failure 500 {object} Error "Internal Error" @@ -530,15 +641,16 @@ type snapshotsPullParams struct { // @Description // @Description Aptly pulls first package matching each of package queries, but with flag -all-matches all matching packages would be pulled. // @Tags Snapshots +// @Param request body snapshotsPullParams true "Parameters" // @Param name path string true "Name of the snapshot to be created" // @Param all-matches query int false "pull all the packages that satisfy the dependency version requirements (default is to pull first matching package): 1 to enable" // @Param dry-run query int false "don’t create destination snapshot, just show what would be pulled: 1 to enable" // @Param no-deps query int false "don’t process dependencies, just pull listed packages: 1 to enable" // @Param no-remove query int false "don’t remove other package versions when pulling package: 1 to enable" +// @Param _async query bool false "Run in background and return task object" // @Consume json -// @Param request body snapshotsPullParams true "Parameters" // @Produce json -// @Success 200 +// @Success 200 {object} deb.Snapshot "Resulting Snapshot object" // @Failure 400 {object} Error "Bad Request" // @Failure 404 {object} Error "Not Found" // @Failure 500 {object} Error "Internal Error" diff --git a/api/storage.go b/api/storage.go index 4d306b0b4..c4f478b81 100644 --- a/api/storage.go +++ b/api/storage.go @@ -18,6 +18,8 @@ type diskFree struct { // @Summary Get Storage Utilization // @Description **Get disk free information of aptly storage** +// @Description +// @Description Units in MiB. // @Tags Status // @Produce json // @Success 200 {object} diskFree "Storage information" diff --git a/api/task.go b/api/task.go index 189d79319..1d1363254 100644 --- a/api/task.go +++ b/api/task.go @@ -1,18 +1,16 @@ package api import ( - "net/http" "strconv" - "github.com/aptly-dev/aptly/aptly" "github.com/aptly-dev/aptly/task" "github.com/gin-gonic/gin" ) // @Summary Get tasks -// @Description Get list of available tasks. Each task is returned as in “show” API. +// @Description **Get list of available tasks. Each task is returned as in “show” API** // @Tags Tasks -// @Produce json +// @Produce json // @Success 200 {array} task.Task // @Router /api/tasks [get] func apiTasksList(c *gin.Context) { @@ -20,21 +18,39 @@ func apiTasksList(c *gin.Context) { c.JSON(200, list.GetTasks()) } -// POST /tasks-clear +// @Summary Clear finished and failed tasks +// @Description **Removes finished and failed tasks from internal task list** +// @Tags Tasks +// @Produce json +// @Success 200 "" +// @Router /api/tasks-clear [post] func apiTasksClear(c *gin.Context) { list := context.TaskList() list.Clear() c.JSON(200, gin.H{}) } -// GET /tasks-wait +// @Summary Wait for task completion +// @Description **Waits for and returns when all running tasks are complete** +// @Tags Tasks +// @Produce json +// @Success 200 "" +// @Router /api/tasks-wait [get] func apiTasksWait(c *gin.Context) { list := context.TaskList() list.Wait() c.JSON(200, gin.H{}) } -// GET /tasks/:id/wait +// @Summary Wait for task to process +// @Description **Waits for and returns when given Task ID is complete** +// @Tags Tasks +// @Produce json +// @Param id path int true "Task ID" +// @Success 200 {object} task.Task +// @Failure 500 {object} Error "invalid syntax, bad id?" +// @Failure 400 {object} Error "Task Not Found" +// @Router /api/tasks/{id}/wait [get] func apiTasksWaitForTaskByID(c *gin.Context) { list := context.TaskList() id, err := strconv.ParseInt(c.Params.ByName("id"), 10, 0) @@ -52,7 +68,15 @@ func apiTasksWaitForTaskByID(c *gin.Context) { c.JSON(200, task) } -// GET /tasks/:id +// @Summary Return task information +// @Description **Return task information for a given ID** +// @Tags Tasks +// @Produce plain +// @Param id path int true "Task ID" +// @Success 200 {object} task.Task +// @Failure 500 {object} Error "invalid syntax, bad id?" +// @Failure 404 {object} Error "Task Not Found" +// @Router /api/tasks/{id} [get] func apiTasksShow(c *gin.Context) { list := context.TaskList() id, err := strconv.ParseInt(c.Params.ByName("id"), 10, 0) @@ -71,7 +95,15 @@ func apiTasksShow(c *gin.Context) { c.JSON(200, task) } -// GET /tasks/:id/output +// @Summary Return task output +// @Description **Return task output for a given ID** +// @Tags Tasks +// @Produce plain +// @Param id path int true "Task ID" +// @Success 200 {object} string "Task output" +// @Failure 500 {object} Error "invalid syntax, bad ID?" +// @Failure 404 {object} Error "Task Not Found" +// @Router /api/tasks/{id}/output [get] func apiTasksOutputShow(c *gin.Context) { list := context.TaskList() id, err := strconv.ParseInt(c.Params.ByName("id"), 10, 0) @@ -90,7 +122,15 @@ func apiTasksOutputShow(c *gin.Context) { c.JSON(200, output) } -// GET /tasks/:id/detail +// @Summary Return task detail +// @Description **Return task detail for a given ID** +// @Tags Tasks +// @Produce json +// @Param id path int true "Task ID" +// @Success 200 {object} string "Task detail" +// @Failure 500 {object} Error "invalid syntax, bad ID?" +// @Failure 404 {object} Error "Task Not Found" +// @Router /api/tasks/{id}/detail [get] func apiTasksDetailShow(c *gin.Context) { list := context.TaskList() id, err := strconv.ParseInt(c.Params.ByName("id"), 10, 0) @@ -109,7 +149,15 @@ func apiTasksDetailShow(c *gin.Context) { c.JSON(200, detail) } -// GET /tasks/:id/return_value +// @Summary Return task return value (status code) +// @Description **Return task return value (status code) by given ID** +// @Tags Tasks +// @Produce plain +// @Param id path int true "Task ID" +// @Success 200 {object} string "msg" +// @Failure 500 {object} Error "invalid syntax, bad ID?" +// @Failure 404 {object} Error "Not Found" +// @Router /api/tasks/{id}/return_value [get] func apiTasksReturnValueShow(c *gin.Context) { list := context.TaskList() id, err := strconv.ParseInt(c.Params.ByName("id"), 10, 0) @@ -127,7 +175,15 @@ func apiTasksReturnValueShow(c *gin.Context) { c.JSON(200, output) } -// DELETE /tasks/:id +// @Summary Delete task +// @Description **Delete completed task by given ID. Does not stop task execution** +// @Tags Tasks +// @Produce json +// @Param id path int true "Task ID" +// @Success 200 {object} task.Task +// @Failure 500 {object} Error "invalid syntax, bad ID?" +// @Failure 400 {object} Error "Task in progress or not found" +// @Router /api/tasks/{id} [delete] func apiTasksDelete(c *gin.Context) { list := context.TaskList() id, err := strconv.ParseInt(c.Params.ByName("id"), 10, 0) @@ -145,15 +201,3 @@ func apiTasksDelete(c *gin.Context) { c.JSON(200, delTask) } - -// POST /tasks-dummy -func apiTasksDummy(c *gin.Context) { - resources := []string{"dummy"} - taskName := "Dummy task" - maybeRunTaskInBackground(c, taskName, resources, func(out aptly.Progress, detail *task.Detail) (*task.ProcessReturnValue, error) { - out.Printf("Dummy task started\n") - detail.Store([]int{1, 2, 3}) - out.Printf("Dummy task finished\n") - return &task.ProcessReturnValue{Code: http.StatusTeapot, Value: []int{1, 2, 3}}, nil - }) -} diff --git a/api/task_test.go b/api/task_test.go deleted file mode 100644 index 59c981a07..000000000 --- a/api/task_test.go +++ /dev/null @@ -1,75 +0,0 @@ -package api - -import ( - "encoding/json" - "fmt" - "time" - - "github.com/aptly-dev/aptly/task" - - . "gopkg.in/check.v1" -) - -type TaskSuite struct { - ApiSuite -} - -var _ = Suite(&TaskSuite{}) - -func (s *TaskSuite) TestTasksDummy(c *C) { - response, _ := s.HTTPRequest("POST", "/api/tasks-dummy", nil) - c.Check(response.Code, Equals, 418) - c.Check(response.Body.String(), Equals, "[1,2,3]") -} - -func (s *TaskSuite) TestTasksDummyAsync(c *C) { - response, _ := s.HTTPRequest("POST", "/api/tasks-dummy?_async=true", nil) - c.Check(response.Code, Equals, 202) - var t task.Task - err := json.Unmarshal(response.Body.Bytes(), &t) - c.Assert(err, IsNil) - c.Check(t.Name, Equals, "Dummy task") - response, _ = s.HTTPRequest("GET", fmt.Sprintf("/api/tasks/%d/wait", t.ID), nil) - err = json.Unmarshal(response.Body.Bytes(), &t) - c.Assert(err, IsNil) - c.Check(t.State, Equals, task.SUCCEEDED) - response, _ = s.HTTPRequest("GET", fmt.Sprintf("/api/tasks/%d/detail", t.ID), nil) - c.Check(response.Code, Equals, 200) - c.Check(response.Body.String(), Equals, "[1,2,3]") - response, _ = s.HTTPRequest("GET", fmt.Sprintf("/api/tasks/%d/output", t.ID), nil) - c.Check(response.Code, Equals, 200) - c.Check(response.Body.String(), Matches, "\"Dummy task started.*") -} - -func (s *TaskSuite) TestTaskDelete(c *C) { - response, _ := s.HTTPRequest("POST", "/api/tasks-dummy?_async=true", nil) - c.Check(response.Code, Equals, 202) - c.Check(response.Body.String(), Equals, "{\"Name\":\"Dummy task\",\"ID\":1,\"State\":0}") - // Give the task time to start - time.Sleep(time.Second) - response, _ = s.HTTPRequest("DELETE", "/api/tasks/1", nil) - c.Check(response.Code, Equals, 200) -} - -func (s *TaskSuite) TestTasksClear(c *C) { - response, _ := s.HTTPRequest("POST", "/api/tasks-dummy?_async=true", nil) - c.Check(response.Code, Equals, 202) - var t task.Task - err := json.Unmarshal(response.Body.Bytes(), &t) - c.Assert(err, IsNil) - c.Check(t.Name, Equals, "Dummy task") - response, _ = s.HTTPRequest("GET", "/api/tasks-wait", nil) - c.Check(response.Code, Equals, 200) - response, _ = s.HTTPRequest("GET", "/api/tasks", nil) - c.Check(response.Code, Equals, 200) - var ts []task.Task - err = json.Unmarshal(response.Body.Bytes(), &ts) - c.Assert(err, IsNil) - c.Check(len(ts), Equals, 1) - c.Check(ts[0].State, Equals, task.SUCCEEDED) - response, _ = s.HTTPRequest("POST", "/api/tasks-clear", nil) - c.Check(response.Code, Equals, 200) - response, _ = s.HTTPRequest("GET", "/api/tasks", nil) - c.Check(response.Code, Equals, 200) - c.Check(response.Body.String(), Equals, "[]") -} diff --git a/aptly/conf.go b/aptly/conf.go new file mode 100644 index 000000000..b39c2b8ba --- /dev/null +++ b/aptly/conf.go @@ -0,0 +1,4 @@ +package aptly + +// Default aptly.conf (filled in at link time) +var AptlyConf []byte diff --git a/cmd/repo_add.go b/cmd/repo_add.go index 8189e7834..e7ccfac70 100644 --- a/cmd/repo_add.go +++ b/cmd/repo_add.go @@ -91,7 +91,7 @@ func aptlyRepoAdd(cmd *commander.Command, args []string) error { func makeCmdRepoAdd() *commander.Command { cmd := &commander.Command{ Run: aptlyRepoAdd, - UsageLine: "add | ...", + UsageLine: "add (|)...", Short: "add packages to local repository", Long: ` Command adds packages to local repository from .deb, .udeb (binary packages) and .dsc (source packages) files. diff --git a/cmd/repo_include.go b/cmd/repo_include.go index 72e24c6da..cba3fb306 100644 --- a/cmd/repo_include.go +++ b/cmd/repo_include.go @@ -86,7 +86,7 @@ func aptlyRepoInclude(cmd *commander.Command, args []string) error { func makeCmdRepoInclude() *commander.Command { cmd := &commander.Command{ Run: aptlyRepoInclude, - UsageLine: "include | ...", + UsageLine: "include (|)...", Short: "add packages to local repositories based on .changes files", Long: ` Command include looks for .changes files in list of arguments or specified directories. Each diff --git a/cmd/snapshot_create.go b/cmd/snapshot_create.go index 000a78d9b..74e3c9667 100644 --- a/cmd/snapshot_create.go +++ b/cmd/snapshot_create.go @@ -84,7 +84,7 @@ func aptlySnapshotCreate(cmd *commander.Command, args []string) error { func makeCmdSnapshotCreate() *commander.Command { cmd := &commander.Command{ Run: aptlySnapshotCreate, - UsageLine: "create from mirror | from repo | empty", + UsageLine: "create (from mirror | from repo | empty)", Short: "creates snapshot of mirror (local repository) contents", Long: ` Command create from mirror makes persistent immutable snapshot of remote diff --git a/cmd/task_run.go b/cmd/task_run.go index ff0e1dd6b..50519fe80 100644 --- a/cmd/task_run.go +++ b/cmd/task_run.go @@ -131,7 +131,7 @@ func formatCommands(args []string) [][]string { func makeCmdTaskRun() *commander.Command { cmd := &commander.Command{ Run: aptlyTaskRun, - UsageLine: "run -filename= | , , ...", + UsageLine: "run (-filename= | ...)", Short: "run aptly tasks", Long: ` Command helps organise multiple aptly commands in one single aptly task, running as single thread. diff --git a/context/context.go b/context/context.go index e599102d1..5df5bbfd9 100644 --- a/context/context.go +++ b/context/context.go @@ -3,7 +3,6 @@ package context import ( gocontext "context" - "errors" "fmt" "math/rand" "os" @@ -116,9 +115,11 @@ func (context *AptlyContext) config() *utils.ConfigStructure { if err != nil { fmt.Fprintf(os.Stderr, "Config file not found, creating default config at %s\n\n", homeLocation) - // as this is fresh aptly installation, we don't need to support legacy pool locations - utils.Config.SkipLegacyPool = true - utils.SaveConfig(configLocations[0], &utils.Config) + utils.SaveConfigRaw(homeLocation, aptly.AptlyConf) + err = utils.LoadConfig(homeLocation, &utils.Config) + if err != nil { + Fatal(fmt.Errorf("error loading config file %s: %s", homeLocation, err)) + } } } @@ -293,10 +294,10 @@ func (context *AptlyContext) _database() (database.Storage, error) { var err error switch context.config().DatabaseBackend.Type { case "leveldb": - if len(context.config().DatabaseBackend.DbPath) == 0 { - return nil, errors.New("leveldb databaseBackend config invalid") + dbPath := filepath.Join(context.config().GetRootDir(), "db") + if len(context.config().DatabaseBackend.DbPath) != 0 { + dbPath = context.config().DatabaseBackend.DbPath } - dbPath := filepath.Join(context.config().GetRootDir(), context.config().DatabaseBackend.DbPath) context.database, err = goleveldb.NewDB(dbPath) case "etcd": context.database, err = etcddb.NewDB(context.config().DatabaseBackend.URL) diff --git a/context/context_test.go b/context/context_test.go index 0a48f920c..f83d42f20 100644 --- a/context/context_test.go +++ b/context/context_test.go @@ -1,6 +1,8 @@ package context import ( + "fmt" + "os" "reflect" "testing" @@ -82,5 +84,6 @@ func (s *AptlyContextSuite) TestGetPublishedStorageBadFS(c *C) { // storage never exists. c.Assert(func() { s.context.GetPublishedStorage("filesystem:fuji") }, FatalErrorPanicMatches, - &FatalError{ReturnCode: 1, Message: "published local storage fuji not configured"}) + &FatalError{ReturnCode: 1, Message: fmt.Sprintf("error loading config file %s/.aptly.conf: EOF", + os.Getenv("HOME"))}) } diff --git a/debian/aptly.conf b/debian/aptly.conf index d091e4745..6eeed77ec 100644 --- a/debian/aptly.conf +++ b/debian/aptly.conf @@ -1,38 +1,370 @@ +// vim: : filetype=json +// json configuration file with comments +// validate with: sed '/\/\//d' aptly.conf | json_pp { + +// Aptly Configuration File +//////////////////////////// + + // Aptly storage directory for: + // - downloaded packages (`rootDir`/pool) + // - database (`rootDir`/db) + // - published repositories (`rootDir`/public) "rootDir": "~/.aptly", - "downloadConcurrency": 4, - "downloadSpeedLimit": 0, - "downloadRetries": 0, - "downloader": "default", + + // Number of attempts to open database if it's locked by other instance + // * -1 (no retry) "databaseOpenAttempts": -1, + + // Log Level + // * debug + // * info + // * warning + // * error + "logLevel": "info", + + // Log Format + // * default (text) + // * json + "logFormat": "default", + + // Default Architectures + // empty array defaults to all available architectures "architectures": [], + + // Follow contents of `Suggests:` field when processing dependencies for the package "dependencyFollowSuggests": false, + + // Follow contents of `Recommends:` field when processing dependencies for the package "dependencyFollowRecommends": false, + + // When dependency looks like `package-a | package-b`, follow both variants always "dependencyFollowAllVariants": false, + + // Follow dependency from binary package to source package "dependencyFollowSource": false, + + // Log additional details while resolving dependencies (useful for debugging) "dependencyVerboseResolve": false, - "gpgDisableSign": false, - "gpgDisableVerify": false, - "gpgProvider": "gpg", - "downloadSourcePackages": false, - "skipLegacyPool": true, + + // Specifies paramaters for short PPA url expansion + // empty defaults to output of `lsb_release` command "ppaDistributorID": "ubuntu", + + // Codename for short PPA url expansion "ppaCodename": "", - "skipContentsPublishing": false, - "skipBz2Publishing": false, - "FileSystemPublishEndpoints": {}, - "S3PublishEndpoints": {}, - "SwiftPublishEndpoints": {}, - "AzurePublishEndpoints": {}, - "AsyncAPI": false, - "enableMetricsEndpoint": false, - "logLevel": "info", - "logFormat": "default", + + // OBSOLETE + // in aptly up to version 1.0.0, package files were stored in internal package pool + // with MD5-dervied path, since 1.1.0 package pool layout was changed; + // if option is enabled, aptly stops checking for legacy paths; + // by default option is enabled for new aptly installations and disabled when + // upgrading from older versions + "skipLegacyPool": true, + + +// Aptly Server +//////////////// + + // Serve published repos as well as API "serveInAPIMode": false, + + // Enable metrics for Prometheus client + "enableMetricsEndpoint": false, + + // Enable API documentation on /docs + "enableSwaggerEndpoint": false, + + // OBSOLETE: use via url param ?_async=true + "AsyncAPI": false, + + +// Database +//////////// + + // Database backend + // Type must be one of: + // * leveldb (default) + // * etcd "databaseBackend": { - "type": "", - "url": "", + // LevelDB + "type": "leveldb", + // Path to leveldb files + // empty dbPath defaults to `rootDir`/db "dbPath": "" + + // // etcd + // "type": "etcd", + // // URL to db server + // "url": "127.0.0.1:2379" + }, + + +// Mirroring +///////////// + + // Downloader + // * "default" + // * "grab" (more robust) + "downloader": "default", + + // Number of parallel download threads to use when downloading packages + "downloadConcurrency": 4, + + // Limit in kbytes/sec on download speed while mirroring remote repositories + "downloadSpeedLimit": 0, + + // Number of retries for download attempts + "downloadRetries": 0, + + // Download source packages per default + "downloadSourcePackages": false, + + +// Signing +/////////// + + // GPG Provider + // * "internal" (Go internal implementation) + // * "gpg" (External `gpg` utility) + "gpgProvider": "gpg", + + // Disable signing of published repositories + "gpgDisableSign": false, + + // Disable signature verification of remote repositories + "gpgDisableVerify": false, + + +// Publishing +////////////// + + // Do not publish Contents files + "skipContentsPublishing": false, + + // Do not create bz2 files + "skipBz2Publishing": false, + + +// Storage +/////////// + + // Filesystem publishing endpoints + // + // aptly defaults to publish to a single publish directory under `rootDir`/public. For + // a more advanced publishing strategy, you can define one or more filesystem endpoints in the + // `FileSystemPublishEndpoints` list of the aptly configuration file. Each endpoint has a name + // and the following associated settings. + // + // In order to publish to such an endpoint, specify the endpoint as `filesystem:endpoint-name` + // with `endpoint-name` as the name given in the aptly configuration file. For example: + // + // `aptly publish snapshot wheezy-main filesystem:test1:wheezy/daily` + // + "FileSystemPublishEndpoints": { + // // Endpoint Name + // "test1": { + // // Directory for publishing + // "rootDir": "/opt/srv/aptly_public", + + // // File Link Method for linking files from the internal pool to the published directory + // // * hardlink + // // * symlink + // // * copy + // "linkMethod": "hardlink", + + // // File Copare Method for comparing existing links from the internal pool to the published directory + // // Only used when "linkMethod" is set to "copy" + // // * md5 (default: compare md5 sum) + // // * size (compare file size) + // "verifyMethod": "md5" + // } + }, + + // S3 Endpoint Support + // + // cloud storage). First, publishing + // endpoints should be described in aptly configuration file. Each endpoint has name + // and associated settings. + // + // In order to publish to S3, specify endpoint as `s3:endpoint-name:` before + // publishing prefix on the command line, e.g.: + // + // `aptly publish snapshot wheezy-main s3:test:` + // + "S3PublishEndpoints": { + // // Endpoint Name + // "test": { + + // // Amazon region for S3 bucket + // "region": "us-east-1", + + // // Bucket name + // "bucket": "test-bucket", + + // // Endpoint (optional) + // // When using S3-compatible cloud storage, specify hostname of service endpoint here, + // // region is ignored if endpoint is set (set region to some human-readable name) + // // (should be left blank for real Amazon S3) + // "endpoint": "", + + // // Prefix (optional) + // // publishing under specified prefix in the bucket, defaults to + // // no prefix (bucket root) + // "prefix": "", + + // // Default ACLs (optional) + // // assign ACL to published files (one of the canned ACLs in Amazon + // // terminology). Useful values: `private` (default), `public-read` (public + // // repository) or `none` (don't set ACL). Public repositories could be consumed by `apt` using + // // HTTP endpoint (Amazon bucket should be configured for "website hosting"), + // // for private repositories special apt S3 transport is required. + // "acl": "private", + + // // Credentials (optional) + // // Amazon credentials to access S3 bucket. If not supplied, + // // environment variables `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` + // // are used. + // "awsAccessKeyID": "", + // "awsSecretAccessKey": "", + + // // Storage Class (optional) + // // Amazon S3 storage class, defaults to `STANDARD`. Other values + // // available: `REDUCED_REDUNDANCY` (lower price, lower redundancy) + // "storageClass": "STANDARD", + + // // Encryption Method (optional) + // // Server-side encryption method, defaults to none. Currently + // // the only available encryption method is `AES256` + // "encryptionMethod": "none", + + // // Plus Workaround (optional) + // // Workaround misbehavior in apt and Amazon S3 for files with `+` in filename by + // // creating two copies of package files with `+` in filename: one original + // // and another one with spaces instead of plus signs + // // With `plusWorkaround` enabled, package files with plus sign + // // would be stored twice. aptly might not cleanup files with spaces when published + // // repository is dropped or updated (switched) to new version of repository (snapshot) + // "plusWorkaround": false, + + // // Disable MultiDel (optional) + // // For S3-compatible cloud storages which do not support `MultiDel` S3 API, + // // enable this setting (file deletion would be slower with this setting enabled) + // "disableMultiDel": false, + + // // ForceSig2 (optional) + // // Disable Signature V4 support, useful with non-AWS S3-compatible object stores + // // which do not support SigV4, shouldn't be enabled for AWS + // "forceSigV2": false, + + // // ForceVirtualHostedStyle (optional) + // // Disable path style visit, useful with non-AWS S3-compatible object stores + // // which only support virtual hosted style + // "forceVirtualHostedStyle": false, + + // // Debug (optional) + // // Enables detailed request/response dump for each S3 operation + // "debug": false + // } + }, + + // Swift Endpoint Support + // + // aptly could be configured to publish repository directly to OpenStack Swift. First, + // publishing endpoints should be described in aptly configuration file. Each endpoint + // has name and associated settings. + // + // In order to publish to Swift, specify endpoint as `swift:endpoint-name:` before + // publishing prefix on the command line, e.g.: + // + // `aptly publish snapshot jessie-main swift:test:` + // + "SwiftPublishEndpoints": { + // Endpoint Name + // "test": { + + // // Container Name + // "container": "taylor1", + + // // Prefix (optional) + // // Publish under specified prefix in the container, defaults to no prefix (container root) + // "prefix": "", + + // // Credentials (optional) + // // OpenStack credentials to access Keystone. If not supplied, environment variables `OS_USERNAME` and `OS_PASSWORD` are used + // "osname": "", + // "password": "", + + // // Tenant (optional) + // // OpenStack tenant name and id (in order to use v2 authentication) + // "tenant": "", + // "tenantid": "", + + // // Auth URL (optional) + // // Full url of Keystone server (including port, and version). + // // Example `http://identity.example.com:5000/v2.0` + // "authurl": "" + // } + }, + + // Azure Endpoint Support + // + // aptly can be configured to publish repositories directly to Microsoft Azure Blob + // Storage. First, publishing endpoints should be described in the aptly + // configuration file. Each endpoint has its name and associated settings. + "AzurePublishEndpoints": { + // // Endpoint Name + // "test": { + + // // Container Name + // "container": "container1", + + // // Prefix (optional) + // // Publishing under specified prefix in the container, defaults to no prefix (container root) + // "prefix": "", + + // // Credentials + // // Azure storage account access key to access blob storage + // "accountName": "", + // "accountKey": "", + + // // Endpoint URL + // // See: Azure documentation https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string + // // defaults to "https://.blob.core.windows.net" + // "endpoint": "" + // } }, - "enableSwaggerEndpoint": false + + // Package Pool + // Location for storing downloaded packages + // Type must be one of: + // * local + // * azure + "packagePoolStorage": { + // Local Pool + "type": "local", + // Local Pool Path + // empty path defaults to `rootDir`/pool + "path": "" + + // // Azure Azure Blob Storage Pool + // "type": "azure", + // "container": "pool1", + + // // Prefix (optional) + // // Publishing under specified prefix in the container, defaults to no prefix (container root) + // "prefix": "", + + // // Credentials + // // Azure storage account access key to access blob storage + // "accountName": "", + // "accountKey": "", + + // // Endpoint URL + // // See: Azure documentation https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string + // // defaults to "https://.blob.core.windows.net" + // "endpoint": "" + } + +// End of config } diff --git a/docs/Database.md b/docs/Database.md new file mode 100644 index 000000000..b4eca8d06 --- /dev/null +++ b/docs/Database.md @@ -0,0 +1,5 @@ +# Maintenance Operations +
+Manage aptly’s internal metadata database and package pool. + +
diff --git a/docs/Files.md b/docs/Files.md new file mode 100644 index 000000000..616a64ac8 --- /dev/null +++ b/docs/Files.md @@ -0,0 +1,12 @@ +# Upload Package Files +
+ +In order to add debian package files to a local repository, files are first uploaded to a temporary directory. +Then the directory (or a specific file within) is added to a repository. After adding to a repositorty, the directory resp. files are removed bt default. + +All uploaded files are stored under `/upload/` directory. + +For concurrent uploads from CI/CD pipelines, make sure the tempdir is unique. + + +
diff --git a/docs/Mirrors.md b/docs/Mirrors.md new file mode 100644 index 000000000..4bacaec19 --- /dev/null +++ b/docs/Mirrors.md @@ -0,0 +1,8 @@ +# Manage Remote Repository Mirrors +
+Manage mirrors of remote Debian repositories (http, https or ftp). + +Flat debian repositories, mirroring source packages and debian installers is supported. + +
+ diff --git a/docs/Packages.md b/docs/Packages.md new file mode 100644 index 000000000..18385c0e6 --- /dev/null +++ b/docs/Packages.md @@ -0,0 +1,5 @@ +# Search Package Collection +
+Perform operations on the whole collection of packages in apty database. +
+ diff --git a/docs/Publish.md b/docs/Publish.md new file mode 100644 index 000000000..0077f4d9b --- /dev/null +++ b/docs/Publish.md @@ -0,0 +1,21 @@ +# Publish Repositories, Snapshots, Mirrors +
+ +Publish snapshot or local repo as Debian repository to be used as APT source on Debian based systems. + +The published repository is signed with the user's GnuPG key. + +Repositories can be published to local directories, Amazon S3 buckets, Azure or Swift Storage. + +#### GPG Keys + +GPG key is required to sign any published repository. The key pari should be generated before publishing. + +Publiс part of the key should be exported from your keyring using `gpg --export --armor` and imported on the system which uses a published repository. + +#### Parameters + +Publish APIs use following convention to identify published repositories: `/api/publish/:prefix/:distribution`. `:distribution` is distribution name, while `:prefix` is `[:]` (storage is optional, it defaults to empty string), if publishing prefix contains slashes `/`, they should be replaced with underscores (`_`) and underscores +should be replaced with double underscore (`__`). To specify root `:prefix`, use `:.`, as `.` is ambigious in URLs. + +
diff --git a/docs/Repos.md b/docs/Repos.md new file mode 100644 index 000000000..05960c033 --- /dev/null +++ b/docs/Repos.md @@ -0,0 +1,9 @@ +# Manage Local Repositories +
+A local repository is a collection of versionned packages (usually custom packages created internally). + +Packages can be added, removed, moved or copied between repos. + +Local repositories can be published (either directly or via snapshot) to be used a APT source on a debian based system. +
+ diff --git a/docs/Snapshots.md b/docs/Snapshots.md new file mode 100644 index 000000000..21d0e5266 --- /dev/null +++ b/docs/Snapshots.md @@ -0,0 +1,8 @@ +# Manage Snapshots +
+ +Local Repositories and Mirrors can be snapshotted to get an immutable state. + +Snapshots cab be merged, filtered, verified for missing dependencies. Snapshots can be published to be used as APT source. + +
diff --git a/docs/Status.md b/docs/Status.md new file mode 100644 index 000000000..2aac9e18f --- /dev/null +++ b/docs/Status.md @@ -0,0 +1,5 @@ +# Status Information +
+Various status information. + +
diff --git a/docs/Tasks.md b/docs/Tasks.md new file mode 100644 index 000000000..03668d8e6 --- /dev/null +++ b/docs/Tasks.md @@ -0,0 +1,8 @@ +# Background Tasks +
+ +Several API operations allow to be run in background asynchronously in a task. In that case, a Task object with an ID and a State is returned, which can be queried for progress. + +Tasks should be deleted once they are no longer in progress, in order to not cause memory overflows. + +
diff --git a/docs/api.md b/docs/api.md new file mode 100644 index 000000000..aaf87195a --- /dev/null +++ b/docs/api.md @@ -0,0 +1,9 @@ +Aptly operations are also available via REST API served with `aptly api serve`. + +On Debian based systems, a package `aptly-api` is available, which will run aptly as systemd service as dedicated aptly-api user. + +Some configuration changes (S3 publishing endpoints, ...) will require restarting the aptly service in order to take effect. + +The REST API shouldn't be exposed to the Internet as there is no authentication/protection, consider using a HTTP proxy (e.g. nginx) to add https and authentication. + +#### Aptly REST API Documentation diff --git a/docs/docs.html b/docs/docs.html new file mode 100644 index 000000000..d057ee1d3 --- /dev/null +++ b/docs/docs.html @@ -0,0 +1,175 @@ + + + + + Swagger UI + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + diff --git a/docs/index.go b/docs/index.go index da325d03a..ca4c914a7 100644 --- a/docs/index.go +++ b/docs/index.go @@ -1,3 +1,10 @@ package docs -import _ "github.com/swaggo/swag" // make sure swag is in go.mod +import ( + _ "embed" // embed html below + + _ "github.com/swaggo/swag" // make sure swag is in go.mod +) + +//go:embed docs.html +var DocsHTML []byte diff --git a/docs/swagger.conf.tpl b/docs/swagger.conf.tpl new file mode 100644 index 000000000..31b0dfbff --- /dev/null +++ b/docs/swagger.conf.tpl @@ -0,0 +1,28 @@ +package docs + +// @title Aptly API +// @description.markdown + +// @contact.name Aptly +// @contact.url http://github.com/aptly-dev/aptly + +// @Tag.name Repos +// @Tag.description.markdown +// @Tag.name Files +// @Tag.description.markdown +// @Tag.name Mirrors +// @Tag.description.markdown +// @Tag.name Snapshots +// @Tag.description.markdown +// @Tag.name Publish +// @Tag.description.markdown +// @Tag.name Packages +// @Tag.description.markdown +// @Tag.name Status +// @Tag.description.markdown +// @Tag.name Database +// @Tag.description.markdown +// @Tag.name Tasks +// @Tag.description.markdown + +// version will be appended here: diff --git a/files/package_pool_test.go b/files/package_pool_test.go index f06d6bcfd..4cd9476b9 100644 --- a/files/package_pool_test.go +++ b/files/package_pool_test.go @@ -120,7 +120,7 @@ func (s *PackagePoolSuite) TestImportOk(c *C) { if isSameDevice(s) { c.Check(info.Sys().(*syscall.Stat_t).Nlink > 1, Equals, true) } else { - c.Check(info.Sys().(*syscall.Stat_t).Nlink, Equals, uint64(1)) + c.Check(info.Sys().(*syscall.Stat_t).Nlink == 1, Equals, true) } // import as different name @@ -359,7 +359,7 @@ func (s *PackagePoolSuite) TestLink(c *C) { if isSameDevice(s) { c.Check(info.Sys().(*syscall.Stat_t).Nlink > 2, Equals, true) } else { - c.Check(info.Sys().(*syscall.Stat_t).Nlink, Equals, uint64(2)) + c.Check(info.Sys().(*syscall.Stat_t).Nlink == 2, Equals, true) } } @@ -377,7 +377,7 @@ func (s *PackagePoolSuite) TestSymlink(c *C) { if isSameDevice(s) { c.Check(info.Sys().(*syscall.Stat_t).Nlink > 2, Equals, true) } else { - c.Check(info.Sys().(*syscall.Stat_t).Nlink, Equals, uint64(1)) + c.Check(info.Sys().(*syscall.Stat_t).Nlink == 1, Equals, true) } info, err = os.Lstat(dstPath) diff --git a/main.go b/main.go index 337575a4e..6a5247cc8 100644 --- a/main.go +++ b/main.go @@ -13,12 +13,16 @@ import ( //go:embed VERSION var Version string +//go:embed debian/aptly.conf +var AptlyConf []byte + func main() { if Version == "" { Version = "unknown" } aptly.Version = Version + aptly.AptlyConf = AptlyConf os.Exit(cmd.Run(cmd.RootCommand(), os.Args[1:], true)) } diff --git a/main_test.go b/main_test.go index 81ef138dd..750073b78 100644 --- a/main_test.go +++ b/main_test.go @@ -51,6 +51,7 @@ func TestRunMain(t *testing.T) { } aptly.Version = Version + aptly.AptlyConf = AptlyConf args := filterOutTestArgs(os.Args[1:]) root := cmd.RootCommand() diff --git a/system/api_lib.py b/system/api_lib.py index a41161ea6..20036b861 100644 --- a/system/api_lib.py +++ b/system/api_lib.py @@ -84,7 +84,7 @@ def post_task(self, uri, *args, **kwargs): self._ensure_async(kwargs) resp = self.post(uri, *args, **kwargs) if resp.status_code != 202: - return resp + raise Exception("async api error: " + resp.text) _id = resp.json()['ID'] resp = self.get("/api/tasks/" + str(_id) + "/wait") diff --git a/system/lib.py b/system/lib.py index 2918d8053..1287f81d5 100644 --- a/system/lib.py +++ b/system/lib.py @@ -242,7 +242,7 @@ def prepare_fixture(self): os.environ["HOME"], self.aptlyDir, "pool"), ignore=shutil.ignore_patterns(".git")) if self.databaseType == "etcd": - if not os.path.exists("/srv/etcd"): + if not os.path.exists("/tmp/aptly-etcd"): self.run_cmd([os.path.join(os.path.dirname(inspect.getsourcefile(BaseTest)), "t13_etcd/install-etcd.sh")]) if self.fixtureDB and self.databaseType != "etcd": @@ -253,12 +253,12 @@ def prepare_fixture(self): self.shutdown_etcd() # remove existing database - if os.path.exists("/tmp/etcd-data"): - shutil.rmtree("/tmp/etcd-data") + if os.path.exists("/tmp/aptly-etcd-data"): + shutil.rmtree("/tmp/aptly-etcd-data") if self.fixtureDB: print("import etcd") - self.run_cmd(["/srv/etcd/etcdctl", "--data-dir=/tmp/etcd-data", "snapshot", "restore", os.path.join(os.environ["HOME"], "etcd.db")]) + self.run_cmd(["/tmp/aptly-etcd/etcdctl", "--data-dir=/tmp/aptly-etcd-data", "snapshot", "restore", os.path.join(os.environ["HOME"], "etcd.db")]) print("starting etcd") self.EtcdServer = self._start_process([os.path.join(os.path.dirname(inspect.getsourcefile(BaseTest)), "t13_etcd/start-etcd.sh")], stdout=subprocess.PIPE, stderr=subprocess.PIPE) diff --git a/system/t02_config/CreateConfigTest_gold b/system/t02_config/CreateConfigTest_gold index 181efa4d4..6eeed77ec 100644 --- a/system/t02_config/CreateConfigTest_gold +++ b/system/t02_config/CreateConfigTest_gold @@ -1,39 +1,370 @@ +// vim: : filetype=json +// json configuration file with comments +// validate with: sed '/\/\//d' aptly.conf | json_pp { - "rootDir": "${HOME}/.aptly", - "downloadConcurrency": 4, - "downloadSpeedLimit": 0, - "downloadRetries": 0, - "downloader": "default", + +// Aptly Configuration File +//////////////////////////// + + // Aptly storage directory for: + // - downloaded packages (`rootDir`/pool) + // - database (`rootDir`/db) + // - published repositories (`rootDir`/public) + "rootDir": "~/.aptly", + + // Number of attempts to open database if it's locked by other instance + // * -1 (no retry) "databaseOpenAttempts": -1, + + // Log Level + // * debug + // * info + // * warning + // * error + "logLevel": "info", + + // Log Format + // * default (text) + // * json + "logFormat": "default", + + // Default Architectures + // empty array defaults to all available architectures "architectures": [], + + // Follow contents of `Suggests:` field when processing dependencies for the package "dependencyFollowSuggests": false, + + // Follow contents of `Recommends:` field when processing dependencies for the package "dependencyFollowRecommends": false, + + // When dependency looks like `package-a | package-b`, follow both variants always "dependencyFollowAllVariants": false, + + // Follow dependency from binary package to source package "dependencyFollowSource": false, + + // Log additional details while resolving dependencies (useful for debugging) "dependencyVerboseResolve": false, - "gpgDisableSign": false, - "gpgDisableVerify": false, - "gpgProvider": "gpg", - "downloadSourcePackages": false, - "packagePoolStorage": {}, - "skipLegacyPool": true, + + // Specifies paramaters for short PPA url expansion + // empty defaults to output of `lsb_release` command "ppaDistributorID": "ubuntu", + + // Codename for short PPA url expansion "ppaCodename": "", - "skipContentsPublishing": false, - "skipBz2Publishing": false, - "FileSystemPublishEndpoints": {}, - "S3PublishEndpoints": {}, - "SwiftPublishEndpoints": {}, - "AzurePublishEndpoints": {}, - "AsyncAPI": false, - "enableMetricsEndpoint": false, - "logLevel": "debug", - "logFormat": "default", + + // OBSOLETE + // in aptly up to version 1.0.0, package files were stored in internal package pool + // with MD5-dervied path, since 1.1.0 package pool layout was changed; + // if option is enabled, aptly stops checking for legacy paths; + // by default option is enabled for new aptly installations and disabled when + // upgrading from older versions + "skipLegacyPool": true, + + +// Aptly Server +//////////////// + + // Serve published repos as well as API "serveInAPIMode": false, + + // Enable metrics for Prometheus client + "enableMetricsEndpoint": false, + + // Enable API documentation on /docs + "enableSwaggerEndpoint": false, + + // OBSOLETE: use via url param ?_async=true + "AsyncAPI": false, + + +// Database +//////////// + + // Database backend + // Type must be one of: + // * leveldb (default) + // * etcd "databaseBackend": { - "type": "", - "url": "", + // LevelDB + "type": "leveldb", + // Path to leveldb files + // empty dbPath defaults to `rootDir`/db "dbPath": "" + + // // etcd + // "type": "etcd", + // // URL to db server + // "url": "127.0.0.1:2379" + }, + + +// Mirroring +///////////// + + // Downloader + // * "default" + // * "grab" (more robust) + "downloader": "default", + + // Number of parallel download threads to use when downloading packages + "downloadConcurrency": 4, + + // Limit in kbytes/sec on download speed while mirroring remote repositories + "downloadSpeedLimit": 0, + + // Number of retries for download attempts + "downloadRetries": 0, + + // Download source packages per default + "downloadSourcePackages": false, + + +// Signing +/////////// + + // GPG Provider + // * "internal" (Go internal implementation) + // * "gpg" (External `gpg` utility) + "gpgProvider": "gpg", + + // Disable signing of published repositories + "gpgDisableSign": false, + + // Disable signature verification of remote repositories + "gpgDisableVerify": false, + + +// Publishing +////////////// + + // Do not publish Contents files + "skipContentsPublishing": false, + + // Do not create bz2 files + "skipBz2Publishing": false, + + +// Storage +/////////// + + // Filesystem publishing endpoints + // + // aptly defaults to publish to a single publish directory under `rootDir`/public. For + // a more advanced publishing strategy, you can define one or more filesystem endpoints in the + // `FileSystemPublishEndpoints` list of the aptly configuration file. Each endpoint has a name + // and the following associated settings. + // + // In order to publish to such an endpoint, specify the endpoint as `filesystem:endpoint-name` + // with `endpoint-name` as the name given in the aptly configuration file. For example: + // + // `aptly publish snapshot wheezy-main filesystem:test1:wheezy/daily` + // + "FileSystemPublishEndpoints": { + // // Endpoint Name + // "test1": { + // // Directory for publishing + // "rootDir": "/opt/srv/aptly_public", + + // // File Link Method for linking files from the internal pool to the published directory + // // * hardlink + // // * symlink + // // * copy + // "linkMethod": "hardlink", + + // // File Copare Method for comparing existing links from the internal pool to the published directory + // // Only used when "linkMethod" is set to "copy" + // // * md5 (default: compare md5 sum) + // // * size (compare file size) + // "verifyMethod": "md5" + // } + }, + + // S3 Endpoint Support + // + // cloud storage). First, publishing + // endpoints should be described in aptly configuration file. Each endpoint has name + // and associated settings. + // + // In order to publish to S3, specify endpoint as `s3:endpoint-name:` before + // publishing prefix on the command line, e.g.: + // + // `aptly publish snapshot wheezy-main s3:test:` + // + "S3PublishEndpoints": { + // // Endpoint Name + // "test": { + + // // Amazon region for S3 bucket + // "region": "us-east-1", + + // // Bucket name + // "bucket": "test-bucket", + + // // Endpoint (optional) + // // When using S3-compatible cloud storage, specify hostname of service endpoint here, + // // region is ignored if endpoint is set (set region to some human-readable name) + // // (should be left blank for real Amazon S3) + // "endpoint": "", + + // // Prefix (optional) + // // publishing under specified prefix in the bucket, defaults to + // // no prefix (bucket root) + // "prefix": "", + + // // Default ACLs (optional) + // // assign ACL to published files (one of the canned ACLs in Amazon + // // terminology). Useful values: `private` (default), `public-read` (public + // // repository) or `none` (don't set ACL). Public repositories could be consumed by `apt` using + // // HTTP endpoint (Amazon bucket should be configured for "website hosting"), + // // for private repositories special apt S3 transport is required. + // "acl": "private", + + // // Credentials (optional) + // // Amazon credentials to access S3 bucket. If not supplied, + // // environment variables `AWS_ACCESS_KEY_ID` and `AWS_SECRET_ACCESS_KEY` + // // are used. + // "awsAccessKeyID": "", + // "awsSecretAccessKey": "", + + // // Storage Class (optional) + // // Amazon S3 storage class, defaults to `STANDARD`. Other values + // // available: `REDUCED_REDUNDANCY` (lower price, lower redundancy) + // "storageClass": "STANDARD", + + // // Encryption Method (optional) + // // Server-side encryption method, defaults to none. Currently + // // the only available encryption method is `AES256` + // "encryptionMethod": "none", + + // // Plus Workaround (optional) + // // Workaround misbehavior in apt and Amazon S3 for files with `+` in filename by + // // creating two copies of package files with `+` in filename: one original + // // and another one with spaces instead of plus signs + // // With `plusWorkaround` enabled, package files with plus sign + // // would be stored twice. aptly might not cleanup files with spaces when published + // // repository is dropped or updated (switched) to new version of repository (snapshot) + // "plusWorkaround": false, + + // // Disable MultiDel (optional) + // // For S3-compatible cloud storages which do not support `MultiDel` S3 API, + // // enable this setting (file deletion would be slower with this setting enabled) + // "disableMultiDel": false, + + // // ForceSig2 (optional) + // // Disable Signature V4 support, useful with non-AWS S3-compatible object stores + // // which do not support SigV4, shouldn't be enabled for AWS + // "forceSigV2": false, + + // // ForceVirtualHostedStyle (optional) + // // Disable path style visit, useful with non-AWS S3-compatible object stores + // // which only support virtual hosted style + // "forceVirtualHostedStyle": false, + + // // Debug (optional) + // // Enables detailed request/response dump for each S3 operation + // "debug": false + // } + }, + + // Swift Endpoint Support + // + // aptly could be configured to publish repository directly to OpenStack Swift. First, + // publishing endpoints should be described in aptly configuration file. Each endpoint + // has name and associated settings. + // + // In order to publish to Swift, specify endpoint as `swift:endpoint-name:` before + // publishing prefix on the command line, e.g.: + // + // `aptly publish snapshot jessie-main swift:test:` + // + "SwiftPublishEndpoints": { + // Endpoint Name + // "test": { + + // // Container Name + // "container": "taylor1", + + // // Prefix (optional) + // // Publish under specified prefix in the container, defaults to no prefix (container root) + // "prefix": "", + + // // Credentials (optional) + // // OpenStack credentials to access Keystone. If not supplied, environment variables `OS_USERNAME` and `OS_PASSWORD` are used + // "osname": "", + // "password": "", + + // // Tenant (optional) + // // OpenStack tenant name and id (in order to use v2 authentication) + // "tenant": "", + // "tenantid": "", + + // // Auth URL (optional) + // // Full url of Keystone server (including port, and version). + // // Example `http://identity.example.com:5000/v2.0` + // "authurl": "" + // } + }, + + // Azure Endpoint Support + // + // aptly can be configured to publish repositories directly to Microsoft Azure Blob + // Storage. First, publishing endpoints should be described in the aptly + // configuration file. Each endpoint has its name and associated settings. + "AzurePublishEndpoints": { + // // Endpoint Name + // "test": { + + // // Container Name + // "container": "container1", + + // // Prefix (optional) + // // Publishing under specified prefix in the container, defaults to no prefix (container root) + // "prefix": "", + + // // Credentials + // // Azure storage account access key to access blob storage + // "accountName": "", + // "accountKey": "", + + // // Endpoint URL + // // See: Azure documentation https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string + // // defaults to "https://.blob.core.windows.net" + // "endpoint": "" + // } }, - "enableSwaggerEndpoint": false + + // Package Pool + // Location for storing downloaded packages + // Type must be one of: + // * local + // * azure + "packagePoolStorage": { + // Local Pool + "type": "local", + // Local Pool Path + // empty path defaults to `rootDir`/pool + "path": "" + + // // Azure Azure Blob Storage Pool + // "type": "azure", + // "container": "pool1", + + // // Prefix (optional) + // // Publishing under specified prefix in the container, defaults to no prefix (container root) + // "prefix": "", + + // // Credentials + // // Azure storage account access key to access blob storage + // "accountName": "", + // "accountKey": "", + + // // Endpoint URL + // // See: Azure documentation https://docs.microsoft.com/en-us/azure/storage/common/storage-configure-connection-string + // // defaults to "https://.blob.core.windows.net" + // "endpoint": "" + } + +// End of config } diff --git a/system/t13_etcd/install-etcd.sh b/system/t13_etcd/install-etcd.sh index 163a3f85d..1521511d9 100755 --- a/system/t13_etcd/install-etcd.sh +++ b/system/t13_etcd/install-etcd.sh @@ -4,9 +4,16 @@ ETCD_VER=v3.5.2 DOWNLOAD_URL=https://storage.googleapis.com/etcd -if [ ! -e /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz ]; then - curl -L ${DOWNLOAD_URL}/${ETCD_VER}/etcd-${ETCD_VER}-linux-amd64.tar.gz -o /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz +ARCH="" +case $(uname -m) in + x86_64) ARCH="amd64" ;; + aarch64) ARCH="arm64" ;; + *) echo "unsupported cpu arch"; exit 1 ;; +esac + +if [ ! -e /tmp/etcd-${ETCD_VER}-linux-$ARCH.tar.gz ]; then + curl -L ${DOWNLOAD_URL}/${ETCD_VER}/etcd-${ETCD_VER}-linux-$ARCH.tar.gz -o /tmp/etcd-${ETCD_VER}-linux-$ARCH.tar.gz fi -mkdir /srv/etcd -tar xf /tmp/etcd-${ETCD_VER}-linux-amd64.tar.gz -C /srv/etcd --strip-components=1 +mkdir /tmp/aptly-etcd +tar xf /tmp/etcd-${ETCD_VER}-linux-$ARCH.tar.gz -C /tmp/aptly-etcd --strip-components=1 diff --git a/system/t13_etcd/start-etcd.sh b/system/t13_etcd/start-etcd.sh index ae6c0a45e..05908defb 100755 --- a/system/t13_etcd/start-etcd.sh +++ b/system/t13_etcd/start-etcd.sh @@ -16,7 +16,7 @@ finish() } trap finish INT -/srv/etcd/etcd --max-request-bytes '1073741824' --data-dir /tmp/etcd-data & +/tmp/aptly-etcd/etcd --max-request-bytes '1073741824' --data-dir /tmp/aptly-etcd-data & echo $! > /tmp/etcd.pid etcdpid=`cat /tmp/etcd.pid` wait $etcdpid diff --git a/utils/config.go b/utils/config.go index b25ba4ea5..4665c8307 100644 --- a/utils/config.go +++ b/utils/config.go @@ -6,6 +6,8 @@ import ( "os" "path/filepath" "strings" + + "github.com/DisposaBoy/JsonConfigReader" ) // ConfigStructure is structure of main configuration @@ -192,7 +194,7 @@ func LoadConfig(filename string, config *ConfigStructure) error { } defer f.Close() - dec := json.NewDecoder(f) + dec := json.NewDecoder(JsonConfigReader.New(f)) return dec.Decode(&config) } @@ -213,6 +215,18 @@ func SaveConfig(filename string, config *ConfigStructure) error { return err } +// SaveConfigRaw write configuration to file +func SaveConfigRaw(filename string, conf []byte) error { + f, err := os.Create(filename) + if err != nil { + return err + } + defer f.Close() + + _, err = f.Write(conf) + return err +} + // GetRootDir returns the RootDir with expanded ~ as home directory func (conf *ConfigStructure) GetRootDir() string { return strings.Replace(conf.RootDir, "~", os.Getenv("HOME"), 1)