diff --git a/.circleci/config.yml b/.circleci/config.yml index 948ef8c..45deaed 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -30,7 +30,10 @@ jobs: MONO_TLS_CA_CERT: "configs/pki/ca.crt" MONO_X_MYSQL_ADDR_HOST: "localhost" MONO_X_NATS_ADDR_URLS: "nats://localhost:4222" + MONO_X_POSTGRES_ADDR_HOST: "localhost" MONO_X_STAN_CLUSTER_ID: "ci" + MONO__AUTH_POSTGRES_AUTH_LOGIN: "auth" + MONO__AUTH_POSTGRES_AUTH_PASS: "authpass" MONO__AUTH_SECRET: "s3cr3t" MONO__AUTH_TLS_CERT: "configs/pki/issued/ms-auth.crt" MONO__AUTH_TLS_CERT_INT: "configs/pki/issued/ms-auth-int.crt" @@ -38,9 +41,78 @@ jobs: MONO__AUTH_TLS_KEY_INT: "configs/pki/private/ms-auth-int.key" MONO__EXAMPLE_MYSQL_AUTH_LOGIN: "root" MONO__EXAMPLE_MYSQL_AUTH_PASS: "" + PGHOST: "localhost" + PGUSER: "postgres" + PGPASSWORD: "postgres" - image: "mysql:5.7" environment: MYSQL_ALLOW_EMPTY_PASSWORD: "yes" + - image: "postgres:11.10" + environment: + POSTGRES_PASSWORD: "postgres" + # configs/pki/issued/postgres.crt + CRT: | + -----BEGIN CERTIFICATE----- + MIIDsjCCApqgAwIBAgIRAMEzbE/A4QDHc5a7EH0yT1YwDQYJKoZIhvcNAQELBQAw + OTE3MDUGA1UEAwwuRGV2IENBIGdpdGh1Yi5jb20vcG93ZXJtYW4vZ28tbW9ub2xp + dGgtZXhhbXBsZTAeFw0yMDExMTMxNTM4MTNaFw0zMDExMTExNTM4MTNaMBMxETAP + BgNVBAMMCHBvc3RncmVzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA + xpyORrle7GqBrlpIhNMMRIBLpNb3tM5shbH5t5FdIPp9nAe1HSL11x1UWUSVVl6l + BSVknLo8Z6kY+keQC+ZLdIVOFNXdKGEm6f4HHjywvMtzX+qzzNwgj+5UPUL+xy5I + qgMiBJFsPeqdqjNq5UIP7LuXbPy4EPY9lIoOJSdF3Wtgm9maBXL4yqz54i+tb+79 + h2SQtRbY3NsWGc6RZoEpPlhODgSUfIQxdBImZgC85j7xmfSmP6ID5hGnXce2EO+3 + Hk8Xx6Uxg4+W+JfdW6e/GHeEmcIWMhX/Fww8kSQWNDhbOPAcIhmtOI8wK7IoPvvJ + NgKfN6jfSMnRt+BM1aNuHQIDAQABo4HaMIHXMAkGA1UdEwQCMAAwHQYDVR0OBBYE + FKTXmcEADRpJSakWqubvm5pINObtMHQGA1UdIwRtMGuAFOz7gh3cy6P5+gKYENe2 + s5rvZ0zvoT2kOzA5MTcwNQYDVQQDDC5EZXYgQ0EgZ2l0aHViLmNvbS9wb3dlcm1h + bi9nby1tb25vbGl0aC1leGFtcGxlghQiwcZHKfcmcZK6HimiKZD01IGu0zATBgNV + HSUEDDAKBggrBgEFBQcDATALBgNVHQ8EBAMCBaAwEwYDVR0RBAwwCoIIcG9zdGdy + ZXMwDQYJKoZIhvcNAQELBQADggEBABeACn6stlZqbtaiveV2gCrw+Lmp319ndYq1 + V30/Nq9xk3/wOERkHIf/VLO3OCSeQS2gF3RKEt0qaaIq66G2fs33hMCf2LYqN/ZX + HfwSuqcrb49mQYD+bLCO8W7id6353yhu2Kfml+mnwcFob/EXfxBAMt5SIrJXvOf7 + V8j1gN4+3PEOnJhKSRm3th4vrs/LKZO/8KamVZUwO3TG0v7AM4Qhydm0V8DDQlWp + PTJRcAMjJaCUA4aWA4FyBkR8BdKaolYO0lxBouqtcucqYvChfM2qz+irJhlVYMT+ + LuA2cFt802y28PXbeGpxCACmsxwriEy2D/hBfoPiScCifWFywPM= + -----END CERTIFICATE----- + # configs/pki/private/postgres.key + KEY: | + -----BEGIN PRIVATE KEY----- + MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDGnI5GuV7saoGu + WkiE0wxEgEuk1ve0zmyFsfm3kV0g+n2cB7UdIvXXHVRZRJVWXqUFJWScujxnqRj6 + R5AL5kt0hU4U1d0oYSbp/gcePLC8y3Nf6rPM3CCP7lQ9Qv7HLkiqAyIEkWw96p2q + M2rlQg/su5ds/LgQ9j2Uig4lJ0Xda2Cb2ZoFcvjKrPniL61v7v2HZJC1Ftjc2xYZ + zpFmgSk+WE4OBJR8hDF0EiZmALzmPvGZ9KY/ogPmEaddx7YQ77ceTxfHpTGDj5b4 + l91bp78Yd4SZwhYyFf8XDDyRJBY0OFs48BwiGa04jzArsig++8k2Ap83qN9IydG3 + 4EzVo24dAgMBAAECggEBAKp1PSr3978iXfB87haJZ9L25XUxRmF4dO195VumN+6Q + 7fNXLhs2oRYhxv3ifeUlz62N/T8CG8u/9n7/omKEIah8rJn3Pxtj3lkfO464+drf + JkNhFTWyi4PcQZJeiHn8gELh83VGNkchsHWeMZVX0IEWM41HjNaJ8ConHqLGhRDV + jmSZQ07y0R35ximoy99y7ItZsNnXBJAS+sDVfmRc8Fz+FK5x8z2M+Qx0c+68vpaL + PWTfHwozp+YaIwUZHR/GV8DgcaYofq9diRI4VRMOoITVFKzPlRNew8ehtrRUv09z + gFO2XYeSRr0cJiVkOfU0VaGmgCZ38RGMOBeyzISz40UCgYEA81PUt3TUKLYUms0D + bEfk+MIKOm22JgOxfQzY0XgQ6+UlM1pTldzFltmqTzjSyeCKtoCZWq77JDD0aXbe + MvHTxI4NvIzAmb1VlgwkxdnzZEADLaNzEoGS2qRWWy0taFsCi2d3pi5kkGtmSG6Z + IGPb4/9Ek+3dGRoY3pjHVcDGWWcCgYEA0PSMf2F/535Z2CM8muhxqX8u/4S8MVIm + DsSHUbdqZZ5/G91bLPyCX4UjsPw2g3L28LmRBQ79Xgk0ctefoQm/Xt3T4oMCLD6i + 3WRvYZGuXgGyMwsZGqkP+7wDKAXSQsNo+qeUz15AQU0vccGJx6AS8WXbfNqR0JR7 + Hi/Pgoc0ldsCgYBspf7yV7Ev6HV01Zv0qnl79bB5fGl7ueRzkNvzVChCHJQqxOdT + oDt96v0X2KiOSbvGV8+h5UXwWAeaW+BFYSOMvV021qZdP2RK1mtJCNOb7NU+eKA2 + IDNBe2Wv2pFO6mxNcMrFIiv3LG2EZBXtl4kk2eN2Jhy2S6fVJOTEzooCewKBgC14 + 6pJV73W7Q2YL1QaJCyMtsFae1i2NiQaxshkQZzESGus/crYJhX2wBaJgYKSegirE + 11BYgL7UFfvf/LcY3fKRJ6klhXWHc/3l+28qOt3uVI9THPV2USohMjalfapVyCS0 + D2hJRIZxe5+v1IICzdyGpAs5rX3MsjZRhUEMl179AoGBAIyrf0Dh3OQj0/ZA83oc + Y+qRcjG2fDtVxtuJbBrxP7oOT4vbhWl6AYztC++IapT4qPcqkduTYaMSRvSB+FaA + nUq56U6esPDYi3khqh/89xre5sYhghm7V6EBZoyHBfkTZWH6dSF2vy/W17dY94hh + VlUa0UYwNAl97XKKtm+boudR + -----END PRIVATE KEY----- + entrypoint: + - bash + - -c + - | + set -x -e -o pipefail + echo "$CRT" | install -m 0440 -o root -g postgres /dev/stdin /server.crt + echo "$KEY" | install -m 0440 -o root -g postgres /dev/stdin /server.key + exec docker-entrypoint.sh postgres -c ssl=on \ + --ssl_cert_file=/server.crt --ssl_key_file=/server.key - image: "nats:2.1.4" - image: "nats-streaming:0.17.0" command: @@ -74,6 +146,8 @@ jobs: curl -sSfL https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VER}/hadolint-$(uname)-x86_64 | install /dev/stdin $(go env GOPATH)/bin/hadolint shellcheck --version | tee /dev/stderr | grep -wq $SHELLCHECK_VER || curl -sSfL https://github.com/koalaman/shellcheck/releases/download/v${SHELLCHECK_VER}/shellcheck-v${SHELLCHECK_VER}.$(uname).x86_64.tar.xz | tar xJf - -C $(go env GOPATH)/bin --strip-components=1 shellcheck-v${SHELLCHECK_VER}/shellcheck + sudo apt update + sudo apt install -y postgresql-client - run: name: Ensure API spec match auto-generated code command: | diff --git a/.dockerignore b/.dockerignore index 21fe194..356a1bd 100644 --- a/.dockerignore +++ b/.dockerignore @@ -5,3 +5,4 @@ * !bin !ms/example/internal/migrations/*.sql +!ms/auth/internal/migrations/*.sql diff --git a/.github/workflows/CI&CD.yml b/.github/workflows/CI&CD.yml index 2d686e9..cbd3585 100644 --- a/.github/workflows/CI&CD.yml +++ b/.github/workflows/CI&CD.yml @@ -33,7 +33,10 @@ jobs: MONO_TLS_CA_CERT: 'configs/pki/ca.crt' MONO_X_MYSQL_ADDR_HOST: 'localhost' MONO_X_NATS_ADDR_URLS: 'nats://localhost:4222' + MONO_X_POSTGRES_ADDR_HOST: 'localhost' MONO_X_STAN_CLUSTER_ID: 'ci' + MONO__AUTH_POSTGRES_AUTH_LOGIN: 'auth' + MONO__AUTH_POSTGRES_AUTH_PASS: 'authpass' MONO__AUTH_SECRET: 's3cr3t' MONO__AUTH_TLS_CERT: 'configs/pki/issued/ms-auth.crt' MONO__AUTH_TLS_CERT_INT: 'configs/pki/issued/ms-auth-int.crt' @@ -41,6 +44,9 @@ jobs: MONO__AUTH_TLS_KEY_INT: 'configs/pki/private/ms-auth-int.key' MONO__EXAMPLE_MYSQL_AUTH_LOGIN: 'root' MONO__EXAMPLE_MYSQL_AUTH_PASS: '' + PGHOST: 'localhost' + PGUSER: 'postgres' + PGPASSWORD: 'postgres' steps: - name: Run STAN service run: | @@ -66,6 +72,20 @@ jobs: - uses: actions/checkout@v2 + - name: Run PostgreSQL service + run: | + docker run -d --net=host \ + -e POSTGRES_PASSWORD=postgres \ + -v $PWD/configs/pki/issued/postgres.crt:/dev.crt \ + -v $PWD/configs/pki/private/postgres.key:/dev.key \ + --entrypoint=bash postgres:11.10 -c ' + set -x -e -o pipefail + install -m 0440 -o root -g postgres /dev.crt /server.crt + install -m 0440 -o root -g postgres /dev.key /server.key + exec docker-entrypoint.sh postgres -c ssl=on \ + --ssl_cert_file=/server.crt --ssl_key_file=/server.key + ' + - name: Fetch master branch (to compare .proto files) run: | if ! git show-branch master >/dev/null 2>&1; then @@ -97,6 +117,8 @@ jobs: curl -sSfL https://github.com/hadolint/hadolint/releases/download/v${HADOLINT_VER}/hadolint-$(uname)-x86_64 | install /dev/stdin $(go env GOPATH)/bin/hadolint shellcheck --version | tee /dev/stderr | grep -wq $SHELLCHECK_VER || curl -sSfL https://github.com/koalaman/shellcheck/releases/download/v${SHELLCHECK_VER}/shellcheck-v${SHELLCHECK_VER}.$(uname).x86_64.tar.xz | tar xJf - -C $(go env GOPATH)/bin --strip-components=1 shellcheck-v${SHELLCHECK_VER}/shellcheck + sudo apt update + sudo apt install -y postgresql-client - name: Ensure API spec match auto-generated code run: | diff --git a/README.md b/README.md index 0ae1a01..37a2ac5 100644 --- a/README.md +++ b/README.md @@ -97,6 +97,7 @@ for more details. - [X] Embedded [Swagger UI](https://swagger.io/tools/swagger-ui/). - [X] Example DAL (data access layer): - [X] MySQL 5.7 (strictest SQL mode). + - [X] PostgreSQL 11 (secure schema usage pattern). - [X] Example tests, both unit and integration. - [X] Production logging using [structlog](https://github.com/powerman/structlog). - [X] Production metrics using Prometheus. @@ -145,6 +146,7 @@ $ /path/to/easyrsa init-pki $ echo Dev CA $(go list -m) | /path/to/easyrsa build-ca nopass $ /path/to/easyrsa --days=3650 "--subject-alt-name=DNS:localhost" build-server-full ms-auth nopass $ /path/to/easyrsa --days=3650 "--subject-alt-name=IP:127.0.0.1" build-server-full ms-auth-int nopass +$ /path/to/easyrsa --days=3650 "--subject-alt-name=DNS:postgres" build-server-full postgres nopass ``` ### Usage @@ -278,7 +280,6 @@ $ ./bin/mono serve - [ ] Add security-related headers for HTTPS endpoints (HSTS, CSP, etc.), also move default host from localhost to avoid poisoning it with HSTS. -- [ ] Add DAL using PostgreSQL 11. - [ ] Embed https://github.com/powerman/go-service-example as an example of embedding microservices from another repo. - [ ] Add example of `internal/svc/*` adapters calling some other services. diff --git a/configs/pki/certs_by_serial/C1336C4FC0E100C77396BB107D324F56.pem b/configs/pki/certs_by_serial/C1336C4FC0E100C77396BB107D324F56.pem new file mode 100644 index 0000000..abe8cc8 --- /dev/null +++ b/configs/pki/certs_by_serial/C1336C4FC0E100C77396BB107D324F56.pem @@ -0,0 +1,88 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: + c1:33:6c:4f:c0:e1:00:c7:73:96:bb:10:7d:32:4f:56 + Signature Algorithm: sha256WithRSAEncryption + Issuer: CN=Dev CA github.com/powerman/go-monolith-example + Validity + Not Before: Nov 13 15:38:13 2020 GMT + Not After : Nov 11 15:38:13 2030 GMT + Subject: CN=postgres + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + RSA Public-Key: (2048 bit) + Modulus: + 00:c6:9c:8e:46:b9:5e:ec:6a:81:ae:5a:48:84:d3: + 0c:44:80:4b:a4:d6:f7:b4:ce:6c:85:b1:f9:b7:91: + 5d:20:fa:7d:9c:07:b5:1d:22:f5:d7:1d:54:59:44: + 95:56:5e:a5:05:25:64:9c:ba:3c:67:a9:18:fa:47: + 90:0b:e6:4b:74:85:4e:14:d5:dd:28:61:26:e9:fe: + 07:1e:3c:b0:bc:cb:73:5f:ea:b3:cc:dc:20:8f:ee: + 54:3d:42:fe:c7:2e:48:aa:03:22:04:91:6c:3d:ea: + 9d:aa:33:6a:e5:42:0f:ec:bb:97:6c:fc:b8:10:f6: + 3d:94:8a:0e:25:27:45:dd:6b:60:9b:d9:9a:05:72: + f8:ca:ac:f9:e2:2f:ad:6f:ee:fd:87:64:90:b5:16: + d8:dc:db:16:19:ce:91:66:81:29:3e:58:4e:0e:04: + 94:7c:84:31:74:12:26:66:00:bc:e6:3e:f1:99:f4: + a6:3f:a2:03:e6:11:a7:5d:c7:b6:10:ef:b7:1e:4f: + 17:c7:a5:31:83:8f:96:f8:97:dd:5b:a7:bf:18:77: + 84:99:c2:16:32:15:ff:17:0c:3c:91:24:16:34:38: + 5b:38:f0:1c:22:19:ad:38:8f:30:2b:b2:28:3e:fb: + c9:36:02:9f:37:a8:df:48:c9:d1:b7:e0:4c:d5:a3: + 6e:1d + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: + CA:FALSE + X509v3 Subject Key Identifier: + A4:D7:99:C1:00:0D:1A:49:49:A9:16:AA:E6:EF:9B:9A:48:34:E6:ED + X509v3 Authority Key Identifier: + keyid:EC:FB:82:1D:DC:CB:A3:F9:FA:02:98:10:D7:B6:B3:9A:EF:67:4C:EF + DirName:/CN=Dev CA github.com/powerman/go-monolith-example + serial:22:C1:C6:47:29:F7:26:71:92:BA:1E:29:A2:29:90:F4:D4:81:AE:D3 + + X509v3 Extended Key Usage: + TLS Web Server Authentication + X509v3 Key Usage: + Digital Signature, Key Encipherment + X509v3 Subject Alternative Name: + DNS:postgres + Signature Algorithm: sha256WithRSAEncryption + 17:80:0a:7e:ac:b6:56:6a:6e:d6:a2:bd:e5:76:80:2a:f0:f8: + b9:a9:df:5f:67:75:8a:b5:57:7d:3f:36:af:71:93:7f:f0:38: + 44:64:1c:87:ff:54:b3:b7:38:24:9e:41:2d:a0:17:74:4a:12: + dd:2a:69:a2:2a:eb:a1:b6:7e:cd:f7:84:c0:9f:d8:b6:2a:37: + f6:57:1d:fc:12:ba:a7:2b:6f:8f:66:41:80:fe:6c:b0:8e:f1: + 6e:e2:77:ad:f9:df:28:6e:d8:a7:e6:97:e9:a7:c1:c1:68:6f: + f1:17:7f:10:40:32:de:52:22:b2:57:bc:e7:fb:57:c8:f5:80: + de:3e:dc:f1:0e:9c:98:4a:49:19:b7:b6:1e:2f:ae:cf:cb:29: + 93:bf:f0:a6:a6:55:95:30:3b:74:c6:d2:fe:c0:33:84:21:c9: + d9:b4:57:c0:c3:42:55:a9:3d:32:51:70:03:23:25:a0:94:03: + 86:96:03:81:72:06:44:7c:05:d2:9a:a2:56:0e:d2:5c:41:a2: + ea:ad:72:e7:2a:62:f0:a1:7c:cd:aa:cf:e8:ab:26:19:55:60: + c4:fe:2e:e0:36:70:5b:7c:d3:6c:b6:f0:f5:db:78:6a:71:08: + 00:a6:b3:1c:2b:88:4c:b6:0f:f8:41:7e:83:e2:49:c0:a2:7d: + 61:72:c0:f3 +-----BEGIN CERTIFICATE----- +MIIDsjCCApqgAwIBAgIRAMEzbE/A4QDHc5a7EH0yT1YwDQYJKoZIhvcNAQELBQAw +OTE3MDUGA1UEAwwuRGV2IENBIGdpdGh1Yi5jb20vcG93ZXJtYW4vZ28tbW9ub2xp +dGgtZXhhbXBsZTAeFw0yMDExMTMxNTM4MTNaFw0zMDExMTExNTM4MTNaMBMxETAP +BgNVBAMMCHBvc3RncmVzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +xpyORrle7GqBrlpIhNMMRIBLpNb3tM5shbH5t5FdIPp9nAe1HSL11x1UWUSVVl6l +BSVknLo8Z6kY+keQC+ZLdIVOFNXdKGEm6f4HHjywvMtzX+qzzNwgj+5UPUL+xy5I +qgMiBJFsPeqdqjNq5UIP7LuXbPy4EPY9lIoOJSdF3Wtgm9maBXL4yqz54i+tb+79 +h2SQtRbY3NsWGc6RZoEpPlhODgSUfIQxdBImZgC85j7xmfSmP6ID5hGnXce2EO+3 +Hk8Xx6Uxg4+W+JfdW6e/GHeEmcIWMhX/Fww8kSQWNDhbOPAcIhmtOI8wK7IoPvvJ +NgKfN6jfSMnRt+BM1aNuHQIDAQABo4HaMIHXMAkGA1UdEwQCMAAwHQYDVR0OBBYE +FKTXmcEADRpJSakWqubvm5pINObtMHQGA1UdIwRtMGuAFOz7gh3cy6P5+gKYENe2 +s5rvZ0zvoT2kOzA5MTcwNQYDVQQDDC5EZXYgQ0EgZ2l0aHViLmNvbS9wb3dlcm1h +bi9nby1tb25vbGl0aC1leGFtcGxlghQiwcZHKfcmcZK6HimiKZD01IGu0zATBgNV +HSUEDDAKBggrBgEFBQcDATALBgNVHQ8EBAMCBaAwEwYDVR0RBAwwCoIIcG9zdGdy +ZXMwDQYJKoZIhvcNAQELBQADggEBABeACn6stlZqbtaiveV2gCrw+Lmp319ndYq1 +V30/Nq9xk3/wOERkHIf/VLO3OCSeQS2gF3RKEt0qaaIq66G2fs33hMCf2LYqN/ZX +HfwSuqcrb49mQYD+bLCO8W7id6353yhu2Kfml+mnwcFob/EXfxBAMt5SIrJXvOf7 +V8j1gN4+3PEOnJhKSRm3th4vrs/LKZO/8KamVZUwO3TG0v7AM4Qhydm0V8DDQlWp +PTJRcAMjJaCUA4aWA4FyBkR8BdKaolYO0lxBouqtcucqYvChfM2qz+irJhlVYMT+ +LuA2cFt802y28PXbeGpxCACmsxwriEy2D/hBfoPiScCifWFywPM= +-----END CERTIFICATE----- diff --git a/configs/pki/index.txt b/configs/pki/index.txt index 9c81d9b..ebe81c9 100644 --- a/configs/pki/index.txt +++ b/configs/pki/index.txt @@ -1,2 +1,3 @@ V 301104101400Z F4E16ED052D9E165912065596B3E9F89 unknown /CN=ms-auth V 301104101415Z 03403AF3482DAEBB5519F1F229B6B180 unknown /CN=ms-auth-int +V 301111153813Z C1336C4FC0E100C77396BB107D324F56 unknown /CN=postgres diff --git a/configs/pki/index.txt.old b/configs/pki/index.txt.old index 97938a2..9c81d9b 100644 --- a/configs/pki/index.txt.old +++ b/configs/pki/index.txt.old @@ -1 +1,2 @@ V 301104101400Z F4E16ED052D9E165912065596B3E9F89 unknown /CN=ms-auth +V 301104101415Z 03403AF3482DAEBB5519F1F229B6B180 unknown /CN=ms-auth-int diff --git a/configs/pki/issued/postgres.crt b/configs/pki/issued/postgres.crt new file mode 100644 index 0000000..abe8cc8 --- /dev/null +++ b/configs/pki/issued/postgres.crt @@ -0,0 +1,88 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: + c1:33:6c:4f:c0:e1:00:c7:73:96:bb:10:7d:32:4f:56 + Signature Algorithm: sha256WithRSAEncryption + Issuer: CN=Dev CA github.com/powerman/go-monolith-example + Validity + Not Before: Nov 13 15:38:13 2020 GMT + Not After : Nov 11 15:38:13 2030 GMT + Subject: CN=postgres + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + RSA Public-Key: (2048 bit) + Modulus: + 00:c6:9c:8e:46:b9:5e:ec:6a:81:ae:5a:48:84:d3: + 0c:44:80:4b:a4:d6:f7:b4:ce:6c:85:b1:f9:b7:91: + 5d:20:fa:7d:9c:07:b5:1d:22:f5:d7:1d:54:59:44: + 95:56:5e:a5:05:25:64:9c:ba:3c:67:a9:18:fa:47: + 90:0b:e6:4b:74:85:4e:14:d5:dd:28:61:26:e9:fe: + 07:1e:3c:b0:bc:cb:73:5f:ea:b3:cc:dc:20:8f:ee: + 54:3d:42:fe:c7:2e:48:aa:03:22:04:91:6c:3d:ea: + 9d:aa:33:6a:e5:42:0f:ec:bb:97:6c:fc:b8:10:f6: + 3d:94:8a:0e:25:27:45:dd:6b:60:9b:d9:9a:05:72: + f8:ca:ac:f9:e2:2f:ad:6f:ee:fd:87:64:90:b5:16: + d8:dc:db:16:19:ce:91:66:81:29:3e:58:4e:0e:04: + 94:7c:84:31:74:12:26:66:00:bc:e6:3e:f1:99:f4: + a6:3f:a2:03:e6:11:a7:5d:c7:b6:10:ef:b7:1e:4f: + 17:c7:a5:31:83:8f:96:f8:97:dd:5b:a7:bf:18:77: + 84:99:c2:16:32:15:ff:17:0c:3c:91:24:16:34:38: + 5b:38:f0:1c:22:19:ad:38:8f:30:2b:b2:28:3e:fb: + c9:36:02:9f:37:a8:df:48:c9:d1:b7:e0:4c:d5:a3: + 6e:1d + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Basic Constraints: + CA:FALSE + X509v3 Subject Key Identifier: + A4:D7:99:C1:00:0D:1A:49:49:A9:16:AA:E6:EF:9B:9A:48:34:E6:ED + X509v3 Authority Key Identifier: + keyid:EC:FB:82:1D:DC:CB:A3:F9:FA:02:98:10:D7:B6:B3:9A:EF:67:4C:EF + DirName:/CN=Dev CA github.com/powerman/go-monolith-example + serial:22:C1:C6:47:29:F7:26:71:92:BA:1E:29:A2:29:90:F4:D4:81:AE:D3 + + X509v3 Extended Key Usage: + TLS Web Server Authentication + X509v3 Key Usage: + Digital Signature, Key Encipherment + X509v3 Subject Alternative Name: + DNS:postgres + Signature Algorithm: sha256WithRSAEncryption + 17:80:0a:7e:ac:b6:56:6a:6e:d6:a2:bd:e5:76:80:2a:f0:f8: + b9:a9:df:5f:67:75:8a:b5:57:7d:3f:36:af:71:93:7f:f0:38: + 44:64:1c:87:ff:54:b3:b7:38:24:9e:41:2d:a0:17:74:4a:12: + dd:2a:69:a2:2a:eb:a1:b6:7e:cd:f7:84:c0:9f:d8:b6:2a:37: + f6:57:1d:fc:12:ba:a7:2b:6f:8f:66:41:80:fe:6c:b0:8e:f1: + 6e:e2:77:ad:f9:df:28:6e:d8:a7:e6:97:e9:a7:c1:c1:68:6f: + f1:17:7f:10:40:32:de:52:22:b2:57:bc:e7:fb:57:c8:f5:80: + de:3e:dc:f1:0e:9c:98:4a:49:19:b7:b6:1e:2f:ae:cf:cb:29: + 93:bf:f0:a6:a6:55:95:30:3b:74:c6:d2:fe:c0:33:84:21:c9: + d9:b4:57:c0:c3:42:55:a9:3d:32:51:70:03:23:25:a0:94:03: + 86:96:03:81:72:06:44:7c:05:d2:9a:a2:56:0e:d2:5c:41:a2: + ea:ad:72:e7:2a:62:f0:a1:7c:cd:aa:cf:e8:ab:26:19:55:60: + c4:fe:2e:e0:36:70:5b:7c:d3:6c:b6:f0:f5:db:78:6a:71:08: + 00:a6:b3:1c:2b:88:4c:b6:0f:f8:41:7e:83:e2:49:c0:a2:7d: + 61:72:c0:f3 +-----BEGIN CERTIFICATE----- +MIIDsjCCApqgAwIBAgIRAMEzbE/A4QDHc5a7EH0yT1YwDQYJKoZIhvcNAQELBQAw +OTE3MDUGA1UEAwwuRGV2IENBIGdpdGh1Yi5jb20vcG93ZXJtYW4vZ28tbW9ub2xp +dGgtZXhhbXBsZTAeFw0yMDExMTMxNTM4MTNaFw0zMDExMTExNTM4MTNaMBMxETAP +BgNVBAMMCHBvc3RncmVzMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA +xpyORrle7GqBrlpIhNMMRIBLpNb3tM5shbH5t5FdIPp9nAe1HSL11x1UWUSVVl6l +BSVknLo8Z6kY+keQC+ZLdIVOFNXdKGEm6f4HHjywvMtzX+qzzNwgj+5UPUL+xy5I +qgMiBJFsPeqdqjNq5UIP7LuXbPy4EPY9lIoOJSdF3Wtgm9maBXL4yqz54i+tb+79 +h2SQtRbY3NsWGc6RZoEpPlhODgSUfIQxdBImZgC85j7xmfSmP6ID5hGnXce2EO+3 +Hk8Xx6Uxg4+W+JfdW6e/GHeEmcIWMhX/Fww8kSQWNDhbOPAcIhmtOI8wK7IoPvvJ +NgKfN6jfSMnRt+BM1aNuHQIDAQABo4HaMIHXMAkGA1UdEwQCMAAwHQYDVR0OBBYE +FKTXmcEADRpJSakWqubvm5pINObtMHQGA1UdIwRtMGuAFOz7gh3cy6P5+gKYENe2 +s5rvZ0zvoT2kOzA5MTcwNQYDVQQDDC5EZXYgQ0EgZ2l0aHViLmNvbS9wb3dlcm1h +bi9nby1tb25vbGl0aC1leGFtcGxlghQiwcZHKfcmcZK6HimiKZD01IGu0zATBgNV +HSUEDDAKBggrBgEFBQcDATALBgNVHQ8EBAMCBaAwEwYDVR0RBAwwCoIIcG9zdGdy +ZXMwDQYJKoZIhvcNAQELBQADggEBABeACn6stlZqbtaiveV2gCrw+Lmp319ndYq1 +V30/Nq9xk3/wOERkHIf/VLO3OCSeQS2gF3RKEt0qaaIq66G2fs33hMCf2LYqN/ZX +HfwSuqcrb49mQYD+bLCO8W7id6353yhu2Kfml+mnwcFob/EXfxBAMt5SIrJXvOf7 +V8j1gN4+3PEOnJhKSRm3th4vrs/LKZO/8KamVZUwO3TG0v7AM4Qhydm0V8DDQlWp +PTJRcAMjJaCUA4aWA4FyBkR8BdKaolYO0lxBouqtcucqYvChfM2qz+irJhlVYMT+ +LuA2cFt802y28PXbeGpxCACmsxwriEy2D/hBfoPiScCifWFywPM= +-----END CERTIFICATE----- diff --git a/configs/pki/private/postgres.key b/configs/pki/private/postgres.key new file mode 100644 index 0000000..77e27c6 --- /dev/null +++ b/configs/pki/private/postgres.key @@ -0,0 +1,28 @@ +-----BEGIN PRIVATE KEY----- +MIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQDGnI5GuV7saoGu +WkiE0wxEgEuk1ve0zmyFsfm3kV0g+n2cB7UdIvXXHVRZRJVWXqUFJWScujxnqRj6 +R5AL5kt0hU4U1d0oYSbp/gcePLC8y3Nf6rPM3CCP7lQ9Qv7HLkiqAyIEkWw96p2q +M2rlQg/su5ds/LgQ9j2Uig4lJ0Xda2Cb2ZoFcvjKrPniL61v7v2HZJC1Ftjc2xYZ +zpFmgSk+WE4OBJR8hDF0EiZmALzmPvGZ9KY/ogPmEaddx7YQ77ceTxfHpTGDj5b4 +l91bp78Yd4SZwhYyFf8XDDyRJBY0OFs48BwiGa04jzArsig++8k2Ap83qN9IydG3 +4EzVo24dAgMBAAECggEBAKp1PSr3978iXfB87haJZ9L25XUxRmF4dO195VumN+6Q +7fNXLhs2oRYhxv3ifeUlz62N/T8CG8u/9n7/omKEIah8rJn3Pxtj3lkfO464+drf +JkNhFTWyi4PcQZJeiHn8gELh83VGNkchsHWeMZVX0IEWM41HjNaJ8ConHqLGhRDV +jmSZQ07y0R35ximoy99y7ItZsNnXBJAS+sDVfmRc8Fz+FK5x8z2M+Qx0c+68vpaL +PWTfHwozp+YaIwUZHR/GV8DgcaYofq9diRI4VRMOoITVFKzPlRNew8ehtrRUv09z +gFO2XYeSRr0cJiVkOfU0VaGmgCZ38RGMOBeyzISz40UCgYEA81PUt3TUKLYUms0D +bEfk+MIKOm22JgOxfQzY0XgQ6+UlM1pTldzFltmqTzjSyeCKtoCZWq77JDD0aXbe +MvHTxI4NvIzAmb1VlgwkxdnzZEADLaNzEoGS2qRWWy0taFsCi2d3pi5kkGtmSG6Z +IGPb4/9Ek+3dGRoY3pjHVcDGWWcCgYEA0PSMf2F/535Z2CM8muhxqX8u/4S8MVIm +DsSHUbdqZZ5/G91bLPyCX4UjsPw2g3L28LmRBQ79Xgk0ctefoQm/Xt3T4oMCLD6i +3WRvYZGuXgGyMwsZGqkP+7wDKAXSQsNo+qeUz15AQU0vccGJx6AS8WXbfNqR0JR7 +Hi/Pgoc0ldsCgYBspf7yV7Ev6HV01Zv0qnl79bB5fGl7ueRzkNvzVChCHJQqxOdT +oDt96v0X2KiOSbvGV8+h5UXwWAeaW+BFYSOMvV021qZdP2RK1mtJCNOb7NU+eKA2 +IDNBe2Wv2pFO6mxNcMrFIiv3LG2EZBXtl4kk2eN2Jhy2S6fVJOTEzooCewKBgC14 +6pJV73W7Q2YL1QaJCyMtsFae1i2NiQaxshkQZzESGus/crYJhX2wBaJgYKSegirE +11BYgL7UFfvf/LcY3fKRJ6klhXWHc/3l+28qOt3uVI9THPV2USohMjalfapVyCS0 +D2hJRIZxe5+v1IICzdyGpAs5rX3MsjZRhUEMl179AoGBAIyrf0Dh3OQj0/ZA83oc +Y+qRcjG2fDtVxtuJbBrxP7oOT4vbhWl6AYztC++IapT4qPcqkduTYaMSRvSB+FaA +nUq56U6esPDYi3khqh/89xre5sYhghm7V6EBZoyHBfkTZWH6dSF2vy/W17dY94hh +VlUa0UYwNAl97XKKtm+boudR +-----END PRIVATE KEY----- diff --git a/configs/pki/reqs/postgres.req b/configs/pki/reqs/postgres.req new file mode 100644 index 0000000..aab50aa --- /dev/null +++ b/configs/pki/reqs/postgres.req @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE REQUEST----- +MIICfjCCAWYCAQAwEzERMA8GA1UEAwwIcG9zdGdyZXMwggEiMA0GCSqGSIb3DQEB +AQUAA4IBDwAwggEKAoIBAQDGnI5GuV7saoGuWkiE0wxEgEuk1ve0zmyFsfm3kV0g ++n2cB7UdIvXXHVRZRJVWXqUFJWScujxnqRj6R5AL5kt0hU4U1d0oYSbp/gcePLC8 +y3Nf6rPM3CCP7lQ9Qv7HLkiqAyIEkWw96p2qM2rlQg/su5ds/LgQ9j2Uig4lJ0Xd +a2Cb2ZoFcvjKrPniL61v7v2HZJC1Ftjc2xYZzpFmgSk+WE4OBJR8hDF0EiZmALzm +PvGZ9KY/ogPmEaddx7YQ77ceTxfHpTGDj5b4l91bp78Yd4SZwhYyFf8XDDyRJBY0 +OFs48BwiGa04jzArsig++8k2Ap83qN9IydG34EzVo24dAgMBAAGgJjAkBgkqhkiG +9w0BCQ4xFzAVMBMGA1UdEQQMMAqCCHBvc3RncmVzMA0GCSqGSIb3DQEBCwUAA4IB +AQDByrTWZSDL1lE8xFSFkhLT0/LaiBEFYxNjzcEL+JCzN68mvvgqMiBQbRXilk8n +bWSqppGQAjNsrkXC8COz+Pw0LRGWtoMTbW8DJ56RyMto3pCS6nbSmuOK1ZTys1Jg +sK+BpeyQSwCeRD6FBWuuUnJlUIins3GHQSg7lftwF9aGkNpCRsRGS3nTok+965Ml ++FxHz4GR41oEn+P/Fhu7TLIxL2ytFgei3dkbceWdpYF1ROShfrbps41e8Id/HwZX +vhtzKxGBqYjY7eIJHfvLRaq5J3GZxRaagNaZT0te0K4zSVgs/Q9uS9lOPAHNf6li +5Jo73gNLJBVHcP2TPjHaLIi1 +-----END CERTIFICATE REQUEST----- diff --git a/configs/pki/serial b/configs/pki/serial index 239548e..b6057ea 100644 --- a/configs/pki/serial +++ b/configs/pki/serial @@ -1 +1 @@ -03403AF3482DAEBB5519F1F229B6B181 +C1336C4FC0E100C77396BB107D324F57 diff --git a/configs/pki/serial.old b/configs/pki/serial.old index 2ac4225..b70e097 100644 --- a/configs/pki/serial.old +++ b/configs/pki/serial.old @@ -1 +1 @@ -03403af3482daebb5519f1f229b6b180 +c1336c4fc0e100c77396bb107d324f56 diff --git a/docker-compose.yml b/docker-compose.yml index e6e9624..4139c71 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -2,6 +2,7 @@ version: "3.8" volumes: mysql: + postgres: stan: services: @@ -17,6 +18,54 @@ services: environment: MYSQL_ALLOW_EMPTY_PASSWORD: "yes" + postgres: + image: "postgres:11.10" # We're using 11. + container_name: "mono_postgres" + restart: "always" + shm_size: "256m" + ports: + - "${mono_postgres_addr_port:-0}:5432" + volumes: + - "postgres:/var/lib/postgresql/data" + - "${mono_postgres_tls_cert}:/dev.crt" + - "${mono_postgres_tls_key}:/dev.key" + environment: + POSTGRES_PASSWORD: "postgres" + entrypoint: + - bash + - -c + - | + set -x -e -o pipefail + install -m 0440 -o root -g postgres /dev.crt /server.crt + install -m 0440 -o root -g postgres /dev.key /server.key + exec docker-entrypoint.sh postgres -c ssl=on \ + --ssl_cert_file=/server.crt --ssl_key_file=/server.key + + postgres-setup: + depends_on: + - postgres + image: "postgres:11.10" # We're using 11. + container_name: "mono_postgres-setup" + restart: "no" + volumes: + - "./scripts/postgres-setup:/usr/local/bin/postgres-setup" + environment: + MONO__AUTH_POSTGRES_AUTH_LOGIN: "${MONO__AUTH_POSTGRES_AUTH_LOGIN}" + MONO__AUTH_POSTGRES_AUTH_PASS: "${MONO__AUTH_POSTGRES_AUTH_PASS}" + PGHOST: "postgres" + PGUSER: "postgres" + PGPASSWORD: "postgres" + command: + - bash + - -c + - | + while ! timeout --foreground 0.5 bash -c "echo -n >/dev/tcp/$${PGHOST}/$${PGPORT:-5432}" 2>/dev/null; do + echo waiting for postgres + sleep 0.5 + done + set -x -e -o pipefail + postgres-setup | psql + nats: image: "nats:2.1.4" # This is the latest version at the moment. container_name: mono_nats @@ -61,10 +110,27 @@ services: - "${MONO_AUTH_METRICS_ADDR_PORT:-0}:17005" - "${MONO_EXAMPLE_ADDR_PORT:-0}:17001" - "${MONO_EXAMPLE_METRICS_ADDR_PORT:-0}:17002" + volumes: + - "${MONO_TLS_CA_CERT}:/ca.crt" + - "${MONO__AUTH_TLS_CERT}:/auth.crt" + - "${MONO__AUTH_TLS_CERT_INT}:/auth-int.crt" + - "${MONO__AUTH_TLS_KEY}:/auth.key" + - "${MONO__AUTH_TLS_KEY_INT}:/auth-int.key" environment: + MONO_ADDR_HOST: "${MONO_ADDR_HOST}" + MONO_ADDR_HOST_INT: "${MONO_ADDR_HOST_INT}" + MONO_AUTH_ADDR_HOST_INT: "${MONO_AUTH_ADDR_HOST_INT}" + MONO_TLS_CA_CERT: "/ca.crt" MONO_X_MYSQL_ADDR_HOST: "mysql" MONO_X_NATS_ADDR_URLS: "nats://nats:4222" + MONO_X_POSTGRES_ADDR_HOST: "postgres" MONO_X_STAN_CLUSTER_ID: "local" - MONO__AUTH_SECRET: "s3cr3t" + MONO__AUTH_POSTGRES_AUTH_LOGIN: "${MONO__AUTH_POSTGRES_AUTH_LOGIN}" + MONO__AUTH_POSTGRES_AUTH_PASS: "${MONO__AUTH_POSTGRES_AUTH_PASS}" + MONO__AUTH_SECRET: "${MONO__AUTH_SECRET}" + MONO__AUTH_TLS_CERT: "/auth.crt" + MONO__AUTH_TLS_CERT_INT: "/auth-int.crt" + MONO__AUTH_TLS_KEY: "/auth.key" + MONO__AUTH_TLS_KEY_INT: "/auth-int.key" MONO__EXAMPLE_MYSQL_AUTH_LOGIN: "${MONO__EXAMPLE_MYSQL_AUTH_LOGIN}" MONO__EXAMPLE_MYSQL_AUTH_PASS: "${MONO__EXAMPLE_MYSQL_AUTH_PASS}" diff --git a/env.sh.dist b/env.sh.dist index 4022df3..8b65b99 100644 --- a/env.sh.dist +++ b/env.sh.dist @@ -9,6 +9,9 @@ # provide reusable values for project's upper-case variables defined below. export mono_mysql_addr_port="3306" export mono_nats_addr_port="4222" +export mono_postgres_addr_port="5432" +export mono_postgres_tls_cert="configs/pki/issued/postgres.crt" +export mono_postgres_tls_key="configs/pki/private/postgres.key" # Variables required to run and test project. # Should be kept in sorted order. @@ -36,7 +39,11 @@ export MONO_TLS_CA_CERT="configs/pki/ca.crt" export MONO_X_MYSQL_ADDR_HOST="localhost" export MONO_X_MYSQL_ADDR_PORT="${mono_mysql_addr_port}" export MONO_X_NATS_ADDR_URLS="nats://localhost:${mono_nats_addr_port}" +export MONO_X_POSTGRES_ADDR_HOST="localhost" +export MONO_X_POSTGRES_ADDR_PORT="${mono_postgres_addr_port}" export MONO_X_STAN_CLUSTER_ID="local" +export MONO__AUTH_POSTGRES_AUTH_LOGIN="auth" +export MONO__AUTH_POSTGRES_AUTH_PASS="authpass" export MONO__AUTH_SECRET="s3cr3t" export MONO__AUTH_TLS_CERT="configs/pki/issued/ms-auth.crt" export MONO__AUTH_TLS_CERT_INT="configs/pki/issued/ms-auth-int.crt" diff --git a/go.mod b/go.mod index a591eb5..47313be 100644 --- a/go.mod +++ b/go.mod @@ -15,6 +15,7 @@ require ( github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 github.com/grpc-ecosystem/grpc-gateway/v2 v2.0.1 github.com/jmoiron/sqlx v1.2.1-0.20190826204134-d7d95172beb5 + github.com/lib/pq v1.8.0 github.com/mattn/goveralls v0.0.7 github.com/nats-io/nats-streaming-server v0.18.0 // indirect github.com/nats-io/nats.go v1.10.0 @@ -23,12 +24,13 @@ require ( github.com/parnurzeal/gorequest v0.2.16 github.com/powerman/appcfg v0.5.0 github.com/powerman/check v1.3.0 - github.com/powerman/dockerize v0.13.0 + github.com/powerman/dockerize v0.13.1 github.com/powerman/getenv v0.1.0 github.com/powerman/goose/v2 v2.7.0 github.com/powerman/must v0.1.0 github.com/powerman/mysqlx v0.3.3 github.com/powerman/narada4d v1.7.1 + github.com/powerman/pqx v0.7.0 github.com/powerman/rpc-codec v1.2.2 github.com/powerman/sensitive v0.0.4 github.com/powerman/sqlxx v0.2.0 diff --git a/go.sum b/go.sum index ba977d5..7d2fd03 100644 --- a/go.sum +++ b/go.sum @@ -592,8 +592,8 @@ github.com/powerman/check v1.1.0/go.mod h1:nX1qs/UsVgnQrkebazvKUJzMrtOItv4O0+Y79 github.com/powerman/check v1.2.1/go.mod h1:IW+hYd9ihaKn7ri1+NGS8WWwrKdytbU8hgNC1wbcurc= github.com/powerman/check v1.3.0 h1:GsncV4RbbC4nZEzLTVpUIdp1rgZil4N36D/XuPZUA60= github.com/powerman/check v1.3.0/go.mod h1:k/8NCUQwepaKJKctBBKjQo84jvGEvKiumD9pDl87RB0= -github.com/powerman/dockerize v0.13.0 h1:zzFj55fDFnaAZv/BhbQhuu5JA/S/f3STbJn/uanegg4= -github.com/powerman/dockerize v0.13.0/go.mod h1:YkBME3AB6+8VK6ixwAOCUTHACucQG0H/LabRErUzA48= +github.com/powerman/dockerize v0.13.1 h1:FKmOVH6hef9e95VclWCjl2oveMg+I71dEq+pAqMM6uY= +github.com/powerman/dockerize v0.13.1/go.mod h1:YkBME3AB6+8VK6ixwAOCUTHACucQG0H/LabRErUzA48= github.com/powerman/getenv v0.1.0 h1:GTqwBYwtjoxjK/kB+qDyfUiqSz7vZ5431x002ftW13s= github.com/powerman/getenv v0.1.0/go.mod h1:kTSy/ckmNA/gYTbH6+XOUaGWUCeS9k7QzSol+LsOsZM= github.com/powerman/goose v2.7.0-rc4.0.20200329145851-5c15923690fa+incompatible h1:50EFIy0MsWcUXblSAkgRxbJRA67gz+LPSmwLaqgMHhI= @@ -609,7 +609,10 @@ github.com/powerman/mysqlx v0.3.3 h1:eBTeJvrLY+mcacJZekzM3VKCUT8gj/5KKG8f2KYXqzg github.com/powerman/mysqlx v0.3.3/go.mod h1:ZSl17PWU2DgwM2KjN8OrdAq9tYC5p4Xm3lTN6W7j3SA= github.com/powerman/narada4d v1.7.1 h1:G6hkEQxfCBWIsrUtB4MyWq4c84qcS/3XsBQLWUuUyik= github.com/powerman/narada4d v1.7.1/go.mod h1:LUOXFkHgtSwPqtrZx+F/Ab1PqOiL96O3GfYnoKfg6T0= +github.com/powerman/pqx v0.6.1 h1:tlFOKQvVTiWG3FCb1jmGUzxcNzw7Ep1K5KBbSa5If/g= github.com/powerman/pqx v0.6.1/go.mod h1:EJGFN0+vvTWFqwT1bTrnVWrJ8hO7RAW6rp2JVhYwrVE= +github.com/powerman/pqx v0.7.0 h1:2oOW6h3IIx/WGkY9tlfJjhovBQ6zv9bEYXXtW/zUf3U= +github.com/powerman/pqx v0.7.0/go.mod h1:iLUZqSDdr+p1j+Ut6UPb2KHmaMziUHWb3L6pSSIwoHI= github.com/powerman/rpc-codec v1.2.2 h1:BK0JScZivljhwW/vLLhZLtUgqSxc/CD3sHEs8LiwwKw= github.com/powerman/rpc-codec v1.2.2/go.mod h1:3Qr/y/+u3CwcSww9tfJMRn/95lB2qUdUeIQe7BYlLDo= github.com/powerman/sensitive v0.0.4 h1:gHwPy0Zq8byP/x7EJHezWM5+7AeQyOq8ENzeHTZfRVE= diff --git a/internal/config/config.go b/internal/config/config.go index b7a1d2e..7ace216 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -33,6 +33,9 @@ type Shared struct { XMySQLAddrHost appcfg.NotEmptyString `env:"X_MYSQL_ADDR_HOST"` XMySQLAddrPort appcfg.Port `env:"X_MYSQL_ADDR_PORT"` XNATSAddrUrls appcfg.NotEmptyString `env:"X_NATS_ADDR_URLS"` + XPostgresAddrHost appcfg.NotEmptyString `env:"X_POSTGRES_ADDR_HOST"` + XPostgresAddrPort appcfg.Port `env:"X_POSTGRES_ADDR_PORT"` + XPostgresDBName appcfg.NotEmptyString `env:"X_POSTGRES_DB_NAME"` XSTANClusterID appcfg.NotEmptyString `env:"X_STAN_CLUSTER_ID"` } @@ -58,6 +61,8 @@ var shared = &Shared{ //nolint:gochecknoglobals // Config is global anyway. ExampleAddrPort: appcfg.MustPort(strconv.Itoa(ExamplePort)), ExampleMetricsAddrPort: appcfg.MustPort(strconv.Itoa(ExampleMetricsPort)), XMySQLAddrPort: appcfg.MustPort("3306"), + XPostgresAddrPort: appcfg.MustPort("5432"), + XPostgresDBName: appcfg.MustNotEmptyString("postgres"), } // Get updates config defaults (from env) and returns shared config. diff --git a/ms/auth/init_test.go b/ms/auth/init_test.go index e1ec40a..b7c249c 100644 --- a/ms/auth/init_test.go +++ b/ms/auth/init_test.go @@ -7,19 +7,24 @@ import ( "github.com/powerman/go-monolith-example/ms/auth/internal/app" "github.com/powerman/go-monolith-example/ms/auth/internal/config" + "github.com/powerman/go-monolith-example/ms/auth/internal/dal" "github.com/powerman/go-monolith-example/ms/auth/internal/srv/grpc" "github.com/powerman/go-monolith-example/pkg/def" ) func TestMain(m *testing.M) { def.Init() - // dal.InitMetrics(reg) // TODO + dal.InitMetrics(reg) app.InitMetrics(reg) grpc.InitMetrics(reg) cfg = config.MustGetServeTest() check.TestMain(m) } +type tLogger check.C + +func (l tLogger) Print(v ...interface{}) { l.Log(v...) } + // Const shared by tests. Recommended naming scheme: . var ( cfg *config.ServeConfig diff --git a/ms/auth/internal/app/app.go b/ms/auth/internal/app/app.go index f5a7b5c..427d023 100644 --- a/ms/auth/internal/app/app.go +++ b/ms/auth/internal/app/app.go @@ -23,10 +23,10 @@ type Ctx = context.Context // Errors. var ( ErrAccessDenied = errors.New("access denied") - ErrNotFound = errors.New("not found") ErrAlreadyExist = errors.New("already exists") - ErrWrongPassword = errors.New("wrong password") + ErrNotFound = errors.New("not found") ErrValidate = errors.New("validate") + ErrWrongPassword = errors.New("wrong password") ) // Appl provides application features (use cases) service. @@ -70,7 +70,7 @@ type Repo interface { GetUserByAccessToken(Ctx, AccessToken) (*User, error) // AddAccessToken creates and returns AccessToken for given user. // Errors: ErrNotFound. - AddAccessToken(Ctx, dom.UserName) (AccessToken, error) + AddAccessToken(Ctx, AccessToken, dom.UserName) error // DelAccessToken deletes given AccessToken. // Errors: none. DelAccessToken(Ctx, AccessToken) error diff --git a/ms/auth/internal/app/auth.go b/ms/auth/internal/app/auth.go index 496bc96..7bf7aa3 100644 --- a/ms/auth/internal/app/auth.go +++ b/ms/auth/internal/app/auth.go @@ -38,7 +38,12 @@ func (a *App) LoginByUserID(ctx Ctx, userID string, password sensitive.String) ( if !a.equalPassHash(password, user.PassHash) { return "", ErrWrongPassword } - return a.repo.AddAccessToken(ctx, user.Name) + accessToken := AccessToken(dom.NewID()) + err = a.repo.AddAccessToken(ctx, accessToken, user.Name) + if err != nil { + return "", err + } + return accessToken, nil } func (a *App) LoginByEmail(ctx Ctx, email string, password sensitive.String) (AccessToken, error) { diff --git a/ms/auth/internal/app/auth_test.go b/ms/auth/internal/app/auth_test.go index f351d12..0e47907 100644 --- a/ms/auth/internal/app/auth_test.go +++ b/ms/auth/internal/app/auth_test.go @@ -79,19 +79,18 @@ func TestLoginByUserID(tt *testing.T) { t.Nil(a.Register(ctx, "admin", "secret", uAdmin)) mockRepo.EXPECT().GetUser(gomock.Any(), uAdmin.Name).Return(uAdmin, nil).AnyTimes() mockRepo.EXPECT().GetUser(gomock.Any(), gomock.Any()).Return(nil, app.ErrNotFound) - mockRepo.EXPECT().AddAccessToken(gomock.Any(), uAdmin.Name).Return(app.AccessToken("token"), nil) - mockRepo.EXPECT().AddAccessToken(gomock.Any(), uAdmin.Name).Return(app.AccessToken(""), io.EOF) + mockRepo.EXPECT().AddAccessToken(gomock.Any(), gomock.Any(), uAdmin.Name).Return(nil) + mockRepo.EXPECT().AddAccessToken(gomock.Any(), gomock.Any(), uAdmin.Name).Return(io.EOF) tests := []struct { userID string pass string - want string wantErr error }{ - {"user", "", "", app.ErrNotFound}, - {"admin", "wrong", "", app.ErrWrongPassword}, - {"admin", "secret", "token", nil}, - {"admin", "secret", "", io.EOF}, + {"user", "", app.ErrNotFound}, + {"admin", "wrong", app.ErrWrongPassword}, + {"admin", "secret", nil}, + {"admin", "secret", io.EOF}, } for _, tc := range tests { tc := tc @@ -99,7 +98,11 @@ func TestLoginByUserID(tt *testing.T) { t := check.T(tt) res, err := a.LoginByUserID(ctx, tc.userID, sensitive.String(tc.pass)) t.Err(err, tc.wantErr) - t.Equal(res, app.AccessToken(tc.want)) + if tc.wantErr == nil { + t.Len(res, 26) + } else { + t.Len(res, 0) + } }) } } @@ -116,19 +119,18 @@ func TestLoginByEmail(tt *testing.T) { mockRepo.EXPECT().GetUserByEmail(gomock.Any(), uAdmin.Email).Return(uAdmin, nil).AnyTimes() mockRepo.EXPECT().GetUserByEmail(gomock.Any(), gomock.Any()).Return(nil, app.ErrNotFound) mockRepo.EXPECT().GetUser(gomock.Any(), uAdmin.Name).Return(uAdmin, nil).AnyTimes() - mockRepo.EXPECT().AddAccessToken(gomock.Any(), uAdmin.Name).Return(app.AccessToken("token"), nil) - mockRepo.EXPECT().AddAccessToken(gomock.Any(), uAdmin.Name).Return(app.AccessToken(""), io.EOF) + mockRepo.EXPECT().AddAccessToken(gomock.Any(), gomock.Any(), uAdmin.Name).Return(nil) + mockRepo.EXPECT().AddAccessToken(gomock.Any(), gomock.Any(), uAdmin.Name).Return(io.EOF) tests := []struct { email string pass string - want string wantErr error }{ - {"user@host", "", "", app.ErrNotFound}, - {"admin@host", "wrong", "", app.ErrWrongPassword}, - {"admin@host", "secret", "token", nil}, - {"admin@host", "secret", "", io.EOF}, + {"user@host", "", app.ErrNotFound}, + {"admin@host", "wrong", app.ErrWrongPassword}, + {"admin@host", "secret", nil}, + {"admin@host", "secret", io.EOF}, } for _, tc := range tests { tc := tc @@ -136,7 +138,11 @@ func TestLoginByEmail(tt *testing.T) { t := check.T(tt) res, err := a.LoginByEmail(ctx, tc.email, sensitive.String(tc.pass)) t.Err(err, tc.wantErr) - t.Equal(res, app.AccessToken(tc.want)) + if tc.wantErr == nil { + t.Len(res, 26) + } else { + t.Len(res, 0) + } }) } } diff --git a/ms/auth/internal/app/mock.app.go b/ms/auth/internal/app/mock.app.go index 7ad65f0..0c591c1 100644 --- a/ms/auth/internal/app/mock.app.go +++ b/ms/auth/internal/app/mock.app.go @@ -204,18 +204,17 @@ func (mr *MockRepoMockRecorder) GetUserByAccessToken(arg0, arg1 interface{}) *go } // AddAccessToken mocks base method -func (m *MockRepo) AddAccessToken(arg0 Ctx, arg1 dom.UserName) (AccessToken, error) { +func (m *MockRepo) AddAccessToken(arg0 Ctx, arg1 AccessToken, arg2 dom.UserName) error { m.ctrl.T.Helper() - ret := m.ctrl.Call(m, "AddAccessToken", arg0, arg1) - ret0, _ := ret[0].(AccessToken) - ret1, _ := ret[1].(error) - return ret0, ret1 + ret := m.ctrl.Call(m, "AddAccessToken", arg0, arg1, arg2) + ret0, _ := ret[0].(error) + return ret0 } // AddAccessToken indicates an expected call of AddAccessToken -func (mr *MockRepoMockRecorder) AddAccessToken(arg0, arg1 interface{}) *gomock.Call { +func (mr *MockRepoMockRecorder) AddAccessToken(arg0, arg1, arg2 interface{}) *gomock.Call { mr.mock.ctrl.T.Helper() - return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AddAccessToken", reflect.TypeOf((*MockRepo)(nil).AddAccessToken), arg0, arg1) + return mr.mock.ctrl.RecordCallWithMethodType(mr.mock, "AddAccessToken", reflect.TypeOf((*MockRepo)(nil).AddAccessToken), arg0, arg1, arg2) } // DelAccessToken mocks base method diff --git a/ms/auth/internal/config/config.go b/ms/auth/internal/config/config.go index 7b01e6f..5d48544 100644 --- a/ms/auth/internal/config/config.go +++ b/ms/auth/internal/config/config.go @@ -15,11 +15,14 @@ import ( "strings" "github.com/powerman/appcfg" + "github.com/powerman/pqx" "github.com/spf13/pflag" "golang.org/x/text/unicode/norm" "github.com/powerman/go-monolith-example/internal/config" "github.com/powerman/go-monolith-example/ms/auth/internal/app" + "github.com/powerman/go-monolith-example/pkg/cobrax" + "github.com/powerman/go-monolith-example/pkg/def" "github.com/powerman/go-monolith-example/pkg/netx" ) @@ -35,17 +38,23 @@ var shared *SharedCfg //nolint:gochecknoglobals // Config is global anyway. // If microservice may runs in different ways (e.g. using CLI subcommands) // then these subcommands may use subset of these values. var own = &struct { //nolint:gochecknoglobals // Config is global anyway. - Secret appcfg.NotEmptyString `env:"SECRET"` - TLSCert appcfg.NotEmptyString `env:"TLS_CERT"` - TLSCertInt appcfg.NotEmptyString `env:"TLS_CERT_INT"` - TLSKey appcfg.NotEmptyString `env:"TLS_KEY"` - TLSKeyInt appcfg.NotEmptyString `env:"TLS_KEY_INT"` + PostgresUser appcfg.NotEmptyString `env:"POSTGRES_AUTH_LOGIN"` + PostgresPass appcfg.NotEmptyString `env:"POSTGRES_AUTH_PASS"` + GoosePostgresDir appcfg.NotEmptyString + Secret appcfg.NotEmptyString `env:"SECRET"` + TLSCert appcfg.NotEmptyString `env:"TLS_CERT"` + TLSCertInt appcfg.NotEmptyString `env:"TLS_CERT_INT"` + TLSKey appcfg.NotEmptyString `env:"TLS_KEY"` + TLSKeyInt appcfg.NotEmptyString `env:"TLS_KEY_INT"` }{ // Defaults, if any: + PostgresUser: appcfg.MustNotEmptyString(app.ServiceName), + GoosePostgresDir: appcfg.MustNotEmptyString(fmt.Sprintf("ms/%s/internal/migrations", app.ServiceName)), } // FlagSets for all CLI subcommands which use flags to set config values. type FlagSets struct { - Serve *pflag.FlagSet + Serve *pflag.FlagSet + GoosePostgres *pflag.FlagSet } var fs FlagSets //nolint:gochecknoglobals // Flags are global anyway. @@ -62,7 +71,18 @@ func Init(sharedCfg *SharedCfg, flagsets FlagSets) error { return err } + appcfg.AddPFlag(fs.GoosePostgres, &shared.XPostgresAddrHost, "postgres.host", "host to connect to PostgreSQL") + appcfg.AddPFlag(fs.GoosePostgres, &shared.XPostgresAddrPort, "postgres.port", "port to connect to PostgreSQL") + appcfg.AddPFlag(fs.GoosePostgres, &shared.XPostgresDBName, "postgres.dbname", "PostgreSQL database name") + appcfg.AddPFlag(fs.GoosePostgres, &own.PostgresUser, "postgres.user", "PostgreSQL username") + appcfg.AddPFlag(fs.GoosePostgres, &own.PostgresPass, "postgres.pass", "PostgreSQL password") + pfx := app.ServiceName + "." + appcfg.AddPFlag(fs.Serve, &shared.XPostgresAddrHost, "postgres.host", "host to connect to PostgreSQL") + appcfg.AddPFlag(fs.Serve, &shared.XPostgresAddrPort, "postgres.port", "port to connect to PostgreSQL") + appcfg.AddPFlag(fs.Serve, &shared.XPostgresDBName, "postgres.dbname", "PostgreSQL database name") + appcfg.AddPFlag(fs.Serve, &own.PostgresUser, pfx+"postgres.user", "PostgreSQL username") + appcfg.AddPFlag(fs.Serve, &own.PostgresPass, pfx+"postgres.pass", "PostgreSQL password") appcfg.AddPFlag(fs.Serve, &shared.AddrHost, "host", "host to serve") appcfg.AddPFlag(fs.Serve, &shared.AddrHostInt, "host-int", "internal host to serve") appcfg.AddPFlag(fs.Serve, &shared.AuthAddrPort, pfx+"port", "port to serve") @@ -76,16 +96,18 @@ func Init(sharedCfg *SharedCfg, flagsets FlagSets) error { // ServeConfig contains configuration for subcommand. type ServeConfig struct { - Addr netx.Addr - AddrInt netx.Addr - GRPCGWAddr netx.Addr - MetricsAddr netx.Addr - Secret []byte - TLSCACert string - TLSCert string - TLSCertInt string - TLSKey string - TLSKeyInt string + Postgres *def.PostgresConfig + GoosePostgresDir string + Addr netx.Addr + AddrInt netx.Addr + GRPCGWAddr netx.Addr + MetricsAddr netx.Addr + Secret []byte + TLSCACert string + TLSCert string + TLSCertInt string + TLSKey string + TLSKeyInt string } // GetServe validates and returns configuration for subcommand. @@ -93,16 +115,25 @@ func GetServe() (c *ServeConfig, err error) { defer cleanup() c = &ServeConfig{ - Addr: netx.NewAddr(shared.AddrHost.Value(&err), shared.AuthAddrPort.Value(&err)), - AddrInt: netx.NewAddr(shared.AddrHostInt.Value(&err), shared.AuthAddrPortInt.Value(&err)), - GRPCGWAddr: netx.NewAddr(shared.AddrHost.Value(&err), shared.AuthGRPCGWAddrPort.Value(&err)), - MetricsAddr: netx.NewAddr(shared.AddrHostInt.Value(&err), shared.AuthMetricsAddrPort.Value(&err)), - Secret: norm.NFD.Bytes([]byte(own.Secret.Value(&err))), - TLSCACert: shared.TLSCACert.Value(&err), - TLSCert: own.TLSCert.Value(&err), - TLSCertInt: own.TLSCertInt.Value(&err), - TLSKey: own.TLSKey.Value(&err), - TLSKeyInt: own.TLSKeyInt.Value(&err), + Postgres: def.NewPostgresConfig(pqx.Config{ + Host: shared.XPostgresAddrHost.Value(&err), + Port: shared.XPostgresAddrPort.Value(&err), + DBName: shared.XPostgresDBName.Value(&err), + User: own.PostgresUser.Value(&err), + Pass: own.PostgresPass.Value(&err), + SSLRootCert: shared.TLSCACert.Value(&err), + }), + GoosePostgresDir: own.GoosePostgresDir.Value(&err), + Addr: netx.NewAddr(shared.AddrHost.Value(&err), shared.AuthAddrPort.Value(&err)), + AddrInt: netx.NewAddr(shared.AddrHostInt.Value(&err), shared.AuthAddrPortInt.Value(&err)), + GRPCGWAddr: netx.NewAddr(shared.AddrHost.Value(&err), shared.AuthGRPCGWAddrPort.Value(&err)), + MetricsAddr: netx.NewAddr(shared.AddrHostInt.Value(&err), shared.AuthMetricsAddrPort.Value(&err)), + Secret: norm.NFD.Bytes([]byte(own.Secret.Value(&err))), + TLSCACert: shared.TLSCACert.Value(&err), + TLSCert: own.TLSCert.Value(&err), + TLSCertInt: own.TLSCertInt.Value(&err), + TLSKey: own.TLSKey.Value(&err), + TLSKeyInt: own.TLSKeyInt.Value(&err), } if err != nil { return nil, appcfg.WrapPErr(err, fs.Serve, own, shared) @@ -110,6 +141,26 @@ func GetServe() (c *ServeConfig, err error) { return c, nil } +func GetGoosePostgres() (c *cobrax.GoosePostgresConfig, err error) { + defer cleanup() + + c = &cobrax.GoosePostgresConfig{ + Postgres: def.NewPostgresConfig(pqx.Config{ + Host: shared.XPostgresAddrHost.Value(&err), + Port: shared.XPostgresAddrPort.Value(&err), + DBName: shared.XPostgresDBName.Value(&err), + User: own.PostgresUser.Value(&err), + Pass: own.PostgresPass.Value(&err), + SSLRootCert: shared.TLSCACert.Value(&err), + }), + GoosePostgresDir: own.GoosePostgresDir.Value(&err), + } + if err != nil { + return nil, appcfg.WrapPErr(err, fs.GoosePostgres, own, shared) + } + return c, nil +} + // Cleanup must be called by all Get* functions to ensure second call to // any of them will panic. func cleanup() { diff --git a/ms/auth/internal/config/config_test.go b/ms/auth/internal/config/config_test.go index 0b12edd..d5967eb 100644 --- a/ms/auth/internal/config/config_test.go +++ b/ms/auth/internal/config/config_test.go @@ -5,6 +5,7 @@ import ( "testing" "github.com/powerman/check" + "github.com/powerman/pqx" "github.com/powerman/go-monolith-example/internal/config" "github.com/powerman/go-monolith-example/pkg/def" @@ -13,16 +14,25 @@ import ( func Test(t *testing.T) { want := &ServeConfig{ - Addr: netx.NewAddr(def.Hostname, config.AuthPort), - AddrInt: netx.NewAddr(def.Hostname, config.AuthPortInt), - GRPCGWAddr: netx.NewAddr(def.Hostname, config.AuthGRPCGWPort), - MetricsAddr: netx.NewAddr(def.Hostname, config.AuthMetricsPort), - Secret: []byte("s3cr3t"), - TLSCACert: "ca.crt", - TLSCert: "tls.crt", - TLSCertInt: "tls-int.crt", - TLSKey: "tls.key", - TLSKeyInt: "tls-int.key", + Postgres: def.NewPostgresConfig(pqx.Config{ + Host: "postgres", + Port: 5432, + DBName: "postgres", + User: "auth", + Pass: "authpass", + SSLRootCert: "ca.crt", + }), + GoosePostgresDir: "ms/auth/internal/migrations", + Addr: netx.NewAddr(def.Hostname, config.AuthPort), + AddrInt: netx.NewAddr(def.Hostname, config.AuthPortInt), + GRPCGWAddr: netx.NewAddr(def.Hostname, config.AuthGRPCGWPort), + MetricsAddr: netx.NewAddr(def.Hostname, config.AuthMetricsPort), + Secret: []byte("s3cr3t"), + TLSCACert: "ca.crt", + TLSCert: "tls.crt", + TLSCertInt: "tls-int.crt", + TLSKey: "tls.key", + TLSKeyInt: "tls-int.key", } t.Run("required", func(tt *testing.T) { @@ -37,6 +47,8 @@ func Test(t *testing.T) { os.Setenv("MONO__AUTH_TLS_CERT", "tls.crt") require(t, "Secret") os.Setenv("MONO__AUTH_SECRET", "s3cr3t") + require(t, "PostgresPass") + os.Setenv("MONO__AUTH_POSTGRES_AUTH_PASS", "authpass") }) t.Run("default", func(tt *testing.T) { t := check.T(tt) @@ -46,6 +58,8 @@ func Test(t *testing.T) { }) t.Run("constraint", func(tt *testing.T) { t := check.T(tt) + constraint(t, "MONO__AUTH_POSTGRES_AUTH_LOGIN", "", `^PostgresUser .* empty`) + constraint(t, "MONO__AUTH_POSTGRES_AUTH_PASS", "", `^PostgresPass .* empty`) constraint(t, "MONO__AUTH_SECRET", "", `^Secret .* empty`) constraint(t, "MONO__AUTH_TLS_CERT", "", `^TLSCert .* empty`) constraint(t, "MONO__AUTH_TLS_CERT_INT", "", `^TLSCertInt .* empty`) @@ -54,6 +68,8 @@ func Test(t *testing.T) { }) t.Run("env", func(tt *testing.T) { t := check.T(tt) + os.Setenv("MONO__AUTH_POSTGRES_AUTH_LOGIN", "auth3") + os.Setenv("MONO__AUTH_POSTGRES_AUTH_PASS", "authpass3") os.Setenv("MONO__AUTH_SECRET", "secret3") os.Setenv("MONO__AUTH_TLS_CERT", "tls3.crt") os.Setenv("MONO__AUTH_TLS_CERT_INT", "tls3-int.crt") @@ -61,6 +77,8 @@ func Test(t *testing.T) { os.Setenv("MONO__AUTH_TLS_KEY_INT", "tls3-int.key") c, err := testGetServe() t.Nil(err) + want.Postgres.User = "auth3" + want.Postgres.Pass = "authpass3" want.Secret = []byte("secret3") want.TLSCert = "tls3.crt" want.TLSCertInt = "tls3-int.crt" @@ -71,6 +89,11 @@ func Test(t *testing.T) { t.Run("flag", func(tt *testing.T) { t := check.T(tt) c, err := testGetServe( + "--postgres.host=localhost4", + "--postgres.port=45432", + "--postgres.dbname=postgres4", + "--auth.postgres.user=auth4", + "--auth.postgres.pass=authpass4", "--host=host4", "--host-int=hostint4", "--auth.port=8004", @@ -80,6 +103,11 @@ func Test(t *testing.T) { "--auth.secret=secret4", // TODO Test norm.NFD. ) t.Nil(err) + want.Postgres.Host = "localhost4" + want.Postgres.Port = 45432 + want.Postgres.DBName = "postgres4" + want.Postgres.User = "auth4" + want.Postgres.Pass = "authpass4" want.Addr = netx.NewAddr("host4", 8004) want.AddrInt = netx.NewAddr("hostint4", 9004) want.GRPCGWAddr = netx.NewAddr("host4", 7004) diff --git a/ms/auth/internal/config/init_test.go b/ms/auth/internal/config/init_test.go index 569928d..03e2006 100644 --- a/ms/auth/internal/config/init_test.go +++ b/ms/auth/internal/config/init_test.go @@ -16,7 +16,8 @@ var ( testShared *SharedCfg testOwn = own testFlagsets = FlagSets{ - Serve: pflag.NewFlagSet("", 0), + Serve: pflag.NewFlagSet("", 0), + GoosePostgres: pflag.NewFlagSet("", 0), } ) @@ -24,6 +25,7 @@ func TestMain(m *testing.M) { def.Init() os.Clearenv() os.Setenv("MONO_TLS_CA_CERT", "ca.crt") + os.Setenv("MONO_X_POSTGRES_ADDR_HOST", "postgres") testShared, _ = config.Get() check.TestMain(m) } diff --git a/ms/auth/internal/config/testing.go b/ms/auth/internal/config/testing.go index 46187da..2abf86e 100644 --- a/ms/auth/internal/config/testing.go +++ b/ms/auth/internal/config/testing.go @@ -17,7 +17,8 @@ func MustGetServeTest() *ServeConfig { sharedCfg, err := config.Get() must.NoErr(err) err = Init(sharedCfg, FlagSets{ - Serve: pflag.NewFlagSet("", pflag.ContinueOnError), + Serve: pflag.NewFlagSet("", pflag.ContinueOnError), + GoosePostgres: pflag.NewFlagSet("", pflag.ContinueOnError), }) must.NoErr(err) cfg, err := GetServe() @@ -37,6 +38,8 @@ func MustGetServeTest() *ServeConfig { } for _, path := range []*string{ + &cfg.Postgres.Config.SSLRootCert, + &cfg.GoosePostgresDir, &cfg.TLSCACert, &cfg.TLSCert, &cfg.TLSCertInt, diff --git a/ms/auth/internal/dal/dal.go b/ms/auth/internal/dal/dal.go index ff49cbe..ccf9eff 100644 --- a/ms/auth/internal/dal/dal.go +++ b/ms/auth/internal/dal/dal.go @@ -1,100 +1,49 @@ +// Package dal implements Data Access Layer using PostgreSQL DB. package dal import ( "context" - "sync" + "time" - "github.com/powerman/go-monolith-example/internal/dom" "github.com/powerman/go-monolith-example/ms/auth/internal/app" + "github.com/powerman/go-monolith-example/ms/auth/internal/migrations" + "github.com/powerman/go-monolith-example/pkg/def" + "github.com/powerman/go-monolith-example/pkg/repo" +) + +const ( + schemaVersion = 4 + dbMaxOpenConns = 100 / 10 // Use up to 1/10 of server's max_connections. + dbMaxIdleConns = 5 // A bit more than default (2). ) type Ctx = context.Context type Repo struct { - sync.Mutex - users map[dom.UserName]app.User - tokens map[app.AccessToken]dom.UserName -} - -func New() *Repo { - return &Repo{ - users: make(map[dom.UserName]app.User), - tokens: make(map[app.AccessToken]dom.UserName), - } -} - -func (r *Repo) AddUser(ctx Ctx, user app.User) error { - r.Lock() - defer r.Unlock() - if _, ok := r.users[user.Name]; ok { - return app.ErrAlreadyExist - } - for i := range r.users { - if user.Email == r.users[i].Email { - return app.ErrAlreadyExist - } - } - r.users[user.Name] = user - return nil -} - -func (r *Repo) GetUser(ctx Ctx, userName dom.UserName) (*app.User, error) { - r.Lock() - defer r.Unlock() - user, ok := r.users[userName] - if !ok { - return nil, app.ErrNotFound - } - return &user, nil -} - -func (r *Repo) GetUserByEmail(ctx Ctx, email string) (*app.User, error) { - r.Lock() - defer r.Unlock() - for _, user := range r.users { //nolint:gocritic // rangeValCopy. - if user.Email == email { - return &user, nil - } - } - return nil, app.ErrNotFound -} - -func (r *Repo) GetUserByAccessToken(ctx Ctx, accessToken app.AccessToken) (*app.User, error) { - r.Lock() - defer r.Unlock() - userName, ok := r.tokens[accessToken] - if !ok { - return nil, app.ErrNotFound - } - user, ok := r.users[userName] - if !ok { - return nil, app.ErrNotFound - } - return &user, nil -} - -func (r *Repo) AddAccessToken(ctx Ctx, userName dom.UserName) (app.AccessToken, error) { - r.Lock() - defer r.Unlock() - accessToken := app.AccessToken(dom.NewID()) - r.tokens[accessToken] = userName // XXX May overwrite existing record. - return accessToken, nil -} - -func (r *Repo) DelAccessToken(ctx Ctx, accessToken app.AccessToken) error { - r.Lock() - defer r.Unlock() - delete(r.tokens, accessToken) - return nil -} - -func (r *Repo) DelAccessTokens(ctx Ctx, userName dom.UserName) error { - r.Lock() - defer r.Unlock() - for accessToken, name := range r.tokens { //nolint:gocritic // rangeValCopy. - if name.String() == userName.String() { - delete(r.tokens, accessToken) - } - } - return nil + *repo.Repo +} + +// New creates and returns new Repo. +// It will also run required DB migrations and connects to DB. +func New(ctx Ctx, dir string, cfg *def.PostgresConfig) (_ *Repo, err error) { + returnErrs := []error{ // List of app.Err… returned by Repo methods. + app.ErrAlreadyExist, + app.ErrNotFound, + } + + r := &Repo{} + r.Repo, err = repo.NewPostgres(ctx, migrations.Goose(), repo.PostgresConfig{ + Postgres: cfg, + GoosePostgresDir: dir, + SchemaVersion: schemaVersion, + Metric: metric, + ReturnErrs: returnErrs, + }) + if err != nil { + return nil, err + } + r.DB.SetMaxOpenConns(dbMaxOpenConns) + r.DB.SetMaxIdleConns(dbMaxIdleConns) + r.SchemaVer.HoldSharedLock(ctx, time.Second) + return r, nil } diff --git a/ms/auth/internal/dal/init_integration_test.go b/ms/auth/internal/dal/init_integration_test.go new file mode 100644 index 0000000..1198b96 --- /dev/null +++ b/ms/auth/internal/dal/init_integration_test.go @@ -0,0 +1,63 @@ +// +build integration + +package dal_test + +import ( + "runtime" + "strings" + "testing" + + "github.com/powerman/check" + "github.com/powerman/pqx" + "github.com/prometheus/client_golang/prometheus" + + "github.com/powerman/go-monolith-example/ms/auth/internal/app" + "github.com/powerman/go-monolith-example/ms/auth/internal/config" + "github.com/powerman/go-monolith-example/ms/auth/internal/dal" + "github.com/powerman/go-monolith-example/pkg/def" +) + +func TestMain(m *testing.M) { + def.Init() + reg := prometheus.NewPedanticRegistry() + app.InitMetrics(reg) + dal.InitMetrics(reg) + cfg = config.MustGetServeTest() + check.TestMain(m) +} + +type tLogger check.C + +func (t tLogger) Print(args ...interface{}) { t.Log(args...) } + +var ( + ctx = def.NewContext(app.ServiceName) + cfg *config.ServeConfig +) + +func newTestRepo(t *check.C) (cleanup func(), r *dal.Repo) { + t.Helper() + + pc, _, _, _ := runtime.Caller(1) + suffix := runtime.FuncForPC(pc).Name() + suffix = suffix[:strings.LastIndex(suffix, ".")] + suffix += "_" + t.Name() + const maxIdentLen = 63 + if maxLen := maxIdentLen - len(cfg.Postgres.DBName) - 1; len(suffix) > maxLen { + suffix = suffix[len(suffix)-maxLen:] + } + + dbCfg := cfg.Postgres.Clone() + _, cleanupDB, err := pqx.EnsureTempDB(tLogger(*t), suffix, dbCfg.Config) + t.Must(t.Nil(err)) + tempDBCfg := dbCfg.Clone() + tempDBCfg.DBName += "_" + suffix + r, err = dal.New(ctx, cfg.GoosePostgresDir, tempDBCfg) + t.Must(t.Nil(err)) + + cleanup = func() { + r.Close() + cleanupDB() + } + return cleanup, r +} diff --git a/ms/auth/internal/dal/methods.go b/ms/auth/internal/dal/methods.go new file mode 100644 index 0000000..eed1fd7 --- /dev/null +++ b/ms/auth/internal/dal/methods.go @@ -0,0 +1,114 @@ +package dal + +import ( + "database/sql" + "errors" + + "github.com/powerman/sqlxx" + + "github.com/powerman/go-monolith-example/internal/dom" + "github.com/powerman/go-monolith-example/ms/auth/internal/app" + "github.com/powerman/go-monolith-example/pkg/repo" +) + +func (r *Repo) AddUser(ctx Ctx, user app.User) error { + return r.Tx(ctx, nil, func(tx *sqlxx.Tx) error { + _, err := tx.NamedExecContext(ctx, sqlUsersAdd, argUsersAdd{ + ID: user.Name.ID(), + PassSalt: user.PassHash.Salt, + PassHash: user.PassHash.Hash, + Email: user.Email, + DisplayName: user.DisplayName, + Role: dalRole(user.Role), + }) + if repo.PostgresErrName(err, repo.PostgresUniqueViolation) { + return app.ErrAlreadyExist + } + return err + }) +} + +func (r *Repo) GetUser(ctx Ctx, userName dom.UserName) (res *app.User, err error) { + err = r.Tx(ctx, &sql.TxOptions{ReadOnly: true}, func(tx *sqlxx.Tx) error { + var resUsersGet rowUsersGet + err := tx.NamedGetContext(ctx, &resUsersGet, sqlUsersGet, argUsersGet{ + ID: userName.ID(), + }) + switch { + case errors.Is(err, sql.ErrNoRows): + return app.ErrNotFound + case err != nil: + return err + } + res = appUserWithPass(resUsersGet) + return nil + }) + return +} + +func (r *Repo) GetUserByEmail(ctx Ctx, email string) (res *app.User, err error) { + err = r.Tx(ctx, &sql.TxOptions{ReadOnly: true}, func(tx *sqlxx.Tx) error { + var resUsersGetByEmail rowUsersGetByEmail + err := tx.NamedGetContext(ctx, &resUsersGetByEmail, sqlUsersGetByEmail, argUsersGetByEmail{ + Email: email, + }) + switch { + case errors.Is(err, sql.ErrNoRows): + return app.ErrNotFound + case err != nil: + return err + } + res = appUserWithPass(rowUsersGet(resUsersGetByEmail)) + return nil + }) + return +} + +func (r *Repo) GetUserByAccessToken(ctx Ctx, accessToken app.AccessToken) (res *app.User, err error) { + err = r.Tx(ctx, &sql.TxOptions{ReadOnly: true}, func(tx *sqlxx.Tx) error { + var resGetUserByAccessToken rowGetUserByAccessToken + err := tx.NamedGetContext(ctx, &resGetUserByAccessToken, sqlGetUserByAccessToken, argGetUserByAccessToken{ + AccessToken: string(accessToken), + }) + switch { + case errors.Is(err, sql.ErrNoRows): + return app.ErrNotFound + case err != nil: + return err + } + res = appUser(resGetUserByAccessToken) + return nil + }) + return +} + +func (r *Repo) AddAccessToken(ctx Ctx, accessToken app.AccessToken, userName dom.UserName) error { + return r.Tx(ctx, nil, func(tx *sqlxx.Tx) error { + _, err := tx.NamedExecContext(ctx, sqlAccessTokensAdd, argAccessTokensAdd{ + AccessToken: string(accessToken), + UserID: userName.ID(), + }) + if repo.PostgresErrName(err, repo.PostgresForeignKeyViolation) { + return app.ErrNotFound + } + return err + }) +} + +func (r *Repo) DelAccessToken(ctx Ctx, accessToken app.AccessToken) error { + return r.Tx(ctx, nil, func(tx *sqlxx.Tx) error { + _, err := tx.NamedExecContext(ctx, sqlAccessTokensDel, argAccessTokensDel{ + AccessToken: string(accessToken), + }) + return err + }) +} + +func (r *Repo) DelAccessTokens(ctx Ctx, userName dom.UserName) error { + return r.Tx(ctx, nil, func(tx *sqlxx.Tx) error { + _, err := tx.NamedExecContext(ctx, sqlAccessTokensDelByUser, argAccessTokensDelByUser{ + UserID: userName.ID(), + }) + return err + }) +} diff --git a/ms/auth/internal/dal/methods_integration_test.go b/ms/auth/internal/dal/methods_integration_test.go new file mode 100644 index 0000000..ea957ba --- /dev/null +++ b/ms/auth/internal/dal/methods_integration_test.go @@ -0,0 +1,187 @@ +// build integration + +package dal_test + +import ( + "errors" + "testing" + "time" + + "github.com/lib/pq" + "github.com/powerman/check" + + "github.com/powerman/go-monolith-example/internal/dom" + "github.com/powerman/go-monolith-example/ms/auth/internal/app" +) + +var ( + errDupToken = errors.New(`duplicate key value violates unique constraint "access_tokens_pkey"`) + now = time.Now().Truncate(time.Second) + tmplUAdmin = app.User{ + Name: dom.NewUserName("admin"), + PassHash: app.PassHash{ + Salt: []byte("saltAdmin"), + Hash: []byte("hashAdmin"), + }, + Email: "root@localhost", + Role: app.RoleAdmin, + } + tmplU1 = app.User{ + Name: dom.NewUserName("user1"), + PassHash: app.PassHash{ + Salt: []byte("salt1"), + Hash: []byte("hash1"), + }, + Email: "user1@localhost", + DisplayName: "User 1", + Role: app.RoleUser, + } + tmplU2 = app.User{ + Name: dom.NewUserName("user2"), + Email: "user2@localhost", + Role: app.RoleUser, + } +) + +func TestUser(tt *testing.T) { + t := check.T(tt) + t.Parallel() + cleanup, r := newTestRepo(t) + defer cleanup() + + var ( + uAdmin = tmplUAdmin + u1 = tmplU1 + u2 = tmplU2 + u1DupName = app.User{ + Name: dom.NewUserName("user1"), + Email: "user1dup@localhost", + Role: app.RoleUser, + } + u1DupEmail = app.User{ + Name: dom.NewUserName("user1dup"), + Email: "User1@LocalHost", + Role: app.RoleUser, + } + ) + + res, err := r.GetUser(ctx, u1.Name) + t.Err(err, app.ErrNotFound) + t.Nil(res) + res, err = r.GetUserByEmail(ctx, u1.Email) + t.Err(err, app.ErrNotFound) + t.Nil(res) + + tests := []struct { + given app.User + wantErr error + }{ + {uAdmin, nil}, + {uAdmin, app.ErrAlreadyExist}, + {u1, nil}, + {u1DupName, app.ErrAlreadyExist}, + {u1DupEmail, app.ErrAlreadyExist}, + {u2, nil}, + } + for _, tc := range tests { + tc := tc + t.Run("", func(tt *testing.T) { + t := check.T(tt) + err := r.AddUser(ctx, tc.given) + t.Err(err, tc.wantErr) + if err == nil { + if tc.given.PassHash.Salt == nil { + tc.given.PassHash.Salt = []byte{} + tc.given.PassHash.Hash = []byte{} + } + res, err := r.GetUser(ctx, tc.given.Name) + t.Nil(err) + t.GE(res.CreateTime, now) + tc.given.CreateTime = res.CreateTime + t.DeepEqual(res, &tc.given) + + res, err = r.GetUserByEmail(ctx, tc.given.Email) + t.Nil(err) + t.DeepEqual(res, &tc.given) + } + }) + } +} + +func TestAccessToken(tt *testing.T) { + t := check.T(tt) + t.Parallel() + cleanup, r := newTestRepo(t) + defer cleanup() + + var ( + uAdmin = tmplUAdmin + u1 = tmplU1 + u2 = tmplU2 + ) + for _, u := range []*app.User{&uAdmin, &u1, &u2} { + t.Nil(r.AddUser(ctx, *u)) + res, err := r.GetUser(ctx, u.Name) + t.Nil(err) + u.CreateTime = res.CreateTime + } + + tests := []struct { + AccessToken app.AccessToken + userName dom.UserName + wantErr error + }{ + {"admintoken1", uAdmin.Name, nil}, + {"admintoken1", uAdmin.Name, errDupToken}, + {"admintoken2", uAdmin.Name, nil}, + {"u1token1", u1.Name, nil}, + {"u1token2", u1.Name, nil}, + {"u2token1", u2.Name, nil}, + {"u3token1", dom.NewUserName("user3"), app.ErrNotFound}, + } + for _, tc := range tests { + tc := tc + t.Run("", func(tt *testing.T) { + t := check.T(tt) //nolint:govet // False positive. + err := r.AddAccessToken(ctx, tc.AccessToken, tc.userName) + if pqErr := new(*pq.Error); errors.As(err, pqErr) { + t.Match(err, tc.wantErr.Error()) + } else { + t.Err(err, tc.wantErr) + } + }) + } + + t.Nil(r.DelAccessTokens(ctx, uAdmin.Name)) + t.Nil(r.DelAccessTokens(ctx, uAdmin.Name)) + t.Nil(r.DelAccessTokens(ctx, dom.NewUserName("nosuch"))) + t.Nil(r.DelAccessToken(ctx, "u1token1")) + t.Nil(r.DelAccessToken(ctx, "u1token1")) + t.Nil(r.DelAccessToken(ctx, "nosuch")) + + u1.PassHash = app.PassHash{} + u2.PassHash = app.PassHash{} + + tests2 := []struct { + AccessToken app.AccessToken + want *app.User + wantErr error + }{ + {"nosuch", nil, app.ErrNotFound}, + {"admintoken1", nil, app.ErrNotFound}, + {"admintoken2", nil, app.ErrNotFound}, + {"u1token1", nil, app.ErrNotFound}, + {"u1token2", &u1, nil}, + {"u1token2", &u1, nil}, + {"u2token1", &u2, nil}, + } + for _, tc := range tests2 { + tc := tc + t.Run("", func(tt *testing.T) { + t := check.T(tt) + res, err := r.GetUserByAccessToken(ctx, tc.AccessToken) + t.Err(err, tc.wantErr) + t.DeepEqual(res, tc.want) + }) + } +} diff --git a/ms/auth/internal/dal/metrics.go b/ms/auth/internal/dal/metrics.go new file mode 100644 index 0000000..9285411 --- /dev/null +++ b/ms/auth/internal/dal/metrics.go @@ -0,0 +1,18 @@ +package dal + +import ( + "github.com/prometheus/client_golang/prometheus" + + "github.com/powerman/go-monolith-example/ms/auth/internal/app" + "github.com/powerman/go-monolith-example/pkg/repo" +) + +var metric repo.Metrics //nolint:gochecknoglobals // Metrics are global anyway. + +// InitMetrics must be called once before using this package. +// It registers and initializes metrics used by this package. +func InitMetrics(reg *prometheus.Registry) { + const subsystem = "dal_postgres" + + metric = repo.NewMetrics(reg, app.ServiceName, subsystem, new(app.Repo)) +} diff --git a/ms/auth/internal/dal/models.go b/ms/auth/internal/dal/models.go new file mode 100644 index 0000000..314bb8b --- /dev/null +++ b/ms/auth/internal/dal/models.go @@ -0,0 +1,61 @@ +package dal + +import ( + "fmt" + + "github.com/powerman/go-monolith-example/internal/dom" + "github.com/powerman/go-monolith-example/ms/auth/internal/app" +) + +type dbRole string + +const ( + roleAdmin dbRole = "admin" + roleUser dbRole = "user" +) + +func dalRole(role app.Role) dbRole { + switch role { + case app.RoleAdmin: + return roleAdmin + case app.RoleUser: + return roleUser + default: + panic(fmt.Sprintf("unknown app.Role: %v", role)) + } +} + +func appRole(role dbRole) app.Role { + switch role { + case roleAdmin: + return app.RoleAdmin + case roleUser: + return app.RoleUser + default: + panic(fmt.Sprintf("unknown dal.Role: %v", role)) + } +} + +func appUserWithPass(row rowUsersGet) *app.User { + return &app.User{ + Name: dom.NewUserName(row.ID), + PassHash: app.PassHash{ + Salt: row.PassSalt, + Hash: row.PassHash, + }, + Email: row.Email, + DisplayName: row.DisplayName, + Role: appRole(row.Role), + CreateTime: row.CreatedAt, + } +} + +func appUser(row rowGetUserByAccessToken) *app.User { + return &app.User{ + Name: dom.NewUserName(row.ID), + Email: row.Email, + DisplayName: row.DisplayName, + Role: appRole(row.Role), + CreateTime: row.CreatedAt, + } +} diff --git a/ms/auth/internal/dal/sql.go b/ms/auth/internal/dal/sql.go new file mode 100644 index 0000000..e3df0e2 --- /dev/null +++ b/ms/auth/internal/dal/sql.go @@ -0,0 +1,101 @@ +package dal + +import ( + "time" +) + +//nolint:gosec // False positive. +const ( + sqlUsersAdd = ` +INSERT INTO users (id, pass_salt, pass_hash, email, display_name, role) +VALUES (:id, :pass_salt, :pass_hash, :email, :display_name, :role) + ` + sqlUsersGet = ` +SELECT id, pass_salt, pass_hash, email, display_name, role, created_at +FROM users +WHERE id = :id + ` + sqlUsersGetByEmail = ` +SELECT id, pass_salt, pass_hash, email, display_name, role, created_at +FROM users +WHERE LOWER(email) = LOWER(:email) + ` + sqlGetUserByAccessToken = ` +SELECT id, email, display_name, role, u.created_at +FROM access_tokens AS t LEFT JOIN users AS u ON (t.user_id = u.id) +WHERE access_token = :access_token + ` + sqlAccessTokensAdd = ` +INSERT INTO access_tokens (access_token, user_id) +VALUES (:access_token, :user_id) + ` + sqlAccessTokensDel = ` +DELETE FROM access_tokens +WHERE access_token = :access_token + ` + sqlAccessTokensDelByUser = ` +DELETE FROM access_tokens +WHERE user_id = :user_id + ` +) + +type ( + argUsersAdd struct { + ID string + PassSalt []byte + PassHash []byte + Email string + DisplayName string + Role dbRole + } + + argUsersGet struct { + ID string + } + rowUsersGet struct { + ID string + PassSalt []byte + PassHash []byte + Email string + DisplayName string + Role dbRole + CreatedAt time.Time + } + + argUsersGetByEmail struct { + Email string + } + rowUsersGetByEmail struct { + ID string + PassSalt []byte + PassHash []byte + Email string + DisplayName string + Role dbRole + CreatedAt time.Time + } + + argGetUserByAccessToken struct { + AccessToken string + } + rowGetUserByAccessToken struct { + ID string + Email string + DisplayName string + Role dbRole + CreatedAt time.Time + } + + argAccessTokensAdd struct { + AccessToken string + UserID string + } + + argAccessTokensDel struct { + AccessToken string + } + + argAccessTokensDelByUser struct { + UserID string + } +) diff --git a/ms/auth/internal/dal/test.goconvey b/ms/auth/internal/dal/test.goconvey new file mode 100644 index 0000000..1cff4fb --- /dev/null +++ b/ms/auth/internal/dal/test.goconvey @@ -0,0 +1 @@ +-tags=integration diff --git a/ms/auth/internal/migrations/00001_down_not_supported.sql b/ms/auth/internal/migrations/00001_down_not_supported.sql new file mode 100644 index 0000000..e606949 --- /dev/null +++ b/ms/auth/internal/migrations/00001_down_not_supported.sql @@ -0,0 +1,17 @@ +-- +goose Up +-- SQL in this section is executed when the migration is applied. + +-- Usage: +-- -- +goose Down +-- SELECT down_not_supported(); +-- +goose StatementBegin +CREATE FUNCTION down_not_supported() RETURNS void LANGUAGE plpgsql AS $$ + BEGIN + RAISE EXCEPTION 'downgrade is not supported, restore from backup instead'; + END; +$$; +-- +goose StatementEnd + +-- +goose Down +-- SQL in this section is executed when the migration is rolled back. +DROP FUNCTION down_not_supported; diff --git a/ms/auth/internal/migrations/00002_noop.go b/ms/auth/internal/migrations/00002_noop.go new file mode 100644 index 0000000..ead4861 --- /dev/null +++ b/ms/auth/internal/migrations/00002_noop.go @@ -0,0 +1,19 @@ +package migrations + +import ( + "database/sql" +) + +func init() { + goose.AddMigration(upNoop, downNoop) +} + +func upNoop(tx *sql.Tx) error { + // This code is executed when the migration is applied. + return nil +} + +func downNoop(tx *sql.Tx) error { + // This code is executed when the migration is rolled back. + return nil // migrate.ErrDownNotSupported +} diff --git a/ms/auth/internal/migrations/00003_add-func-trigger_set_updated_at.sql b/ms/auth/internal/migrations/00003_add-func-trigger_set_updated_at.sql new file mode 100644 index 0000000..7d32970 --- /dev/null +++ b/ms/auth/internal/migrations/00003_add-func-trigger_set_updated_at.sql @@ -0,0 +1,20 @@ +-- +goose Up + +-- Usage: +-- CREATE TABLE {table} ( +-- updated_at TIMESTAMPTZ DEFAULT now() NOT NULL, +-- ); +-- CREATE TRIGGER {table}_updated_at +-- BEFORE UPDATE ON {table} +-- FOR EACH ROW EXECUTE FUNCTION trigger_set_updated_at(); +-- +goose StatementBegin +CREATE FUNCTION trigger_set_updated_at() RETURNS trigger LANGUAGE plpgsql AS $$ + BEGIN + NEW.updated_at := now(); + RETURN NEW; + END; +$$; +-- +goose StatementEnd + +-- +goose Down +DROP FUNCTION trigger_set_updated_at; diff --git a/ms/auth/internal/migrations/00004_add-table-users-access_tokens.sql b/ms/auth/internal/migrations/00004_add-table-users-access_tokens.sql new file mode 100644 index 0000000..ce1e74e --- /dev/null +++ b/ms/auth/internal/migrations/00004_add-table-users-access_tokens.sql @@ -0,0 +1,34 @@ +-- +goose Up +CREATE TYPE role AS ENUM ('admin', 'user'); + +CREATE TABLE users ( + id TEXT NOT NULL CHECK (id ~ '^[a-z0-9-]{4,63}$'), + pass_salt BYTEA NOT NULL, + pass_hash BYTEA NOT NULL, + email TEXT NOT NULL, + display_name TEXT NOT NULL, + role role NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT now(), + PRIMARY KEY (id) +); + +CREATE UNIQUE INDEX users_unique_lower_email_idx ON users (LOWER(email)); + +CREATE TRIGGER users_updated_at + BEFORE UPDATE ON users + FOR EACH ROW EXECUTE FUNCTION trigger_set_updated_at(); + +CREATE TABLE access_tokens ( + access_token TEXT NOT NULL, + user_id TEXT NOT NULL REFERENCES users(id) ON DELETE CASCADE, + created_at TIMESTAMPTZ NOT NULL DEFAULT now(), + PRIMARY KEY (access_token) +); + +CREATE INDEX access_tokens_user_id_idx ON access_tokens (user_id); + +-- +goose Down +DROP TABLE access_tokens; +DROP TABLE users; +DROP TYPE role; diff --git a/ms/auth/internal/migrations/goose.go b/ms/auth/internal/migrations/goose.go new file mode 100644 index 0000000..c6a2c1a --- /dev/null +++ b/ms/auth/internal/migrations/goose.go @@ -0,0 +1,15 @@ +// Package migrations provides goose migrations. +package migrations + +import ( + goosepkg "github.com/powerman/goose/v2" + + "github.com/powerman/go-monolith-example/ms/auth/internal/app" + "github.com/powerman/go-monolith-example/pkg/def" +) + +//nolint:gochecknoglobals // Force code generated by `goose create` to use instance. +var goose = def.NewGoose(app.ServiceName) + +// Goose returns goose instance with Go migrations defined in the package. +func Goose() *goosepkg.Instance { return goose } diff --git a/ms/auth/internal/migrations/integration_test.go b/ms/auth/internal/migrations/integration_test.go new file mode 100644 index 0000000..8bb1b0b --- /dev/null +++ b/ms/auth/internal/migrations/integration_test.go @@ -0,0 +1,29 @@ +// +build integration + +package migrations_test + +import ( + "testing" + + "github.com/powerman/check" + + "github.com/powerman/go-monolith-example/ms/auth/internal/app" + "github.com/powerman/go-monolith-example/ms/auth/internal/config" + "github.com/powerman/go-monolith-example/ms/auth/internal/migrations" + "github.com/powerman/go-monolith-example/pkg/def" + "github.com/powerman/go-monolith-example/pkg/migrate" +) + +var cfg *config.ServeConfig + +func TestMain(m *testing.M) { + def.Init() + cfg = config.MustGetServeTest() + check.TestMain(m) +} + +func Test(tt *testing.T) { + t := check.T(tt) + ctx := def.NewContext(app.ServiceName) + migrate.PostgresUpDownTest(t, ctx, migrations.Goose(), ".", cfg.Postgres) +} diff --git a/ms/auth/internal/migrations/test.goconvey b/ms/auth/internal/migrations/test.goconvey new file mode 100644 index 0000000..1cff4fb --- /dev/null +++ b/ms/auth/internal/migrations/test.goconvey @@ -0,0 +1 @@ +-tags=integration diff --git a/ms/auth/service.go b/ms/auth/service.go index 7111ff3..96dffa1 100644 --- a/ms/auth/service.go +++ b/ms/auth/service.go @@ -15,9 +15,12 @@ import ( "github.com/powerman/go-monolith-example/ms/auth/internal/app" "github.com/powerman/go-monolith-example/ms/auth/internal/config" "github.com/powerman/go-monolith-example/ms/auth/internal/dal" + "github.com/powerman/go-monolith-example/ms/auth/internal/migrations" "github.com/powerman/go-monolith-example/ms/auth/internal/srv/grpc" "github.com/powerman/go-monolith-example/ms/auth/internal/srv/grpcgw" + "github.com/powerman/go-monolith-example/pkg/cobrax" "github.com/powerman/go-monolith-example/pkg/concurrent" + "github.com/powerman/go-monolith-example/pkg/def" "github.com/powerman/go-monolith-example/pkg/netx" "github.com/powerman/go-monolith-example/pkg/serve" ) @@ -45,12 +48,17 @@ func (s *Service) Name() string { return app.ServiceName } // Init implements main.embeddedService interface. func (s *Service) Init(sharedCfg *config.SharedCfg, cmd, serveCmd *cobra.Command) error { - // dal.InitMetrics(reg) TODO + dal.InitMetrics(reg) app.InitMetrics(reg) grpc.InitMetrics(reg) + ctx := def.NewContext(app.ServiceName) + goosePostgresCmd := cobrax.NewGoosePostgresCmd(ctx, migrations.Goose(), config.GetGoosePostgres) + cmd.AddCommand(goosePostgresCmd) + return config.Init(sharedCfg, config.FlagSets{ - Serve: serveCmd.Flags(), + Serve: serveCmd.Flags(), + GoosePostgres: goosePostgresCmd.Flags(), }) } @@ -117,7 +125,7 @@ func (s *Service) RunServe(ctxStartup, ctxShutdown Ctx, shutdown func()) (err er } func (s *Service) connectRepo(ctx Ctx) (interface{}, error) { - return dal.New(), nil + return dal.New(ctx, s.cfg.GoosePostgresDir, s.cfg.Postgres) } func (s *Service) serveMetrics(ctx Ctx) error { diff --git a/ms/auth/service_integration_test.go b/ms/auth/service_integration_test.go index aea902a..c5db169 100644 --- a/ms/auth/service_integration_test.go +++ b/ms/auth/service_integration_test.go @@ -7,12 +7,14 @@ import ( "crypto/tls" "fmt" "net/http" + "runtime" "strings" "testing" "github.com/golang/mock/gomock" "github.com/parnurzeal/gorequest" "github.com/powerman/check" + "github.com/powerman/pqx" "golang.org/x/oauth2" grpcpkg "google.golang.org/grpc" "google.golang.org/grpc/codes" @@ -47,6 +49,14 @@ func TestSmoke(tt *testing.T) { s := &Service{cfg: cfg} + pc, _, _, _ := runtime.Caller(1) + suffix := runtime.FuncForPC(pc).Name() + suffix = suffix[:strings.LastIndex(suffix, ".")] + _, cleanup, err := pqx.EnsureTempDB(tLogger(*t), suffix, cfg.Postgres.Config) + t.Must(t.Nil(err)) + defer cleanup() + cfg.Postgres.DBName += "_" + suffix // Assign to cfg and not s.cfg as a reminder: they are the same. + ctxStartup, cancel := context.WithTimeout(ctx, def.TestTimeout) defer cancel() ctxShutdown, shutdown := context.WithCancel(ctx) @@ -55,9 +65,9 @@ func TestSmoke(tt *testing.T) { defer func() { shutdown() t.Nil(<-errc, "RunServe") - // if s.repo != nil { - // s.repo.Close() // TODO - // } + if s.repo != nil { + s.repo.Close() + } }() t.Must(t.Nil(netx.WaitTCPPort(ctxStartup, cfg.Addr), "connect to gRPC service")) t.Must(t.Nil(netx.WaitTCPPort(ctxStartup, cfg.AddrInt), "connect to internal gRPC service")) diff --git a/pkg/cobrax/goose-postgres.go b/pkg/cobrax/goose-postgres.go new file mode 100644 index 0000000..a670cd8 --- /dev/null +++ b/pkg/cobrax/goose-postgres.go @@ -0,0 +1,45 @@ +package cobrax + +import ( + "context" + "fmt" + "strings" + + goosepkg "github.com/powerman/goose/v2" + "github.com/spf13/cobra" + + "github.com/powerman/go-monolith-example/pkg/def" + "github.com/powerman/go-monolith-example/pkg/migrate" +) + +// GoosePostgresConfig contain configuration for goose command. +type GoosePostgresConfig struct { + Postgres *def.PostgresConfig + GoosePostgresDir string +} + +// NewGoosePostgresCmd creates new goose command executed by run. +func NewGoosePostgresCmd(ctx context.Context, goose *goosepkg.Instance, getCfg func() (*GoosePostgresConfig, error)) *cobra.Command { + cmd := &cobra.Command{ + Use: "goose-postgres", + Short: "Migrate PostgreSQL database schema", + Args: gooseArgs, + RunE: func(cmd *cobra.Command, args []string) error { + gooseCmd := strings.Join(args, " ") + + cfg, err := getCfg() + if err != nil { + return fmt.Errorf("failed to get config: %w", err) + } + + connector := &migrate.Postgres{PostgresConfig: cfg.Postgres} + err = migrate.Run(ctx, goose, cfg.GoosePostgresDir, gooseCmd, connector) + if err != nil { + return fmt.Errorf("failed to run goose %s: %w", gooseCmd, err) + } + return nil + }, + } + cmd.SetUsageTemplate(gooseUsageTemplate) + return cmd +} diff --git a/pkg/def/postgres.go b/pkg/def/postgres.go new file mode 100644 index 0000000..d0f47ea --- /dev/null +++ b/pkg/def/postgres.go @@ -0,0 +1,72 @@ +package def + +import ( + "context" + "database/sql" + "time" + + "github.com/powerman/pqx" +) + +// Default timeouts for PostgreSQL. +const ( + PostgresDefaultStatementTimeout = 3 * time.Second + PostgresDefaultLockTimeout = 3 * time.Second + PostgresDefaultIdleInTransactionSessionTimeout = 10 * time.Second +) + +// PostgresConfig described connection parameters for github.com/lib/pq. +type PostgresConfig struct { + pqx.Config +} + +// NewPostgresConfig creates a new default config for PostgreSQL. +func NewPostgresConfig(cfg pqx.Config) *PostgresConfig { + c := (&PostgresConfig{Config: cfg}).Clone() + // Enforce SSL. + if c.SSLMode != pqx.SSLVerifyFull { + c.SSLMode = pqx.SSLVerifyCA + } + // Extra protection in case secure schema usage pattern isn't used on this server + // https://www.postgresql.org/docs/11/ddl-schemas.html#DDL-SCHEMAS-PATTERNS. + c.SearchPath = `"$user"` + // In modern PostgreSQL serializable is fast enough, use it by default. + if c.DefaultTransactionIsolation == sql.LevelDefault { + c.DefaultTransactionIsolation = sql.LevelSerializable + } + // Sane timeout defaults: + if c.StatementTimeout == 0 { + c.StatementTimeout = PostgresDefaultStatementTimeout + } + if c.LockTimeout == 0 { + c.LockTimeout = PostgresDefaultLockTimeout + } + if c.IdleInTransactionSessionTimeout == 0 { + c.IdleInTransactionSessionTimeout = PostgresDefaultIdleInTransactionSessionTimeout + } + return c +} + +// Clone returns a deep copy. +func (c *PostgresConfig) Clone() *PostgresConfig { + clone := *c + clone.Other = make(map[string]string, len(c.Other)) + for k, v := range c.Other { + clone.Other[k] = v + } + return &clone +} + +// UpdateConnectTimeout updates c accordingly to ctx.Deadline if +// c.ConnectTimeout isn't set or larger than ctx.Deadline. +func (c *PostgresConfig) UpdateConnectTimeout(ctx context.Context) error { + if deadline, ok := ctx.Deadline(); ok { + if c.ConnectTimeout == 0 || time.Until(deadline) < c.ConnectTimeout { + c.ConnectTimeout = time.Until(deadline) + if c.ConnectTimeout <= 0 { + return context.DeadlineExceeded + } + } + } + return nil +} diff --git a/pkg/migrate/goose-mysql.go b/pkg/migrate/goose-mysql.go index 782434d..3fcb553 100644 --- a/pkg/migrate/goose-mysql.go +++ b/pkg/migrate/goose-mysql.go @@ -71,6 +71,8 @@ func (c *MySQL) Connect(ctx Ctx, goose *goosepkg.Instance) (db *sql.DB, ver *sch err = nextErr } + gooseMu.Lock() + defer gooseMu.Unlock() must.NoErr(goose.SetDialect("mysql")) _, _ = goose.EnsureDBVersion(db) // Race on CREATE TABLE, so allowed to fail. diff --git a/pkg/migrate/goose-postgres.go b/pkg/migrate/goose-postgres.go new file mode 100644 index 0000000..d88eb00 --- /dev/null +++ b/pkg/migrate/goose-postgres.go @@ -0,0 +1,81 @@ +package migrate + +import ( + "context" + "database/sql" + "errors" + "fmt" + "time" + + goosepkg "github.com/powerman/goose/v2" + "github.com/powerman/must" + + // Driver. + _ "github.com/powerman/narada4d/protocol/goose-postgres" + "github.com/powerman/narada4d/schemaver" + "github.com/powerman/structlog" + + "github.com/powerman/go-monolith-example/pkg/def" +) + +const ( + postgresStatementTimeout = time.Second + postgresLockTimeout = time.Second + postgresIdleInTransactionSessionTimeout = time.Second +) + +// Postgres implements Connector interface. +type Postgres struct { + *def.PostgresConfig +} + +// Connect to PostgreSQL. Will initialize schemaver if needed. +func (c *Postgres) Connect(ctx Ctx, goose *goosepkg.Instance) (db *sql.DB, ver *schemaver.SchemaVer, err error) { + log := structlog.FromContext(ctx, nil) + + cfg := c.PostgresConfig.Clone() + cfg.DefaultTransactionIsolation = sql.LevelDefault + cfg.StatementTimeout = postgresStatementTimeout + cfg.LockTimeout = postgresLockTimeout + cfg.IdleInTransactionSessionTimeout = postgresIdleInTransactionSessionTimeout + err = cfg.UpdateConnectTimeout(ctx) + if err != nil { + return nil, nil, err + } + + db, err = sql.Open("postgres", cfg.FormatDSN()) + if err != nil { + return nil, nil, fmt.Errorf("sql.Open: %w", err) + } + defer func() { + if err != nil { + log.WarnIfFail(db.Close) + } + }() + + if cfg.ConnectTimeout != 0 { + var cancel func() + ctx, cancel = context.WithTimeout(ctx, cfg.ConnectTimeout) + defer cancel() + } + err = db.PingContext(ctx) + for err != nil { + nextErr := db.PingContext(ctx) + if errors.Is(nextErr, context.DeadlineExceeded) || errors.Is(nextErr, context.Canceled) { + return nil, nil, fmt.Errorf("db.Ping: %w", err) + } + err = nextErr + } + + gooseMu.Lock() + defer gooseMu.Unlock() + must.NoErr(goose.SetDialect("postgres")) + _, _ = goose.EnsureDBVersion(db) // Race on CREATE TABLE, so allowed to fail. + + ver, err = schemaver.NewAt("goose-" + cfg.FormatURL()) + if err != nil { + return nil, nil, err + } + + return db, ver, nil +} diff --git a/pkg/migrate/goose.go b/pkg/migrate/goose.go index e72bd56..6af0999 100644 --- a/pkg/migrate/goose.go +++ b/pkg/migrate/goose.go @@ -7,6 +7,7 @@ import ( "errors" "fmt" "strings" + "sync" goosepkg "github.com/powerman/goose/v2" "github.com/powerman/narada4d/schemaver" @@ -18,6 +19,11 @@ type Ctx = context.Context var errSelfCheck = errors.New("unexpected db schema version") +// Tests often runs in parallel using same goose instance and may trigger +// -race detector on SetDialect. So, use this mutex to work around. +//nolint:gochecknoglobals // By design. +var gooseMu sync.Mutex + // Connector provides a way to connect to any database with schemaver. type Connector interface { Connect(Ctx, *goosepkg.Instance) (*sql.DB, *schemaver.SchemaVer, error) diff --git a/pkg/migrate/testing-postgres.go b/pkg/migrate/testing-postgres.go new file mode 100644 index 0000000..78f3f5e --- /dev/null +++ b/pkg/migrate/testing-postgres.go @@ -0,0 +1,28 @@ +package migrate + +import ( + "runtime" + "strings" + + "github.com/powerman/check" + goosepkg "github.com/powerman/goose/v2" + "github.com/powerman/pqx" + + "github.com/powerman/go-monolith-example/pkg/def" +) + +// PostgresUpDownTest creates temporary database, test given migrations, and removes +// temporary database. +func PostgresUpDownTest(t *check.C, ctx Ctx, goose *goosepkg.Instance, dir string, cfg *def.PostgresConfig) { + pc, _, _, _ := runtime.Caller(1) + suffix := runtime.FuncForPC(pc).Name() + suffix = suffix[:strings.LastIndex(suffix, ".")] + + _, cleanup, err := pqx.EnsureTempDB(tLogger(*t), suffix, cfg.Config) + t.Must(t.Nil(err)) + defer cleanup() + + connector := &Postgres{PostgresConfig: cfg.Clone()} + connector.PostgresConfig.DBName += "_" + suffix + upDownTest(t, ctx, goose, dir, connector) +} diff --git a/pkg/repo/repo-postgres.go b/pkg/repo/repo-postgres.go new file mode 100644 index 0000000..f527fb1 --- /dev/null +++ b/pkg/repo/repo-postgres.go @@ -0,0 +1,97 @@ +package repo + +import ( + "context" + "database/sql" + "errors" + "fmt" + "strconv" + + "github.com/jmoiron/sqlx" + "github.com/lib/pq" + goosepkg "github.com/powerman/goose/v2" + "github.com/powerman/pqx" + "github.com/powerman/sqlxx" + "github.com/powerman/structlog" + + "github.com/powerman/go-monolith-example/pkg/def" + "github.com/powerman/go-monolith-example/pkg/migrate" +) + +// Error names. +const ( + PostgresUniqueViolation = "unique_violation" + PostgresForeignKeyViolation = "foreign_key_violation" +) + +// PostgresErrName returns true if err is PostgreSQL error with given name. +func PostgresErrName(err error, name string) bool { + pqErr := new(pq.Error) + return errors.As(err, &pqErr) && pqErr.Code.Name() == name +} + +// PostgresConfig contains repo configuration. +type PostgresConfig struct { + Postgres *def.PostgresConfig + GoosePostgresDir string + SchemaVersion int64 + Metric Metrics + ReturnErrs []error // List of app.Err… returned by DAL methods. +} + +// NewPostgres creates and returns new Repo. +// It will also run required DB migrations and connects to DB. +func NewPostgres(ctx Ctx, goose *goosepkg.Instance, cfg PostgresConfig) (_ *Repo, err error) { + log := structlog.FromContext(ctx, nil) + + connector := &migrate.Postgres{PostgresConfig: cfg.Postgres} + schemaVer, err := migrate.UpTo(ctx, goose, cfg.GoosePostgresDir, cfg.SchemaVersion, connector) + if err != nil { + return nil, fmt.Errorf("migration: %w", err) + } + defer func() { + if err != nil { + log.WarnIfFail(schemaVer.Close) + } + }() + + err = cfg.Postgres.UpdateConnectTimeout(ctx) + if err != nil { + return nil, err + } + + db, err := sql.Open("postgres", cfg.Postgres.FormatDSN()) + if err != nil { + return nil, fmt.Errorf("sql.Open: %w", err) + } + defer func() { + if err != nil { + log.WarnIfFail(db.Close) + } + }() + + if cfg.Postgres.ConnectTimeout != 0 { + var cancel func() + ctx, cancel = context.WithTimeout(ctx, cfg.Postgres.ConnectTimeout) + defer cancel() + } + err = db.PingContext(ctx) + for err != nil { + nextErr := db.PingContext(ctx) + if errors.Is(nextErr, context.DeadlineExceeded) || errors.Is(nextErr, context.Canceled) { + return nil, fmt.Errorf("db.Ping: %w", err) + } + err = nextErr + } + + r := &Repo{ + DB: sqlxx.NewDB(sqlx.NewDb(db, "postgres")), + SchemaVer: schemaVer, + schemaVersion: strconv.Itoa(int(cfg.SchemaVersion)), + returnErrs: cfg.ReturnErrs, + metric: cfg.Metric, + log: log, + serialize: pqx.Serialize, + } + return r, nil +} diff --git a/pkg/repo/repo.go b/pkg/repo/repo.go index 1e75e76..c0119ac 100644 --- a/pkg/repo/repo.go +++ b/pkg/repo/repo.go @@ -8,6 +8,7 @@ import ( "fmt" "github.com/go-sql-driver/mysql" + "github.com/lib/pq" "github.com/powerman/narada4d/schemaver" "github.com/powerman/sqlxx" "github.com/powerman/structlog" @@ -48,6 +49,7 @@ func (r *Repo) strict(err error) error { switch { case err == nil: case errors.As(err, new(*mysql.MySQLError)): + case errors.As(err, new(*pq.Error)): case errors.Is(err, ErrSchemaVer): case errors.Is(err, sql.ErrNoRows): case errors.Is(err, context.Canceled): diff --git a/scripts/postgres-setup b/scripts/postgres-setup new file mode 100755 index 0000000..0397637 --- /dev/null +++ b/scripts/postgres-setup @@ -0,0 +1,23 @@ +#!/bin/sh +# Enforce secure schema usage pattern https://www.postgresql.org/docs/11/ddl-schemas.html#DDL-SCHEMAS-PATTERNS +cat <&2 + exit 1 + fi + pass=$(printenv "${pfx}_PASS" | sed "s/'/''/g") + cat <