Skip to content

Commit

Permalink
ci: use reusable workflows to delegate Jobs
Browse files Browse the repository at this point in the history
  • Loading branch information
boromir674 committed Nov 13, 2023
1 parent c93a86b commit fb70d19
Showing 1 changed file with 4 additions and 85 deletions.
89 changes: 4 additions & 85 deletions .github/workflows/test.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -446,57 +446,20 @@ jobs:
distro_name: "artificial_artwork"
distro_version: "${{ needs.test_suite.outputs.SEMVER_PIP_FORMAT }}"
should_trigger: ${{ needs.check_which_git_branch_we_are_on.outputs.AUTOMATED_DEPLOY == 'true' }}
# should_trigger: true
pypi_env: "${{ needs.check_which_git_branch_we_are_on.outputs.ENVIRONMENT_NAME }}"
artifacts_path: ${{ needs.test_suite.outputs.ARTIFACTS }}
require_wheel: true
allow_existing: true
secrets:
# This magically works, and the environment secret will be loaded
# it is really weird to pass a secret here because it feels that is comming from outside,
# from the repository secrets, not from the environment. But it doesn't!
TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}

# if: startsWith(github.event.ref, 'refs/tags/v') && needs.check_which_git_branch_we_are_on.outputs.AUTOMATED_DEPLOY
# uses: boromir674/automated-workflows/.github/workflows/pypi_env.yml@test
# with:
# distro_name: "artificial_artwork"
# distro_version: ${{ needs.test_suite.outputs.SEMVER_PIP_FORMAT }}
# should_trigger: true
# # does not affect the job, but is used to name the artifacts folder
# artifacts_path: "downloaded-artifacts"
# pypi_env: ${{ needs.check_which_git_branch_we_are_on.outputs.ENVIRONMENT_NAME }}
# # optional
# dist_folder: "dist"

### PYPI UPLOAD JOB ###
# pypi_publish:
# needs: [test_suite, check_which_git_branch_we_are_on]
# # needs: check_which_git_branch_we_are_on
# runs-on: ubuntu-latest
# # if we are on tag starting with "v" and if we are on master or release branch
# if: ${{ startsWith(github.event.ref, 'refs/tags/v') && needs.check_which_git_branch_we_are_on.outputs.AUTOMATED_DEPLOY == 'true' }}
# environment:
# name: ${{ needs.check_which_git_branch_we_are_on.outputs.ENVIRONMENT_NAME }}
# env:
# DIST_DIR: dist
# DISTRO_SEMVER: ${{ needs.test_suite.outputs.SEMVER_PIP_FORMAT }} # required env var by deploy script (tox -e deploy)
# TWINE_USERNAME: ${{ vars.TWINE_USERNAME }}
# TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
# PYPI_SERVER: ${{ vars.PYPI_SERVER }}
# steps:
# - uses: actions/checkout@v3
# - name: Download Source & Wheel distributions
# uses: actions/download-artifact@v3
# with:
# path: downloaded-artifacts
# - name: Get Publishable files from the Artifacts
# run: |
# TAG="${GITHUB_REF_NAME}"
# TAG_SEM_VER="${TAG:1}" # remove the first character (v)

# # Make sure the distro semver ($DISTRO_SEMVER), which is identical
# # to the version string in the source code, when its format is
# # Major.Minor.Patch (and slightly different when its format is M.m.p-prerelase),
# # is simillarly either identical or slightly different
# # when compared to the git tag ($TAG_SEM_VER)

# # for now MUST match only the Major.Minor.Patch part

# # Extract Major.Minor.Patch parts from DISTRO_SEMVER and TAG_SEM_VER
Expand All @@ -513,50 +476,6 @@ jobs:
# exit 1
# fi
# echo "PACKAGE_DIST_VERSION=$DISTRO_SEMVER" >> $GITHUB_ENV
# - run: mkdir ${DIST_DIR}
# - run: |
# # Get Source Distribution (tar.gz of source code)
# source_distributions=$(find downloaded-artifacts -type f -name artificial_artwork*.tar.gz)
# source_distributions_array=($source_distributions)
# source_distribution=${source_distributions_array[0]} # a *.tar.gz file path
# # Extract the base name (without the path)
# source_distribution_name=$(basename "$source_distribution")
# # Check if all source distribution (.tar.gz) files have the same name
# for file in "${source_distributions_array[@]}"; do
# if [ "$source_distribution_name" != "$(basename "$file")" ]; then
# echo "Error: Not all Source Distribution .tar.gz files have the same name!"
# exit 1
# fi
# done
# echo " -- Will Upload : $source_distribution_name -- "
# echo "source_distribution=$source_distribution" >> $GITHUB_ENV
# cp "$source_distribution" ${DIST_DIR}

# - run: |
# # Get all built Wheels and copy to dist folder
# for f in `find downloaded-artifacts -type f -name artificial_artwork*.whl`; do
# echo "F: $f";
# # TODO check for duplicates, which means that our build matrix produces the same wheel (need a different compiler that python such as pypy, cython, etc)
# cp $f ${DIST_DIR}
# done
# - name: Install Dependencies
# run: pip install tox==3.28

# - run: echo "Publishing $PACKAGE_DIST_VERSION to $PYPI_SERVER PyPI"

# - name: Publish to PyPI
# # env:
# # TWINE_USERNAME: ${{ vars.TWINE_USERNAME }}
# # TWINE_PASSWORD: ${{ secrets.TWINE_PASSWORD }}
# # PYPI_SERVER: ${{ vars.PYPI_SERVER }}
# # run: tox -vv -s false -e deploy
# run: tox -vv -s false -e deploy -- upload --non-interactive --skip-existing

# # runs: twine upload --non-interactive --repository testpypi --skip-existing ./dist/PY_PACKAGE-:Version.tar.gz --verbose

# # runs: twine upload --non-interactive --skip-existing --repository testpypi --skip-existing ./dist/PY_PACKAGE-:Version.tar.gz --verbose

# - run: echo "Published :\)"


## AUTOMATED DOCKER BUILD and PUBLISH ON DOCKERHUB ##
Expand Down

0 comments on commit fb70d19

Please sign in to comment.