diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index d3dc729..fe626eb 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,45 +1,53 @@ // For format details, see https://aka.ms/devcontainer.json. For config options, see the // README at: https://github.com/devcontainers/templates/tree/main/src/ubuntu { - "name": "Ubuntu", - // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile - "build": { - "dockerfile": "Dockerfile", - "context": "..", - "args": { - "DOCKER_GID": "${env:DOCKER_GID:}" - } - }, - "mounts": [ - "source=${env:HOME}${env:USERPROFILE}/.aws,target=/home/vscode/.aws,type=bind", - "source=${env:HOME}${env:USERPROFILE}/.ssh,target=/home/vscode/.ssh,type=bind", - "source=${env:HOME}${env:USERPROFILE}/.gnupg,target=/home/vscode/.gnupg,type=bind", - "source=${env:HOME}${env:USERPROFILE}/.npmrc,target=/home/vscode/.npmrc,type=bind" - ], - "containerUser": "vscode", - "remoteEnv": { "LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}" }, - "postAttachCommand": "docker build -f /workspaces/eps-common-workflows/dockerfiles/nhsd-git-secrets.dockerfile -t git-secrets . && pre-commit install --install-hooks -f", - "features": { - "ghcr.io/devcontainers/features/docker-outside-of-docker:1": { - "version": "latest", - "moby": "true", - "installDockerBuildx": "true" - } - }, - "customizations": { - "vscode": { - "extensions": [ - "AmazonWebServices.aws-toolkit-vscode", - "redhat.vscode-yaml", - "eamodio.gitlens", - "github.vscode-pull-request-github", - "streetsidesoftware.code-spell-checker", - "timonwong.shellcheck", - "github.vscode-github-actions" - ], - "settings": { - "cSpell.words": ["fhir", "Formik", "pino", "serialisation"] - } + "name": "Ubuntu", + // Or use a Dockerfile or Docker Compose file. More info: https://containers.dev/guide/dockerfile + "build": { + "dockerfile": "Dockerfile", + "context": "..", + "args": { + "DOCKER_GID": "${env:DOCKER_GID:}" + } + }, + "mounts": [ + "source=${env:HOME}${env:USERPROFILE}/.aws,target=/home/vscode/.aws,type=bind", + "source=${env:HOME}${env:USERPROFILE}/.ssh,target=/home/vscode/.ssh,type=bind", + "source=${env:HOME}${env:USERPROFILE}/.gnupg,target=/home/vscode/.gnupg,type=bind", + "source=${env:HOME}${env:USERPROFILE}/.npmrc,target=/home/vscode/.npmrc,type=bind" + ], + "containerUser": "vscode", + "remoteEnv": { + "LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}" + }, + "postAttachCommand": "docker build -f /workspaces/eps-common-workflows/dockerfiles/nhsd-git-secrets.dockerfile -t git-secrets . && pre-commit install --install-hooks -f", + "features": { + "ghcr.io/devcontainers/features/github-cli:1": {}, + "ghcr.io/devcontainers/features/docker-outside-of-docker:1": { + "version": "latest", + "moby": "true", + "installDockerBuildx": "true" + } + }, + "customizations": { + "vscode": { + "extensions": [ + "AmazonWebServices.aws-toolkit-vscode", + "redhat.vscode-yaml", + "eamodio.gitlens", + "github.vscode-pull-request-github", + "streetsidesoftware.code-spell-checker", + "timonwong.shellcheck", + "github.vscode-github-actions" + ], + "settings": { + "cSpell.words": [ + "fhir", + "Formik", + "pino", + "serialisation" + ] } } } +} diff --git a/.gitallowed b/.gitallowed index e7593cf..462bb67 100644 --- a/.gitallowed +++ b/.gitallowed @@ -1,3 +1,7 @@ token: ?"?\$\{\{\s*secrets\.GITHUB_TOKEN\s*\}\}"? .*\.gitallowed.* id-token: write +def __init__\(self, token: str, owner: str, repo: str +token = os.environ\.get\("GH_TOKEN"\) +self\.token = token +password: \${{ secrets.GITHUB_TOKEN }} diff --git a/README.md b/README.md index bd91fa5..0cd4152 100644 --- a/README.md +++ b/README.md @@ -214,3 +214,29 @@ repos: - 'docker run -v "$LOCAL_WORKSPACE_FOLDER:/src" git-secrets --pre_commit_hook' language: system ``` + +## Run all releases + +There are some scripts that can be used to trigger releases for all our repos. +It is invoked by running `./scripts/run_all_release.sh`. +This first authenticates to github using github cli tools to get a valid github token. + +It then has an array of repos which it loops through asking for confirmation if you want to run deployment for it. + +For any that you have answered yes to, it then calls the python script `scripts/trigger_release.py`. + +The python script will trigger the release.yml workflow for that repo and monitor the the run for it. +When it reaches one of the steps release_qa, release_ref, release_int it will approve release to that environment. +Once the run reaches release_prod step, the python script will exit. +The python script will also exit if the github run fails, or is cancelled at any step, or there is an unexpected response from github (eg user does not have permission to approve a deployment). +When the python script finishes, it logs the run url, the tag and summary of what happened. +Python logs go to the console, and to a timestamped file in the logs folder. + +When all runs of the python script have finished then the shell script exits showing a summary of failed and successful runs. + + +If a run fails on a step BEFORE the tag_release step, and the failure is transient (eg quality checks fails installing dependencies due to npm being down) then the whole release workflow can be rerun - either via this script or using the github website. + +If a run fails on a step AFTER the tag_release step, and the failure is transient (eg regression tests failure) then that failing step can just be re-run manually via the github website. + +If a run fails due to a code or cloudformation/cdk issue, then a new pull request should be created to fix this, merged to main, and a new release triggered. diff --git a/poetry.lock b/poetry.lock index ea77425..74e62ec 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,5 +1,17 @@ # This file is automatically @generated by Poetry 2.2.1 and should not be changed by hand. +[[package]] +name = "certifi" +version = "2025.11.12" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "certifi-2025.11.12-py3-none-any.whl", hash = "sha256:97de8790030bbd5c2d96b7ec782fc2f7820ef8dba6db909ccf95449f2d062d4b"}, + {file = "certifi-2025.11.12.tar.gz", hash = "sha256:d8ab5478f2ecd78af242878415affce761ca6bc54a22a27e026d7c25357c3316"}, +] + [[package]] name = "cfgv" version = "3.4.0" @@ -12,6 +24,129 @@ files = [ {file = "cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560"}, ] +[[package]] +name = "charset-normalizer" +version = "3.4.4" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e824f1492727fa856dd6eda4f7cee25f8518a12f3c4a56a74e8095695089cf6d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4bd5d4137d500351a30687c2d3971758aac9a19208fc110ccb9d7188fbe709e8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:027f6de494925c0ab2a55eab46ae5129951638a49a34d87f4c3eda90f696b4ad"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f820802628d2694cb7e56db99213f930856014862f3fd943d290ea8438d07ca8"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:798d75d81754988d2565bff1b97ba5a44411867c0cf32b77a7e8f8d84796b10d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9d1bb833febdff5c8927f922386db610b49db6e0d4f4ee29601d71e7c2694313"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:9cd98cdc06614a2f768d2b7286d66805f94c48cde050acdbbb7db2600ab3197e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:077fbb858e903c73f6c9db43374fd213b0b6a778106bc7032446a8e8b5b38b93"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:244bfb999c71b35de57821b8ea746b24e863398194a4014e4c76adc2bbdfeff0"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:64b55f9dce520635f018f907ff1b0df1fdc31f2795a922fb49dd14fbcdf48c84"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:faa3a41b2b66b6e50f84ae4a68c64fcd0c44355741c6374813a800cd6695db9e"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:6515f3182dbe4ea06ced2d9e8666d97b46ef4c75e326b79bb624110f122551db"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:cc00f04ed596e9dc0da42ed17ac5e596c6ccba999ba6bd92b0e0aef2f170f2d6"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win32.whl", hash = "sha256:f34be2938726fc13801220747472850852fe6b1ea75869a048d6f896838c896f"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_amd64.whl", hash = "sha256:a61900df84c667873b292c3de315a786dd8dac506704dea57bc957bd31e22c7d"}, + {file = "charset_normalizer-3.4.4-cp310-cp310-win_arm64.whl", hash = "sha256:cead0978fc57397645f12578bfd2d5ea9138ea0fac82b2f63f7f7c6877986a69"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:6e1fcf0720908f200cd21aa4e6750a48ff6ce4afe7ff5a79a90d5ed8a08296f8"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5f819d5fe9234f9f82d75bdfa9aef3a3d72c4d24a6e57aeaebba32a704553aa0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:a59cb51917aa591b1c4e6a43c132f0cdc3c76dbad6155df4e28ee626cc77a0a3"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:8ef3c867360f88ac904fd3f5e1f902f13307af9052646963ee08ff4f131adafc"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d9e45d7faa48ee908174d8fe84854479ef838fc6a705c9315372eacbc2f02897"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:840c25fb618a231545cbab0564a799f101b63b9901f2569faecd6b222ac72381"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ca5862d5b3928c4940729dacc329aa9102900382fea192fc5e52eb69d6093815"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d9c7f57c3d666a53421049053eaacdd14bbd0a528e2186fcb2e672effd053bb0"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:277e970e750505ed74c832b4bf75dac7476262ee2a013f5574dd49075879e161"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:31fd66405eaf47bb62e8cd575dc621c56c668f27d46a61d975a249930dd5e2a4"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:0d3d8f15c07f86e9ff82319b3d9ef6f4bf907608f53fe9d92b28ea9ae3d1fd89"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9f7fcd74d410a36883701fafa2482a6af2ff5ba96b9a620e9e0721e28ead5569"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ebf3e58c7ec8a8bed6d66a75d7fb37b55e5015b03ceae72a8e7c74495551e224"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win32.whl", hash = "sha256:eecbc200c7fd5ddb9a7f16c7decb07b566c29fa2161a16cf67b8d068bd21690a"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_amd64.whl", hash = "sha256:5ae497466c7901d54b639cf42d5b8c1b6a4fead55215500d2f486d34db48d016"}, + {file = "charset_normalizer-3.4.4-cp311-cp311-win_arm64.whl", hash = "sha256:65e2befcd84bc6f37095f5961e68a6f077bf44946771354a28ad434c2cce0ae1"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525"}, + {file = "charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14"}, + {file = "charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c"}, + {file = "charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ce8a0633f41a967713a59c4139d29110c07e826d131a316b50ce11b1d79b4f84"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:eaabd426fe94daf8fd157c32e571c85cb12e66692f15516a83a03264b08d06c3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c4ef880e27901b6cc782f1b95f82da9313c0eb95c3af699103088fa0ac3ce9ac"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2aaba3b0819274cc41757a1da876f810a3e4d7b6eb25699253a4effef9e8e4af"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:778d2e08eda00f4256d7f672ca9fef386071c9202f5e4607920b86d7803387f2"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f155a433c2ec037d4e8df17d18922c3a0d9b3232a396690f17175d2946f0218d"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8bf8d0f749c5757af2142fe7903a9df1d2e8aa3841559b2bad34b08d0e2bcf3"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:194f08cbb32dc406d6e1aea671a68be0823673db2832b38405deba2fb0d88f63"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_armv7l.whl", hash = "sha256:6aee717dcfead04c6eb1ce3bd29ac1e22663cdea57f943c87d1eab9a025438d7"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:cd4b7ca9984e5e7985c12bc60a6f173f3c958eae74f3ef6624bb6b26e2abbae4"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_riscv64.whl", hash = "sha256:b7cf1017d601aa35e6bb650b6ad28652c9cd78ee6caff19f3c28d03e1c80acbf"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:e912091979546adf63357d7e2ccff9b44f026c075aeaf25a52d0e95ad2281074"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:5cb4d72eea50c8868f5288b7f7f33ed276118325c1dfd3957089f6b519e1382a"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win32.whl", hash = "sha256:837c2ce8c5a65a2035be9b3569c684358dfbf109fd3b6969630a87535495ceaa"}, + {file = "charset_normalizer-3.4.4-cp38-cp38-win_amd64.whl", hash = "sha256:44c2a8734b333e0578090c4cd6b16f275e07aa6614ca8715e6c038e865e70576"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:a9768c477b9d7bd54bc0c86dbaebdec6f03306675526c9927c0e8a04e8f94af9"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1bee1e43c28aa63cb16e5c14e582580546b08e535299b8b6158a7c9c768a1f3d"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:fd44c878ea55ba351104cb93cc85e74916eb8fa440ca7903e57575e97394f608"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:0f04b14ffe5fdc8c4933862d8306109a2c51e0704acfa35d51598eb45a1e89fc"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:cd09d08005f958f370f539f186d10aec3377d55b9eeb0d796025d4886119d76e"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4fe7859a4e3e8457458e2ff592f15ccb02f3da787fcd31e0183879c3ad4692a1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:fa09f53c465e532f4d3db095e0c55b615f010ad81803d383195b6b5ca6cbf5f3"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:7fa17817dc5625de8a027cb8b26d9fefa3ea28c8253929b8d6649e705d2835b6"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:5947809c8a2417be3267efc979c47d76a079758166f7d43ef5ae8e9f92751f88"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:4902828217069c3c5c71094537a8e623f5d097858ac6ca8252f7b4d10b7560f1"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_riscv64.whl", hash = "sha256:7c308f7e26e4363d79df40ca5b2be1c6ba9f02bdbccfed5abddb7859a6ce72cf"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:2c9d3c380143a1fedbff95a312aa798578371eb29da42106a29019368a475318"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:cb01158d8b88ee68f15949894ccc6712278243d95f344770fa7593fa2d94410c"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win32.whl", hash = "sha256:2677acec1a2f8ef614c6888b5b4ae4060cc184174a938ed4e8ef690e15d3e505"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_amd64.whl", hash = "sha256:f8e160feb2aed042cd657a72acc0b481212ed28b1b9a95c0cee1621b524e1966"}, + {file = "charset_normalizer-3.4.4-cp39-cp39-win_arm64.whl", hash = "sha256:b5d84d37db046c5ca74ee7bb47dd6cbc13f80665fdde3e8040bdd3fb015ecb50"}, + {file = "charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f"}, + {file = "charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a"}, +] + [[package]] name = "distlib" version = "0.4.0" @@ -51,6 +186,21 @@ files = [ [package.extras] license = ["ukkonen"] +[[package]] +name = "idna" +version = "3.11" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea"}, + {file = "idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + [[package]] name = "nodeenv" version = "1.9.1" @@ -218,6 +368,46 @@ files = [ {file = "pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f"}, ] +[[package]] +name = "requests" +version = "2.32.5" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6"}, + {file = "requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset_normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "urllib3" +version = "2.5.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc"}, + {file = "urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + [[package]] name = "virtualenv" version = "20.35.3" @@ -254,4 +444,4 @@ files = [ [metadata] lock-version = "2.1" python-versions = "^3.14" -content-hash = "65888fe086520d278cfdba8bfe6bd23c0232d0c258e5c9cff9300a7aae092b02" +content-hash = "9ba842d3ea00f95c4480983baeddb6b8e26649e93a3759822609e5577f0127af" diff --git a/pyproject.toml b/pyproject.toml index 87cf178..29d06aa 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -18,6 +18,7 @@ repository = "https://github.com/NHSDigital/eps-common-workflows" [tool.poetry.dependencies] python = "^3.14" pre-commit = "^4.4.0" +requests = "^2.32.5" [tool.poetry.group.dev.dependencies] pip-licenses = "^5.0.0" diff --git a/scripts/run_all_release.sh b/scripts/run_all_release.sh new file mode 100755 index 0000000..254b291 --- /dev/null +++ b/scripts/run_all_release.sh @@ -0,0 +1,83 @@ +#!/usr/bin/env bash + +# this script runs the python script to trigger releases for multiple repositories +# it uses gh cli to authenticate and get the token + +if ! gh auth status &> /dev/null; then + gh auth login +fi + +# disable shellcheck as this var is needed in the python script +# shellcheck disable=SC2155 +export GH_TOKEN=$(gh auth token) + +repos=( + "NHSDigital/eps-prescription-tracker-ui" + "NHSDigital/prescriptionsforpatients" + "NHSDigital/eps-prescription-status-update-api" + "NHSDigital/eps-FHIR-validator-lambda" + "NHSDigital/eps-vpc-resources" + "NHSDigital/eps-aws-dashboards" + "NHSDigital/electronic-prescription-service-clinical-prescription-tracker" +) + +# Array to store repos that user wants to release +selected_repos=() + +# Ask user for each repo +for repo in "${repos[@]}"; do + read -r -p "Do you want to run the release for $repo? (y/n): " answer + if [[ "$answer" == "y" || "$answer" == "Y" ]]; then + selected_repos+=("$repo") + fi +done + +# Check if any repos were selected +if [ ${#selected_repos[@]} -eq 0 ]; then + echo "No repositories selected for release." + exit 0 +fi + +echo "" +echo "Starting releases for ${#selected_repos[@]} repository(ies)..." +echo "" + +# Array to store background process IDs +pids=() + +# Launch releases in parallel +for repo in "${selected_repos[@]}"; do + echo "Starting release for $repo..." + poetry run python3 scripts/trigger_release.py "$repo" & + pids+=($!) +done + +echo "" +echo "All releases triggered. Waiting for completion..." +echo "" + +# Wait for all background processes to complete and track their exit codes +failed_count=0 +success_count=0 + +for pid in "${pids[@]}"; do + if wait "$pid"; then + ((success_count++)) + else + ((failed_count++)) + fi +done + +echo "" +echo "========================================" +echo "All releases completed!" +echo "Successful: $success_count" +echo "Failed: $failed_count" +echo "========================================" + +# Exit with error if any releases failed +if [ $failed_count -gt 0 ]; then + exit 1 +fi + +exit 0 diff --git a/scripts/trigger_release.py b/scripts/trigger_release.py new file mode 100755 index 0000000..1ab9d53 --- /dev/null +++ b/scripts/trigger_release.py @@ -0,0 +1,483 @@ +#!/usr/bin/env python3 +""" +Script to trigger and monitor the GitHub release workflow. +Requires GH_TOKEN environment variable with permissions to trigger workflows. +""" + +import os +import sys +import time +import logging +from datetime import datetime, timedelta, timezone +from typing import Optional, Dict, Any +import requests + + +class GitHubWorkflowMonitor: + """Monitor and control GitHub Actions workflow runs.""" + + def __init__(self, token: str, owner: str, repo: str, logger: logging.Logger): + self.token = token + self.owner = owner + self.repo = repo + self.logger = logger + self.base_url = f"https://api.github.com/repos/{owner}/{repo}" + self.headers = { + "Accept": "application/vnd.github+json", + "Authorization": f"Bearer {token}", + "X-GitHub-Api-Version": "2022-11-28" + } + self.run_id: Optional[int] = None + self.run_url: Optional[str] = None + self.version_tag: Optional[str] = None + self.jobs_requiring_approval = [ + "release_ref", + "release_qa", + "release_int" + ] + self.approved_jobs = set() + self.completed_jobs = set() + self.last_status_time = time.time() + + def trigger_workflow( + self, workflow_file: str, branch: str = "main" + ) -> bool: + """Trigger the workflow dispatch event.""" + url = f"{self.base_url}/actions/workflows/{workflow_file}/dispatches" + data = { + "ref": branch + } + + self.logger.info( + f"šŸš€ Triggering workflow '{workflow_file}' on branch '{branch}' " + f"for repo '{self.owner}/{self.repo}'..." + ) + response = requests.post(url, headers=self.headers, json=data) + + if response.status_code == 204: + self.logger.info("āœ… Workflow triggered successfully") + return True + else: + self.logger.error(f"āŒ Failed to trigger workflow: {response.status_code}") + self.logger.error(f"Response: {response.text}") + return False + + def get_latest_run( + self, workflow_file: str, minutes: int = 2 + ) -> Optional[Dict[Any, Any]]: + """Get the most recent workflow run started within the last N + minutes.""" + url = f"{self.base_url}/actions/workflows/{workflow_file}/runs" + params = { + "per_page": 10, + "status": "in_progress" + } + + response = requests.get(url, headers=self.headers, params=params) + + if response.status_code != 200: + self.logger.error(f"āŒ Failed to get workflow runs: {response.status_code}") + return None + + data = response.json() + cutoff_time = datetime.now(timezone.utc) - timedelta(minutes=minutes) + + for run in data.get("workflow_runs", []): + run_created = datetime.strptime( + run["created_at"], "%Y-%m-%dT%H:%M:%SZ" + ).replace(tzinfo=timezone.utc) + if run_created >= cutoff_time: + return run + + return None + + def get_run_details(self, run_id: int) -> Optional[Dict[Any, Any]]: + """Get details of a specific workflow run.""" + url = f"{self.base_url}/actions/runs/{run_id}" + response = requests.get(url, headers=self.headers) + + if response.status_code == 200: + return response.json() + return None + + def get_run_jobs(self, run_id: int) -> Optional[Dict[Any, Any]]: + """Get all jobs for a specific workflow run.""" + url = f"{self.base_url}/actions/runs/{run_id}/jobs" + response = requests.get(url, headers=self.headers) + + if response.status_code == 200: + return response.json() + return None + + def get_pending_deployments(self, run_id: int) -> Optional[list]: + """Get pending deployment reviews for a workflow run.""" + url = f"{self.base_url}/actions/runs/{run_id}/pending_deployments" + response = requests.get(url, headers=self.headers) + + if response.status_code == 200: + return response.json() + return None + + def approve_deployment(self, run_id: int, environment_ids: list) -> bool: + """Approve a pending deployment.""" + url = f"{self.base_url}/actions/runs/{run_id}/pending_deployments" + data = { + "environment_ids": environment_ids, + "state": "approved", + "comment": "Auto-approved by trigger_release.py script" + } + + response = requests.post(url, headers=self.headers, json=data) + + if response.status_code == 200: + return True + else: + self.logger.warning(f"āš ļø Failed to approve deployment: {response.status_code}") + self.logger.warning(f"Response: {response.text}") + return False + + def check_for_errors( + self, run_details: Dict[Any, Any], jobs_data: Dict[Any, Any] + ) -> Optional[str]: + """Check if any job has failed.""" + if run_details.get("conclusion") == "failure": + return "Workflow run failed" + + for job in jobs_data.get("jobs", []): + if job["conclusion"] == "failure": + return f"Job '{job['name']}' failed" + if job["conclusion"] == "cancelled": + return f"Job '{job['name']}' was cancelled" + + return None + + def monitor_and_approve(self) -> bool: + """Main monitoring loop.""" + self.logger.info(f"\nšŸ“Š Monitoring workflow run: {self.run_url}\n") + + tag_release_completed = False + + while True: + # Get current run details + run_details = self.get_run_details(self.run_id) + if not run_details: + self.logger.error("āŒ Failed to get run details") + return False + + jobs_data = self.get_run_jobs(self.run_id) + if not jobs_data: + self.logger.error("āŒ Failed to get jobs data") + return False + + # Check for errors + error = self.check_for_errors(run_details, jobs_data) + if error: + self.logger.error(f"\nāŒ ERROR: {error}") + self.print_summary(error) + return False + + # Track job statuses and detect changes + current_job_status = {} + for job in jobs_data.get("jobs", []): + job_name = job["name"] + job_status = job["status"] + job_conclusion = job.get("conclusion") + + current_job_status[job_name] = (job_status, job_conclusion) + + # Detect newly completed jobs + if (job_conclusion == "success" and + job_name not in self.completed_jobs): + self.completed_jobs.add(job_name) + self.logger.info(f"āœ… Job completed: {job_name}") + + # Check if tag_release completed + if job_name == "tag_release / tag_release" and not tag_release_completed: + tag_release_completed = True + # Try to extract version from subsequent + # jobs that use it + self._extract_version_from_jobs(jobs_data) + if self.version_tag: + self.logger.info(f"šŸ·ļø Version tag: {self.version_tag}") + + # Check for pending deployments + pending = self.get_pending_deployments(self.run_id) + if pending: + for deployment in pending: + env_name = deployment["environment"]["name"] + env_id = deployment["environment"]["id"] + + # Check if this is release_prod + if env_name == "prod": + self.logger.info( + f"\nšŸ›‘ Reached production deployment for " + f"environment '{env_name}'" + ) + self.print_summary( + "Stopped at production deployment" + ) + return True + + # Auto-approve other environments + job_name = f"release_{env_name}" + if (job_name in self.jobs_requiring_approval and + job_name not in self.approved_jobs): + self.logger.info( + f"āœ“ Approving deployment to environment " + f"'{env_name}'..." + ) + if self.approve_deployment(self.run_id, [env_id]): + self.approved_jobs.add(job_name) + self.logger.info(f"āœ… Approved: {job_name}") + else: + self.logger.warning(f"āš ļø Failed to approve: {job_name}") + + # Check if workflow is complete + if run_details.get("status") == "completed": + if run_details.get("conclusion") == "success": + self.logger.info("\nāœ… Workflow completed successfully") + self.print_summary("Completed successfully") + return True + else: + conclusion = run_details.get("conclusion", "unknown") + self.logger.warning( + f"\nāš ļø Workflow completed with conclusion: " + f"{conclusion}" + ) + self.print_summary( + f"Completed with conclusion: {conclusion}" + ) + return False + + # Print status update every 30 seconds + current_time = time.time() + if current_time - self.last_status_time >= 30: + self.print_status_update(run_details, jobs_data) + self.last_status_time = current_time + + # Sleep before next check + time.sleep(10) + + def _extract_version_from_jobs(self, jobs_data: Dict[Any, Any]) -> None: + """Try to extract version tag using GitHub CLI.""" + if self.version_tag: + return # Already have it + + try: + import subprocess + # Use gh CLI to get workflow run details with outputs + result = subprocess.run( + [ + "gh", "run", "view", str(self.run_id), + "--repo", f"{self.owner}/{self.repo}", + "--json", "jobs" + ], + capture_output=True, + text=True, + timeout=10 + ) + + if result.returncode == 0: + import json + data = json.loads(result.stdout) + # Look through jobs for tag_release and + # find jobs that depend on it + for job in data.get("jobs", []): + # Look for jobs that use VERSION_NUMBER input + if "package_code" in job.get("name", ""): + # Try to extract from job name or check if + # we can get it from logs + # The version will be in the inputs but + # not directly available + pass + + # Try alternative: check recent tags + tag_result = subprocess.run( + [ + "gh", "api", + f"/repos/{self.owner}/{self.repo}/tags", + "--jq", ".[0].name" + ], + capture_output=True, + text=True, + timeout=10 + ) + if tag_result.returncode == 0: + latest_tag = tag_result.stdout.strip() + if latest_tag: + self.version_tag = latest_tag + + except Exception: + # If gh CLI fails, we'll just skip version extraction + pass + + def print_status_update( + self, run_details: Dict[Any, Any], jobs_data: Dict[Any, Any] + ) -> None: + """Print a status update.""" + status = run_details.get("status", "unknown") + + # Count jobs by status + in_progress = sum( + 1 for j in jobs_data.get("jobs", []) + if j["status"] == "in_progress" + ) + completed = sum( + 1 for j in jobs_data.get("jobs", []) + if j["status"] == "completed" + ) + queued = sum( + 1 for j in jobs_data.get("jobs", []) + if j["status"] == "queued" + ) + + self.logger.info( + f"ā³ Workflow still running... [Status: {status}, " + f"Jobs: {completed} completed, {in_progress} in progress, " + f"{queued} queued]" + ) + + def print_summary(self, outcome: str) -> None: + """Print final summary.""" + self.logger.info("\n" + "="*70) + self.logger.info(f"šŸ“‹ RELEASE WORKFLOW SUMMARY {self.repo}") + self.logger.info("="*70) + self.logger.info(f"Workflow URL: {self.run_url}") + self.logger.info(f"Version Tag: {self.version_tag or 'N/A'}") + self.logger.info(f"Outcome: {outcome}") + approved = ', '.join(sorted(self.approved_jobs)) or 'None' + self.logger.info(f"Approved: {approved}") + self.logger.info("="*70 + "\n") + + +def setup_logger(repo_name: str) -> logging.Logger: + """Set up logger to write to file and console.""" + # Create logs directory if it doesn't exist + log_dir = "logs" + os.makedirs(log_dir, exist_ok=True) + + # Create timestamp for log filename + timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") + + # Sanitize repo name for filename (replace / with _) + safe_repo_name = repo_name.replace("/", "_") + + # Create log filename + log_file = os.path.join(log_dir, f"release_{safe_repo_name}_{timestamp}.log") + + # Create logger + logger = logging.getLogger("trigger_release") + logger.setLevel(logging.INFO) + + # Remove any existing handlers + logger.handlers.clear() + + # Create file handler + file_handler = logging.FileHandler(log_file, encoding='utf-8') + file_handler.setLevel(logging.INFO) + + # Create console handler + console_handler = logging.StreamHandler(sys.stdout) + console_handler.setLevel(logging.INFO) + + # Create formatters - console includes repo name, file doesn't need it + file_formatter = logging.Formatter( + '%(asctime)s - %(name)s - %(levelname)s - %(message)s', + datefmt='%Y-%m-%d %H:%M:%S' + ) + + console_formatter = logging.Formatter( + f'%(asctime)s - [{repo_name}] - %(levelname)s - %(message)s', + datefmt='%Y-%m-%d %H:%M:%S' + ) + + # Add formatter to handlers + file_handler.setFormatter(file_formatter) + console_handler.setFormatter(console_formatter) + + # Add handlers to logger + logger.addHandler(file_handler) + logger.addHandler(console_handler) + + logger.info(f"Logging to file: {log_file}") + + return logger + + +def main(): + """Main entry point.""" + import argparse + + parser = argparse.ArgumentParser( + description='Trigger and monitor GitHub release workflow' + ) + parser.add_argument( + 'repo', + help='Repository in format owner/repo ' + '(e.g., NHSDigital/eps-vpc-resources)' + ) + parser.add_argument( + '--workflow', + default='release.yml', + help='Workflow file name (default: release.yml)' + ) + parser.add_argument( + '--branch', + default='main', + help='Branch to trigger workflow on (default: main)' + ) + + args = parser.parse_args() + + # Parse repository + try: + owner, repo = args.repo.split('/') + except ValueError: + # Use basic print here since logger isn't set up yet + print( + "āŒ Error: Repository must be in format owner/repo " + "(e.g., NHSDigital/eps-vpc-resources)" + ) + sys.exit(1) + + # Set up logger + logger = setup_logger(args.repo) + + # Get GitHub token + token = os.environ.get("GH_TOKEN") + if not token: + logger.error("āŒ Error: GH_TOKEN environment variable not set") + sys.exit(1) + + workflow_file = args.workflow + branch = args.branch + + # Create monitor instance + monitor = GitHubWorkflowMonitor(token, owner, repo, logger) + + # Trigger the workflow + if not monitor.trigger_workflow(workflow_file, branch): + sys.exit(1) + + # Wait a moment for the run to be created + logger.info("ā³ Waiting for workflow run to start...") + for attempt in range(12): # Try for up to 2 minutes + time.sleep(10) + run = monitor.get_latest_run(workflow_file, minutes=3) + if run: + monitor.run_id = run["id"] + monitor.run_url = run["html_url"] + logger.info(f"āœ… Found workflow run: {monitor.run_url}") + break + + if not monitor.run_id: + logger.error("āŒ Failed to find the triggered workflow run") + sys.exit(1) + + # Monitor and approve deployments + success = monitor.monitor_and_approve() + sys.exit(0 if success else 1) + + +if __name__ == "__main__": + main()