diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
new file mode 100644
index 000000000000..07c99d806a44
--- /dev/null
+++ b/.github/workflows/test.yml
@@ -0,0 +1,389 @@
+name: TestBuild
+
+env:
+ # Force the stdout and stderr streams to be unbuffered
+ PYTHONUNBUFFERED: 1
+
+"on":
+ workflow_dispatch:
+
+jobs:
+ PythonUnitTests:
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Clear repository
+ run: |
+ sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Python unit tests
+ run: |
+ cd "$GITHUB_WORKSPACE/tests/ci"
+ python3 -m unittest discover -s . -p '*_test.py'
+ DockerHubPushAarch64:
+ runs-on: [self-hosted, style-checker-aarch64]
+ steps:
+ - name: Clear repository
+ run: |
+ sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Images check
+ run: |
+ cd "$GITHUB_WORKSPACE/tests/ci"
+ python3 docker_images_check.py --suffix aarch64
+ - name: Upload images files to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: changed_images_aarch64
+ path: ${{ runner.temp }}/docker_images_check/changed_images_aarch64.json
+ DockerHubPushAmd64:
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Clear repository
+ run: |
+ sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Images check
+ run: |
+ cd "$GITHUB_WORKSPACE/tests/ci"
+ python3 docker_images_check.py --suffix amd64
+ - name: Upload images files to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: changed_images_amd64
+ path: ${{ runner.temp }}/docker_images_check/changed_images_amd64.json
+ DockerHubPush:
+ needs: [DockerHubPushAmd64, DockerHubPushAarch64, PythonUnitTests]
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Clear repository
+ run: |
+ sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Download changed aarch64 images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images_aarch64
+ path: ${{ runner.temp }}
+ - name: Download changed amd64 images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images_amd64
+ path: ${{ runner.temp }}
+ - name: Images check
+ run: |
+ cd "$GITHUB_WORKSPACE/tests/ci"
+ python3 docker_manifests_merge.py --suffix amd64 --suffix aarch64
+ - name: Upload images files to artifacts
+ uses: actions/upload-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ runner.temp }}/changed_images.json
+ StyleCheck:
+ needs: DockerHubPush
+ runs-on: [self-hosted, style-checker]
+ if: ${{ success() || failure() }}
+ steps:
+ - name: Set envs
+ run: |
+ cat >> "$GITHUB_ENV" << 'EOF'
+ TEMP_PATH=${{ runner.temp }}/style_check
+ EOF
+ - name: Download changed images
+ # even if artifact does not exist, e.g. on `do not test` label or failed Docker job
+ continue-on-error: true
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ env.TEMP_PATH }}
+ - name: Clear repository
+ run: |
+ sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Style Check
+ run: |
+ cd "$GITHUB_WORKSPACE/tests/ci"
+ python3 style_check.py
+ - name: Cleanup
+ if: always()
+ run: |
+ # shellcheck disable=SC2046
+ docker kill $(docker ps -q) ||:
+ # shellcheck disable=SC2046
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr "$TEMP_PATH"
+ CompatibilityCheck:
+ needs: [BuilderDebRelease]
+ runs-on: [self-hosted, style-checker]
+ steps:
+ - name: Set envs
+ run: |
+ cat >> "$GITHUB_ENV" << 'EOF'
+ TEMP_PATH=${{runner.temp}}/compatibility_check
+ REPO_COPY=${{runner.temp}}/compatibility_check/ClickHouse
+ REPORTS_PATH=${{runner.temp}}/reports_dir
+ EOF
+ - name: Clear repository
+ run: |
+ sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{ env.REPORTS_PATH }}
+ - name: CompatibilityCheck
+ run: |
+ sudo rm -fr "$TEMP_PATH"
+ mkdir -p "$TEMP_PATH"
+ cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
+ cd "$REPO_COPY/tests/ci" && python3 compatibility_check.py
+ - name: Cleanup
+ if: always()
+ run: |
+ # shellcheck disable=SC2046
+ docker kill $(docker ps -q) ||:
+ # shellcheck disable=SC2046
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr "$TEMP_PATH"
+# SplitBuildSmokeTest:
+# needs: [BuilderDebSplitted]
+# runs-on: [self-hosted, style-checker]
+# steps:
+# - name: Set envs
+# run: |
+# cat >> "$GITHUB_ENV" << 'EOF'
+# TEMP_PATH=${{runner.temp}}/split_build_check
+# REPO_COPY=${{runner.temp}}/split_build_check/ClickHouse
+# REPORTS_PATH=${{runner.temp}}/reports_dir
+# EOF
+# - name: Clear repository
+# run: |
+# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
+# - name: Check out repository code
+# uses: actions/checkout@v2
+# - name: Download json reports
+# uses: actions/download-artifact@v2
+# with:
+# path: ${{ env.REPORTS_PATH }}
+# - name: Split build check
+# run: |
+# sudo rm -fr "$TEMP_PATH"
+# mkdir -p "$TEMP_PATH"
+# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
+# cd "$REPO_COPY/tests/ci" && python3 split_build_smoke_check.py
+# - name: Cleanup
+# if: always()
+# run: |
+# # shellcheck disable=SC2046
+# docker kill $(docker ps -q) ||:
+# # shellcheck disable=SC2046
+# docker rm -f $(docker ps -a -q) ||:
+# sudo rm -fr "$TEMP_PATH"
+ BuilderDebRelease:
+ needs: [DockerHubPush]
+ runs-on: [self-hosted, builder]
+ steps:
+ - name: Set envs
+ run: |
+ cat >> "$GITHUB_ENV" << 'EOF'
+ TEMP_PATH=${{runner.temp}}/build_check
+ IMAGES_PATH=${{runner.temp}}/images_path
+ REPO_COPY=${{runner.temp}}/build_check/ClickHouse
+ CACHES_PATH=${{runner.temp}}/../ccaches
+ BUILD_NAME=package_release
+ EOF
+ - name: Download changed images
+ uses: actions/download-artifact@v2
+ with:
+ name: changed_images
+ path: ${{ env.IMAGES_PATH }}
+ - name: Clear repository
+ run: |
+ sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ with:
+ fetch-depth: 0 # For a proper version and performance artifacts
+ - name: Build
+ run: |
+ git -C "$GITHUB_WORKSPACE" submodule sync --recursive
+ git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
+ sudo rm -fr "$TEMP_PATH"
+ mkdir -p "$TEMP_PATH"
+ cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
+ cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
+ - name: Upload build URLs to artifacts
+ if: ${{ success() || failure() }}
+ uses: actions/upload-artifact@v2
+ with:
+ name: ${{ env.BUILD_URLS }}
+ path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
+ - name: Cleanup
+ if: always()
+ run: |
+ # shellcheck disable=SC2046
+ docker kill $(docker ps -q) ||:
+ # shellcheck disable=SC2046
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr "$TEMP_PATH"
+ FunctionalStatelessTestRelease:
+ needs: [ BuilderDebRelease ]
+ runs-on: [ self-hosted, func-tester ]
+ steps:
+ - name: Set envs
+ run: |
+ cat >> "$GITHUB_ENV" << 'EOF'
+ TEMP_PATH=${{runner.temp}}/stateless_release
+ REPORTS_PATH=${{runner.temp}}/reports_dir
+ CHECK_NAME=Stateless tests (release, actions)
+ REPO_COPY=${{runner.temp}}/stateless_release/ClickHouse
+ KILL_TIMEOUT=10800
+ EOF
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{ env.REPORTS_PATH }}
+ - name: Clear repository
+ run: |
+ sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ run: |
+ sudo rm -fr "$TEMP_PATH"
+ mkdir -p "$TEMP_PATH"
+ cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
+ cd "$REPO_COPY/tests/ci"
+ python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
+ - name: Cleanup
+ if: always()
+ run: |
+ # shellcheck disable=SC2046
+ docker kill $(docker ps -q) ||:
+ # shellcheck disable=SC2046
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr "$TEMP_PATH"
+ FunctionalStatelessTestReleaseDatabaseOrdinary:
+ needs: [ BuilderDebRelease ]
+ runs-on: [ self-hosted, func-tester ]
+ steps:
+ - name: Set envs
+ run: |
+ cat >> "$GITHUB_ENV" << 'EOF'
+ TEMP_PATH=${{runner.temp}}/stateless_release_database_ordinary
+ REPORTS_PATH=${{runner.temp}}/reports_dir
+ CHECK_NAME=Stateless tests (release, DatabaseOrdinary, actions)
+ REPO_COPY=${{runner.temp}}/stateless_release_database_ordinary/ClickHouse
+ KILL_TIMEOUT=10800
+ EOF
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{ env.REPORTS_PATH }}
+ - name: Clear repository
+ run: |
+ sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ run: |
+ sudo rm -fr "$TEMP_PATH"
+ mkdir -p "$TEMP_PATH"
+ cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
+ cd "$REPO_COPY/tests/ci"
+ python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
+ - name: Cleanup
+ if: always()
+ run: |
+ # shellcheck disable=SC2046
+ docker kill $(docker ps -q) ||:
+ # shellcheck disable=SC2046
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr "$TEMP_PATH"
+ FunctionalStatelessTestReleaseS3:
+ needs: [ BuilderDebRelease ]
+ runs-on: [ self-hosted, func-tester ]
+ steps:
+ - name: Set envs
+ run: |
+ cat >> "$GITHUB_ENV" << 'EOF'
+ TEMP_PATH=${{runner.temp}}/stateless_s3_storage
+ REPORTS_PATH=${{runner.temp}}/reports_dir
+ CHECK_NAME=Stateless tests (release, s3 storage, actions)
+ REPO_COPY=${{runner.temp}}/stateless_s3_storage/ClickHouse
+ KILL_TIMEOUT=10800
+ EOF
+ - name: Download json reports
+ uses: actions/download-artifact@v2
+ with:
+ path: ${{ env.REPORTS_PATH }}
+ - name: Clear repository
+ run: |
+ sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
+ - name: Check out repository code
+ uses: actions/checkout@v2
+ - name: Functional test
+ run: |
+ sudo rm -fr "$TEMP_PATH"
+ mkdir -p "$TEMP_PATH"
+ cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
+ cd "$REPO_COPY/tests/ci"
+ python3 functional_test_check.py "$CHECK_NAME" "$KILL_TIMEOUT"
+ - name: Cleanup
+ if: always()
+ run: |
+ # shellcheck disable=SC2046
+ docker kill $(docker ps -q) ||:
+ # shellcheck disable=SC2046
+ docker rm -f $(docker ps -a -q) ||:
+ sudo rm -fr "$TEMP_PATH"
+# BuilderDebSplitted:
+# needs: [DockerHubPush]
+# runs-on: [self-hosted, builder]
+# steps:
+# - name: Set envs
+# run: |
+# cat >> "$GITHUB_ENV" << 'EOF'
+# TEMP_PATH=${{runner.temp}}/build_check
+# IMAGES_PATH=${{runner.temp}}/images_path
+# REPO_COPY=${{runner.temp}}/build_check/ClickHouse
+# CACHES_PATH=${{runner.temp}}/../ccaches
+# BUILD_NAME=binary_splitted
+# EOF
+# - name: Download changed images
+# uses: actions/download-artifact@v2
+# with:
+# name: changed_images
+# path: ${{ env.IMAGES_PATH }}
+# - name: Clear repository
+# run: |
+# sudo rm -fr "$GITHUB_WORKSPACE" && mkdir "$GITHUB_WORKSPACE"
+# - name: Check out repository code
+# uses: actions/checkout@v2
+# - name: Build
+# run: |
+# git -C "$GITHUB_WORKSPACE" submodule sync --recursive
+# git -C "$GITHUB_WORKSPACE" submodule update --depth=1 --recursive --init --jobs=10
+# sudo rm -fr "$TEMP_PATH"
+# mkdir -p "$TEMP_PATH"
+# cp -r "$GITHUB_WORKSPACE" "$TEMP_PATH"
+# cd "$REPO_COPY/tests/ci" && python3 build_check.py "$BUILD_NAME"
+# - name: Upload build URLs to artifacts
+# if: ${{ success() || failure() }}
+# uses: actions/upload-artifact@v2
+# with:
+# name: ${{ env.BUILD_URLS }}
+# path: ${{ env.TEMP_PATH }}/${{ env.BUILD_URLS }}.json
+# - name: Cleanup
+# if: always()
+# run: |
+# # shellcheck disable=SC2046
+# docker kill $(docker ps -q) ||:
+# # shellcheck disable=SC2046
+# docker rm -f $(docker ps -a -q) ||:
+# sudo rm -fr "$TEMP_PATH" "$CACHES_PATH"
diff --git a/docker/packager/packager b/docker/packager/packager
index 7c0f046b76c8..e14bbe5990a2 100755
--- a/docker/packager/packager
+++ b/docker/packager/packager
@@ -365,10 +365,20 @@ if __name__ == "__main__":
parser.add_argument(
"--as-root", action="store_true", help="if the container should run as root"
)
+ parser.add_argument(
+ "--docker-repo", default="docker.io", help="docker repository"
+ )
+ parser.add_argument(
+ "--docker-user", default=argparse.SUPPRESS, help="docker user"
+ )
+ parser.add_argument(
+ "--docker-password", default=argparse.SUPPRESS, help="docker password"
+ )
+
args = parser.parse_args()
- image_name = f"clickhouse/{IMAGE_TYPE}-builder"
+ image_name = f"{args.docker_repo}/clickhouse/{IMAGE_TYPE}-builder"
ch_root = args.clickhouse_repo_path
@@ -402,6 +412,13 @@ if __name__ == "__main__":
args.with_coverage,
args.with_binaries,
)
+ if args.docker_user:
+ subprocess.check_output( # pylint: disable=unexpected-keyword-arg
+ "docker login {} --username '{}' --password-stdin".format(args.docker_repo, args.docker_user),
+ input=args.docker_password,
+ encoding="utf-8",
+ shell=True,
+ )
pre_build(args.clickhouse_repo_path, env_prepared)
run_docker_image_with_env(
@@ -413,4 +430,4 @@ if __name__ == "__main__":
args.ccache_dir,
args.docker_image_version,
)
- logging.info("Output placed into %s", args.output_dir)
+ logging.info("Output placed into %s", args.output_dir)
\ No newline at end of file
diff --git a/docker/test/base/Dockerfile b/docker/test/base/Dockerfile
index ca44354620fa..bf00e3e094c5 100644
--- a/docker/test/base/Dockerfile
+++ b/docker/test/base/Dockerfile
@@ -1,7 +1,8 @@
# rebuild in #33610
# docker build -t clickhouse/test-base .
ARG FROM_TAG=latest
-FROM clickhouse/test-util:$FROM_TAG
+ARG DOCKER_REPO=docker.io
+FROM $DOCKER_REPO/clickhouse/test-util:$FROM_TAG
# ARG for quick switch to a given ubuntu mirror
ARG apt_archive="http://archive.ubuntu.com"
diff --git a/docker/test/keeper-jepsen/Dockerfile b/docker/test/keeper-jepsen/Dockerfile
index a794e076ec02..b0425847048d 100644
--- a/docker/test/keeper-jepsen/Dockerfile
+++ b/docker/test/keeper-jepsen/Dockerfile
@@ -1,7 +1,8 @@
# rebuild in #33610
# docker build -t clickhouse/keeper-jepsen-test .
ARG FROM_TAG=latest
-FROM clickhouse/test-base:$FROM_TAG
+ARG DOCKER_REPO=docker.io
+FROM $DOCKER_REPO/clickhouse/test-base:$FROM_TAG
ENV DEBIAN_FRONTEND=noninteractive
ENV CLOJURE_VERSION=1.10.3.814
diff --git a/docker/test/split_build_smoke_test/Dockerfile b/docker/test/split_build_smoke_test/Dockerfile
index 5f84eb42216c..49bac3fb8b9a 100644
--- a/docker/test/split_build_smoke_test/Dockerfile
+++ b/docker/test/split_build_smoke_test/Dockerfile
@@ -1,7 +1,8 @@
# rebuild in #33610
# docker build -t clickhouse/split-build-smoke-test .
ARG FROM_TAG=latest
-FROM clickhouse/binary-builder:$FROM_TAG
+ARG DOCKER_REPO=docker.io
+FROM $DOCKER_REPO/clickhouse/binary-builder:$FROM_TAG
COPY run.sh /run.sh
COPY process_split_build_smoke_test_result.py /
diff --git a/docker/test/stateful/Dockerfile b/docker/test/stateful/Dockerfile
index ef375f8b2067..6a8e31690714 100644
--- a/docker/test/stateful/Dockerfile
+++ b/docker/test/stateful/Dockerfile
@@ -1,7 +1,8 @@
# rebuild in #33610
# docker build -t clickhouse/stateful-test .
ARG FROM_TAG=latest
-FROM clickhouse/stateless-test:$FROM_TAG
+ARG DOCKER_REPO=docker.io
+FROM $DOCKER_REPO/clickhouse/stateless-test:$FROM_TAG
RUN apt-get update -y \
&& env DEBIAN_FRONTEND=noninteractive \
diff --git a/docker/test/stateless/Dockerfile b/docker/test/stateless/Dockerfile
index 9141e89d744e..d878e8947b62 100644
--- a/docker/test/stateless/Dockerfile
+++ b/docker/test/stateless/Dockerfile
@@ -1,7 +1,8 @@
# rebuild in #33610
# docker build -t clickhouse/stateless-test .
ARG FROM_TAG=latest
-FROM clickhouse/test-base:$FROM_TAG
+ARG DOCKER_REPO=docker.io
+FROM $DOCKER_REPO/clickhouse/test-base:$FROM_TAG
ARG odbc_driver_url="https://github.com/ClickHouse/clickhouse-odbc/releases/download/v1.1.4.20200302/clickhouse-odbc-1.1.4-Linux.tar.gz"
diff --git a/docker/test/stateless_pytest/Dockerfile b/docker/test/stateless_pytest/Dockerfile
index 789ee0e9b308..69d53ce6f7e3 100644
--- a/docker/test/stateless_pytest/Dockerfile
+++ b/docker/test/stateless_pytest/Dockerfile
@@ -1,7 +1,8 @@
# rebuild in #33610
# docker build -t clickhouse/stateless-pytest .
ARG FROM_TAG=latest
-FROM clickhouse/test-base:$FROM_TAG
+ARG DOCKER_REPO=docker.io
+FROM $DOCKER_REPO/clickhouse/test-base:$FROM_TAG
RUN apt-get update -y && \
apt-get install -y --no-install-recommends \
diff --git a/docker/test/stress/Dockerfile b/docker/test/stress/Dockerfile
index 393508fd551b..2a0bc6ec1c25 100644
--- a/docker/test/stress/Dockerfile
+++ b/docker/test/stress/Dockerfile
@@ -1,7 +1,8 @@
# rebuild in #33610
# docker build -t clickhouse/stress-test .
ARG FROM_TAG=latest
-FROM clickhouse/stateful-test:$FROM_TAG
+ARG DOCKER_REPO=docker.io
+FROM $DOCKER_REPO/clickhouse/stateful-test:$FROM_TAG
RUN apt-get update -y \
&& env DEBIAN_FRONTEND=noninteractive \
diff --git a/docker/test/unit/Dockerfile b/docker/test/unit/Dockerfile
index b75bfb6661cc..d389e0d3f341 100644
--- a/docker/test/unit/Dockerfile
+++ b/docker/test/unit/Dockerfile
@@ -1,7 +1,8 @@
# rebuild in #33610
# docker build -t clickhouse/unit-test .
ARG FROM_TAG=latest
-FROM clickhouse/stateless-test:$FROM_TAG
+ARG DOCKER_REPO=docker.io
+FROM $DOCKER_REPO/clickhouse/stateless-test:$FROM_TAG
RUN apt-get install gdb
diff --git a/tests/ci/ast_fuzzer_check.py b/tests/ci/ast_fuzzer_check.py
index 9ccae89b4036..5b5a6b8a496c 100644
--- a/tests/ci/ast_fuzzer_check.py
+++ b/tests/ci/ast_fuzzer_check.py
@@ -117,7 +117,7 @@ def get_commit(gh, commit_sha):
"core.gz": os.path.join(workspace_path, "core.gz"),
}
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
for f in paths:
try:
paths[f] = s3_helper.upload_test_report_to_s3(paths[f], s3_prefix + "/" + f)
@@ -173,4 +173,4 @@ def get_commit(gh, commit_sha):
logging.info("Result: '%s', '%s', '%s'", status, description, report_url)
print(f"::notice ::Report url: {report_url}")
- post_commit_status(gh, pr_info.sha, check_name, description, status, report_url)
+ # post_commit_status(gh, pr_info.sha, check_name, description, status, report_url)
diff --git a/tests/ci/build_check.py b/tests/ci/build_check.py
index 3976e2ba9168..619ef4da363a 100644
--- a/tests/ci/build_check.py
+++ b/tests/ci/build_check.py
@@ -15,8 +15,12 @@
IMAGES_PATH,
REPO_COPY,
S3_BUILDS_BUCKET,
+ S3_ENDPOINT,
TEMP_PATH,
+ DOCKER_USER,
+ DOCKER_REPO,
)
+from get_robot_token import get_parameter_from_ssm
from s3_helper import S3Helper
from pr_info import PRInfo
from version_helper import (
@@ -30,7 +34,7 @@
from docker_pull_helper import get_image_with_version
from tee_popen import TeePopen
-IMAGE_NAME = "clickhouse/binary-builder"
+IMAGE_NAME = f"{DOCKER_REPO}/clickhouse/binary-builder"
BUILD_LOG_NAME = "build_log.log"
@@ -81,6 +85,9 @@ def get_packager_cmd(
cmd += f" --docker-image-version={image_version}"
cmd += f" --version={build_version}"
+ cmd += f" --docker-repo={DOCKER_REPO}"
+ cmd += f" --docker-user={DOCKER_USER}"
+ cmd += " --docker-password={}".format(get_parameter_from_ssm("dockerhub_robot_password"))
if _can_export_binaries(build_config):
cmd += " --with-binaries=tests"
@@ -142,10 +149,10 @@ def check_for_success_run(
for url in build_results:
url_escaped = url.replace("+", "%2B").replace(" ", "%20")
if BUILD_LOG_NAME in url:
- log_url = f"https://s3.amazonaws.com/{S3_BUILDS_BUCKET}/{url_escaped}"
+ log_url = f"{S3_ENDPOINT}/{S3_BUILDS_BUCKET}/{url_escaped}"
else:
build_urls.append(
- f"https://s3.amazonaws.com/{S3_BUILDS_BUCKET}/{url_escaped}"
+ f"{S3_ENDPOINT}/{S3_BUILDS_BUCKET}/{url_escaped}"
)
if not log_url:
# log is uploaded the last, so if there's no log we need to rerun the build
@@ -250,7 +257,7 @@ def main():
logging.info("Repo copy path %s", REPO_COPY)
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
version = get_version_from_repo(git=Git(True))
release_or_pr, performance_pr = get_release_or_pr(pr_info, version)
@@ -265,6 +272,13 @@ def main():
# put them as github actions artifact (result)
check_for_success_run(s3_helper, s3_path_prefix, build_name, build_config)
+ subprocess.check_output( # pylint: disable=unexpected-keyword-arg
+ "docker login {} --username '{}' --password-stdin".format(DOCKER_REPO, DOCKER_USER),
+ input=get_parameter_from_ssm("dockerhub_robot_password"),
+ encoding="utf-8",
+ shell=True,
+ )
+
docker_image = get_image_with_version(IMAGES_PATH, IMAGE_NAME)
image_version = docker_image.version
diff --git a/tests/ci/build_report_check.py b/tests/ci/build_report_check.py
index dbf5adfe1747..88a17dd1199d 100644
--- a/tests/ci/build_report_check.py
+++ b/tests/ci/build_report_check.py
@@ -231,7 +231,7 @@ def main():
logging.error("No success builds, failing check")
sys.exit(1)
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
branch_url = f"{GITHUB_SERVER_URL}/{GITHUB_REPOSITORY}/commits/master"
branch_name = "master"
diff --git a/tests/ci/ccache_utils.py b/tests/ci/ccache_utils.py
index bd155b02cb4b..797edcd2fb12 100644
--- a/tests/ci/ccache_utils.py
+++ b/tests/ci/ccache_utils.py
@@ -5,11 +5,13 @@
import sys
import os
import shutil
+
from pathlib import Path
import requests
from compress_files import decompress_fast, compress_fast
+from env_helper import S3_ENDPOINT
DOWNLOAD_RETRIES_COUNT = 5
@@ -73,7 +75,7 @@ def get_ccache_if_not_exists(
for obj in objects:
if ccache_name in obj:
logging.info("Found ccache on path %s", obj)
- url = "https://s3.amazonaws.com/clickhouse-builds/" + obj
+ url = f"{S3_ENDPOINT}/clickhouse-builds/" + obj
compressed_cache = os.path.join(temp_path, os.path.basename(obj))
dowload_file_with_progress(url, compressed_cache)
diff --git a/tests/ci/clickhouse_helper.py b/tests/ci/clickhouse_helper.py
index c595dc559df7..7751d9141579 100644
--- a/tests/ci/clickhouse_helper.py
+++ b/tests/ci/clickhouse_helper.py
@@ -4,8 +4,12 @@
import json
import requests # type: ignore
+from env_helper import GITHUB_REPOSITORY
from get_robot_token import get_parameter_from_ssm
+class InsertException(Exception):
+ pass
+
class InsertException(Exception):
pass
@@ -131,7 +135,7 @@ def prepare_tests_results_for_clickhouse(
check_name,
):
- pull_request_url = "https://github.com/ClickHouse/ClickHouse/commits/master"
+ pull_request_url = "https://github.com/{}/commits/master".format(GITHUB_REPOSITORY)
base_ref = "master"
head_ref = "master"
base_repo = pr_info.repo_full_name
diff --git a/tests/ci/codebrowser_check.py b/tests/ci/codebrowser_check.py
index 48c92e9f6acc..86f55b26cda7 100644
--- a/tests/ci/codebrowser_check.py
+++ b/tests/ci/codebrowser_check.py
@@ -7,7 +7,7 @@
from github import Github
-from env_helper import IMAGES_PATH, REPO_COPY
+from env_helper import IMAGES_PATH, REPO_COPY, S3_ENDPOINT
from stopwatch import Stopwatch
from upload_result_helper import upload_results
from s3_helper import S3Helper
@@ -23,7 +23,7 @@ def get_run_command(repo_path, output_path, image):
cmd = (
"docker run " + f"--volume={repo_path}:/repo_folder "
f"--volume={output_path}:/test_output "
- f"-e 'DATA=https://s3.amazonaws.com/clickhouse-test-reports/codebrowser/data' {image}"
+ f"-e 'DATA={S3_ENDPOINT}/clickhouse-test-reports/codebrowser/data' {image}"
)
return cmd
@@ -41,7 +41,7 @@ def get_run_command(repo_path, output_path, image):
os.makedirs(temp_path)
docker_image = get_image_with_version(IMAGES_PATH, "clickhouse/codebrowser")
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
result_path = os.path.join(temp_path, "result_path")
if not os.path.exists(result_path):
@@ -69,7 +69,7 @@ def get_run_command(repo_path, output_path, image):
report_path, s3_path_prefix, "clickhouse-test-reports"
)
- index_html = 'HTML report'
+ index_html = f'HTML report'
test_results = [(index_html, "Look at the report")]
diff --git a/tests/ci/compatibility_check.py b/tests/ci/compatibility_check.py
index 5490f162e42a..983e9e458811 100644
--- a/tests/ci/compatibility_check.py
+++ b/tests/ci/compatibility_check.py
@@ -8,9 +8,9 @@
from github import Github
-from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH
+from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH, DOCKER_REPO, DOCKER_USER
from s3_helper import S3Helper
-from get_robot_token import get_best_robot_token
+from get_robot_token import get_best_robot_token, get_parameter_from_ssm
from pr_info import PRInfo
from build_download_helper import download_builds_filter
from upload_result_helper import upload_results
@@ -24,8 +24,8 @@
from stopwatch import Stopwatch
from rerun_helper import RerunHelper
-IMAGE_UBUNTU = "clickhouse/test-old-ubuntu"
-IMAGE_CENTOS = "clickhouse/test-old-centos"
+IMAGE_UBUNTU = f"{DOCKER_REPO}/clickhouse/test-old-ubuntu"
+IMAGE_CENTOS = f"{DOCKER_REPO}/clickhouse/test-old-centos"
MAX_GLIBC_VERSION = "2.4"
DOWNLOAD_RETRIES_COUNT = 5
CHECK_NAME = "Compatibility check (actions)"
@@ -118,13 +118,19 @@ def get_run_commands(
reports_path = REPORTS_PATH
pr_info = PRInfo()
-
gh = Github(get_best_robot_token())
rerun_helper = RerunHelper(gh, pr_info, CHECK_NAME)
if rerun_helper.is_already_finished_by_status():
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
+
+ subprocess.check_output( # pylint: disable=unexpected-keyword-arg
+ "docker login {} --username '{}' --password-stdin".format(DOCKER_REPO, DOCKER_USER),
+ input=get_parameter_from_ssm("dockerhub_robot_password"),
+ encoding="utf-8",
+ shell=True,
+ )
docker_images = get_images_with_versions(reports_path, [IMAGE_CENTOS, IMAGE_UBUNTU])
@@ -169,7 +175,7 @@ def url_filter(url):
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
state, description, test_results, additional_logs = process_result(
result_path, server_log_path
)
diff --git a/tests/ci/docker_images_check.py b/tests/ci/docker_images_check.py
index 2e181f678dd5..d6c68d6ea211 100644
--- a/tests/ci/docker_images_check.py
+++ b/tests/ci/docker_images_check.py
@@ -14,7 +14,7 @@
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
from commit_status_helper import post_commit_status
-from env_helper import GITHUB_WORKSPACE, RUNNER_TEMP, GITHUB_RUN_URL
+from env_helper import GITHUB_WORKSPACE, RUNNER_TEMP, GITHUB_RUN_URL, DOCKER_USER, DOCKER_REPO
from get_robot_token import get_best_robot_token, get_parameter_from_ssm
from pr_info import PRInfo
from s3_helper import S3Helper
@@ -91,7 +91,7 @@ def get_images_dict(repo_path: str, image_file_path: str) -> ImagesDict:
def get_changed_docker_images(
- pr_info: PRInfo, images_dict: ImagesDict
+ pr_info: PRInfo, images_dict: ImagesDict, docker_repo
) -> Set[DockerImage]:
if not images_dict:
@@ -111,7 +111,7 @@ def get_changed_docker_images(
for dockerfile_dir, image_description in images_dict.items():
for f in files_changed:
if f.startswith(dockerfile_dir):
- name = image_description["name"]
+ name = docker_repo + "/" + image_description["name"]
only_amd64 = image_description.get("only_amd64", False)
logging.info(
"Found changed file '%s' which affects "
@@ -135,7 +135,7 @@ def get_changed_docker_images(
dependent,
image,
)
- name = images_dict[dependent]["name"]
+ name = docker_repo + "/" + images_dict[dependent]["name"]
only_amd64 = images_dict[dependent].get("only_amd64", False)
changed_images.append(DockerImage(dependent, name, only_amd64, image))
index += 1
@@ -248,6 +248,7 @@ def build_and_push_one_image(
"docker buildx build --builder default "
f"--label build-url={GITHUB_RUN_URL} "
f"{from_tag_arg}"
+ f"--build-arg DOCKER_REPO={DOCKER_REPO} "
# A hack to invalidate cache, grep for it in docker/ dir
f"--build-arg CACHE_INVALIDATOR={GITHUB_RUN_URL} "
f"--tag {image.repo}:{version_string} "
@@ -389,7 +390,6 @@ def parse_args() -> argparse.Namespace:
default=argparse.SUPPRESS,
help="don't push images to docker hub",
)
-
return parser.parse_args()
@@ -405,13 +405,12 @@ def main():
else:
changed_json = os.path.join(TEMP_PATH, "changed_images.json")
- if args.push:
- subprocess.check_output( # pylint: disable=unexpected-keyword-arg
- "docker login --username 'robotclickhouse' --password-stdin",
- input=get_parameter_from_ssm("dockerhub_robot_password"),
- encoding="utf-8",
- shell=True,
- )
+ subprocess.check_output( # pylint: disable=unexpected-keyword-arg
+ "docker login {} --username '{}' --password-stdin".format(DOCKER_REPO, DOCKER_USER),
+ input=get_parameter_from_ssm("dockerhub_robot_password"),
+ encoding="utf-8",
+ shell=True,
+ )
if os.path.exists(TEMP_PATH):
shutil.rmtree(TEMP_PATH)
@@ -431,7 +430,7 @@ def main():
# If the event does not contain diff, nothing will be built
pass
- changed_images = get_changed_docker_images(pr_info, images_dict)
+ changed_images = get_changed_docker_images(pr_info, images_dict, DOCKER_REPO)
if changed_images:
logging.info(
"Has changed images: %s", ", ".join([im.path for im in changed_images])
@@ -460,7 +459,7 @@ def main():
with open(changed_json, "w", encoding="utf-8") as images_file:
json.dump(result_images, images_file)
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
s3_path_prefix = (
str(pr_info.number) + "/" + pr_info.sha + "/" + NAME.lower().replace(" ", "_")
diff --git a/tests/ci/docker_manifests_merge.py b/tests/ci/docker_manifests_merge.py
index 9371440346e7..d63f371ab7ce 100644
--- a/tests/ci/docker_manifests_merge.py
+++ b/tests/ci/docker_manifests_merge.py
@@ -11,7 +11,7 @@
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
from commit_status_helper import post_commit_status
-from env_helper import RUNNER_TEMP
+from env_helper import RUNNER_TEMP, DOCKER_USER, DOCKER_REPO
from get_robot_token import get_best_robot_token, get_parameter_from_ssm
from pr_info import PRInfo
from s3_helper import S3Helper
@@ -60,7 +60,7 @@ def parse_args() -> argparse.Namespace:
default=argparse.SUPPRESS,
help="don't push images to docker hub",
)
-
+
args = parser.parse_args()
if len(args.suffixes) < 2:
parser.error("more than two --suffix should be given")
@@ -171,13 +171,13 @@ def main():
stopwatch = Stopwatch()
args = parse_args()
- if args.push:
- subprocess.check_output( # pylint: disable=unexpected-keyword-arg
- "docker login --username 'robotclickhouse' --password-stdin",
- input=get_parameter_from_ssm("dockerhub_robot_password"),
- encoding="utf-8",
- shell=True,
- )
+
+ subprocess.check_output( # pylint: disable=unexpected-keyword-arg
+ "docker login {} --username '{}' --password-stdin".format(DOCKER_REPO, DOCKER_USER),
+ input=get_parameter_from_ssm("dockerhub_robot_password"),
+ encoding="utf-8",
+ shell=True,
+ )
to_merge = {}
for suf in args.suffixes:
@@ -203,7 +203,7 @@ def main():
json.dump(changed_images, ci)
pr_info = PRInfo()
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME)
diff --git a/tests/ci/docker_server.py b/tests/ci/docker_server.py
index a54a89895654..15e95565ce30 100644
--- a/tests/ci/docker_server.py
+++ b/tests/ci/docker_server.py
@@ -16,7 +16,7 @@
from clickhouse_helper import ClickHouseHelper, prepare_tests_results_for_clickhouse
from commit_status_helper import post_commit_status
from docker_images_check import DockerImage
-from env_helper import CI, GITHUB_RUN_URL, RUNNER_TEMP, S3_BUILDS_BUCKET
+from env_helper import CI, DOCKER_USER, DOCKER_REPO, GITHUB_RUN_URL, RUNNER_TEMP, S3_BUILDS_BUCKET, S3_ENDPOINT
from get_robot_token import get_best_robot_token, get_parameter_from_ssm
from git_helper import Git
from pr_info import PRInfo
@@ -309,18 +309,17 @@ def main():
pr_info = PRInfo()
release_or_pr, _ = get_release_or_pr(pr_info, args.version)
args.bucket_prefix = (
- f"https://s3.amazonaws.com/{S3_BUILDS_BUCKET}/"
+ f"{S3_ENDPOINT}/{S3_BUILDS_BUCKET}/"
f"{release_or_pr}/{pr_info.sha}"
)
- if args.push:
- subprocess.check_output( # pylint: disable=unexpected-keyword-arg
- "docker login --username 'robotclickhouse' --password-stdin",
- input=get_parameter_from_ssm("dockerhub_robot_password"),
- encoding="utf-8",
- shell=True,
- )
- NAME = f"Docker image {image.repo} build and push (actions)"
+ subprocess.check_output( # pylint: disable=unexpected-keyword-arg
+ "docker login {} --username '{}' --password-stdin".format(DOCKER_REPO, DOCKER_USER),
+ input=get_parameter_from_ssm("dockerhub_robot_password"),
+ encoding="utf-8",
+ shell=True,
+ )
+ NAME = f"Docker image {image.repo} build and push (actions)"
logging.info("Following tags will be created: %s", ", ".join(tags))
status = "success"
@@ -336,7 +335,7 @@ def main():
status = "failure"
pr_info = pr_info or PRInfo()
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
url = upload_results(s3_helper, pr_info.number, pr_info.sha, test_results, [], NAME)
diff --git a/tests/ci/docker_test.py b/tests/ci/docker_test.py
index 32df6d5f1d07..e7c3bc2bfcc9 100644
--- a/tests/ci/docker_test.py
+++ b/tests/ci/docker_test.py
@@ -4,7 +4,7 @@
import unittest
from unittest.mock import patch, MagicMock
-from env_helper import GITHUB_RUN_URL
+from env_helper import GITHUB_RUN_URL, DOCKER_REPO
from pr_info import PRInfo
import docker_images_check as di
@@ -30,7 +30,7 @@ def test_get_changed_docker_images(self):
images = sorted(
list(
di.get_changed_docker_images(
- pr_info, di.get_images_dict("/", self.docker_images_path)
+ pr_info, di.get_images_dict("/", self.docker_images_path), DOCKER_REPO
)
)
)
@@ -124,6 +124,7 @@ def test_build_and_push_one_image(self, mock_machine, mock_popen, mock_open):
self.assertIn(
f"docker buildx build --builder default --label build-url={GITHUB_RUN_URL} "
"--build-arg FROM_TAG=version "
+ f"--build-arg DOCKER_REPO={DOCKER_REPO} "
f"--build-arg CACHE_INVALIDATOR={GITHUB_RUN_URL} "
"--tag name:version --cache-from type=registry,ref=name:version "
"--cache-from type=registry,ref=name:latest "
@@ -143,6 +144,7 @@ def test_build_and_push_one_image(self, mock_machine, mock_popen, mock_open):
self.assertIn(
f"docker buildx build --builder default --label build-url={GITHUB_RUN_URL} "
"--build-arg FROM_TAG=version2 "
+ f"--build-arg DOCKER_REPO={DOCKER_REPO} "
f"--build-arg CACHE_INVALIDATOR={GITHUB_RUN_URL} "
"--tag name:version2 --cache-from type=registry,ref=name:version2 "
"--cache-from type=registry,ref=name:latest "
@@ -161,6 +163,7 @@ def test_build_and_push_one_image(self, mock_machine, mock_popen, mock_open):
mock_machine.assert_not_called()
self.assertIn(
f"docker buildx build --builder default --label build-url={GITHUB_RUN_URL} "
+ f"--build-arg DOCKER_REPO={DOCKER_REPO} "
f"--build-arg CACHE_INVALIDATOR={GITHUB_RUN_URL} "
"--tag name:version2 --cache-from type=registry,ref=name:version2 "
"--cache-from type=registry,ref=name:latest "
@@ -181,6 +184,7 @@ def test_build_and_push_one_image(self, mock_machine, mock_popen, mock_open):
mock_machine.assert_not_called()
self.assertIn(
f"docker buildx build --builder default --label build-url={GITHUB_RUN_URL} "
+ f"--build-arg DOCKER_REPO={DOCKER_REPO} "
f"--build-arg CACHE_INVALIDATOR={GITHUB_RUN_URL} "
"--tag name:version2 --cache-from type=registry,ref=name:version2 "
"--cache-from type=registry,ref=name:latest "
diff --git a/tests/ci/docs_check.py b/tests/ci/docs_check.py
index c67e9dcc99dd..d858d2c2f756 100644
--- a/tests/ci/docs_check.py
+++ b/tests/ci/docs_check.py
@@ -6,7 +6,7 @@
import sys
from github import Github
-from env_helper import TEMP_PATH, REPO_COPY
+from env_helper import TEMP_PATH, REPO_COPY, DOCKER_REPO
from s3_helper import S3Helper
from pr_info import PRInfo
from get_robot_token import get_best_robot_token
@@ -70,7 +70,7 @@
if not os.path.exists(temp_path):
os.makedirs(temp_path)
- docker_image = get_image_with_version(temp_path, "clickhouse/docs-builder")
+ docker_image = get_image_with_version(temp_path, f"{DOCKER_REPO}/clickhouse/docs-builder")
test_output = os.path.join(temp_path, "docs_check_log")
if not os.path.exists(test_output):
@@ -120,7 +120,7 @@
else:
lines.append(("Non zero exit code", "FAIL"))
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
ch_helper = ClickHouseHelper()
report_url = upload_results(
diff --git a/tests/ci/docs_release.py b/tests/ci/docs_release.py
index 806db28c1b1d..143d5ea3dd4f 100644
--- a/tests/ci/docs_release.py
+++ b/tests/ci/docs_release.py
@@ -106,7 +106,7 @@ def parse_args() -> argparse.Namespace:
else:
lines.append(("Non zero exit code", "FAIL"))
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
report_url = upload_results(
s3_helper, pr_info.number, pr_info.sha, lines, additional_files, NAME
diff --git a/tests/ci/env_helper.py b/tests/ci/env_helper.py
index dd081523db11..bddd86c763d3 100644
--- a/tests/ci/env_helper.py
+++ b/tests/ci/env_helper.py
@@ -11,6 +11,8 @@
CLOUDFLARE_TOKEN = os.getenv("CLOUDFLARE_TOKEN")
GITHUB_EVENT_PATH = os.getenv("GITHUB_EVENT_PATH", "")
GITHUB_JOB = os.getenv("GITHUB_JOB", "local")
+DOCKER_REPO = os.getenv("DOCKER_REPO","docker.io")
+DOCKER_USER = os.getenv("DOCKER_USER", "robotclickhouse")
GITHUB_REPOSITORY = os.getenv("GITHUB_REPOSITORY", "ClickHouse/ClickHouse")
GITHUB_RUN_ID = os.getenv("GITHUB_RUN_ID", "0")
GITHUB_SERVER_URL = os.getenv("GITHUB_SERVER_URL", "https://github.com")
@@ -20,5 +22,11 @@
REPORTS_PATH = os.getenv("REPORTS_PATH", p.abspath(p.join(module_dir, "./reports")))
REPO_COPY = os.getenv("REPO_COPY", git_root)
RUNNER_TEMP = os.getenv("RUNNER_TEMP", p.abspath(p.join(module_dir, "./tmp")))
+S3_REGION = os.getenv("S3_REGION", "us-east-1")
S3_BUILDS_BUCKET = os.getenv("S3_BUILDS_BUCKET", "clickhouse-builds")
S3_TEST_REPORTS_BUCKET = os.getenv("S3_TEST_REPORTS_BUCKET", "clickhouse-test-reports")
+S3_ENDPOINT = os.getenv("S3_ENDPOINT","https://s3.amazonaws.com")
+VAULT_PATH = os.getenv("VAULT_PATH")
+VAULT_TOKEN = os.getenv("VAULT_TOKEN")
+VAULT_URL = os.getenv("VAULT_URL")
+VAULT_MOUNT_POINT = os.getenv("VAULT_MOUNT_POINT", "secret")
diff --git a/tests/ci/fast_test_check.py b/tests/ci/fast_test_check.py
index ce5a4195ceb6..b756a5c709f8 100644
--- a/tests/ci/fast_test_check.py
+++ b/tests/ci/fast_test_check.py
@@ -99,7 +99,7 @@ def process_results(result_folder):
docker_image = get_image_with_version(temp_path, "clickhouse/fasttest")
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
workspace = os.path.join(temp_path, "fasttest-workspace")
if not os.path.exists(workspace):
diff --git a/tests/ci/functional_test_check.py b/tests/ci/functional_test_check.py
index b73e6f9d7080..09716ff80da9 100644
--- a/tests/ci/functional_test_check.py
+++ b/tests/ci/functional_test_check.py
@@ -9,9 +9,9 @@
from github import Github
-from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH
+from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH, DOCKER_REPO, DOCKER_USER
from s3_helper import S3Helper
-from get_robot_token import get_best_robot_token
+from get_robot_token import get_best_robot_token, get_parameter_from_ssm
from pr_info import FORCE_TESTS_LABEL, PRInfo
from build_download_helper import download_all_deb_packages
from download_release_packets import download_last_release
@@ -56,9 +56,9 @@ def get_additional_envs(check_name, run_by_hash_num, run_by_hash_total):
def get_image_name(check_name):
if "stateless" in check_name.lower():
- return "clickhouse/stateless-test"
+ return f"{DOCKER_REPO}/clickhouse/stateless-test"
if "stateful" in check_name.lower():
- return "clickhouse/stateful-test"
+ return f"{DOCKER_REPO}/clickhouse/stateful-test"
else:
raise Exception(f"Cannot deduce image name based on check name {check_name}")
@@ -185,6 +185,7 @@ def parse_args():
choices=["commit_status", "file"],
help="Where to public post commit status",
)
+
return parser.parse_args()
@@ -258,6 +259,13 @@ def parse_args():
)
sys.exit(0)
+ subprocess.check_output( # pylint: disable=unexpected-keyword-arg
+ "docker login {} --username '{}' --password-stdin".format(DOCKER_REPO, DOCKER_USER),
+ input=get_parameter_from_ssm("dockerhub_robot_password"),
+ encoding="utf-8",
+ shell=True,
+ )
+
image_name = get_image_name(check_name)
docker_image = get_image_with_version(reports_path, image_name)
@@ -310,7 +318,7 @@ def parse_args():
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
state, description, test_results, additional_logs = process_results(
result_path, server_log_path
diff --git a/tests/ci/get_robot_token.py b/tests/ci/get_robot_token.py
index cb79d9ae01ac..7bdac36a052c 100644
--- a/tests/ci/get_robot_token.py
+++ b/tests/ci/get_robot_token.py
@@ -1,16 +1,27 @@
#!/usr/bin/env python3
import boto3 # type: ignore
+import hvac # type: ignore
+import os
from github import Github # type: ignore
-
+from env_helper import VAULT_URL, VAULT_TOKEN, VAULT_PATH, VAULT_MOUNT_POINT
def get_parameter_from_ssm(name, decrypt=True, client=None):
- if not client:
- client = boto3.client("ssm", region_name="us-east-1")
- return client.get_parameter(Name=name, WithDecryption=decrypt)["Parameter"]["Value"]
-
+ if VAULT_URL:
+ if not client:
+ client = hvac.Client(url=VAULT_URL,token=VAULT_TOKEN)
+ parameter = client.secrets.kv.v2.read_secret_version(mount_point=VAULT_MOUNT_POINT,path=VAULT_PATH)["data"]["data"][name]
+ else:
+ if not client:
+ client = boto3.client("ssm", region_name="us-east-1")
+ parameter = client.get_parameter(Name=name, WithDecryption=decrypt)["Parameter"]["Value"]
+ return parameter
def get_best_robot_token(token_prefix_env_name="github_robot_token_", total_tokens=4):
- client = boto3.client("ssm", region_name="us-east-1")
+ client = None
+ if VAULT_URL:
+ client = hvac.Client(url=VAULT_URL,token=VAULT_TOKEN)
+ else:
+ client = boto3.client("ssm", region_name="us-east-1")
tokens = {}
for i in range(1, total_tokens + 1):
token_name = token_prefix_env_name + str(i)
diff --git a/tests/ci/git_helper.py b/tests/ci/git_helper.py
index e3ad0eb39c03..2f6019732c19 100644
--- a/tests/ci/git_helper.py
+++ b/tests/ci/git_helper.py
@@ -9,7 +9,7 @@
# \A and \Z match only start and end of the whole string
RELEASE_BRANCH_REGEXP = r"\A\d+[.]\d+\Z"
TAG_REGEXP = (
- r"\Av\d{2}[.][1-9]\d*[.][1-9]\d*[.][1-9]\d*-(testing|prestable|stable|lts)\Z"
+ r"\Av\d{2}[.][1-9]\d*[.][1-9]\d*[.][1-9]\d*-(testing|prestable|stable|lts|clib)\Z"
)
SHA_REGEXP = r"\A([0-9]|[a-f]){40}\Z"
diff --git a/tests/ci/integration_test_check.py b/tests/ci/integration_test_check.py
index 1c53247c072c..67e0fb9be892 100644
--- a/tests/ci/integration_test_check.py
+++ b/tests/ci/integration_test_check.py
@@ -249,7 +249,7 @@ def parse_args():
ch_helper = ClickHouseHelper()
mark_flaky_tests(ch_helper, check_name, test_results)
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
report_url = upload_results(
s3_helper,
pr_info.number,
diff --git a/tests/ci/keeper_jepsen_check.py b/tests/ci/keeper_jepsen_check.py
index 14c31927b75e..2cd069d23997 100644
--- a/tests/ci/keeper_jepsen_check.py
+++ b/tests/ci/keeper_jepsen_check.py
@@ -9,7 +9,7 @@
from github import Github
import requests
-from env_helper import REPO_COPY, TEMP_PATH
+from env_helper import REPO_COPY, S3_ENDPOINT, TEMP_PATH
from stopwatch import Stopwatch
from upload_result_helper import upload_results
from s3_helper import S3Helper
@@ -192,7 +192,7 @@ def get_run_command(
# run (see .github/workflows/jepsen.yml) So we cannot add explicit
# dependency on a build job and using busy loop on it's results. For the
# same reason we are using latest docker image.
- build_url = f"https://s3.amazonaws.com/clickhouse-builds/{release_or_pr}/{pr_info.sha}/{build_name}/clickhouse"
+ build_url = f"{S3_ENDPOINT}/clickhouse-builds/{release_or_pr}/{pr_info.sha}/{build_name}/clickhouse"
head = requests.head(build_url)
counter = 0
while head.status_code != 200:
@@ -248,7 +248,7 @@ def get_run_command(
description = "No Jepsen output log"
test_result = [("No Jepsen output log", "FAIL")]
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
report_url = upload_results(
s3_helper,
pr_info.number,
diff --git a/tests/ci/performance_comparison_check.py b/tests/ci/performance_comparison_check.py
index baf2593130ad..2d754c71dbd9 100644
--- a/tests/ci/performance_comparison_check.py
+++ b/tests/ci/performance_comparison_check.py
@@ -15,7 +15,7 @@
from commit_status_helper import get_commit, post_commit_status
from ci_config import CI_CONFIG
from docker_pull_helper import get_image_with_version
-from env_helper import GITHUB_EVENT_PATH, GITHUB_RUN_URL
+from env_helper import GITHUB_EVENT_PATH, GITHUB_RUN_URL, S3_ENDPOINT
from get_robot_token import get_best_robot_token, get_parameter_from_ssm
from pr_info import PRInfo
from rerun_helper import RerunHelper
@@ -86,7 +86,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
docker_env = ""
- docker_env += " -e S3_URL=https://s3.amazonaws.com/clickhouse-builds"
+ docker_env += f" -e S3_URL={S3_ENDPOINT}/clickhouse-builds"
docker_env += f" -e BUILD_NAME={required_build}"
if pr_info.number == 0:
@@ -197,7 +197,7 @@ def __exit__(self, exc_type, exc_val, exc_tb):
}
s3_prefix = f"{pr_info.number}/{pr_info.sha}/{check_name_prefix}/"
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
uploaded = {} # type: Dict[str, str]
for name, path in paths.items():
try:
diff --git a/tests/ci/s3_helper.py b/tests/ci/s3_helper.py
index 91e67135f6f1..1f4533f5e661 100644
--- a/tests/ci/s3_helper.py
+++ b/tests/ci/s3_helper.py
@@ -9,7 +9,7 @@
import boto3 # type: ignore
-from env_helper import S3_TEST_REPORTS_BUCKET, S3_BUILDS_BUCKET, RUNNER_TEMP, CI
+from env_helper import S3_TEST_REPORTS_BUCKET, S3_BUILDS_BUCKET, S3_REGION, S3_ENDPOINT, RUNNER_TEMP, CI
from compress_files import compress_file_fast
@@ -33,9 +33,10 @@ def _flatten_list(lst):
class S3Helper:
- def __init__(self, host):
- self.session = boto3.session.Session(region_name="us-east-1")
- self.client = self.session.client("s3", endpoint_url=host)
+ def __init__(self, endpoint=S3_ENDPOINT):
+ self.session = boto3.session.Session(region_name=S3_REGION)
+ self.client = self.session.client("s3", endpoint_url=endpoint)
+ self.endpoint = endpoint
def _upload_file_to_s3(self, bucket_name, file_path, s3_path):
logging.debug(
@@ -99,8 +100,8 @@ def _upload_file_to_s3(self, bucket_name, file_path, s3_path):
# last two replacements are specifics of AWS urls:
# https://jamesd3142.wordpress.com/2018/02/28/amazon-s3-and-the-plus-symbol/
return (
- "https://s3.amazonaws.com/{bucket}/{path}".format(
- bucket=bucket_name, path=s3_path
+ "{host}/{bucket}/{path}".format(
+ host=self.endpoint, bucket=bucket_name, path=s3_path
)
.replace("+", "%2B")
.replace(" ", "%20")
@@ -175,8 +176,8 @@ def upload_task(file_path):
t = time.time()
except Exception as ex:
logging.critical("Failed to upload file, expcetion %s", ex)
- return "https://s3.amazonaws.com/{bucket}/{path}".format(
- bucket=bucket_name, path=s3_path
+ return "{host}/{bucket}/{path}".format(
+ host=self.endpoint, bucket=bucket_name, path=s3_path
)
p = Pool(256)
diff --git a/tests/ci/split_build_smoke_check.py b/tests/ci/split_build_smoke_check.py
index 210a6f9ea86b..8cdc0d0a2999 100644
--- a/tests/ci/split_build_smoke_check.py
+++ b/tests/ci/split_build_smoke_check.py
@@ -7,9 +7,9 @@
from github import Github
-from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH
+from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH, DOCKER_REPO, DOCKER_USER
from s3_helper import S3Helper
-from get_robot_token import get_best_robot_token
+from get_robot_token import get_best_robot_token, get_parameter_from_ssm
from pr_info import PRInfo
from build_download_helper import download_shared_build
from upload_result_helper import upload_results
@@ -20,7 +20,7 @@
from rerun_helper import RerunHelper
-DOCKER_IMAGE = "clickhouse/split-build-smoke-test"
+DOCKER_IMAGE = f"{DOCKER_REPO}/clickhouse/split-build-smoke-test"
DOWNLOAD_RETRIES_COUNT = 5
RESULT_LOG_NAME = "run.log"
CHECK_NAME = "Split build smoke test (actions)"
@@ -75,7 +75,6 @@ def get_run_command(build_path, result_folder, server_log_folder, docker_image):
reports_path = REPORTS_PATH
pr_info = PRInfo()
-
gh = Github(get_best_robot_token())
rerun_helper = RerunHelper(gh, pr_info, CHECK_NAME)
@@ -89,6 +88,13 @@ def get_run_command(build_path, result_folder, server_log_folder, docker_image):
images_path = os.path.join(root, "changed_images.json")
break
+ subprocess.check_output( # pylint: disable=unexpected-keyword-arg
+ "docker login {} --username '{}' --password-stdin".format(DOCKER_REPO, DOCKER_USER),
+ input=get_parameter_from_ssm("dockerhub_robot_password"),
+ encoding="utf-8",
+ shell=True,
+ )
+
docker_image = get_image_with_version(reports_path, DOCKER_IMAGE)
packages_path = os.path.join(temp_path, "packages")
@@ -126,7 +132,7 @@ def get_run_command(build_path, result_folder, server_log_folder, docker_image):
)
ch_helper = ClickHouseHelper()
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
report_url = upload_results(
s3_helper,
pr_info.number,
diff --git a/tests/ci/stress_check.py b/tests/ci/stress_check.py
index 6bd4c580687f..67c296d6693b 100644
--- a/tests/ci/stress_check.py
+++ b/tests/ci/stress_check.py
@@ -108,7 +108,6 @@ def process_results(result_folder, server_log_path, run_log_path):
os.makedirs(temp_path)
pr_info = PRInfo()
-
gh = Github(get_best_robot_token())
rerun_helper = RerunHelper(gh, pr_info, check_name)
@@ -148,7 +147,7 @@ def process_results(result_folder, server_log_path, run_log_path):
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
state, description, test_results, additional_logs = process_results(
result_path, server_log_path, run_log_path
)
diff --git a/tests/ci/style_check.py b/tests/ci/style_check.py
index 84ed9e5a124d..182032b13035 100644
--- a/tests/ci/style_check.py
+++ b/tests/ci/style_check.py
@@ -4,7 +4,6 @@
import os
import csv
import sys
-
from github import Github
from env_helper import (
@@ -12,10 +11,12 @@
GITHUB_WORKSPACE,
GITHUB_REPOSITORY,
GITHUB_SERVER_URL,
+ DOCKER_USER,
+ DOCKER_REPO,
)
from s3_helper import S3Helper
from pr_info import PRInfo, SKIP_SIMPLE_CHECK_LABEL
-from get_robot_token import get_best_robot_token
+from get_robot_token import get_best_robot_token, get_parameter_from_ssm
from upload_result_helper import upload_results
from docker_pull_helper import get_image_with_version
from commit_status_helper import post_commit_status, get_commit
@@ -78,6 +79,13 @@ def process_result(result_folder):
pr_info = PRInfo()
+ subprocess.check_output( # pylint: disable=unexpected-keyword-arg
+ "docker login {} --username '{}' --password-stdin".format(DOCKER_REPO, DOCKER_USER),
+ input=get_parameter_from_ssm("dockerhub_robot_password"),
+ encoding="utf-8",
+ shell=True,
+ )
+
gh = Github(get_best_robot_token())
rerun_helper = RerunHelper(gh, pr_info, NAME)
@@ -88,8 +96,8 @@ def process_result(result_folder):
if not os.path.exists(temp_path):
os.makedirs(temp_path)
- docker_image = get_image_with_version(temp_path, "clickhouse/style-test")
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ docker_image = get_image_with_version(temp_path, "{}/clickhouse/style-test".format(DOCKER_REPO))
+ s3_helper = S3Helper()
cmd = (
f"docker run -u $(id -u ${{USER}}):$(id -g ${{USER}}) --cap-add=SYS_PTRACE "
diff --git a/tests/ci/unit_tests_check.py b/tests/ci/unit_tests_check.py
index c2329fab955a..323918b56fc6 100644
--- a/tests/ci/unit_tests_check.py
+++ b/tests/ci/unit_tests_check.py
@@ -7,9 +7,9 @@
from github import Github
-from env_helper import TEMP_PATH, REPO_COPY, REPORTS_PATH
+from env_helper import DOCKER_USER, DOCKER_REPO, TEMP_PATH, REPO_COPY, REPORTS_PATH
from s3_helper import S3Helper
-from get_robot_token import get_best_robot_token
+from get_robot_token import get_best_robot_token, get_parameter_from_ssm
from pr_info import PRInfo
from build_download_helper import download_unit_tests
from upload_result_helper import upload_results
@@ -121,6 +121,13 @@ def process_result(result_folder):
logging.info("Check is already finished according to github status, exiting")
sys.exit(0)
+ subprocess.check_output( # pylint: disable=unexpected-keyword-arg
+ "docker login {} --username '{}' --password-stdin".format(DOCKER_REPO, DOCKER_USER),
+ input=get_parameter_from_ssm("dockerhub_robot_password"),
+ encoding="utf-8",
+ shell=True,
+ )
+
docker_image = get_image_with_version(reports_path, IMAGE_NAME)
download_unit_tests(check_name, reports_path, temp_path)
@@ -147,7 +154,7 @@ def process_result(result_folder):
subprocess.check_call(f"sudo chown -R ubuntu:ubuntu {temp_path}", shell=True)
- s3_helper = S3Helper("https://s3.amazonaws.com")
+ s3_helper = S3Helper()
state, description, test_results, additional_logs = process_result(test_output)
ch_helper = ClickHouseHelper()
diff --git a/tests/ci/worker/init_runner_ibm.sh b/tests/ci/worker/init_runner_ibm.sh
new file mode 100644
index 000000000000..65dad7e53cbd
--- /dev/null
+++ b/tests/ci/worker/init_runner_ibm.sh
@@ -0,0 +1,51 @@
+#!/usr/bin/env bash
+set -uo pipefail
+
+####################################
+# IMPORTANT! #
+# VPC instance should have #
+# `github-runner-type:` tag #
+# set accordingly to a runner role #
+####################################
+
+# Set API_KEY and RUNNER_TOKEN prior to running this
+
+echo "Running init script"
+export DEBIAN_FRONTEND=noninteractive
+export RUNNER_HOME=/home/ubuntu/actions-runner
+
+export RUNNER_URL="https://github.com/ClibMouse"
+
+# Funny fact, but metadata service has fixed IP
+ACCESS_TOKEN=$(curl -s -X PUT "http://169.254.169.254/instance_identity/v1/token?version=2022-03-08" -H "Metadata-Flavor: ibm" -d '{"expires_in": 3600}' | jq -r '(.access_token)')
+IAM_TOKEN=$(curl -s -X POST "https://iam.cloud.ibm.com/identity/token" -H "content-type: application/x-www-form-urlencoded" -H "accept: application/json" -d "grant_type=urn%3Aibm%3Aparams%3Aoauth%3Agrant-type%3Aapikey&apikey=$API_KEY" | jq -r '(.access_token)')
+CRN=$(curl -s -X GET "http://169.254.169.254/metadata/v1/instance?version=2022-03-08" -H "Accept:application/json" -H "Authorization: Bearer $ACCESS_TOKEN" | jq -r '(.crn)')
+
+INSTANCE_ID=$(curl -s -X GET "http://169.254.169.254/metadata/v1/instance?version=2022-03-08" -H "Accept:application/json" -H "Authorization: Bearer $ACCESS_TOKEN" | jq -r '(.id)')
+export INSTANCE_ID
+
+# combine labels
+RUNNER_TYPE=$(curl -s -X GET --header "Authorization: Bearer $IAM_TOKEN" --header "Accept: application/json" "https://tags.global-search-tagging.cloud.ibm.com/v3/tags?attached_to=$CRN" | jq -r '.items[] | select( .name | startswith("github-runner-type:")) | .name | split(":") | .[1]' | paste -s -d, - )
+LABELS="self-hosted,Linux,$(uname -m),$RUNNER_TYPE"
+export LABELS
+
+while true; do
+ runner_pid=$(pgrep run.sh)
+ echo "Got runner pid $runner_pid"
+
+ cd $RUNNER_HOME || exit 1
+ if [ -z "$runner_pid" ]; then
+ echo "Will try to remove runner"
+ sudo -u ubuntu ./config.sh remove --token "$RUNNER_TOKEN" ||:
+
+ echo "Going to configure runner"
+ sudo -u ubuntu ./config.sh --url $RUNNER_URL --token "$RUNNER_TOKEN" --name "$INSTANCE_ID" --runnergroup Default --labels "$LABELS" --work _work
+
+ echo "Run"
+ sudo -u ubuntu ./run.sh &
+ sleep 15
+ else
+ echo "Runner is working with pid $runner_pid, nothing to do"
+ sleep 10
+ fi
+done
\ No newline at end of file
diff --git a/tests/ci/worker/ubuntu_ami_for_ci_ibm.sh b/tests/ci/worker/ubuntu_ami_for_ci_ibm.sh
new file mode 100644
index 000000000000..478db4b8ebbd
--- /dev/null
+++ b/tests/ci/worker/ubuntu_ami_for_ci_ibm.sh
@@ -0,0 +1,110 @@
+#!/usr/bin/env bash
+set -xeuo pipefail
+
+echo "Running prepare script"
+export DEBIAN_FRONTEND=noninteractive
+export RUNNER_VERSION=2.293.0
+export RUNNER_HOME=/home/ubuntu/actions-runner
+
+deb_arch() {
+ case $(uname -m) in
+ x86_64 )
+ echo amd64;;
+ aarch64 )
+ echo arm64;;
+ esac
+}
+
+runner_arch() {
+ case $(uname -m) in
+ x86_64 )
+ echo x64;;
+ aarch64 )
+ echo arm64;;
+ esac
+}
+
+apt-get update
+
+apt-get install --yes --no-install-recommends \
+ apt-transport-https \
+ atop \
+ binfmt-support \
+ build-essential \
+ ca-certificates \
+ curl \
+ gnupg \
+ jq \
+ lsb-release \
+ pigz \
+ python3-dev \
+ python3-pip \
+ qemu-user-static \
+ unzip
+
+curl -fsSL https://download.docker.com/linux/ubuntu/gpg | gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
+
+echo "deb [arch=$(deb_arch) signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | tee /etc/apt/sources.list.d/docker.list > /dev/null
+
+apt-get update
+
+apt-get install --yes --no-install-recommends docker-ce docker-ce-cli containerd.io
+
+usermod -aG docker ubuntu
+
+# enable ipv6 in containers (fixed-cidr-v6 is some random network mask)
+cat < /etc/docker/daemon.json
+{
+ "ipv6": true,
+ "fixed-cidr-v6": "2001:db8:1::/64",
+ "log-driver": "json-file",
+ "log-opts": {
+ "max-file": "5",
+ "max-size": "1000m"
+ }
+}
+EOT
+
+systemctl restart docker
+
+# buildx builder is user-specific
+sudo -u ubuntu docker buildx version
+sudo -u ubuntu docker buildx create --use --name default-builder
+
+pip install boto3 pygithub requests urllib3 unidiff dohq-artifactory
+
+mkdir -p $RUNNER_HOME && cd $RUNNER_HOME
+
+RUNNER_ARCHIVE="actions-runner-linux-$(runner_arch)-$RUNNER_VERSION.tar.gz"
+
+curl -O -L "https://github.com/actions/runner/releases/download/v$RUNNER_VERSION/$RUNNER_ARCHIVE"
+
+tar xzf "./$RUNNER_ARCHIVE"
+rm -f "./$RUNNER_ARCHIVE"
+./bin/installdependencies.sh
+
+chown -R ubuntu:ubuntu $RUNNER_HOME
+
+# cd /home/ubuntu
+# curl "https://awscli.amazonaws.com/awscli-exe-linux-$(uname -m).zip" -o "awscliv2.zip"
+# unzip awscliv2.zip
+# ./aws/install
+
+# rm -rf /home/ubuntu/awscliv2.zip /home/ubuntu/aws
+
+# # SSH keys of core team
+# mkdir -p /home/ubuntu/.ssh
+
+# # ~/.ssh/authorized_keys is cleaned out, so we use deprecated but working ~/.ssh/authorized_keys2
+# TEAM_KEYS_URL=$(aws ssm get-parameter --region us-east-1 --name team-keys-url --query 'Parameter.Value' --output=text)
+# curl "${TEAM_KEYS_URL}" > /home/ubuntu/.ssh/authorized_keys2
+# chown ubuntu: /home/ubuntu/.ssh -R
+# chmod 0700 /home/ubuntu/.ssh
+
+# # Download cloudwatch agent and install config for it
+# wget --directory-prefix=/tmp https://s3.amazonaws.com/amazoncloudwatch-agent/ubuntu/"$(deb_arch)"/latest/amazon-cloudwatch-agent.deb{,.sig}
+# gpg --recv-key --keyserver keyserver.ubuntu.com D58167303B789C72
+# gpg --verify /tmp/amazon-cloudwatch-agent.deb.sig
+# dpkg -i /tmp/amazon-cloudwatch-agent.deb
+# aws ssm get-parameter --region us-east-1 --name AmazonCloudWatch-github-runners --query 'Parameter.Value' --output text > /opt/aws/amazon-cloudwatch-agent/etc/amazon-cloudwatch-agent.json
+# systemctl enable amazon-cloudwatch-agent.service