Skip to content

Antalya Switch artifacts to R2 #704

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 14 commits into
base: antalya
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .github/create_combined_ci_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -275,7 +275,7 @@ def main():
exit(0)

# Upload the report to S3
s3_client = boto3.client("s3")
s3_client = boto3.client("s3", endpoint_url=os.getenv("S3_URL"))

try:
s3_client.put_object(
Expand Down
8 changes: 4 additions & 4 deletions .github/workflows/docker_test_images.yml
Original file line number Diff line number Diff line change
Expand Up @@ -23,13 +23,13 @@ name: Build docker images
required: true

env:
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
S3_URL: ${{ secrets.R2_ENDPOINT }}
S3_DOWNLOAD: https://build-artifacts.altinity.io
DOCKER_USERNAME: ${{ secrets.DOCKER_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKER_PASSWORD }}


jobs:
DockerBuildAarch64:
runs-on: [self-hosted, altinity-on-demand, altinity-func-tester-aarch64]
Expand Down
10 changes: 7 additions & 3 deletions .github/workflows/release_branches.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,13 @@ name: ReleaseBranchCI
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
# AWS region not needed for R2
# AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
# Point S3_URL to R2 endpoint
S3_URL: ${{ secrets.R2_ENDPOINT }}
S3_DOWNLOAD: https://build-artifacts.altinity.io
CLICKHOUSE_TEST_STAT_LOGIN: ${{ secrets.CLICKHOUSE_TEST_STAT_LOGIN }}
CLICKHOUSE_TEST_STAT_PASSWORD: ${{ secrets.CLICKHOUSE_TEST_STAT_PASSWORD }}
CLICKHOUSE_TEST_STAT_URL: ${{ secrets.CLICKHOUSE_TEST_STAT_URL }}
Expand Down
7 changes: 4 additions & 3 deletions .github/workflows/reusable_build.yml
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,10 @@
env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
S3_URL: ${{ secrets.R2_ENDPOINT }}
S3_DOWNLOAD: https://build-artifacts.altinity.io
CLICKHOUSE_TEST_STAT_LOGIN: ${{ secrets.CLICKHOUSE_TEST_STAT_LOGIN }}
CLICKHOUSE_TEST_STAT_PASSWORD: ${{ secrets.CLICKHOUSE_TEST_STAT_PASSWORD }}
CLICKHOUSE_TEST_STAT_URL: ${{ secrets.CLICKHOUSE_TEST_STAT_URL }}
Expand Down
7 changes: 4 additions & 3 deletions .github/workflows/reusable_sign.yml
Original file line number Diff line number Diff line change
Expand Up @@ -57,9 +57,10 @@ env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
CHECK_NAME: ${{inputs.test_name}}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
S3_URL: ${{ secrets.R2_ENDPOINT }}
S3_DOWNLOAD: https://build-artifacts.altinity.io
CLICKHOUSE_TEST_STAT_LOGIN: ${{ secrets.CLICKHOUSE_TEST_STAT_LOGIN }}
CLICKHOUSE_TEST_STAT_PASSWORD: ${{ secrets.CLICKHOUSE_TEST_STAT_PASSWORD }}
CLICKHOUSE_TEST_STAT_URL: ${{ secrets.CLICKHOUSE_TEST_STAT_URL }}
Expand Down
9 changes: 6 additions & 3 deletions .github/workflows/reusable_test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -69,9 +69,11 @@ env:
# Force the stdout and stderr streams to be unbuffered
PYTHONUNBUFFERED: 1
CHECK_NAME: ${{inputs.test_name}}
AWS_ACCESS_KEY_ID: ${{ secrets.AWS_ACCESS_KEY_ID }}
AWS_DEFAULT_REGION: ${{ secrets.AWS_DEFAULT_REGION }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
AWS_ACCESS_KEY_ID: ${{ secrets.R2_ACCESS_KEY_ID }}
AWS_SECRET_ACCESS_KEY: ${{ secrets.R2_SECRET_ACCESS_KEY }}
# Point S3_URL to R2 endpoint
S3_URL: ${{ secrets.R2_ENDPOINT }}
S3_DOWNLOAD: https://build-artifacts.altinity.io
CLICKHOUSE_TEST_STAT_LOGIN: ${{ secrets.CLICKHOUSE_TEST_STAT_LOGIN }}
CLICKHOUSE_TEST_STAT_PASSWORD: ${{ secrets.CLICKHOUSE_TEST_STAT_PASSWORD }}
CLICKHOUSE_TEST_STAT_URL: ${{ secrets.CLICKHOUSE_TEST_STAT_URL }}
Expand Down Expand Up @@ -135,6 +137,7 @@ jobs:
with:
job_type: test
- name: Docker setup
if: ${{ !contains(inputs.name, 'Docker') }}
uses: ./.github/actions/docker_setup
with:
test_name: ${{ inputs.test_name }}
Expand Down
3 changes: 2 additions & 1 deletion docker/packager/packager
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ IMAGE_TYPE = "binary-builder"
IMAGE_NAME = f"altinityinfra/{IMAGE_TYPE}"
DEFAULT_TMP_PATH = SCRIPT_PATH.parent.absolute() / 'tmp'
TEMP_PATH = Path(os.getenv("TEMP_PATH", DEFAULT_TMP_PATH))
SCCACHE_ENDPOINT = os.getenv("S3_URL")

class BuildException(Exception):
pass
Expand Down Expand Up @@ -84,7 +85,7 @@ def run_docker_image_with_env(
for key, value in aws_secrets.items():
f.write(f"\n{key}={value}")

extra_parts = f"--volume={host_aws_config_file_path}:{env_part['AWS_CONFIG_FILE']}"
extra_parts = f"--volume={host_aws_config_file_path}:{env_part['AWS_CONFIG_FILE']} -e SCCACHE_ENDPOINT={SCCACHE_ENDPOINT} -e SCCACHE_REGION=auto"

env_part = " -e ".join(env_variables)
if env_part:
Expand Down
4 changes: 2 additions & 2 deletions tests/ci/ci_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ def __init__(
self.sync_pr_run_id = 0

if self.pr_number:
self.s3_path = f"{self._S3_PREFIX}/PRs/{self.pr_number}/"
self.s3_path = f"{self._S3_PREFIX}/{self.pr_number}/"
else:
self.s3_path = f"{self._S3_PREFIX}/{self.git_ref}/{self.sha}/"

Expand All @@ -58,7 +58,7 @@ def __init__(
and GITHUB_REPOSITORY != GITHUB_UPSTREAM_REPOSITORY
):
self.upstream_pr_number = int(self.git_ref.split("/pr/", maxsplit=1)[1])
self.s3_path_upstream = f"{self._S3_PREFIX}/PRs/{self.upstream_pr_number}/"
self.s3_path_upstream = f"{self._S3_PREFIX}/{self.upstream_pr_number}/"

self._updated = False

Expand Down
3 changes: 3 additions & 0 deletions tests/ci/env_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,9 @@
S3_SECRET_ACCESS_KEY = os.getenv("AWS_SECRET_ACCESS_KEY")
S3_TEST_REPORTS_BUCKET = os.getenv("S3_TEST_REPORTS_BUCKET", "altinity-build-artifacts")

SCCACHE_ACCESS_KEY_ID = os.getenv("SCCACHE_ACCESS_KEY_ID")
SCCACHE_SECRET_ACCESS_KEY = os.getenv("SCCACHE_SECRET_ACCESS_KEY")

S3_URL = os.getenv("S3_URL", "https://s3.amazonaws.com")
S3_DOWNLOAD = os.getenv("S3_DOWNLOAD", S3_URL)
S3_ARTIFACT_DOWNLOAD_TEMPLATE = (
Expand Down
5 changes: 4 additions & 1 deletion tests/ci/s3_helper.py
Original file line number Diff line number Diff line change
Expand Up @@ -360,7 +360,10 @@ def get_url(bucket: str, key: str) -> str:

@staticmethod
def s3_url(bucket: str, key: str) -> str:
url = f"{S3_DOWNLOAD}/{bucket}/{key}"
if 'r2.cloudflarestorage.com' in S3_URL:
url = f"{S3_DOWNLOAD}/{key}"
else:
url = f"{S3_DOWNLOAD}/{bucket}/{key}"
# last two replacements are specifics of AWS urls:
# https://jamesd3142.wordpress.com/2018/02/28/amazon-s3-and-the-plus-symbol/
url = url.replace("+", "%2B").replace(" ", "%20")
Expand Down
3 changes: 2 additions & 1 deletion tests/performance/scripts/download.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,8 @@ function download
fi
declare -a urls_to_try=(
"$S3_URL/REFs/master/$REF_SHA/$build_name_new/performance.tar.zst"
"$S3_URL/PRs/0/$REF_SHA/$BUILD_NAME/performance.tar.zst"
"$S3_URL/PRs/$left_pr/$left_sha/$BUILD_NAME/performance.tar.zst"
"$S3_URL/$left_pr/$left_sha/$BUILD_NAME/performance.tar.zst"
)

for path in "${urls_to_try[@]}"
Expand Down